|
@@ -17,7 +17,7 @@ BEGIN;
|
|
-- ====================================================================
|
|
-- ====================================================================
|
|
CREATE TABLE IF NOT EXISTS data_pipeline_tasks (
|
|
CREATE TABLE IF NOT EXISTS data_pipeline_tasks (
|
|
-- 主键:时间戳格式的任务ID
|
|
-- 主键:时间戳格式的任务ID
|
|
- id VARCHAR(32) PRIMARY KEY, -- 'task_20250627_143052'
|
|
|
|
|
|
+ task_id VARCHAR(32) PRIMARY KEY, -- 'task_20250627_143052'
|
|
|
|
|
|
-- 任务基本信息
|
|
-- 任务基本信息
|
|
task_type VARCHAR(50) NOT NULL DEFAULT 'data_workflow',
|
|
task_type VARCHAR(50) NOT NULL DEFAULT 'data_workflow',
|
|
@@ -30,28 +30,20 @@ CREATE TABLE IF NOT EXISTS data_pipeline_tasks (
|
|
-- 错误处理
|
|
-- 错误处理
|
|
error_message TEXT, -- 错误详细信息
|
|
error_message TEXT, -- 错误详细信息
|
|
|
|
|
|
- -- 步骤状态跟踪
|
|
|
|
- step_status JSONB DEFAULT '{
|
|
|
|
- "ddl_generation": "pending",
|
|
|
|
- "qa_generation": "pending",
|
|
|
|
- "sql_validation": "pending",
|
|
|
|
- "training_load": "pending"
|
|
|
|
- }'::jsonb,
|
|
|
|
-
|
|
|
|
-- 时间戳
|
|
-- 时间戳
|
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
started_at TIMESTAMP,
|
|
started_at TIMESTAMP,
|
|
completed_at TIMESTAMP,
|
|
completed_at TIMESTAMP,
|
|
|
|
|
|
-- 创建者信息
|
|
-- 创建者信息
|
|
- created_by VARCHAR(50) DEFAULT 'api', -- 'api', 'manual', 'system'
|
|
|
|
|
|
+ created_type VARCHAR(50) DEFAULT 'api', -- 'api', 'manual', 'system'
|
|
|
|
+ by_user VARCHAR(50), -- 'guest'或其它user_id
|
|
|
|
|
|
-- 输出目录
|
|
-- 输出目录
|
|
output_directory TEXT, -- 任务输出目录路径
|
|
output_directory TEXT, -- 任务输出目录路径
|
|
|
|
|
|
-- 索引字段
|
|
-- 索引字段
|
|
- db_name VARCHAR(100), -- 数据库名称(便于筛选)
|
|
|
|
- business_context TEXT -- 业务上下文(便于搜索)
|
|
|
|
|
|
+ db_name VARCHAR(100) -- 数据库名称(便于筛选)
|
|
);
|
|
);
|
|
|
|
|
|
-- 添加约束
|
|
-- 添加约束
|
|
@@ -61,98 +53,31 @@ ALTER TABLE data_pipeline_tasks ADD CONSTRAINT chk_task_status
|
|
ALTER TABLE data_pipeline_tasks ADD CONSTRAINT chk_task_type
|
|
ALTER TABLE data_pipeline_tasks ADD CONSTRAINT chk_task_type
|
|
CHECK (task_type IN ('data_workflow', 'complete_workflow'));
|
|
CHECK (task_type IN ('data_workflow', 'complete_workflow'));
|
|
|
|
|
|
-ALTER TABLE data_pipeline_tasks ADD CONSTRAINT chk_created_by
|
|
|
|
- CHECK (created_by IN ('api', 'manual', 'system'));
|
|
|
|
|
|
+ALTER TABLE data_pipeline_tasks ADD CONSTRAINT chk_created_type
|
|
|
|
+ CHECK (created_type IN ('api', 'manual', 'system'));
|
|
|
|
|
|
-- ====================================================================
|
|
-- ====================================================================
|
|
--- 任务执行记录表 (data_pipeline_task_executions)
|
|
|
|
|
|
+-- 任务步骤状态表 (data_pipeline_task_steps)
|
|
-- ====================================================================
|
|
-- ====================================================================
|
|
-CREATE TABLE IF NOT EXISTS data_pipeline_task_executions (
|
|
|
|
|
|
+CREATE TABLE IF NOT EXISTS data_pipeline_task_steps (
|
|
id SERIAL PRIMARY KEY,
|
|
id SERIAL PRIMARY KEY,
|
|
- task_id VARCHAR(32) REFERENCES data_pipeline_tasks(id) ON DELETE CASCADE,
|
|
|
|
- execution_step VARCHAR(50) NOT NULL, -- 'ddl_generation', 'qa_generation', 'sql_validation', 'training_load', 'complete'
|
|
|
|
- status VARCHAR(20) NOT NULL, -- 'running', 'completed', 'failed'
|
|
|
|
- started_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
|
|
|
|
+ task_id VARCHAR(32) REFERENCES data_pipeline_tasks(task_id) ON DELETE CASCADE,
|
|
|
|
+ execution_id VARCHAR(100), -- 执行批次ID(可为空)
|
|
|
|
+ step_name VARCHAR(50) NOT NULL, -- 'ddl_generation', 'qa_generation', 'sql_validation', 'training_load'
|
|
|
|
+ step_status VARCHAR(50) NOT NULL DEFAULT 'pending', -- 'pending', 'running', 'completed', 'failed'
|
|
|
|
+ started_at TIMESTAMP,
|
|
completed_at TIMESTAMP,
|
|
completed_at TIMESTAMP,
|
|
- error_message TEXT,
|
|
|
|
- execution_result JSONB, -- 步骤执行结果
|
|
|
|
- execution_id VARCHAR(100) UNIQUE, -- {task_id}_step_{step_name}_exec_{timestamp}
|
|
|
|
- force_executed BOOLEAN DEFAULT FALSE, -- 是否强制执行
|
|
|
|
- files_cleaned BOOLEAN DEFAULT FALSE, -- 是否清理了旧文件
|
|
|
|
- duration_seconds INTEGER -- 执行时长(秒)
|
|
|
|
|
|
+ error_message TEXT -- 错误详细信息
|
|
);
|
|
);
|
|
|
|
|
|
-- 添加约束
|
|
-- 添加约束
|
|
-ALTER TABLE data_pipeline_task_executions ADD CONSTRAINT chk_execution_status
|
|
|
|
- CHECK (status IN ('running', 'completed', 'failed'));
|
|
|
|
|
|
+ALTER TABLE data_pipeline_task_steps ADD CONSTRAINT chk_step_status
|
|
|
|
+ CHECK (step_status IN ('pending', 'running', 'completed', 'failed'));
|
|
|
|
|
|
-ALTER TABLE data_pipeline_task_executions ADD CONSTRAINT chk_execution_step
|
|
|
|
- CHECK (execution_step IN ('ddl_generation', 'qa_generation', 'sql_validation', 'training_load', 'complete'));
|
|
|
|
|
|
+ALTER TABLE data_pipeline_task_steps ADD CONSTRAINT chk_step_name
|
|
|
|
+ CHECK (step_name IN ('ddl_generation', 'qa_generation', 'sql_validation', 'training_load'));
|
|
|
|
|
|
-ALTER TABLE data_pipeline_task_executions ADD CONSTRAINT chk_duration_positive
|
|
|
|
- CHECK (duration_seconds IS NULL OR duration_seconds >= 0);
|
|
|
|
|
|
|
|
--- ====================================================================
|
|
|
|
--- 任务日志表 (data_pipeline_task_logs)
|
|
|
|
--- ====================================================================
|
|
|
|
-CREATE TABLE IF NOT EXISTS data_pipeline_task_logs (
|
|
|
|
- id SERIAL PRIMARY KEY,
|
|
|
|
- task_id VARCHAR(32) REFERENCES data_pipeline_tasks(id) ON DELETE CASCADE,
|
|
|
|
- execution_id VARCHAR(100) REFERENCES data_pipeline_task_executions(execution_id) ON DELETE SET NULL,
|
|
|
|
-
|
|
|
|
- -- 日志内容
|
|
|
|
- log_level VARCHAR(10) NOT NULL, -- 'DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'
|
|
|
|
- message TEXT NOT NULL, -- 日志消息内容
|
|
|
|
-
|
|
|
|
- -- 上下文信息
|
|
|
|
- step_name VARCHAR(50), -- 执行步骤名称
|
|
|
|
- module_name VARCHAR(100), -- 模块名称
|
|
|
|
- function_name VARCHAR(100), -- 函数名称
|
|
|
|
-
|
|
|
|
- -- 时间戳
|
|
|
|
- timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
|
|
-
|
|
|
|
- -- 额外信息(JSON格式)
|
|
|
|
- extra_data JSONB DEFAULT '{}'::jsonb -- 额外的结构化信息
|
|
|
|
-);
|
|
|
|
-
|
|
|
|
--- 添加约束
|
|
|
|
-ALTER TABLE data_pipeline_task_logs ADD CONSTRAINT chk_log_level
|
|
|
|
- CHECK (log_level IN ('DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'));
|
|
|
|
-
|
|
|
|
--- ====================================================================
|
|
|
|
--- 任务输出文件表 (data_pipeline_task_outputs)
|
|
|
|
--- ====================================================================
|
|
|
|
-CREATE TABLE IF NOT EXISTS data_pipeline_task_outputs (
|
|
|
|
- id SERIAL PRIMARY KEY,
|
|
|
|
- task_id VARCHAR(32) REFERENCES data_pipeline_tasks(id) ON DELETE CASCADE,
|
|
|
|
- execution_id VARCHAR(100) REFERENCES data_pipeline_task_executions(execution_id) ON DELETE SET NULL,
|
|
|
|
-
|
|
|
|
- -- 文件信息
|
|
|
|
- file_type VARCHAR(50) NOT NULL, -- 'ddl', 'md', 'json', 'log', 'report'
|
|
|
|
- file_name VARCHAR(255) NOT NULL, -- 文件名
|
|
|
|
- file_path TEXT NOT NULL, -- 相对路径
|
|
|
|
- file_size BIGINT DEFAULT 0, -- 文件大小(字节)
|
|
|
|
-
|
|
|
|
- -- 文件内容摘要
|
|
|
|
- content_hash VARCHAR(64), -- 文件内容hash
|
|
|
|
- description TEXT, -- 文件描述
|
|
|
|
-
|
|
|
|
- -- 时间戳
|
|
|
|
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
|
|
- modified_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
|
|
-
|
|
|
|
- -- 状态
|
|
|
|
- is_primary BOOLEAN DEFAULT FALSE, -- 是否为主要输出文件
|
|
|
|
- is_downloadable BOOLEAN DEFAULT TRUE -- 是否可下载
|
|
|
|
-);
|
|
|
|
-
|
|
|
|
--- 添加约束
|
|
|
|
-ALTER TABLE data_pipeline_task_outputs ADD CONSTRAINT chk_file_type
|
|
|
|
- CHECK (file_type IN ('ddl', 'md', 'json', 'log', 'report', 'txt', 'other'));
|
|
|
|
-
|
|
|
|
-ALTER TABLE data_pipeline_task_outputs ADD CONSTRAINT chk_file_size_positive
|
|
|
|
- CHECK (file_size >= 0);
|
|
|
|
|
|
|
|
-- ====================================================================
|
|
-- ====================================================================
|
|
-- 创建索引
|
|
-- 创建索引
|
|
@@ -162,30 +87,15 @@ ALTER TABLE data_pipeline_task_outputs ADD CONSTRAINT chk_file_size_positive
|
|
CREATE INDEX IF NOT EXISTS idx_tasks_status ON data_pipeline_tasks(status);
|
|
CREATE INDEX IF NOT EXISTS idx_tasks_status ON data_pipeline_tasks(status);
|
|
CREATE INDEX IF NOT EXISTS idx_tasks_created_at ON data_pipeline_tasks(created_at DESC);
|
|
CREATE INDEX IF NOT EXISTS idx_tasks_created_at ON data_pipeline_tasks(created_at DESC);
|
|
CREATE INDEX IF NOT EXISTS idx_tasks_db_name ON data_pipeline_tasks(db_name);
|
|
CREATE INDEX IF NOT EXISTS idx_tasks_db_name ON data_pipeline_tasks(db_name);
|
|
-CREATE INDEX IF NOT EXISTS idx_tasks_created_by ON data_pipeline_tasks(created_by);
|
|
|
|
|
|
+CREATE INDEX IF NOT EXISTS idx_tasks_created_type ON data_pipeline_tasks(created_type);
|
|
CREATE INDEX IF NOT EXISTS idx_tasks_task_type ON data_pipeline_tasks(task_type);
|
|
CREATE INDEX IF NOT EXISTS idx_tasks_task_type ON data_pipeline_tasks(task_type);
|
|
|
|
|
|
--- 执行记录表索引
|
|
|
|
-CREATE INDEX IF NOT EXISTS idx_executions_task_id ON data_pipeline_task_executions(task_id);
|
|
|
|
-CREATE INDEX IF NOT EXISTS idx_executions_step ON data_pipeline_task_executions(execution_step);
|
|
|
|
-CREATE INDEX IF NOT EXISTS idx_executions_status ON data_pipeline_task_executions(status);
|
|
|
|
-CREATE INDEX IF NOT EXISTS idx_executions_started_at ON data_pipeline_task_executions(started_at DESC);
|
|
|
|
-CREATE INDEX IF NOT EXISTS idx_executions_task_step ON data_pipeline_task_executions(task_id, execution_step);
|
|
|
|
-
|
|
|
|
--- 日志表索引
|
|
|
|
-CREATE INDEX IF NOT EXISTS idx_logs_task_id ON data_pipeline_task_logs(task_id);
|
|
|
|
-CREATE INDEX IF NOT EXISTS idx_logs_execution_id ON data_pipeline_task_logs(execution_id);
|
|
|
|
-CREATE INDEX IF NOT EXISTS idx_logs_timestamp ON data_pipeline_task_logs(timestamp DESC);
|
|
|
|
-CREATE INDEX IF NOT EXISTS idx_logs_level ON data_pipeline_task_logs(log_level);
|
|
|
|
-CREATE INDEX IF NOT EXISTS idx_logs_step ON data_pipeline_task_logs(step_name);
|
|
|
|
-CREATE INDEX IF NOT EXISTS idx_logs_task_timestamp ON data_pipeline_task_logs(task_id, timestamp DESC);
|
|
|
|
-
|
|
|
|
--- 文件输出表索引
|
|
|
|
-CREATE INDEX IF NOT EXISTS idx_outputs_task_id ON data_pipeline_task_outputs(task_id);
|
|
|
|
-CREATE INDEX IF NOT EXISTS idx_outputs_execution_id ON data_pipeline_task_outputs(execution_id);
|
|
|
|
-CREATE INDEX IF NOT EXISTS idx_outputs_file_type ON data_pipeline_task_outputs(file_type);
|
|
|
|
-CREATE INDEX IF NOT EXISTS idx_outputs_primary ON data_pipeline_task_outputs(is_primary) WHERE is_primary = TRUE;
|
|
|
|
-CREATE INDEX IF NOT EXISTS idx_outputs_downloadable ON data_pipeline_task_outputs(is_downloadable) WHERE is_downloadable = TRUE;
|
|
|
|
|
|
+-- 步骤状态表索引
|
|
|
|
+CREATE INDEX IF NOT EXISTS idx_steps_task_id ON data_pipeline_task_steps(task_id);
|
|
|
|
+CREATE INDEX IF NOT EXISTS idx_steps_step_name ON data_pipeline_task_steps(step_name);
|
|
|
|
+CREATE INDEX IF NOT EXISTS idx_steps_step_status ON data_pipeline_task_steps(step_status);
|
|
|
|
+CREATE INDEX IF NOT EXISTS idx_steps_started_at ON data_pipeline_task_steps(started_at DESC);
|
|
|
|
+CREATE INDEX IF NOT EXISTS idx_steps_task_step ON data_pipeline_task_steps(task_id, step_name);
|
|
|
|
|
|
-- ====================================================================
|
|
-- ====================================================================
|
|
-- 创建清理函数
|
|
-- 创建清理函数
|
|
@@ -200,19 +110,13 @@ DECLARE
|
|
BEGIN
|
|
BEGIN
|
|
cutoff_date := NOW() - INTERVAL '1 day' * days_to_keep;
|
|
cutoff_date := NOW() - INTERVAL '1 day' * days_to_keep;
|
|
|
|
|
|
- -- 删除旧任务(级联删除相关日志和文件记录)
|
|
|
|
|
|
+ -- 删除旧任务(级联删除相关步骤记录)
|
|
DELETE FROM data_pipeline_tasks
|
|
DELETE FROM data_pipeline_tasks
|
|
WHERE created_at < cutoff_date
|
|
WHERE created_at < cutoff_date
|
|
AND status IN ('completed', 'failed');
|
|
AND status IN ('completed', 'failed');
|
|
|
|
|
|
GET DIAGNOSTICS deleted_count = ROW_COUNT;
|
|
GET DIAGNOSTICS deleted_count = ROW_COUNT;
|
|
|
|
|
|
- -- 记录清理操作
|
|
|
|
- INSERT INTO data_pipeline_task_logs (task_id, log_level, message, step_name)
|
|
|
|
- VALUES ('system', 'INFO',
|
|
|
|
- FORMAT('清理了 %s 个超过 %s 天的旧任务', deleted_count, days_to_keep),
|
|
|
|
- 'cleanup');
|
|
|
|
-
|
|
|
|
RETURN deleted_count;
|
|
RETURN deleted_count;
|
|
END;
|
|
END;
|
|
$$ LANGUAGE plpgsql;
|
|
$$ LANGUAGE plpgsql;
|
|
@@ -249,12 +153,12 @@ DECLARE
|
|
BEGIN
|
|
BEGIN
|
|
cutoff_time := NOW() - INTERVAL '1 hour' * timeout_hours;
|
|
cutoff_time := NOW() - INTERVAL '1 hour' * timeout_hours;
|
|
|
|
|
|
- -- 查找超时的运行中执行
|
|
|
|
- UPDATE data_pipeline_task_executions
|
|
|
|
- SET status = 'failed',
|
|
|
|
- error_message = FORMAT('执行超时(超过%s小时),可能已停止运行', timeout_hours),
|
|
|
|
|
|
+ -- 查找超时的运行中步骤
|
|
|
|
+ UPDATE data_pipeline_task_steps
|
|
|
|
+ SET step_status = 'failed',
|
|
|
|
+ error_message = FORMAT('步骤执行超时(超过%s小时),可能已停止运行', timeout_hours),
|
|
completed_at = NOW()
|
|
completed_at = NOW()
|
|
- WHERE status = 'running'
|
|
|
|
|
|
+ WHERE step_status = 'running'
|
|
AND started_at < cutoff_time;
|
|
AND started_at < cutoff_time;
|
|
|
|
|
|
GET DIAGNOSTICS zombie_count = ROW_COUNT;
|
|
GET DIAGNOSTICS zombie_count = ROW_COUNT;
|
|
@@ -266,14 +170,6 @@ BEGIN
|
|
WHERE status IN ('in_progress')
|
|
WHERE status IN ('in_progress')
|
|
AND started_at < cutoff_time;
|
|
AND started_at < cutoff_time;
|
|
|
|
|
|
- -- 记录检查操作
|
|
|
|
- IF zombie_count > 0 THEN
|
|
|
|
- INSERT INTO data_pipeline_task_logs (task_id, log_level, message, step_name)
|
|
|
|
- VALUES ('system', 'WARNING',
|
|
|
|
- FORMAT('发现并处理了 %s 个僵尸执行', zombie_count),
|
|
|
|
- 'zombie_check');
|
|
|
|
- END IF;
|
|
|
|
-
|
|
|
|
RETURN zombie_count;
|
|
RETURN zombie_count;
|
|
END;
|
|
END;
|
|
$$ LANGUAGE plpgsql;
|
|
$$ LANGUAGE plpgsql;
|
|
@@ -289,41 +185,33 @@ $$ LANGUAGE plpgsql;
|
|
-- 创建视图(便于查询)
|
|
-- 创建视图(便于查询)
|
|
-- ====================================================================
|
|
-- ====================================================================
|
|
|
|
|
|
--- 任务执行概览视图
|
|
|
|
-CREATE OR REPLACE VIEW v_task_execution_overview AS
|
|
|
|
|
|
+-- 任务步骤概览视图
|
|
|
|
+CREATE OR REPLACE VIEW v_task_step_overview AS
|
|
SELECT
|
|
SELECT
|
|
- t.id as task_id,
|
|
|
|
|
|
+ t.task_id,
|
|
t.task_type,
|
|
t.task_type,
|
|
t.status as task_status,
|
|
t.status as task_status,
|
|
- t.step_status,
|
|
|
|
t.created_at,
|
|
t.created_at,
|
|
t.started_at,
|
|
t.started_at,
|
|
t.completed_at,
|
|
t.completed_at,
|
|
- t.created_by,
|
|
|
|
|
|
+ t.created_type,
|
|
|
|
+ t.by_user,
|
|
t.db_name,
|
|
t.db_name,
|
|
- COALESCE(e.current_execution, '{}') as current_execution,
|
|
|
|
- COALESCE(e.execution_count, 0) as total_executions
|
|
|
|
|
|
+ s.step_name,
|
|
|
|
+ s.step_status,
|
|
|
|
+ s.started_at as step_started_at,
|
|
|
|
+ s.completed_at as step_completed_at,
|
|
|
|
+ s.error_message as step_error_message
|
|
FROM data_pipeline_tasks t
|
|
FROM data_pipeline_tasks t
|
|
-LEFT JOIN (
|
|
|
|
- SELECT
|
|
|
|
- task_id,
|
|
|
|
- COUNT(*) as execution_count,
|
|
|
|
- json_build_object(
|
|
|
|
- 'execution_id', e1.execution_id,
|
|
|
|
- 'step', e1.execution_step,
|
|
|
|
- 'status', e1.status,
|
|
|
|
- 'started_at', e1.started_at
|
|
|
|
- ) as current_execution
|
|
|
|
- FROM data_pipeline_task_executions e1
|
|
|
|
- WHERE e1.id = (
|
|
|
|
- SELECT e2.id
|
|
|
|
- FROM data_pipeline_task_executions e2
|
|
|
|
- WHERE e2.task_id = e1.task_id
|
|
|
|
- ORDER BY e2.started_at DESC
|
|
|
|
- LIMIT 1
|
|
|
|
- )
|
|
|
|
- GROUP BY task_id, e1.execution_id, e1.execution_step, e1.status, e1.started_at
|
|
|
|
-) e ON t.id = e.task_id;
|
|
|
|
|
|
+LEFT JOIN data_pipeline_task_steps s ON t.task_id = s.task_id
|
|
|
|
+ORDER BY t.created_at DESC,
|
|
|
|
+ CASE s.step_name
|
|
|
|
+ WHEN 'ddl_generation' THEN 1
|
|
|
|
+ WHEN 'qa_generation' THEN 2
|
|
|
|
+ WHEN 'sql_validation' THEN 3
|
|
|
|
+ WHEN 'training_load' THEN 4
|
|
|
|
+ ELSE 5
|
|
|
|
+ END;
|
|
|
|
|
|
-- 提交事务
|
|
-- 提交事务
|
|
COMMIT;
|
|
COMMIT;
|
|
@@ -333,9 +221,7 @@ COMMIT;
|
|
\echo ''
|
|
\echo ''
|
|
\echo '已创建的表:'
|
|
\echo '已创建的表:'
|
|
\echo '- data_pipeline_tasks: 任务主表'
|
|
\echo '- data_pipeline_tasks: 任务主表'
|
|
-\echo '- data_pipeline_task_executions: 任务执行记录表'
|
|
|
|
-\echo '- data_pipeline_task_logs: 任务日志表'
|
|
|
|
-\echo '- data_pipeline_task_outputs: 任务输出文件表'
|
|
|
|
|
|
+\echo '- data_pipeline_task_steps: 任务步骤状态表'
|
|
\echo ''
|
|
\echo ''
|
|
\echo '已创建的函数:'
|
|
\echo '已创建的函数:'
|
|
\echo '- cleanup_old_data_pipeline_tasks(days): 清理旧任务'
|
|
\echo '- cleanup_old_data_pipeline_tasks(days): 清理旧任务'
|
|
@@ -343,4 +229,4 @@ COMMIT;
|
|
\echo '- check_zombie_data_pipeline_tasks(hours): 检查僵尸任务'
|
|
\echo '- check_zombie_data_pipeline_tasks(hours): 检查僵尸任务'
|
|
\echo ''
|
|
\echo ''
|
|
\echo '已创建的视图:'
|
|
\echo '已创建的视图:'
|
|
-\echo '- v_task_execution_overview: 任务执行概览'
|
|
|
|
|
|
+\echo '- v_task_step_overview: 任务步骤概览'
|