diff --git a/doc/sql/snail_job_dm8.sql b/doc/sql/snail_job_dm8.sql index 42498f78..e120fee3 100644 --- a/doc/sql/snail_job_dm8.sql +++ b/doc/sql/snail_job_dm8.sql @@ -2,7 +2,7 @@ SnailJob Database Transfer Tool Source Server Type : MySQL Target Server Type : DM8 - Date: 2024-06-01 00:26:12 + Date: 2024-07-06 12:59:11 */ @@ -357,8 +357,7 @@ COMMENT ON TABLE sj_server_node IS '服务器节点'; -- sj_distributed_lock CREATE TABLE sj_distributed_lock ( - id bigint NOT NULL PRIMARY KEY IDENTITY, - name varchar(64) NULL, + name varchar(64) NOT NULL PRIMARY KEY IDENTITY, lock_until timestamp(3) DEFAULT CURRENT_TIMESTAMP(3) NOT NULL, locked_at timestamp(3) DEFAULT CURRENT_TIMESTAMP(3) NOT NULL, locked_by varchar(255) NULL, @@ -366,7 +365,6 @@ CREATE TABLE sj_distributed_lock update_dt datetime DEFAULT CURRENT_TIMESTAMP NOT NULL ); -COMMENT ON COLUMN sj_distributed_lock.id IS '主键'; COMMENT ON COLUMN sj_distributed_lock.name IS '锁名称'; COMMENT ON COLUMN sj_distributed_lock.lock_until IS '锁定时长'; COMMENT ON COLUMN sj_distributed_lock.locked_at IS '锁定时间'; @@ -546,7 +544,11 @@ CREATE TABLE sj_job_task parent_id bigint DEFAULT 0 NOT NULL, task_status smallint DEFAULT 0 NOT NULL, retry_count int DEFAULT 0 NOT NULL, + mr_stage smallint DEFAULT NULL NULL, + leaf smallint DEFAULT '1' NOT NULL, + task_name varchar(255) DEFAULT '' NULL, client_info varchar(128) DEFAULT NULL NULL, + wf_context text DEFAULT NULL NULL, result_message text NULL, args_str text DEFAULT NULL NULL, args_type smallint DEFAULT 1 NOT NULL, @@ -567,7 +569,11 @@ COMMENT ON COLUMN sj_job_task.task_batch_id IS '调度任务id'; COMMENT ON COLUMN sj_job_task.parent_id IS '父执行器id'; COMMENT ON COLUMN sj_job_task.task_status IS '执行的状态 0、失败 1、成功'; COMMENT ON COLUMN sj_job_task.retry_count IS '重试次数'; +COMMENT ON COLUMN sj_job_task.mr_stage IS '动态分片所处阶段 1:map 2:reduce 3:mergeReduce'; +COMMENT ON COLUMN sj_job_task.leaf IS '叶子节点'; +COMMENT ON COLUMN sj_job_task.task_name IS '任务名称'; COMMENT ON COLUMN sj_job_task.client_info IS '客户端地址 clientId#ip:port'; +COMMENT ON COLUMN sj_job_task.wf_context IS '工作流全局上下文'; COMMENT ON COLUMN sj_job_task.result_message IS '执行结果'; COMMENT ON COLUMN sj_job_task.args_str IS '执行方法参数'; COMMENT ON COLUMN sj_job_task.args_type IS '参数类型 '; @@ -709,6 +715,7 @@ CREATE TABLE sj_workflow executor_timeout int DEFAULT 0 NOT NULL, description varchar(256) DEFAULT '' NULL, flow_info text DEFAULT NULL NULL, + wf_context text DEFAULT NULL NULL, bucket_index int DEFAULT 0 NOT NULL, version int NOT NULL, ext_attrs varchar(256) DEFAULT '' NULL, @@ -732,6 +739,7 @@ COMMENT ON COLUMN sj_workflow.block_strategy IS '阻塞策略 1、丢弃 2、覆 COMMENT ON COLUMN sj_workflow.executor_timeout IS '任务执行超时时间,单位秒'; COMMENT ON COLUMN sj_workflow.description IS '描述'; COMMENT ON COLUMN sj_workflow.flow_info IS '流程信息'; +COMMENT ON COLUMN sj_workflow.wf_context IS '上下文'; COMMENT ON COLUMN sj_workflow.bucket_index IS 'bucket'; COMMENT ON COLUMN sj_workflow.version IS '版本号'; COMMENT ON COLUMN sj_workflow.ext_attrs IS '扩展字段'; @@ -794,8 +802,10 @@ CREATE TABLE sj_workflow_task_batch task_batch_status smallint DEFAULT 0 NOT NULL, operation_reason smallint DEFAULT 0 NOT NULL, flow_info text DEFAULT NULL NULL, + wf_context text DEFAULT NULL NULL, execution_at bigint DEFAULT 0 NOT NULL, ext_attrs varchar(256) DEFAULT '' NULL, + version int DEFAULT 1 NOT NULL, deleted smallint DEFAULT 0 NOT NULL, create_dt datetime DEFAULT CURRENT_TIMESTAMP NOT NULL, update_dt datetime DEFAULT CURRENT_TIMESTAMP NOT NULL @@ -812,10 +822,15 @@ COMMENT ON COLUMN sj_workflow_task_batch.workflow_id IS '工作流任务id'; COMMENT ON COLUMN sj_workflow_task_batch.task_batch_status IS '任务批次状态 0、失败 1、成功'; COMMENT ON COLUMN sj_workflow_task_batch.operation_reason IS '操作原因'; COMMENT ON COLUMN sj_workflow_task_batch.flow_info IS '流程信息'; +COMMENT ON COLUMN sj_workflow_task_batch.wf_context IS '全局上下文'; COMMENT ON COLUMN sj_workflow_task_batch.execution_at IS '任务执行时间'; COMMENT ON COLUMN sj_workflow_task_batch.ext_attrs IS '扩展字段'; +COMMENT ON COLUMN sj_workflow_task_batch.version IS '版本号'; COMMENT ON COLUMN sj_workflow_task_batch.deleted IS '逻辑删除 1、删除'; COMMENT ON COLUMN sj_workflow_task_batch.create_dt IS '创建时间'; COMMENT ON COLUMN sj_workflow_task_batch.update_dt IS '修改时间'; COMMENT ON TABLE sj_workflow_task_batch IS '工作流批次'; + +Process finished +with exit code 0 diff --git a/doc/sql/snail_job_oracle.sql b/doc/sql/snail_job_oracle.sql index 1a89e65e..98135575 100644 --- a/doc/sql/snail_job_oracle.sql +++ b/doc/sql/snail_job_oracle.sql @@ -2,7 +2,7 @@ SnailJob Database Transfer Tool Source Server Type : MySQL Target Server Type : Oracle - Date: 2024-05-20 22:01:56 + Date: 2024-07-06 12:49:36 */ @@ -387,8 +387,7 @@ COMMENT ON TABLE sj_server_node IS '服务器节点'; -- sj_distributed_lock CREATE TABLE sj_distributed_lock ( - id number GENERATED ALWAYS AS IDENTITY, - name varchar2(64) NULL, + name varchar2(64) NOT NULL, lock_until timestamp(3) DEFAULT CURRENT_TIMESTAMP(3) NOT NULL, locked_at timestamp(3) DEFAULT CURRENT_TIMESTAMP(3) NOT NULL, locked_by varchar2(255) NULL, @@ -397,9 +396,8 @@ CREATE TABLE sj_distributed_lock ); ALTER TABLE sj_distributed_lock - ADD CONSTRAINT pk_sj_distributed_lock PRIMARY KEY (id); + ADD CONSTRAINT pk_sj_distributed_lock PRIMARY KEY (name); -COMMENT ON COLUMN sj_distributed_lock.id IS '主键'; COMMENT ON COLUMN sj_distributed_lock.name IS '锁名称'; COMMENT ON COLUMN sj_distributed_lock.lock_until IS '锁定时长'; COMMENT ON COLUMN sj_distributed_lock.locked_at IS '锁定时间'; @@ -594,7 +592,11 @@ CREATE TABLE sj_job_task parent_id number DEFAULT 0 NOT NULL, task_status smallint DEFAULT 0 NOT NULL, retry_count number DEFAULT 0 NOT NULL, + mr_stage smallint DEFAULT NULL NULL, + leaf smallint DEFAULT '1' NOT NULL, + task_name varchar2(255) DEFAULT '' NULL, client_info varchar2(128) DEFAULT NULL NULL, + wf_context clob DEFAULT NULL NULL, result_message clob NULL, args_str clob DEFAULT NULL NULL, args_type smallint DEFAULT 1 NOT NULL, @@ -618,7 +620,11 @@ COMMENT ON COLUMN sj_job_task.task_batch_id IS '调度任务id'; COMMENT ON COLUMN sj_job_task.parent_id IS '父执行器id'; COMMENT ON COLUMN sj_job_task.task_status IS '执行的状态 0、失败 1、成功'; COMMENT ON COLUMN sj_job_task.retry_count IS '重试次数'; +COMMENT ON COLUMN sj_job_task.mr_stage IS '动态分片所处阶段 1:map 2:reduce 3:mergeReduce'; +COMMENT ON COLUMN sj_job_task.leaf IS '叶子节点'; +COMMENT ON COLUMN sj_job_task.task_name IS '任务名称'; COMMENT ON COLUMN sj_job_task.client_info IS '客户端地址 clientId#ip:port'; +COMMENT ON COLUMN sj_job_task.wf_context IS '工作流全局上下文'; COMMENT ON COLUMN sj_job_task.result_message IS '执行结果'; COMMENT ON COLUMN sj_job_task.args_str IS '执行方法参数'; COMMENT ON COLUMN sj_job_task.args_type IS '参数类型 '; @@ -769,6 +775,7 @@ CREATE TABLE sj_workflow executor_timeout number DEFAULT 0 NOT NULL, description varchar2(256) DEFAULT '' NULL, flow_info clob DEFAULT NULL NULL, + wf_context clob DEFAULT NULL NULL, bucket_index number DEFAULT 0 NOT NULL, version number NOT NULL, ext_attrs varchar2(256) DEFAULT '' NULL, @@ -795,6 +802,7 @@ COMMENT ON COLUMN sj_workflow.block_strategy IS '阻塞策略 1、丢弃 2、覆 COMMENT ON COLUMN sj_workflow.executor_timeout IS '任务执行超时时间,单位秒'; COMMENT ON COLUMN sj_workflow.description IS '描述'; COMMENT ON COLUMN sj_workflow.flow_info IS '流程信息'; +COMMENT ON COLUMN sj_workflow.wf_context IS '上下文'; COMMENT ON COLUMN sj_workflow.bucket_index IS 'bucket'; COMMENT ON COLUMN sj_workflow.version IS '版本号'; COMMENT ON COLUMN sj_workflow.ext_attrs IS '扩展字段'; @@ -860,8 +868,10 @@ CREATE TABLE sj_workflow_task_batch task_batch_status smallint DEFAULT 0 NOT NULL, operation_reason smallint DEFAULT 0 NOT NULL, flow_info clob DEFAULT NULL NULL, + wf_context clob DEFAULT NULL NULL, execution_at number DEFAULT 0 NOT NULL, ext_attrs varchar2(256) DEFAULT '' NULL, + version number DEFAULT 1 NOT NULL, deleted smallint DEFAULT 0 NOT NULL, create_dt date DEFAULT CURRENT_TIMESTAMP NOT NULL, update_dt date DEFAULT CURRENT_TIMESTAMP NOT NULL @@ -881,10 +891,15 @@ COMMENT ON COLUMN sj_workflow_task_batch.workflow_id IS '工作流任务id'; COMMENT ON COLUMN sj_workflow_task_batch.task_batch_status IS '任务批次状态 0、失败 1、成功'; COMMENT ON COLUMN sj_workflow_task_batch.operation_reason IS '操作原因'; COMMENT ON COLUMN sj_workflow_task_batch.flow_info IS '流程信息'; +COMMENT ON COLUMN sj_workflow_task_batch.wf_context IS '全局上下文'; COMMENT ON COLUMN sj_workflow_task_batch.execution_at IS '任务执行时间'; COMMENT ON COLUMN sj_workflow_task_batch.ext_attrs IS '扩展字段'; +COMMENT ON COLUMN sj_workflow_task_batch.version IS '版本号'; COMMENT ON COLUMN sj_workflow_task_batch.deleted IS '逻辑删除 1、删除'; COMMENT ON COLUMN sj_workflow_task_batch.create_dt IS '创建时间'; COMMENT ON COLUMN sj_workflow_task_batch.update_dt IS '修改时间'; COMMENT ON TABLE sj_workflow_task_batch IS '工作流批次'; + +Process finished +with exit code 0 diff --git a/doc/sql/snail_job_postgre.sql b/doc/sql/snail_job_postgre.sql index 35cbbba3..88b6b937 100644 --- a/doc/sql/snail_job_postgre.sql +++ b/doc/sql/snail_job_postgre.sql @@ -2,7 +2,7 @@ SnailJob Database Transfer Tool Source Server Type : MySQL Target Server Type : PostgreSQL - Date: 2024-05-20 22:02:23 + Date: 2024-07-06 11:45:40 */ @@ -357,8 +357,7 @@ COMMENT ON TABLE sj_server_node IS '服务器节点'; -- sj_distributed_lock CREATE TABLE sj_distributed_lock ( - id bigserial PRIMARY KEY, - name varchar(64) NOT NULL, + name varchar(64) PRIMARY KEY, lock_until timestamp(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3), locked_at timestamp(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3), locked_by varchar(255) NOT NULL, @@ -366,7 +365,6 @@ CREATE TABLE sj_distributed_lock update_dt timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ); -COMMENT ON COLUMN sj_distributed_lock.id IS '主键'; COMMENT ON COLUMN sj_distributed_lock.name IS '锁名称'; COMMENT ON COLUMN sj_distributed_lock.lock_until IS '锁定时长'; COMMENT ON COLUMN sj_distributed_lock.locked_at IS '锁定时间'; @@ -546,7 +544,11 @@ CREATE TABLE sj_job_task parent_id bigint NOT NULL DEFAULT 0, task_status smallint NOT NULL DEFAULT 0, retry_count int NOT NULL DEFAULT 0, + mr_stage smallint NULL DEFAULT NULL, + leaf smallint NOT NULL DEFAULT '1', + task_name varchar(255) NOT NULL DEFAULT '', client_info varchar(128) NULL DEFAULT NULL, + wf_context text NULL DEFAULT NULL, result_message text NOT NULL, args_str text NULL DEFAULT NULL, args_type smallint NOT NULL DEFAULT 1, @@ -567,7 +569,11 @@ COMMENT ON COLUMN sj_job_task.task_batch_id IS '调度任务id'; COMMENT ON COLUMN sj_job_task.parent_id IS '父执行器id'; COMMENT ON COLUMN sj_job_task.task_status IS '执行的状态 0、失败 1、成功'; COMMENT ON COLUMN sj_job_task.retry_count IS '重试次数'; +COMMENT ON COLUMN sj_job_task.mr_stage IS '动态分片所处阶段 1:map 2:reduce 3:mergeReduce'; +COMMENT ON COLUMN sj_job_task.leaf IS '叶子节点'; +COMMENT ON COLUMN sj_job_task.task_name IS '任务名称'; COMMENT ON COLUMN sj_job_task.client_info IS '客户端地址 clientId#ip:port'; +COMMENT ON COLUMN sj_job_task.wf_context IS '工作流全局上下文'; COMMENT ON COLUMN sj_job_task.result_message IS '执行结果'; COMMENT ON COLUMN sj_job_task.args_str IS '执行方法参数'; COMMENT ON COLUMN sj_job_task.args_type IS '参数类型 '; @@ -709,6 +715,7 @@ CREATE TABLE sj_workflow executor_timeout int NOT NULL DEFAULT 0, description varchar(256) NOT NULL DEFAULT '', flow_info text NULL DEFAULT NULL, + wf_context text NULL DEFAULT NULL, bucket_index int NOT NULL DEFAULT 0, version int NOT NULL, ext_attrs varchar(256) NULL DEFAULT '', @@ -732,6 +739,7 @@ COMMENT ON COLUMN sj_workflow.block_strategy IS '阻塞策略 1、丢弃 2、覆 COMMENT ON COLUMN sj_workflow.executor_timeout IS '任务执行超时时间,单位秒'; COMMENT ON COLUMN sj_workflow.description IS '描述'; COMMENT ON COLUMN sj_workflow.flow_info IS '流程信息'; +COMMENT ON COLUMN sj_workflow.wf_context IS '上下文'; COMMENT ON COLUMN sj_workflow.bucket_index IS 'bucket'; COMMENT ON COLUMN sj_workflow.version IS '版本号'; COMMENT ON COLUMN sj_workflow.ext_attrs IS '扩展字段'; @@ -794,8 +802,10 @@ CREATE TABLE sj_workflow_task_batch task_batch_status smallint NOT NULL DEFAULT 0, operation_reason smallint NOT NULL DEFAULT 0, flow_info text NULL DEFAULT NULL, + wf_context text NULL DEFAULT NULL, execution_at bigint NOT NULL DEFAULT 0, ext_attrs varchar(256) NULL DEFAULT '', + version int NOT NULL DEFAULT 1, deleted smallint NOT NULL DEFAULT 0, create_dt timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, update_dt timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP @@ -812,10 +822,11 @@ COMMENT ON COLUMN sj_workflow_task_batch.workflow_id IS '工作流任务id'; COMMENT ON COLUMN sj_workflow_task_batch.task_batch_status IS '任务批次状态 0、失败 1、成功'; COMMENT ON COLUMN sj_workflow_task_batch.operation_reason IS '操作原因'; COMMENT ON COLUMN sj_workflow_task_batch.flow_info IS '流程信息'; +COMMENT ON COLUMN sj_workflow_task_batch.wf_context IS '全局上下文'; COMMENT ON COLUMN sj_workflow_task_batch.execution_at IS '任务执行时间'; COMMENT ON COLUMN sj_workflow_task_batch.ext_attrs IS '扩展字段'; +COMMENT ON COLUMN sj_workflow_task_batch.version IS '版本号'; COMMENT ON COLUMN sj_workflow_task_batch.deleted IS '逻辑删除 1、删除'; COMMENT ON COLUMN sj_workflow_task_batch.create_dt IS '创建时间'; COMMENT ON COLUMN sj_workflow_task_batch.update_dt IS '修改时间'; COMMENT ON TABLE sj_workflow_task_batch IS '工作流批次'; - diff --git a/doc/sql/snail_job_sqlserver.sql b/doc/sql/snail_job_sqlserver.sql index b2c1b7aa..d72ecd09 100644 --- a/doc/sql/snail_job_sqlserver.sql +++ b/doc/sql/snail_job_sqlserver.sql @@ -2,7 +2,7 @@ SnailJob Database Transfer Tool Source Server Type : MySQL Target Server Type : Microsoft SQL Server - Date: 2024-05-20 22:03:46 + Date: 2024-07-06 12:55:47 */ @@ -1138,8 +1138,7 @@ GO -- sj_distributed_lock CREATE TABLE sj_distributed_lock ( - id bigint NOT NULL PRIMARY KEY IDENTITY, - name nvarchar(64) NOT NULL, + name nvarchar(64) NOT NULL PRIMARY KEY IDENTITY, lock_until datetime2 NOT NULL DEFAULT CURRENT_TIMESTAMP, locked_at datetime2 NOT NULL DEFAULT CURRENT_TIMESTAMP, locked_by nvarchar(255) NOT NULL, @@ -1148,13 +1147,6 @@ CREATE TABLE sj_distributed_lock ) GO -EXEC sp_addextendedproperty - 'MS_Description', N'主键', - 'SCHEMA', N'dbo', - 'TABLE', N'sj_distributed_lock', - 'COLUMN', N'id' -GO - EXEC sp_addextendedproperty 'MS_Description', N'锁名称', 'SCHEMA', N'dbo', @@ -1738,7 +1730,11 @@ CREATE TABLE sj_job_task parent_id bigint NOT NULL DEFAULT 0, task_status tinyint NOT NULL DEFAULT 0, retry_count int NOT NULL DEFAULT 0, + mr_stage tinyint NULL DEFAULT NULL, + leaf tinyint NOT NULL DEFAULT '1', + task_name nvarchar(255) NOT NULL DEFAULT '', client_info nvarchar(128) NULL DEFAULT NULL, + wf_context nvarchar(max) NULL DEFAULT NULL, result_message nvarchar(max) NOT NULL, args_str nvarchar(max) NULL DEFAULT NULL, args_type tinyint NOT NULL DEFAULT 1, @@ -1811,6 +1807,27 @@ EXEC sp_addextendedproperty 'COLUMN', N'retry_count' GO +EXEC sp_addextendedproperty + 'MS_Description', N'动态分片所处阶段 1:map 2:reduce 3:mergeReduce', + 'SCHEMA', N'dbo', + 'TABLE', N'sj_job_task', + 'COLUMN', N'mr_stage' +GO + +EXEC sp_addextendedproperty + 'MS_Description', N'叶子节点', + 'SCHEMA', N'dbo', + 'TABLE', N'sj_job_task', + 'COLUMN', N'leaf' +GO + +EXEC sp_addextendedproperty + 'MS_Description', N'任务名称', + 'SCHEMA', N'dbo', + 'TABLE', N'sj_job_task', + 'COLUMN', N'task_name' +GO + EXEC sp_addextendedproperty 'MS_Description', N'客户端地址 clientId#ip:port', 'SCHEMA', N'dbo', @@ -1818,6 +1835,13 @@ EXEC sp_addextendedproperty 'COLUMN', N'client_info' GO +EXEC sp_addextendedproperty + 'MS_Description', N'工作流全局上下文', + 'SCHEMA', N'dbo', + 'TABLE', N'sj_job_task', + 'COLUMN', N'wf_context' +GO + EXEC sp_addextendedproperty 'MS_Description', N'执行结果', 'SCHEMA', N'dbo', @@ -2274,6 +2298,7 @@ CREATE TABLE sj_workflow executor_timeout int NOT NULL DEFAULT 0, description nvarchar(256) NOT NULL DEFAULT '', flow_info nvarchar(max) NULL DEFAULT NULL, + wf_context nvarchar(max) NULL DEFAULT NULL, bucket_index int NOT NULL DEFAULT 0, version int NOT NULL, ext_attrs nvarchar(256) NULL DEFAULT '', @@ -2372,6 +2397,13 @@ EXEC sp_addextendedproperty 'COLUMN', N'flow_info' GO +EXEC sp_addextendedproperty + 'MS_Description', N'上下文', + 'SCHEMA', N'dbo', + 'TABLE', N'sj_workflow', + 'COLUMN', N'wf_context' +GO + EXEC sp_addextendedproperty 'MS_Description', N'bucket', 'SCHEMA', N'dbo', @@ -2583,8 +2615,10 @@ CREATE TABLE sj_workflow_task_batch task_batch_status tinyint NOT NULL DEFAULT 0, operation_reason tinyint NOT NULL DEFAULT 0, flow_info nvarchar(max) NULL DEFAULT NULL, + wf_context nvarchar(max) NULL DEFAULT NULL, execution_at bigint NOT NULL DEFAULT 0, ext_attrs nvarchar(256) NULL DEFAULT '', + version int NOT NULL DEFAULT 1, deleted tinyint NOT NULL DEFAULT 0, create_dt datetime2 NOT NULL DEFAULT CURRENT_TIMESTAMP, update_dt datetime2 NOT NULL DEFAULT CURRENT_TIMESTAMP @@ -2647,6 +2681,13 @@ EXEC sp_addextendedproperty 'COLUMN', N'flow_info' GO +EXEC sp_addextendedproperty + 'MS_Description', N'全局上下文', + 'SCHEMA', N'dbo', + 'TABLE', N'sj_workflow_task_batch', + 'COLUMN', N'wf_context' +GO + EXEC sp_addextendedproperty 'MS_Description', N'任务执行时间', 'SCHEMA', N'dbo', @@ -2661,6 +2702,13 @@ EXEC sp_addextendedproperty 'COLUMN', N'ext_attrs' GO +EXEC sp_addextendedproperty + 'MS_Description', N'版本号', + 'SCHEMA', N'dbo', + 'TABLE', N'sj_workflow_task_batch', + 'COLUMN', N'version' +GO + EXEC sp_addextendedproperty 'MS_Description', N'逻辑删除 1、删除', 'SCHEMA', N'dbo', diff --git a/doc/tool/dbsync.py b/doc/tool/dbsync.py index 6dd06689..df0dbc03 100755 --- a/doc/tool/dbsync.py +++ b/doc/tool/dbsync.py @@ -536,18 +536,18 @@ class SQLServerConvertor(Convertor): type = col["type"].lower() full_type = self.translate_type(type, col["size"]) nullable = "NULL" if col["nullable"] else "NOT NULL" - default = f"DEFAULT {col["default"]}" if col["default"] is not None else "" + default = f"DEFAULT {col['default']}" if col["default"] is not None else "" default = re.sub(r"CURRENT_TIMESTAMP\(\d+\)", "CURRENT_TIMESTAMP", default) return f"{name} {full_type} {nullable} {default}" table_name = ddl["table_name"].lower() columns = [f"{_generate_column(col).strip()}" for col in ddl["columns"]] filed_def_list = ",\n ".join(columns) - script = f"""-- {table_name} -CREATE TABLE {table_name} ( - {filed_def_list} -) -GO""" + script = (f"-- {table_name}\n" + f"CREATE TABLE {table_name} (\n" + f" {filed_def_list}\n" + f")\n" + f"GO") return script diff --git a/snail-job-server/snail-job-server-job-task/src/main/java/com/aizuda/snailjob/server/job/task/support/dispatch/WorkflowExecutorActor.java b/snail-job-server/snail-job-server-job-task/src/main/java/com/aizuda/snailjob/server/job/task/support/dispatch/WorkflowExecutorActor.java index 146b6c09..f24db397 100644 --- a/snail-job-server/snail-job-server-job-task/src/main/java/com/aizuda/snailjob/server/job/task/support/dispatch/WorkflowExecutorActor.java +++ b/snail-job-server/snail-job-server-job-task/src/main/java/com/aizuda/snailjob/server/job/task/support/dispatch/WorkflowExecutorActor.java @@ -146,28 +146,7 @@ public class WorkflowExecutorActor extends AbstractActor { Map workflowNodeMap = StreamUtils.toIdentityMap(workflowNodes, WorkflowNode::getId); List parentJobTaskBatchList = jobTaskBatchMap.get(taskExecute.getParentId()); - // 如果父节点是无需处理则不再继续执行 -// if (CollUtil.isNotEmpty(parentJobTaskBatchList) && -// parentJobTaskBatchList.stream() -// .map(JobTaskBatch::getOperationReason) -// .filter(Objects::nonNull) -// .anyMatch(JobOperationReasonEnum.WORKFLOW_SUCCESSOR_SKIP_EXECUTION::contains)) { -// workflowBatchHandler.complete(taskExecute.getWorkflowTaskBatchId(), workflowTaskBatch); -// return; -// } - WorkflowNode parentWorkflowNode = workflowNodeMap.get(taskExecute.getParentId()); - // 失败策略处理 -// if (CollUtil.isNotEmpty(parentJobTaskBatchList) -// && parentJobTaskBatchList.stream() -// .map(JobTaskBatch::getTaskBatchStatus) -// .anyMatch(i -> i != JobTaskBatchStatusEnum.SUCCESS.getStatus())) { -// -// // 根据失败策略判断是否继续处理 -// if (Objects.equals(parentWorkflowNode.getFailStrategy(), FailStrategyEnum.BLOCK.getCode())) { -// return; -// } -// } // 决策节点 if (Objects.nonNull(parentWorkflowNode) @@ -186,7 +165,6 @@ public class WorkflowExecutorActor extends AbstractActor { .filter(workflowNode -> !workflowNode.getId().equals(taskExecute.getParentId())) .collect(Collectors.toList()); - // TODO 合并job task的结果到全局上下文中 // 此次的并发数与当时父节点的兄弟节点的数量一致 workflowBatchHandler.mergeWorkflowContextAndRetry(workflowTaskBatch, StreamUtils.toSet(allJobTaskBatchList, JobTaskBatch::getId)); @@ -197,7 +175,7 @@ public class WorkflowExecutorActor extends AbstractActor { // 只会条件节点会使用 Object evaluationResult = null; - log.info("待执行的节点为. workflowNodes:[{}]", StreamUtils.toList(workflowNodes, WorkflowNode::getId)); + log.debug("待执行的节点为. workflowNodes:[{}]", StreamUtils.toList(workflowNodes, WorkflowNode::getId)); for (WorkflowNode workflowNode : workflowNodes) { // 批次已经存在就不在重复生成 @@ -299,7 +277,7 @@ public class WorkflowExecutorActor extends AbstractActor { WorkflowNode preWorkflowNode = workflowNodeMap.get(nodeId); // 根据失败策略判断是否继续处理 if (Objects.equals(preWorkflowNode.getFailStrategy(), FailStrategyEnum.BLOCK.getCode())) { - SnailJobLog.LOCAL.warn("此节点执行失败且失败策略配置了【阻塞】中断执行 [{}] 待执行节点:[{}] parentId:[{}]", nodeId, + SnailJobLog.LOCAL.info("此节点执行失败且失败策略配置了【阻塞】中断执行 [{}] 待执行节点:[{}] parentId:[{}]", nodeId, taskExecute.getParentId(), waitExecWorkflowNode.getId() ); return Boolean.FALSE;