feat(sj_1.5.0): OpenApi 增加查询定时任务和工作流的批次情况

This commit is contained in:
Srzou 2025-03-23 19:33:14 +08:00 committed by opensnail
parent 166bba34bb
commit f0d8ee92fd
35 changed files with 813 additions and 57 deletions

View File

@ -0,0 +1,29 @@
package com.aizuda.snailjob.client.job.core.dto;
import lombok.Data;
/**
* 回调节点配置
*
* @author xiaowoniu
* @date 2023-12-30 11:18:14
* @since 2.6.0
*/
@Data
public class CallbackConfig {
/**
* webhook
*/
private String webhook;
/**
* 请求类型
*/
private Integer contentType;
/**
* 秘钥
*/
private String secret;
}

View File

@ -0,0 +1,35 @@
package com.aizuda.snailjob.client.job.core.dto;
import lombok.Data;
/**
* 决策节点配置
*
* @author xiaowoniu
* @date 2023-12-30 11:17:30
* @since 2.6.0
*/
@Data
public class DecisionConfig {
/**
* 表达式类型 1SpEl2Aviator 3QL
*/
private Integer expressionType;
/**
* 条件节点表达式
*/
private String nodeExpression;
// /**
// * 判定逻辑 and 或者 or
// */
// private Integer logicalCondition;
/**
* 是否为其他情况
*/
private Integer defaultDecision;
}

View File

@ -0,0 +1,95 @@
package com.aizuda.snailjob.client.job.core.dto;
import lombok.Data;
import java.time.LocalDateTime;
/**
* @author: opensnail
* @date : 2023-10-12 10:18
* @since : 2.4.0
*/
@Data
public class JobBatchResponseVO {
private Long id;
/**
* 组名称
*/
private String groupName;
/**
* 名称
*/
private String jobName;
/**
* 任务类型
*/
private String taskType;
/**
* 工作流节点名称
*/
private String nodeName;
/**
* 任务信息id
*/
private Long jobId;
/**
* 任务状态
*/
private Integer taskBatchStatus;
/**
* 创建时间
*/
private LocalDateTime createDt;
/**
* 更新时间
*/
private LocalDateTime updateDt;
/**
* 任务执行时间
*/
private LocalDateTime executionAt;
/**
* 操作原因
*/
private Integer operationReason;
/**
* 执行器类型 1Java
*/
private Integer executorType;
/**
* 执行器名称
*/
private String executorInfo;
/**
* 工作流的回调节点信息
*/
private CallbackConfig callback;
/**
* 工作流的决策节点信息
*/
private DecisionConfig decision;
/**
* 工作流批次id
*/
private Long workflowTaskBatchId;
/**
* 工作流节点id
*/
private Long workflowNodeId;
}

View File

@ -0,0 +1,22 @@
package com.aizuda.snailjob.client.job.core.dto;
import lombok.Data;
/**
* @author xiaowoniu
* @date 2023-12-30 21:42:59
* @since 2.6.0
*/
@Data
public class JobTaskConfig {
/**
* 任务ID
*/
private Long jobId;
/**
* 任务名称
*/
private String jobName;
}

View File

@ -0,0 +1,162 @@
package com.aizuda.snailjob.client.job.core.dto;
import lombok.Data;
import java.util.List;
import java.util.Set;
/**
* @author xiaowoniu
* @date 2023-12-14 22:59:33
* @since 2.6.0
*/
@Data
public class WorkflowDetailResponseVO {
/**
* 工作流ID
*/
private Long id;
/**
* 组名称
*/
private String workflowName;
/**
* 组名称
*/
private String groupName;
/**
* 触发类型
*/
private Integer triggerType;
/**
* 阻塞策略
*/
private Integer blockStrategy;
/**
* 触发间隔
*/
private String triggerInterval;
/**
* 超时时间
*/
private Integer executorTimeout;
/**
* 0关闭1开启
*/
private Integer workflowStatus;
/**
* see: {@link com.aizuda.snailjob.common.core.enums.JobTaskBatchStatusEnum}
*/
private Integer workflowBatchStatus;
/**
* 工作流上下文
*/
private String wfContext;
/**
* DAG节点配置
*/
private NodeConfig nodeConfig;
@Data
public static class NodeConfig {
/**
* 1任务节点 2条件节点 3回调节点
*/
private Integer nodeType;
/**
* 节点信息
*/
private List<NodeInfo> conditionNodes;
/**
* 子节点
*/
private NodeConfig childNode;
}
@Data
public static class NodeInfo {
/**
* 节点ID
*/
private Long id;
/**
* 1任务节点 2条件节点 3回调节点
*/
private Integer nodeType;
/**
* 节点名称
*/
private String nodeName;
/**
* 优先级
*/
private Integer priorityLevel;
/**
* 工作流状态 0关闭1开启
*/
private Integer workflowNodeStatus;
/**
* 失败策略 1跳过 2阻塞
*/
private Integer failStrategy;
/**
* 任务批次状态
*/
private Integer taskBatchStatus;
/**
* 判定配置
*/
private DecisionConfig decision;
/**
* 回调配置
*/
private CallbackConfig callback;
/**
* 任务配置
*/
private JobTaskConfig jobTask;
/**
* 定时任务批次信息
*/
private List<JobBatchResponseVO> jobBatchList;
/**
* 子节点
*/
private NodeConfig childNode;
}
/**
* 通知告警场景配置id列表
*/
private Set<Long> notifyIds;
}

View File

@ -0,0 +1,50 @@
package com.aizuda.snailjob.client.job.core.handler.query;
import cn.hutool.core.lang.Assert;
import cn.hutool.core.lang.Pair;
import com.aizuda.snailjob.client.common.exception.SnailJobClientException;
import com.aizuda.snailjob.client.job.core.dto.JobBatchResponseVO;
import com.aizuda.snailjob.client.job.core.handler.AbstractRequestHandler;
import com.aizuda.snailjob.common.core.enums.StatusEnum;
import com.aizuda.snailjob.common.core.model.Result;
import com.aizuda.snailjob.common.core.util.JsonUtil;
import java.util.Objects;
/**
* @since 1.5.0
*/
public class RequestQueryJobBatchHandler extends AbstractRequestHandler<JobBatchResponseVO> {
private final Long queryJobBatchId;
public RequestQueryJobBatchHandler(Long queryJobBatchId) {
this.queryJobBatchId = queryJobBatchId;
}
@Override
protected void afterExecute(JobBatchResponseVO jobBatchResponseVO) {
}
@Override
protected void beforeExecute() {
}
@Override
protected JobBatchResponseVO doExecute() {
Result<Object> result = client.getJobBatchDetail(queryJobBatchId);
Assert.isTrue(StatusEnum.YES.getStatus() == result.getStatus(),
() -> new SnailJobClientException(result.getMessage()));
Object data = result.getData();
Assert.isTrue(Objects.nonNull(data), () -> new SnailJobClientException("获取[{}]任务批次详情失败", queryJobBatchId));
return JsonUtil.parseObject(JsonUtil.toJsonString(data), JobBatchResponseVO.class);
}
@Override
protected Pair<Boolean, String> checkRequest() {
return Pair.of(queryJobBatchId != null && !Long.valueOf(0).equals(queryJobBatchId), "queryJobBatchId不能为null并且必须大于0");
}
}

View File

@ -0,0 +1,50 @@
package com.aizuda.snailjob.client.job.core.handler.query;
import cn.hutool.core.lang.Assert;
import cn.hutool.core.lang.Pair;
import com.aizuda.snailjob.client.common.exception.SnailJobClientException;
import com.aizuda.snailjob.client.job.core.dto.WorkflowDetailResponseVO;
import com.aizuda.snailjob.client.job.core.handler.AbstractRequestHandler;
import com.aizuda.snailjob.common.core.enums.StatusEnum;
import com.aizuda.snailjob.common.core.model.Result;
import com.aizuda.snailjob.common.core.util.JsonUtil;
import java.util.Objects;
/**
* @since 1.5.0
*/
public class RequestQueryWorkflowBatchHandler extends AbstractRequestHandler<WorkflowDetailResponseVO> {
private final Long workflowBatchId;
public RequestQueryWorkflowBatchHandler(Long workflowBatchId) {
this.workflowBatchId = workflowBatchId;
}
@Override
protected void afterExecute(WorkflowDetailResponseVO workflowDetailResponseVO) {
}
@Override
protected void beforeExecute() {
}
@Override
protected WorkflowDetailResponseVO doExecute() {
Result<Object> result = client.getWorkflowBatchDetail(workflowBatchId);
Assert.isTrue(StatusEnum.YES.getStatus() == result.getStatus(),
() -> new SnailJobClientException(result.getMessage()));
Object data = result.getData();
Assert.isTrue(Objects.nonNull(data), () -> new SnailJobClientException("获取[{}]工作流任务批次详情失败", workflowBatchId));
return JsonUtil.parseObject(JsonUtil.toJsonString(data), WorkflowDetailResponseVO.class);
}
@Override
protected Pair<Boolean, String> checkRequest() {
return Pair.of(workflowBatchId != null && !Long.valueOf(0).equals(workflowBatchId), "queryJobBatchId不能为null并且必须大于0");
}
}

View File

@ -19,6 +19,12 @@ public interface OpenApiClient {
@Mapping(method = RequestMethod.POST, path = "/api/job/getJobDetail")
Result<Object> getJobDetail(Long jobId);
@Mapping(method = RequestMethod.POST, path = "/api/job/getJobBatchDetail")
Result<Object> getJobBatchDetail(Long jobBatchId);
@Mapping(method = RequestMethod.POST, path = "/api/job/getWorkflowBatchDetail")
Result<Object> getWorkflowBatchDetail(Long jobBatchId);
@Mapping(method = RequestMethod.POST, path = "/api/job/triggerJob")
Result<Object> triggerJob(JobTriggerDTO jobTriggerDTO);

View File

@ -4,6 +4,8 @@ import com.aizuda.snailjob.client.job.core.handler.add.*;
import com.aizuda.snailjob.client.job.core.handler.delete.DeleteJobHandler;
import com.aizuda.snailjob.client.job.core.handler.delete.DeleteWorkflowHandler;
import com.aizuda.snailjob.client.job.core.handler.query.RequestQueryHandler;
import com.aizuda.snailjob.client.job.core.handler.query.RequestQueryJobBatchHandler;
import com.aizuda.snailjob.client.job.core.handler.query.RequestQueryWorkflowBatchHandler;
import com.aizuda.snailjob.client.job.core.handler.trigger.*;
import com.aizuda.snailjob.client.job.core.handler.update.*;
@ -123,6 +125,26 @@ public final class SnailJobOpenApi {
return new RequestQueryHandler(jobId);
}
/**
* 获取任务批次详情
*
* @param jobBatchId 任务批次 ID
* @return {@link RequestQueryJobBatchHandler}
*/
public static RequestQueryJobBatchHandler getJobBatchDetail(Long jobBatchId){
return new RequestQueryJobBatchHandler(jobBatchId);
}
/**
* 获取工作流任务批次详情
*
* @param workflowBatchId 工作流任务批次 ID
* @return {@link RequestQueryWorkflowBatchHandler}
*/
public static RequestQueryWorkflowBatchHandler getWorkflowBatchDetail(Long workflowBatchId){
return new RequestQueryWorkflowBatchHandler(workflowBatchId);
}
/**
* 手动触发广播定时任务
*

View File

@ -151,6 +151,10 @@ public interface SystemConstants {
String OPENAPI_GET_JOB_DETAIL = "/api/job/getJobDetail";
String OPENAPI_GET_JOB_BATCH_DETAIL = "/api/job/getJobBatchDetail";
String OPENAPI_GET_WORKFLOW_BATCH_DETAIL = "/api/job/getWorkflowBatchDetail";
String OPENAPI_TRIGGER_JOB = "/api/job/triggerJob";
String OPENAPI_TRIGGER_WORKFLOW = "/api/job/triggerWorkFlow";

View File

@ -1,7 +1,7 @@
package com.aizuda.snailjob.server.web.service.convert;
package com.aizuda.snailjob.server.common.convert;
import com.aizuda.snailjob.server.common.util.DateUtils;
import com.aizuda.snailjob.server.web.model.response.JobBatchResponseVO;
import com.aizuda.snailjob.server.common.vo.JobBatchResponseVO;
import com.aizuda.snailjob.template.datasource.persistence.dataobject.JobBatchResponseDO;
import com.aizuda.snailjob.template.datasource.persistence.po.Job;
import com.aizuda.snailjob.template.datasource.persistence.po.JobTaskBatch;

View File

@ -1,4 +1,4 @@
package com.aizuda.snailjob.server.web.service.convert;
package com.aizuda.snailjob.server.common.convert;
import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.util.StrUtil;
@ -8,10 +8,10 @@ import com.aizuda.snailjob.server.common.dto.CallbackConfig;
import com.aizuda.snailjob.server.common.dto.DecisionConfig;
import com.aizuda.snailjob.server.common.dto.JobTaskConfig;
import com.aizuda.snailjob.server.common.util.DateUtils;
import com.aizuda.snailjob.server.web.model.request.WorkflowRequestVO;
import com.aizuda.snailjob.server.web.model.response.WorkflowBatchResponseVO;
import com.aizuda.snailjob.server.web.model.response.WorkflowDetailResponseVO;
import com.aizuda.snailjob.server.web.model.response.WorkflowResponseVO;
import com.aizuda.snailjob.server.common.vo.WorkflowBatchResponseVO;
import com.aizuda.snailjob.server.common.vo.WorkflowResponseVO;
import com.aizuda.snailjob.server.common.vo.request.WorkflowRequestVO;
import com.aizuda.snailjob.server.common.vo.WorkflowDetailResponseVO;
import com.aizuda.snailjob.template.datasource.persistence.dataobject.WorkflowBatchResponseDO;
import com.aizuda.snailjob.template.datasource.persistence.po.Workflow;
import com.aizuda.snailjob.template.datasource.persistence.po.WorkflowNode;

View File

@ -1,4 +1,4 @@
package com.aizuda.snailjob.server.web.service.handler;
package com.aizuda.snailjob.server.common.handler;
import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.lang.Assert;
@ -9,11 +9,11 @@ import com.aizuda.snailjob.server.common.dto.CallbackConfig;
import com.aizuda.snailjob.server.common.dto.DecisionConfig;
import com.aizuda.snailjob.server.common.dto.JobTaskConfig;
import com.aizuda.snailjob.server.common.exception.SnailJobServerException;
import com.aizuda.snailjob.server.web.model.request.WorkflowRequestVO;
import com.aizuda.snailjob.server.web.model.response.WorkflowDetailResponseVO;
import com.aizuda.snailjob.server.web.model.response.WorkflowDetailResponseVO.NodeConfig;
import com.aizuda.snailjob.server.web.model.response.WorkflowDetailResponseVO.NodeInfo;
import com.aizuda.snailjob.server.web.service.convert.WorkflowConverter;
import com.aizuda.snailjob.server.common.vo.request.WorkflowRequestVO;
import com.aizuda.snailjob.server.common.vo.WorkflowDetailResponseVO;
import com.aizuda.snailjob.server.common.vo.WorkflowDetailResponseVO.NodeConfig;
import com.aizuda.snailjob.server.common.vo.WorkflowDetailResponseVO.NodeInfo;
import com.aizuda.snailjob.server.common.convert.WorkflowConverter;
import com.aizuda.snailjob.template.datasource.persistence.mapper.WorkflowNodeMapper;
import com.aizuda.snailjob.template.datasource.persistence.po.WorkflowNode;
import com.google.common.collect.Lists;

View File

@ -1,4 +1,4 @@
package com.aizuda.snailjob.server.web.model.response;
package com.aizuda.snailjob.server.common.vo;
import com.aizuda.snailjob.server.common.dto.CallbackConfig;
import com.aizuda.snailjob.server.common.dto.DecisionConfig;

View File

@ -1,4 +1,4 @@
package com.aizuda.snailjob.server.web.model.response;
package com.aizuda.snailjob.server.common.vo;
import com.aizuda.snailjob.server.common.dto.CallbackConfig;
import com.aizuda.snailjob.server.common.dto.DecisionConfig;

View File

@ -1,4 +1,4 @@
package com.aizuda.snailjob.server.web.model.request;
package com.aizuda.snailjob.server.common.vo.request;
import com.aizuda.snailjob.server.common.dto.CallbackConfig;
import com.aizuda.snailjob.server.common.dto.DecisionConfig;

View File

@ -0,0 +1,94 @@
package com.aizuda.snailjob.server.job.task.support.request;
import cn.hutool.core.lang.Assert;
import cn.hutool.core.net.url.UrlQuery;
import com.aizuda.snailjob.common.core.constant.SystemConstants;
import com.aizuda.snailjob.common.core.constant.SystemConstants.HTTP_PATH;
import com.aizuda.snailjob.common.core.model.SnailJobRequest;
import com.aizuda.snailjob.common.core.model.SnailJobRpcResult;
import com.aizuda.snailjob.common.core.util.JsonUtil;
import com.aizuda.snailjob.common.log.SnailJobLog;
import com.aizuda.snailjob.server.common.convert.JobBatchResponseVOConverter;
import com.aizuda.snailjob.server.common.dto.CallbackConfig;
import com.aizuda.snailjob.server.common.dto.DecisionConfig;
import com.aizuda.snailjob.server.common.enums.SyetemTaskTypeEnum;
import com.aizuda.snailjob.server.common.exception.SnailJobServerException;
import com.aizuda.snailjob.server.common.handler.PostHttpRequestHandler;
import com.aizuda.snailjob.server.common.vo.JobBatchResponseVO;
import com.aizuda.snailjob.template.datasource.persistence.mapper.JobMapper;
import com.aizuda.snailjob.template.datasource.persistence.mapper.JobTaskBatchMapper;
import com.aizuda.snailjob.template.datasource.persistence.mapper.WorkflowNodeMapper;
import com.aizuda.snailjob.template.datasource.persistence.po.Job;
import com.aizuda.snailjob.template.datasource.persistence.po.JobTaskBatch;
import com.aizuda.snailjob.template.datasource.persistence.po.WorkflowNode;
import io.netty.handler.codec.http.HttpHeaders;
import io.netty.handler.codec.http.HttpMethod;
import lombok.RequiredArgsConstructor;
import org.springframework.stereotype.Component;
import java.util.Objects;
/**
* OPENAPI
* 获取定时任务批次详情
* @since 1.5.0
*/
@Component
@RequiredArgsConstructor
public class OpenApiGetJobBatchDetailRequestHandler extends PostHttpRequestHandler {
private final JobMapper jobMapper;
private final JobTaskBatchMapper jobTaskBatchMapper;
private final WorkflowNodeMapper workflowNodeMapper;
@Override
public boolean supports(String path) {
return HTTP_PATH.OPENAPI_GET_JOB_BATCH_DETAIL.equals(path);
}
@Override
public HttpMethod method() {
return HttpMethod.POST;
}
@Override
public SnailJobRpcResult doHandler(String content, UrlQuery query, HttpHeaders headers) {
SnailJobLog.LOCAL.debug("query job batch content:[{}]", content);
SnailJobRequest jobRequest = JsonUtil.parseObject(content, SnailJobRequest.class);
Object[] args = jobRequest.getArgs();
Long jobBatchId = JsonUtil.parseObject(JsonUtil.toJsonString(args[0]), Long.class);
Assert.notNull(jobBatchId, () -> new SnailJobServerException("id 不能为空"));
JobTaskBatch jobTaskBatch = jobTaskBatchMapper.selectById(jobBatchId);
if (Objects.isNull(jobTaskBatch)) {
return new SnailJobRpcResult(null, jobRequest.getReqId());
}
Job job = jobMapper.selectById(jobTaskBatch.getJobId());
JobBatchResponseVO jobBatchResponseVO = JobBatchResponseVOConverter.INSTANCE.convert(jobTaskBatch, job);
if (jobTaskBatch.getSystemTaskType().equals(SyetemTaskTypeEnum.WORKFLOW.getType())) {
WorkflowNode workflowNode = workflowNodeMapper.selectById(jobTaskBatch.getWorkflowNodeId());
jobBatchResponseVO.setNodeName(workflowNode.getNodeName());
// 回调节点
if (SystemConstants.CALLBACK_JOB_ID.equals(jobTaskBatch.getJobId())) {
jobBatchResponseVO.setCallback(JsonUtil.parseObject(workflowNode.getNodeInfo(), CallbackConfig.class));
jobBatchResponseVO.setExecutionAt(jobTaskBatch.getCreateDt());
return new SnailJobRpcResult(jobBatchResponseVO, jobRequest.getReqId());
}
// 条件节点
if (SystemConstants.DECISION_JOB_ID.equals(jobTaskBatch.getJobId())) {
jobBatchResponseVO.setDecision(JsonUtil.parseObject(workflowNode.getNodeInfo(), DecisionConfig.class));
jobBatchResponseVO.setExecutionAt(jobTaskBatch.getCreateDt());
return new SnailJobRpcResult(jobBatchResponseVO, jobRequest.getReqId());
}
}
return new SnailJobRpcResult(jobBatchResponseVO, jobRequest.getReqId());
}
}

View File

@ -0,0 +1,197 @@
package com.aizuda.snailjob.server.job.task.support.request;
import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.lang.Assert;
import cn.hutool.core.net.url.UrlQuery;
import com.aizuda.snailjob.common.core.constant.SystemConstants;
import com.aizuda.snailjob.common.core.constant.SystemConstants.HTTP_PATH;
import com.aizuda.snailjob.common.core.enums.JobOperationReasonEnum;
import com.aizuda.snailjob.common.core.enums.JobTaskBatchStatusEnum;
import com.aizuda.snailjob.common.core.enums.StatusEnum;
import com.aizuda.snailjob.common.core.model.SnailJobRequest;
import com.aizuda.snailjob.common.core.model.SnailJobRpcResult;
import com.aizuda.snailjob.common.core.util.JsonUtil;
import com.aizuda.snailjob.common.core.util.StreamUtils;
import com.aizuda.snailjob.common.log.SnailJobLog;
import com.aizuda.snailjob.server.common.convert.JobBatchResponseVOConverter;
import com.aizuda.snailjob.server.common.convert.WorkflowConverter;
import com.aizuda.snailjob.server.common.dto.JobTaskConfig;
import com.aizuda.snailjob.server.common.exception.SnailJobServerException;
import com.aizuda.snailjob.server.common.handler.PostHttpRequestHandler;
import com.aizuda.snailjob.server.common.handler.WorkflowHandler;
import com.aizuda.snailjob.server.common.vo.JobBatchResponseVO;
import com.aizuda.snailjob.server.common.vo.WorkflowDetailResponseVO;
import com.aizuda.snailjob.server.job.task.support.cache.MutableGraphCache;
import com.aizuda.snailjob.template.datasource.persistence.mapper.*;
import com.aizuda.snailjob.template.datasource.persistence.po.*;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.common.graph.MutableGraph;
import io.netty.handler.codec.http.HttpHeaders;
import io.netty.handler.codec.http.HttpMethod;
import lombok.RequiredArgsConstructor;
import org.springframework.stereotype.Component;
import java.util.*;
import java.util.function.Function;
import java.util.stream.Collectors;
/**
* OPENAPI
* 获取工作流任务批次详情
* @since 1.5.0
*/
@Component
@RequiredArgsConstructor
public class OpenApiGetWorkflowBatchDetailRequestHandler extends PostHttpRequestHandler {
private static final Integer WORKFLOW_DECISION_FAILED_STATUS = 98;
private final WorkflowTaskBatchMapper workflowTaskBatchMapper;
private final WorkflowMapper workflowMapper;
private final WorkflowNodeMapper workflowNodeMapper;
private final JobTaskBatchMapper jobTaskBatchMapper;
private final WorkflowHandler workflowHandler;
private final JobMapper jobMapper;
@Override
public boolean supports(String path) {
return HTTP_PATH.OPENAPI_GET_WORKFLOW_BATCH_DETAIL.equals(path);
}
@Override
public HttpMethod method() {
return HttpMethod.POST;
}
@Override
public SnailJobRpcResult doHandler(String content, UrlQuery query, HttpHeaders headers) {
SnailJobLog.LOCAL.debug("query workflow batch content:[{}]", content);
SnailJobRequest jobRequest = JsonUtil.parseObject(content, SnailJobRequest.class);
Object[] args = jobRequest.getArgs();
Long workflowBatchId = JsonUtil.parseObject(JsonUtil.toJsonString(args[0]), Long.class);
Assert.notNull(workflowBatchId, () -> new SnailJobServerException("id 不能为空"));
WorkflowTaskBatch workflowTaskBatch = workflowTaskBatchMapper.selectOne(
new LambdaQueryWrapper<WorkflowTaskBatch>()
.eq(WorkflowTaskBatch::getId, workflowBatchId));
if (Objects.isNull(workflowTaskBatch)) {
return new SnailJobRpcResult(null, jobRequest.getReqId());
}
Workflow workflow = workflowMapper.selectById(workflowTaskBatch.getWorkflowId());
WorkflowDetailResponseVO responseVO = WorkflowConverter.INSTANCE.convert(workflow);
responseVO.setWorkflowBatchStatus(workflowTaskBatch.getTaskBatchStatus());
List<WorkflowNode> workflowNodes = workflowNodeMapper.selectList(new LambdaQueryWrapper<WorkflowNode>()
.eq(WorkflowNode::getDeleted, StatusEnum.NO.getStatus())
.eq(WorkflowNode::getWorkflowId, workflow.getId()));
List<Job> jobs = jobMapper.selectList(
new LambdaQueryWrapper<Job>()
.in(Job::getId, StreamUtils.toSet(workflowNodes, WorkflowNode::getJobId)));
Map<Long, Job> jobMap = StreamUtils.toIdentityMap(jobs, Job::getId);
List<JobTaskBatch> alJobTaskBatchList = jobTaskBatchMapper.selectList(
new LambdaQueryWrapper<JobTaskBatch>()
.eq(JobTaskBatch::getWorkflowTaskBatchId, workflowBatchId)
.orderByDesc(JobTaskBatch::getId));
Map<Long, List<JobTaskBatch>> jobTaskBatchMap = StreamUtils.groupByKey(alJobTaskBatchList,
JobTaskBatch::getWorkflowNodeId);
List<WorkflowDetailResponseVO.NodeInfo> nodeInfos = WorkflowConverter.INSTANCE.convertList(workflowNodes);
String flowInfo = workflowTaskBatch.getFlowInfo();
MutableGraph<Long> graph = MutableGraphCache.getOrDefault(workflowBatchId, flowInfo);
Set<Long> allNoOperationNode = Sets.newHashSet();
Map<Long, WorkflowDetailResponseVO.NodeInfo> workflowNodeMap = nodeInfos.stream()
.peek(nodeInfo -> {
JobTaskConfig jobTask = nodeInfo.getJobTask();
if (Objects.nonNull(jobTask)) {
jobTask.setJobName(jobMap.getOrDefault(jobTask.getJobId(), new Job()).getJobName());
}
List<JobTaskBatch> jobTaskBatchList = jobTaskBatchMap.get(nodeInfo.getId());
if (CollUtil.isNotEmpty(jobTaskBatchList)) {
jobTaskBatchList = jobTaskBatchList.stream()
.sorted(Comparator.comparingInt(JobTaskBatch::getTaskBatchStatus))
.collect(Collectors.toList());
nodeInfo.setJobBatchList(
JobBatchResponseVOConverter.INSTANCE.convertListToJobBatchList(jobTaskBatchList));
// 取第最新的一条状态
JobTaskBatch jobTaskBatch = jobTaskBatchList.get(0);
if (JobOperationReasonEnum.WORKFLOW_DECISION_FAILED.getReason()
== jobTaskBatch.getOperationReason()) {
// 前端展示使用
nodeInfo.setTaskBatchStatus(WORKFLOW_DECISION_FAILED_STATUS);
} else {
nodeInfo.setTaskBatchStatus(jobTaskBatch.getTaskBatchStatus());
}
if (jobTaskBatchList.stream()
.filter(Objects::nonNull)
.anyMatch(OpenApiGetWorkflowBatchDetailRequestHandler::isNoOperation)) {
// 当前节点下面的所有节点都是无需处理的节点
Set<Long> allDescendants = MutableGraphCache.getAllDescendants(graph, nodeInfo.getId());
allNoOperationNode.addAll(allDescendants);
} else {
// 删除被误添加的节点
allNoOperationNode.remove(nodeInfo.getId());
}
} else {
if (JobTaskBatchStatusEnum.NOT_SUCCESS.contains(workflowTaskBatch.getTaskBatchStatus())) {
allNoOperationNode.add(nodeInfo.getId());
}
}
})
.collect(Collectors.toMap(WorkflowDetailResponseVO.NodeInfo::getId, Function.identity()));
for (Long noOperationNodeId : allNoOperationNode) {
WorkflowDetailResponseVO.NodeInfo nodeInfo = workflowNodeMap.get(noOperationNodeId);
List<JobTaskBatch> jobTaskBatches = jobTaskBatchMap.get(nodeInfo.getId());
if (CollUtil.isNotEmpty(jobTaskBatches)) {
jobTaskBatches = jobTaskBatches.stream()
.sorted(Comparator.comparingInt(JobTaskBatch::getTaskBatchStatus))
.collect(Collectors.toList());
nodeInfo.setJobBatchList(
JobBatchResponseVOConverter.INSTANCE.convertListToJobBatchList(jobTaskBatches));
} else {
JobBatchResponseVO jobBatchResponseVO = new JobBatchResponseVO();
JobTaskConfig jobTask = nodeInfo.getJobTask();
if (Objects.nonNull(jobTask)) {
jobBatchResponseVO.setJobId(jobTask.getJobId());
}
// 只为前端展示提供
// nodeInfo.setTaskBatchStatus(NOT_HANDLE_STATUS);
// jobBatchResponseVO.setTaskBatchStatus(NOT_HANDLE_STATUS);
// jobBatchResponseVO.setOperationReason(JobOperationReasonEnum.WORKFLOW_NODE_NO_REQUIRED.getReason());
nodeInfo.setJobBatchList(Lists.newArrayList(jobBatchResponseVO));
}
}
try {
// 反序列化构建图
WorkflowDetailResponseVO.NodeConfig config = workflowHandler.buildNodeConfig(graph, SystemConstants.ROOT,
new HashMap<>(), workflowNodeMap);
responseVO.setNodeConfig(config);
} catch (Exception e) {
SnailJobLog.LOCAL.error("反序列化失败. json:[{}]", flowInfo, e);
throw new SnailJobServerException("查询工作流批次详情失败");
}
return new SnailJobRpcResult(responseVO, jobRequest.getReqId());
}
private static boolean isNoOperation(JobTaskBatch i) {
return JobOperationReasonEnum.WORKFLOW_SUCCESSOR_SKIP_EXECUTION.contains(i.getOperationReason())
|| i.getTaskBatchStatus() == JobTaskBatchStatusEnum.STOP.getStatus();
}
}

View File

@ -12,8 +12,6 @@ import com.aizuda.snailjob.server.web.model.response.GroupConfigResponseVO;
import com.aizuda.snailjob.server.web.service.GroupConfigService;
import com.aizuda.snailjob.server.web.util.ExportUtils;
import com.aizuda.snailjob.server.web.util.ImportUtils;
import jakarta.validation.Valid;
import jakarta.validation.constraints.NotEmpty;
import lombok.RequiredArgsConstructor;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
@ -23,7 +21,6 @@ import org.springframework.web.multipart.MultipartFile;
import java.io.IOException;
import java.util.List;
import java.util.Set;
/**
* 重试组接口

View File

@ -3,7 +3,7 @@ package com.aizuda.snailjob.server.web.controller;
import com.aizuda.snailjob.server.web.annotation.LoginRequired;
import com.aizuda.snailjob.server.web.model.base.PageResult;
import com.aizuda.snailjob.server.web.model.request.JobBatchQueryVO;
import com.aizuda.snailjob.server.web.model.response.JobBatchResponseVO;
import com.aizuda.snailjob.server.common.vo.JobBatchResponseVO;
import com.aizuda.snailjob.server.web.service.JobBatchService;
import jakarta.validation.constraints.NotEmpty;
import jakarta.validation.constraints.Size;

View File

@ -4,8 +4,8 @@ import com.aizuda.snailjob.server.web.annotation.LoginRequired;
import com.aizuda.snailjob.server.web.annotation.RoleEnum;
import com.aizuda.snailjob.server.web.model.base.PageResult;
import com.aizuda.snailjob.server.web.model.request.WorkflowBatchQueryVO;
import com.aizuda.snailjob.server.web.model.response.WorkflowBatchResponseVO;
import com.aizuda.snailjob.server.web.model.response.WorkflowDetailResponseVO;
import com.aizuda.snailjob.server.common.vo.WorkflowBatchResponseVO;
import com.aizuda.snailjob.server.common.vo.WorkflowDetailResponseVO;
import com.aizuda.snailjob.server.web.service.WorkflowBatchService;
import jakarta.validation.constraints.NotEmpty;
import jakarta.validation.constraints.Size;

View File

@ -2,12 +2,13 @@ package com.aizuda.snailjob.server.web.controller;
import cn.hutool.core.lang.Pair;
import com.aizuda.snailjob.common.core.annotation.OriginalControllerReturnValue;
import com.aizuda.snailjob.server.common.vo.request.WorkflowRequestVO;
import com.aizuda.snailjob.server.web.annotation.LoginRequired;
import com.aizuda.snailjob.server.web.annotation.RoleEnum;
import com.aizuda.snailjob.server.web.model.base.PageResult;
import com.aizuda.snailjob.server.web.model.request.*;
import com.aizuda.snailjob.server.web.model.response.WorkflowDetailResponseVO;
import com.aizuda.snailjob.server.web.model.response.WorkflowResponseVO;
import com.aizuda.snailjob.server.common.vo.WorkflowDetailResponseVO;
import com.aizuda.snailjob.server.common.vo.WorkflowResponseVO;
import com.aizuda.snailjob.server.web.service.WorkflowService;
import com.aizuda.snailjob.server.web.util.ExportUtils;
import com.aizuda.snailjob.server.web.util.ImportUtils;

View File

@ -2,7 +2,7 @@ package com.aizuda.snailjob.server.web.service;
import com.aizuda.snailjob.server.web.model.base.PageResult;
import com.aizuda.snailjob.server.web.model.request.JobBatchQueryVO;
import com.aizuda.snailjob.server.web.model.response.JobBatchResponseVO;
import com.aizuda.snailjob.server.common.vo.JobBatchResponseVO;
import java.util.List;
import java.util.Set;

View File

@ -2,8 +2,8 @@ package com.aizuda.snailjob.server.web.service;
import com.aizuda.snailjob.server.web.model.base.PageResult;
import com.aizuda.snailjob.server.web.model.request.WorkflowBatchQueryVO;
import com.aizuda.snailjob.server.web.model.response.WorkflowBatchResponseVO;
import com.aizuda.snailjob.server.web.model.response.WorkflowDetailResponseVO;
import com.aizuda.snailjob.server.common.vo.WorkflowBatchResponseVO;
import com.aizuda.snailjob.server.common.vo.WorkflowDetailResponseVO;
import java.util.List;
import java.util.Set;

View File

@ -1,11 +1,11 @@
package com.aizuda.snailjob.server.web.service;
import cn.hutool.core.lang.Pair;
import com.aizuda.snailjob.server.common.dto.DecisionConfig;
import com.aizuda.snailjob.server.common.vo.request.WorkflowRequestVO;
import com.aizuda.snailjob.server.web.model.base.PageResult;
import com.aizuda.snailjob.server.web.model.request.*;
import com.aizuda.snailjob.server.web.model.response.WorkflowDetailResponseVO;
import com.aizuda.snailjob.server.web.model.response.WorkflowResponseVO;
import com.aizuda.snailjob.server.common.vo.WorkflowDetailResponseVO;
import com.aizuda.snailjob.server.common.vo.WorkflowResponseVO;
import jakarta.validation.Valid;
import jakarta.validation.constraints.NotEmpty;

View File

@ -2,7 +2,6 @@ package com.aizuda.snailjob.server.web.service.impl;
import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.lang.Assert;
import cn.hutool.core.util.HashUtil;
import cn.hutool.core.util.ReUtil;
import cn.hutool.core.util.StrUtil;
import com.aizuda.snailjob.common.core.enums.StatusEnum;

View File

@ -1,7 +1,6 @@
package com.aizuda.snailjob.server.web.service.impl;
import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.lang.Assert;
import cn.hutool.core.util.ObjUtil;
import cn.hutool.core.util.StrUtil;
import com.aizuda.snailjob.common.core.constant.SystemConstants;
@ -9,13 +8,12 @@ import com.aizuda.snailjob.common.core.util.JsonUtil;
import com.aizuda.snailjob.server.common.dto.CallbackConfig;
import com.aizuda.snailjob.server.common.dto.DecisionConfig;
import com.aizuda.snailjob.server.common.enums.SyetemTaskTypeEnum;
import com.aizuda.snailjob.server.common.exception.SnailJobServerException;
import com.aizuda.snailjob.server.web.model.base.PageResult;
import com.aizuda.snailjob.server.web.model.request.JobBatchQueryVO;
import com.aizuda.snailjob.server.web.model.request.UserSessionVO;
import com.aizuda.snailjob.server.web.model.response.JobBatchResponseVO;
import com.aizuda.snailjob.server.common.vo.JobBatchResponseVO;
import com.aizuda.snailjob.server.web.service.JobBatchService;
import com.aizuda.snailjob.server.web.service.convert.JobBatchResponseVOConverter;
import com.aizuda.snailjob.server.common.convert.JobBatchResponseVOConverter;
import com.aizuda.snailjob.server.web.service.handler.JobHandler;
import com.aizuda.snailjob.server.web.util.UserSessionUtils;
import com.aizuda.snailjob.template.datasource.persistence.dataobject.JobBatchResponseDO;
@ -25,7 +23,6 @@ import com.aizuda.snailjob.template.datasource.persistence.mapper.WorkflowNodeMa
import com.aizuda.snailjob.template.datasource.persistence.po.Job;
import com.aizuda.snailjob.template.datasource.persistence.po.JobTaskBatch;
import com.aizuda.snailjob.template.datasource.persistence.po.WorkflowNode;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.extension.plugins.pagination.PageDTO;
import lombok.RequiredArgsConstructor;

View File

@ -18,6 +18,7 @@ import com.aizuda.snailjob.server.common.strategy.WaitStrategies;
import com.aizuda.snailjob.server.common.util.CronUtils;
import com.aizuda.snailjob.server.common.util.DateUtils;
import com.aizuda.snailjob.server.common.util.PartitionTaskUtils;
import com.aizuda.snailjob.server.web.model.request.UserSessionVO;
import com.aizuda.snailjob.server.job.task.dto.JobTaskPrepareDTO;
import com.aizuda.snailjob.server.job.task.support.JobPrepareHandler;
import com.aizuda.snailjob.server.job.task.support.JobTaskConverter;
@ -40,7 +41,6 @@ import com.aizuda.snailjob.template.datasource.persistence.po.JobSummary;
import com.aizuda.snailjob.template.datasource.persistence.po.SystemUser;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.extension.plugins.pagination.PageDTO;
import com.google.common.collect.Lists;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.RequiredArgsConstructor;
@ -51,7 +51,6 @@ import org.springframework.transaction.annotation.Transactional;
import org.springframework.validation.annotation.Validated;
import java.util.*;
import java.util.stream.Collectors;
/**
* @author opensnail

View File

@ -1,18 +1,14 @@
package com.aizuda.snailjob.server.web.service.impl;
import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.lang.Assert;
import com.aizuda.snailjob.common.core.util.StreamUtils;
import com.aizuda.snailjob.server.common.exception.SnailJobServerException;
import com.aizuda.snailjob.server.web.model.base.PageResult;
import com.aizuda.snailjob.server.web.model.request.JobTaskQueryVO;
import com.aizuda.snailjob.server.web.model.response.JobTaskResponseVO;
import com.aizuda.snailjob.server.web.service.JobTaskService;
import com.aizuda.snailjob.server.web.service.convert.JobTaskResponseVOConverter;
import com.aizuda.snailjob.server.web.util.UserSessionUtils;
import com.aizuda.snailjob.template.datasource.persistence.mapper.JobLogMessageMapper;
import com.aizuda.snailjob.template.datasource.persistence.mapper.JobTaskMapper;
import com.aizuda.snailjob.template.datasource.persistence.po.JobLogMessage;
import com.aizuda.snailjob.template.datasource.persistence.po.JobTask;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.extension.plugins.pagination.PageDTO;

View File

@ -15,7 +15,6 @@ import com.aizuda.snailjob.server.web.model.base.PageResult;
import com.aizuda.snailjob.server.web.model.request.RetryTaskLogMessageQueryVO;
import com.aizuda.snailjob.server.web.model.request.RetryTaskQueryVO;
import com.aizuda.snailjob.server.web.model.request.UserSessionVO;
import com.aizuda.snailjob.server.web.model.response.RetryResponseVO;
import com.aizuda.snailjob.server.web.model.response.RetryTaskLogMessageResponseVO;
import com.aizuda.snailjob.server.web.model.response.RetryTaskResponseVO;
import com.aizuda.snailjob.server.web.service.RetryTaskService;

View File

@ -7,6 +7,7 @@ import cn.hutool.crypto.SecureUtil;
import com.aizuda.snailjob.common.core.util.JsonUtil;
import com.aizuda.snailjob.common.core.util.StreamUtils;
import com.aizuda.snailjob.server.common.exception.SnailJobServerException;
import com.aizuda.snailjob.server.web.model.request.UserSessionVO;
import com.aizuda.snailjob.server.web.annotation.RoleEnum;
import com.aizuda.snailjob.server.web.model.base.PageResult;
import com.aizuda.snailjob.server.web.model.request.*;

View File

@ -16,14 +16,14 @@ import com.aizuda.snailjob.server.job.task.support.handler.WorkflowBatchHandler;
import com.aizuda.snailjob.server.web.model.base.PageResult;
import com.aizuda.snailjob.server.web.model.request.UserSessionVO;
import com.aizuda.snailjob.server.web.model.request.WorkflowBatchQueryVO;
import com.aizuda.snailjob.server.web.model.response.JobBatchResponseVO;
import com.aizuda.snailjob.server.web.model.response.WorkflowBatchResponseVO;
import com.aizuda.snailjob.server.web.model.response.WorkflowDetailResponseVO;
import com.aizuda.snailjob.server.common.vo.JobBatchResponseVO;
import com.aizuda.snailjob.server.common.vo.WorkflowBatchResponseVO;
import com.aizuda.snailjob.server.common.vo.WorkflowDetailResponseVO;
import com.aizuda.snailjob.server.web.service.WorkflowBatchService;
import com.aizuda.snailjob.server.web.service.convert.JobBatchResponseVOConverter;
import com.aizuda.snailjob.server.web.service.convert.WorkflowConverter;
import com.aizuda.snailjob.server.common.convert.JobBatchResponseVOConverter;
import com.aizuda.snailjob.server.common.convert.WorkflowConverter;
import com.aizuda.snailjob.server.web.service.handler.JobHandler;
import com.aizuda.snailjob.server.web.service.handler.WorkflowHandler;
import com.aizuda.snailjob.server.common.handler.WorkflowHandler;
import com.aizuda.snailjob.server.web.util.UserSessionUtils;
import com.aizuda.snailjob.template.datasource.persistence.dataobject.WorkflowBatchResponseDO;
import com.aizuda.snailjob.template.datasource.persistence.mapper.*;

View File

@ -25,19 +25,21 @@ import com.aizuda.snailjob.server.common.util.CronUtils;
import com.aizuda.snailjob.server.common.util.DateUtils;
import com.aizuda.snailjob.server.common.util.GraphUtils;
import com.aizuda.snailjob.server.common.util.PartitionTaskUtils;
import com.aizuda.snailjob.server.web.model.request.UserSessionVO;
import com.aizuda.snailjob.server.common.vo.request.WorkflowRequestVO;
import com.aizuda.snailjob.server.job.task.dto.WorkflowTaskPrepareDTO;
import com.aizuda.snailjob.server.job.task.support.WorkflowPrePareHandler;
import com.aizuda.snailjob.server.job.task.support.WorkflowTaskConverter;
import com.aizuda.snailjob.server.job.task.support.expression.ExpressionInvocationHandler;
import com.aizuda.snailjob.server.web.model.base.PageResult;
import com.aizuda.snailjob.server.web.model.request.*;
import com.aizuda.snailjob.server.web.model.request.WorkflowRequestVO.NodeConfig;
import com.aizuda.snailjob.server.web.model.response.WorkflowDetailResponseVO;
import com.aizuda.snailjob.server.web.model.response.WorkflowResponseVO;
import com.aizuda.snailjob.server.common.vo.request.WorkflowRequestVO.NodeConfig;
import com.aizuda.snailjob.server.common.vo.WorkflowDetailResponseVO;
import com.aizuda.snailjob.server.common.vo.WorkflowResponseVO;
import com.aizuda.snailjob.server.web.service.WorkflowService;
import com.aizuda.snailjob.server.web.service.convert.WorkflowConverter;
import com.aizuda.snailjob.server.common.convert.WorkflowConverter;
import com.aizuda.snailjob.server.web.service.handler.GroupHandler;
import com.aizuda.snailjob.server.web.service.handler.WorkflowHandler;
import com.aizuda.snailjob.server.common.handler.WorkflowHandler;
import com.aizuda.snailjob.server.web.util.UserSessionUtils;
import com.aizuda.snailjob.template.datasource.access.AccessTemplate;
import com.aizuda.snailjob.template.datasource.persistence.mapper.JobMapper;
@ -48,7 +50,6 @@ import com.aizuda.snailjob.template.datasource.persistence.po.*;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.extension.plugins.pagination.PageDTO;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.common.graph.ElementOrder;
import com.google.common.graph.GraphBuilder;
import com.google.common.graph.MutableGraph;