feat(sj_1.0.0): 工作流导入导出导入优化

This commit is contained in:
opensnail 2024-05-30 17:12:53 +08:00
parent b80da2d616
commit 553379b078
6 changed files with 171 additions and 115 deletions

View File

@ -7,6 +7,7 @@ import com.aizuda.snailjob.server.common.dto.DecisionConfig;
import com.aizuda.snailjob.server.web.annotation.LoginRequired;
import com.aizuda.snailjob.server.web.annotation.RoleEnum;
import com.aizuda.snailjob.server.web.model.base.PageResult;
import com.aizuda.snailjob.server.web.model.request.ExportWorkflowVO;
import com.aizuda.snailjob.server.web.model.request.WorkflowQueryVO;
import com.aizuda.snailjob.server.web.model.request.WorkflowRequestVO;
import com.aizuda.snailjob.server.web.model.response.WorkflowDetailResponseVO;
@ -96,10 +97,6 @@ public class WorkflowController {
@LoginRequired
@PostMapping(value = "/import", consumes = MediaType.MULTIPART_FORM_DATA_VALUE)
public void importScene(@RequestPart("file") MultipartFile file) throws IOException {
if (file.isEmpty()) {
throw new SnailJobCommonException("请选择一个文件上传");
}
// 写入数据
workflowService.importWorkflowTask(ImportUtils.parseList(file, WorkflowRequestVO.class));
}
@ -107,8 +104,8 @@ public class WorkflowController {
@LoginRequired
@PostMapping("/export")
@OriginalControllerReturnValue
public ResponseEntity<String> export(@RequestBody Set<Long> workflowIds) {
return ExportUtils.doExport(workflowService.exportWorkflowTask(workflowIds));
public ResponseEntity<String> export(@RequestBody ExportWorkflowVO exportWorkflowVO) {
return ExportUtils.doExport(workflowService.exportWorkflowTask(exportWorkflowVO));
}
}

View File

@ -0,0 +1,22 @@
package com.aizuda.snailjob.server.web.model.request;
import lombok.Data;
import java.util.Set;
/**
* @author: opensnail
* @date : 2024-05-30
* @since : sj_1.0.0
*/
@Data
public class ExportWorkflowVO {
private Set<Long> workflowIds;
private String groupName;
private String workflowName;
private Integer workflowStatus;
}

View File

@ -3,6 +3,7 @@ package com.aizuda.snailjob.server.web.service;
import cn.hutool.core.lang.Pair;
import com.aizuda.snailjob.server.common.dto.DecisionConfig;
import com.aizuda.snailjob.server.web.model.base.PageResult;
import com.aizuda.snailjob.server.web.model.request.ExportWorkflowVO;
import com.aizuda.snailjob.server.web.model.request.WorkflowQueryVO;
import com.aizuda.snailjob.server.web.model.request.WorkflowRequestVO;
import com.aizuda.snailjob.server.web.model.response.WorkflowDetailResponseVO;
@ -12,7 +13,6 @@ import jakarta.validation.constraints.NotEmpty;
import java.io.IOException;
import java.util.List;
import java.util.Set;
/**
* @author xiaowoniu
@ -41,5 +41,6 @@ public interface WorkflowService {
void importWorkflowTask(@Valid @NotEmpty(message = "导入数据不能为空") List<WorkflowRequestVO> requests);
String exportWorkflowTask(Set<Long> workflowIds);
String exportWorkflowTask(ExportWorkflowVO exportWorkflowVO
);
}

View File

@ -16,6 +16,7 @@ import com.aizuda.snailjob.server.common.WaitStrategy;
import com.aizuda.snailjob.server.common.config.SystemProperties;
import com.aizuda.snailjob.server.common.dto.DecisionConfig;
import com.aizuda.snailjob.server.common.dto.JobTaskConfig;
import com.aizuda.snailjob.server.common.dto.PartitionTask;
import com.aizuda.snailjob.server.common.enums.ExpressionTypeEnum;
import com.aizuda.snailjob.server.common.enums.JobTaskExecutorSceneEnum;
import com.aizuda.snailjob.server.common.exception.SnailJobServerException;
@ -23,12 +24,13 @@ import com.aizuda.snailjob.server.common.strategy.WaitStrategies;
import com.aizuda.snailjob.server.common.util.CronUtils;
import com.aizuda.snailjob.server.common.util.DateUtils;
import com.aizuda.snailjob.server.common.util.GraphUtils;
import com.aizuda.snailjob.server.common.util.PartitionTaskUtils;
import com.aizuda.snailjob.server.job.task.dto.WorkflowTaskPrepareDTO;
import com.aizuda.snailjob.server.job.task.support.WorkflowPrePareHandler;
import com.aizuda.snailjob.server.job.task.support.WorkflowTaskConverter;
import com.aizuda.snailjob.server.job.task.support.expression.ExpressionInvocationHandler;
import com.aizuda.snailjob.server.web.model.base.PageResult;
import com.aizuda.snailjob.server.web.model.request.SceneConfigRequestVO;
import com.aizuda.snailjob.server.web.model.request.ExportWorkflowVO;
import com.aizuda.snailjob.server.web.model.request.UserSessionVO;
import com.aizuda.snailjob.server.web.model.request.WorkflowQueryVO;
import com.aizuda.snailjob.server.web.model.request.WorkflowRequestVO;
@ -36,12 +38,10 @@ import com.aizuda.snailjob.server.web.model.request.WorkflowRequestVO.NodeConfig
import com.aizuda.snailjob.server.web.model.response.WorkflowDetailResponseVO;
import com.aizuda.snailjob.server.web.model.response.WorkflowResponseVO;
import com.aizuda.snailjob.server.web.service.WorkflowService;
import com.aizuda.snailjob.server.web.service.convert.SceneConfigConverter;
import com.aizuda.snailjob.server.web.service.convert.WorkflowConverter;
import com.aizuda.snailjob.server.web.service.handler.WorkflowHandler;
import com.aizuda.snailjob.server.web.util.UserSessionUtils;
import com.aizuda.snailjob.template.datasource.access.AccessTemplate;
import com.aizuda.snailjob.template.datasource.access.ConfigAccess;
import com.aizuda.snailjob.template.datasource.persistence.mapper.JobMapper;
import com.aizuda.snailjob.template.datasource.persistence.mapper.WorkflowMapper;
import com.aizuda.snailjob.template.datasource.persistence.mapper.WorkflowNodeMapper;
@ -53,13 +53,15 @@ import com.google.common.collect.Sets;
import com.google.common.graph.ElementOrder;
import com.google.common.graph.GraphBuilder;
import com.google.common.graph.MutableGraph;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.jetbrains.annotations.NotNull;
import org.springframework.context.annotation.Lazy;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.time.LocalDateTime;
import java.util.*;
import java.util.concurrent.LinkedBlockingDeque;
import java.util.stream.Collectors;
@ -165,43 +167,7 @@ public class WorkflowServiceImpl implements WorkflowService {
return null;
}
WorkflowDetailResponseVO responseVO = WorkflowConverter.INSTANCE.convert(workflow);
List<WorkflowNode> workflowNodes = workflowNodeMapper.selectList(new LambdaQueryWrapper<WorkflowNode>()
.eq(WorkflowNode::getDeleted, 0)
.eq(WorkflowNode::getVersion, workflow.getVersion())
.eq(WorkflowNode::getWorkflowId, id)
.orderByAsc(WorkflowNode::getPriorityLevel));
List<Long> jobIds = StreamUtils.toList(workflowNodes, WorkflowNode::getJobId);
List<Job> jobs = jobMapper.selectList(new LambdaQueryWrapper<Job>()
.in(Job::getId, new HashSet<>(jobIds)));
Map<Long, Job> jobMap = StreamUtils.toIdentityMap(jobs, Job::getId);
List<WorkflowDetailResponseVO.NodeInfo> nodeInfos = WorkflowConverter.INSTANCE.convertList(workflowNodes);
Map<Long, WorkflowDetailResponseVO.NodeInfo> workflowNodeMap = nodeInfos.stream()
.peek(nodeInfo -> {
JobTaskConfig jobTask = nodeInfo.getJobTask();
if (Objects.nonNull(jobTask)) {
jobTask.setJobName(jobMap.getOrDefault(jobTask.getJobId(), new Job()).getJobName());
}
}).collect(Collectors.toMap(WorkflowDetailResponseVO.NodeInfo::getId, i -> i));
String flowInfo = workflow.getFlowInfo();
try {
MutableGraph<Long> graph = GraphUtils.deserializeJsonToGraph(flowInfo);
// 反序列化构建图
WorkflowDetailResponseVO.NodeConfig config = workflowHandler.buildNodeConfig(graph, SystemConstants.ROOT,
new HashMap<>(),
workflowNodeMap);
responseVO.setNodeConfig(config);
} catch (Exception e) {
log.error("反序列化失败. json:[{}]", flowInfo, e);
throw new SnailJobServerException("查询工作流详情失败");
}
return responseVO;
return doGetWorkflowDetail(workflow);
}
@Override
@ -214,11 +180,14 @@ public class WorkflowServiceImpl implements WorkflowService {
.eq(Workflow::getDeleted, StatusEnum.NO.getStatus())
.eq(Workflow::getNamespaceId, userSessionVO.getNamespaceId())
.eq(StrUtil.isNotBlank(queryVO.getGroupName()), Workflow::getGroupName, queryVO.getGroupName())
.like(StrUtil.isNotBlank(queryVO.getWorkflowName()), Workflow::getWorkflowName, queryVO.getWorkflowName())
.eq(Objects.nonNull(queryVO.getWorkflowStatus()), Workflow::getWorkflowStatus, queryVO.getWorkflowStatus())
.like(StrUtil.isNotBlank(queryVO.getWorkflowName()), Workflow::getWorkflowName,
queryVO.getWorkflowName())
.eq(Objects.nonNull(queryVO.getWorkflowStatus()), Workflow::getWorkflowStatus,
queryVO.getWorkflowStatus())
.orderByDesc(Workflow::getId));
List<WorkflowResponseVO> jobResponseList = WorkflowConverter.INSTANCE.convertListToWorkflowList(page.getRecords());
List<WorkflowResponseVO> jobResponseList = WorkflowConverter.INSTANCE.convertListToWorkflowList(
page.getRecords());
return new PageResult<>(pageDTO, jobResponseList);
}
@ -300,7 +269,8 @@ public class WorkflowServiceImpl implements WorkflowService {
.eq(GroupConfig::getGroupStatus, StatusEnum.YES.getStatus())
);
Assert.isTrue(count > 0, () -> new SnailJobServerException("组:[{}]已经关闭,不支持手动执行.", workflow.getGroupName()));
Assert.isTrue(count > 0,
() -> new SnailJobServerException("组:[{}]已经关闭,不支持手动执行.", workflow.getGroupName()));
WorkflowTaskPrepareDTO prepareDTO = WorkflowTaskConverter.INSTANCE.toWorkflowTaskPrepareDTO(workflow);
// 设置now表示立即执行
@ -349,21 +319,36 @@ public class WorkflowServiceImpl implements WorkflowService {
}
@Override
public String exportWorkflowTask(Set<Long> workflowIds) {
List<Workflow> workflowList = workflowMapper.selectList(new LambdaQueryWrapper<Workflow>()
public String exportWorkflowTask(ExportWorkflowVO exportVO) {
List<WorkflowDetailResponseVO> resultList = new ArrayList<>();
PartitionTaskUtils.process(startId -> {
List<Workflow> workflowList = workflowMapper.selectPage(new PageDTO<>(0, 100),
new LambdaQueryWrapper<Workflow>()
.eq(Workflow::getNamespaceId, UserSessionUtils.currentUserSession().getNamespaceId())
.eq(Workflow::getDeleted, StatusEnum.NO.getStatus())
// TODO 若导出全部需要分页查询避免一次拉取太多数据
.in(CollUtil.isNotEmpty(workflowIds), Workflow::getId, workflowIds)
);
.eq(StrUtil.isNotBlank(exportVO.getGroupName()), Workflow::getGroupName, exportVO.getGroupName())
.eq(Objects.nonNull(exportVO.getWorkflowStatus()), Workflow::getWorkflowStatus, exportVO.getWorkflowStatus())
.likeRight(StrUtil.isNotBlank(exportVO.getWorkflowName()), Workflow::getWorkflowName, exportVO.getWorkflowName())
.in(CollUtil.isNotEmpty(exportVO.getWorkflowIds()), Workflow::getId, exportVO.getWorkflowIds())
.ge(Workflow::getId, startId)
.orderByAsc(Workflow::getId)
).getRecords();
return workflowList.stream()
.map(this::doGetWorkflowDetail)
.map(WorkflowPartitionTask::new)
.collect(Collectors.toList());
}, partitionTasks -> {
List<WorkflowPartitionTask> workflowPartitionTasks = (List<WorkflowPartitionTask>) partitionTasks;
resultList.addAll(StreamUtils.toList(workflowPartitionTasks, WorkflowPartitionTask::getResponseVO));
}, 0);
List<WorkflowDetailResponseVO> workflowDetailResponseVOList = workflowList.stream().map(i -> getWorkflowDetail(i.getId())).collect(Collectors.toList());
return JsonUtil.toJsonString(workflowDetailResponseVOList);
return JsonUtil.toJsonString(resultList);
}
private void batchSaveWorkflowTask(final List<WorkflowRequestVO> workflowRequestVOList, final String namespaceId) {
Set<String> groupNameSet = workflowRequestVOList.stream().map(i -> i.getGroupName()).collect(Collectors.toSet());
Set<String> groupNameSet =StreamUtils.toSet(workflowRequestVOList, WorkflowRequestVO::getGroupName);
List<GroupConfig> groupConfigs = accessTemplate.getGroupConfigAccess()
.list(new LambdaQueryWrapper<GroupConfig>()
.select(GroupConfig::getGroupName)
@ -383,4 +368,54 @@ public class WorkflowServiceImpl implements WorkflowService {
saveWorkflow(workflowRequestVO);
}
}
private WorkflowDetailResponseVO doGetWorkflowDetail(final Workflow workflow) {
WorkflowDetailResponseVO responseVO = WorkflowConverter.INSTANCE.convert(workflow);
List<WorkflowNode> workflowNodes = workflowNodeMapper.selectList(new LambdaQueryWrapper<WorkflowNode>()
.eq(WorkflowNode::getDeleted, 0)
.eq(WorkflowNode::getVersion, workflow.getVersion())
.eq(WorkflowNode::getWorkflowId, workflow.getId())
.orderByAsc(WorkflowNode::getPriorityLevel));
List<Long> jobIds = StreamUtils.toList(workflowNodes, WorkflowNode::getJobId);
List<Job> jobs = jobMapper.selectList(new LambdaQueryWrapper<Job>()
.in(Job::getId, new HashSet<>(jobIds)));
Map<Long, Job> jobMap = StreamUtils.toIdentityMap(jobs, Job::getId);
List<WorkflowDetailResponseVO.NodeInfo> nodeInfos = WorkflowConverter.INSTANCE.convertList(workflowNodes);
Map<Long, WorkflowDetailResponseVO.NodeInfo> workflowNodeMap = nodeInfos.stream()
.peek(nodeInfo -> {
JobTaskConfig jobTask = nodeInfo.getJobTask();
if (Objects.nonNull(jobTask)) {
jobTask.setJobName(jobMap.getOrDefault(jobTask.getJobId(), new Job()).getJobName());
}
}).collect(Collectors.toMap(WorkflowDetailResponseVO.NodeInfo::getId, i -> i));
String flowInfo = workflow.getFlowInfo();
try {
MutableGraph<Long> graph = GraphUtils.deserializeJsonToGraph(flowInfo);
// 反序列化构建图
WorkflowDetailResponseVO.NodeConfig config = workflowHandler.buildNodeConfig(graph, SystemConstants.ROOT,
new HashMap<>(),
workflowNodeMap);
responseVO.setNodeConfig(config);
} catch (Exception e) {
log.error("反序列化失败. json:[{}]", flowInfo, e);
throw new SnailJobServerException("查询工作流详情失败");
}
return responseVO;
}
@EqualsAndHashCode(callSuper = true)
@Getter
private static class WorkflowPartitionTask extends PartitionTask {
private final WorkflowDetailResponseVO responseVO;
public WorkflowPartitionTask(@NotNull WorkflowDetailResponseVO responseVO) {
this.responseVO = responseVO;
setId(responseVO.getId());
}
}
}

View File

@ -6,7 +6,9 @@ import org.springframework.http.ResponseEntity;
import java.nio.charset.StandardCharsets;
public class ExportUtils {
public final class ExportUtils {
private ExportUtils() {}
public static ResponseEntity<String> doExport(String json) {
HttpHeaders headers = new HttpHeaders();

View File

@ -10,9 +10,10 @@ import org.springframework.web.multipart.MultipartFile;
import java.io.IOException;
import java.util.List;
public class ImportUtils {
public final class ImportUtils {
private static final List<String> FILE_EXTENSIONS = List.of("json");
private ImportUtils() {}
public static @NotNull <VO> List<VO> parseList(MultipartFile file, Class<VO> clazz) throws IOException {
if (file.isEmpty()) {
@ -26,9 +27,7 @@ public class ImportUtils {
}
JsonNode node = JsonUtil.toJson(file.getBytes());
List<VO> requestList = JsonUtil.parseList(JsonUtil.toJsonString(node), clazz);
return requestList;
return JsonUtil.parseList(JsonUtil.toJsonString(node), clazz);
}
}