feat:(1.2.0-beta1): 1. map和map reduce支持配置参数全路径传递

This commit is contained in:
opensnail 2024-09-07 12:57:13 +08:00
parent 876f48f1c2
commit 01d24d754a
5 changed files with 98 additions and 130 deletions

View File

@ -15,12 +15,6 @@ import java.util.Objects;
@Data
public class JobArgs {
/**
* 此字段即将废弃请使用see: jobParams
*/
@Deprecated
private String argsStr;
private Object jobParams;
private String executorInfo;

View File

@ -101,13 +101,6 @@ public abstract class AbstractJobExecutor implements IJobExecutor {
private static JobArgs buildJobArgs(JobContext jobContext) {
JobArgs jobArgs = new JobArgs();
// 下一个版本即将删除本期兼容此问题
Object jobParams = jobContext.getJobArgsHolder().getJobParams();
if (jobParams instanceof String) {
jobArgs.setArgsStr((String) jobParams);
} else {
jobArgs.setArgsStr(JsonUtil.toJsonString(jobParams));
}
jobArgs.setJobParams(jobContext.getJobArgsHolder().getJobParams());
jobArgs.setExecutorInfo(jobContext.getExecutorInfo());
jobArgs.setTaskBatchId(jobContext.getTaskBatchId());
@ -117,14 +110,6 @@ public abstract class AbstractJobExecutor implements IJobExecutor {
private static JobArgs buildShardingJobArgs(JobContext jobContext) {
ShardingJobArgs jobArgs = new ShardingJobArgs();
jobArgs.setJobParams(jobContext.getJobArgsHolder().getJobParams());
// 下一个版本即将删除本期兼容此问题
Object jobParams = jobContext.getJobArgsHolder().getJobParams();
if (jobParams instanceof String) {
jobArgs.setArgsStr((String) jobParams);
} else {
jobArgs.setArgsStr(JsonUtil.toJsonString(jobParams));
}
jobArgs.setExecutorInfo(jobContext.getExecutorInfo());
jobArgs.setShardingIndex(jobContext.getShardingIndex());
jobArgs.setShardingTotal(jobContext.getShardingTotal());
@ -134,13 +119,6 @@ public abstract class AbstractJobExecutor implements IJobExecutor {
private static JobArgs buildMapJobArgs(JobContext jobContext) {
MapArgs jobArgs = new MapArgs();
JobArgsHolder jobArgsHolder = jobContext.getJobArgsHolder();
// 下一个版本即将删除本期兼容此问题
Object jobParams = jobContext.getJobArgsHolder().getJobParams();
if (jobParams instanceof String) {
jobArgs.setArgsStr((String) jobParams);
} else {
jobArgs.setArgsStr(JsonUtil.toJsonString(jobParams));
}
jobArgs.setJobParams(jobArgsHolder.getJobParams());
jobArgs.setMapResult(jobArgsHolder.getMaps());
jobArgs.setExecutorInfo(jobContext.getExecutorInfo());
@ -152,13 +130,6 @@ public abstract class AbstractJobExecutor implements IJobExecutor {
private static JobArgs buildReduceJobArgs(JobContext jobContext) {
ReduceArgs jobArgs = new ReduceArgs();
JobArgsHolder jobArgsHolder = jobContext.getJobArgsHolder();
// 下一个版本即将删除本期兼容此问题
Object jobParams = jobContext.getJobArgsHolder().getJobParams();
if (jobParams instanceof String) {
jobArgs.setArgsStr((String) jobParams);
} else {
jobArgs.setArgsStr(JsonUtil.toJsonString(jobParams));
}
jobArgs.setJobParams(jobArgsHolder.getJobParams());
Object maps = jobArgsHolder.getMaps();
if (Objects.nonNull(maps)) {

View File

@ -3,11 +3,7 @@ package com.aizuda.snailjob.server.job.task.support.generator.task;
import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.lang.Assert;
import cn.hutool.core.util.StrUtil;
import com.aizuda.snailjob.common.core.enums.JobArgsTypeEnum;
import com.aizuda.snailjob.common.core.enums.JobTaskStatusEnum;
import com.aizuda.snailjob.common.core.enums.JobTaskTypeEnum;
import com.aizuda.snailjob.common.core.enums.MapReduceStageEnum;
import com.aizuda.snailjob.common.core.enums.StatusEnum;
import com.aizuda.snailjob.common.core.enums.*;
import com.aizuda.snailjob.common.core.exception.SnailJobMapReduceException;
import com.aizuda.snailjob.common.core.model.JobArgsHolder;
import com.aizuda.snailjob.common.core.util.JsonUtil;
@ -26,15 +22,16 @@ import com.aizuda.snailjob.template.datasource.persistence.po.JobTask;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.google.common.collect.Lists;
import lombok.RequiredArgsConstructor;
import org.jetbrains.annotations.NotNull;
import org.springframework.stereotype.Component;
import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.support.TransactionCallback;
import org.springframework.transaction.support.TransactionCallbackWithoutResult;
import org.springframework.transaction.support.TransactionTemplate;
import java.time.LocalDateTime;
import java.util.*;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
/**
* 生成Map Reduce任务
@ -60,14 +57,6 @@ public class MapReduceTaskGenerator extends AbstractJobTaskGenerator {
@Override
protected List<JobTask> doGenerate(final JobTaskGenerateContext context) {
// Set<RegisterNodeInfo> serverNodes = CacheRegisterTable.getServerNodeSet(context.getGroupName(),
// context.getNamespaceId());
// if (CollUtil.isEmpty(serverNodes)) {
// SnailJobLog.LOCAL.error("无可执行的客户端信息. jobId:[{}]", context.getJobId());
// return Lists.newArrayList();
// }
// List<RegisterNodeInfo> nodeInfoList = new ArrayList<>(serverNodes);
MapReduceStageEnum mapReduceStageEnum = MapReduceStageEnum.ofStage(context.getMrStage());
Assert.notNull(mapReduceStageEnum, () -> new SnailJobServerException("Map reduce stage is not existed"));
switch (Objects.requireNonNull(mapReduceStageEnum)) {
@ -96,13 +85,14 @@ public class MapReduceTaskGenerator extends AbstractJobTaskGenerator {
.eq(JobTask::getLeaf, StatusEnum.YES.getStatus())
);
MapReduceArgsStrDTO jobParams = getJobParams(context);
Pair<String, Integer> clientInfo = getClientNodeInfo(context);
// 新增任务实例
JobTask jobTask = JobTaskConverter.INSTANCE.toJobTaskInstance(context);
jobTask.setClientInfo(clientInfo.getKey());
jobTask.setArgsType(context.getArgsType());
JobArgsHolder jobArgsHolder = new JobArgsHolder();
jobArgsHolder.setJobParams(context.getArgsStr());
jobArgsHolder.setJobParams(jobParams.getArgsStr());
jobArgsHolder.setReduces(StreamUtils.toList(jobTasks, JobTask::getResultMessage));
jobTask.setArgsStr(JsonUtil.toJsonString(jobArgsHolder));
jobTask.setTaskStatus(clientInfo.getValue());
@ -117,17 +107,9 @@ public class MapReduceTaskGenerator extends AbstractJobTaskGenerator {
private List<JobTask> createReduceJobTasks(JobTaskGenerateContext context) {
int reduceParallel = 1;
String jobParams = null;
try {
MapReduceArgsStrDTO mapReduceArgsStrDTO = JsonUtil.parseObject(context.getArgsStr(),
MapReduceArgsStrDTO.class);
reduceParallel = Optional.ofNullable(mapReduceArgsStrDTO.getShardNum()).orElse(1);
jobParams = mapReduceArgsStrDTO.getArgsStr();
reduceParallel = Math.max(1, reduceParallel);
} catch (Exception e) {
SnailJobLog.LOCAL.error("map reduce args parse error. argsStr:[{}]", context.getArgsStr());
}
MapReduceArgsStrDTO jobParams = getJobParams(context);
int reduceParallel = Math.max(1,
Optional.ofNullable(jobParams.getShardNum()).orElse(1));
List<JobTask> jobTasks = jobTaskMapper.selectList(new LambdaQueryWrapper<JobTask>()
.select(JobTask::getResultMessage, JobTask::getId)
@ -146,7 +128,6 @@ public class MapReduceTaskGenerator extends AbstractJobTaskGenerator {
jobTasks = new ArrayList<>(partition.size());
final List<JobTask> finalJobTasks = jobTasks;
String finalJobParams = jobParams;
transactionTemplate.execute(new TransactionCallbackWithoutResult() {
@Override
protected void doInTransactionWithoutResult(final TransactionStatus status) {
@ -158,7 +139,7 @@ public class MapReduceTaskGenerator extends AbstractJobTaskGenerator {
jobTask.setClientInfo(clientInfo.getKey());
jobTask.setArgsType(context.getArgsType());
JobArgsHolder jobArgsHolder = new JobArgsHolder();
jobArgsHolder.setJobParams(finalJobParams);
jobArgsHolder.setJobParams(jobParams.getArgsStr());
jobArgsHolder.setMaps(partition.get(index));
jobTask.setArgsStr(JsonUtil.toJsonString(jobArgsHolder));
jobTask.setTaskStatus(clientInfo.getValue());
@ -187,6 +168,8 @@ public class MapReduceTaskGenerator extends AbstractJobTaskGenerator {
return Lists.newArrayList();
}
MapReduceArgsStrDTO jobParams = getJobParams(context);
// 判定父节点是不是叶子节点若是则不更新否则更新为非叶子节点
JobTask parentJobTask = jobTaskMapper.selectOne(
new LambdaQueryWrapper<JobTask>()
@ -205,7 +188,7 @@ public class MapReduceTaskGenerator extends AbstractJobTaskGenerator {
jobTask.setClientInfo(clientInfo.getKey());
jobTask.setArgsType(context.getArgsType());
JobArgsHolder jobArgsHolder = new JobArgsHolder();
jobArgsHolder.setJobParams(context.getArgsStr());
jobArgsHolder.setJobParams(jobParams.getArgsStr());
jobArgsHolder.setMaps(mapSubTask.get(index));
jobTask.setArgsStr(JsonUtil.toJsonString(jobArgsHolder));
jobTask.setArgsType(JobArgsTypeEnum.JSON.getArgsType());
@ -237,6 +220,16 @@ public class MapReduceTaskGenerator extends AbstractJobTaskGenerator {
}
protected MapReduceArgsStrDTO getJobParams(JobTaskGenerateContext context) {
try {
return JsonUtil.parseObject(context.getArgsStr(), MapReduceArgsStrDTO.class);
} catch (Exception e) {
SnailJobLog.LOCAL.error("map reduce args parse error. argsStr:[{}]", context.getArgsStr());
}
return new MapReduceArgsStrDTO();
}
private Pair<String, Integer> getClientNodeInfo(JobTaskGenerateContext context) {
RegisterNodeInfo serverNode = clientNodeAllocateHandler.getServerNode(
context.getJobId().toString(),

View File

@ -2,6 +2,7 @@ package com.aizuda.snailjob.server.job.task.support.generator.task;
import com.aizuda.snailjob.common.core.enums.JobTaskTypeEnum;
import com.aizuda.snailjob.server.common.handler.ClientNodeAllocateHandler;
import com.aizuda.snailjob.server.job.task.dto.MapReduceArgsStrDTO;
import com.aizuda.snailjob.template.datasource.persistence.mapper.JobTaskMapper;
import com.aizuda.snailjob.template.datasource.persistence.po.JobTask;
import org.springframework.stereotype.Component;
@ -32,4 +33,12 @@ public class MapTaskGenerator extends MapReduceTaskGenerator {
protected List<JobTask> doGenerate(final JobTaskGenerateContext context) {
return super.doGenerate(context);
}
@Override
protected MapReduceArgsStrDTO getJobParams(JobTaskGenerateContext context) {
// 这里复用map reduce的参数能力
MapReduceArgsStrDTO mapReduceArgsStrDTO = new MapReduceArgsStrDTO();
mapReduceArgsStrDTO.setArgsStr(context.getArgsStr());
return mapReduceArgsStrDTO;
}
}

View File

@ -69,21 +69,6 @@ public class MapTaskPostHttpRequestHandler extends PostHttpRequestHandler {
Object[] args = retryRequest.getArgs();
MapTaskRequest mapTaskRequest = JsonUtil.parseObject(JsonUtil.toJsonString(args[0]), MapTaskRequest.class);
// 创建map任务
JobTaskGenerator taskInstance = JobTaskGeneratorFactory.getTaskInstance(JobTaskTypeEnum.MAP_REDUCE.getType());
JobTaskGenerateContext context = JobTaskConverter.INSTANCE.toJobTaskInstanceGenerateContext(mapTaskRequest);
context.setGroupName(HttpHeaderUtil.getGroupName(headers));
context.setNamespaceId(HttpHeaderUtil.getNamespace(headers));
context.setMrStage(MapReduceStageEnum.MAP.getStage());
context.setMapSubTask(mapTaskRequest.getSubTask());
context.setWfContext(mapTaskRequest.getWfContext());
List<JobTask> taskList = taskInstance.generate(context);
if (CollUtil.isEmpty(taskList)) {
return JsonUtil.toJsonString(
new NettyResult(StatusEnum.NO.getStatus(), "Job task is empty", Boolean.FALSE,
retryRequest.getReqId()));
}
Job job = jobMapper.selectOne(new LambdaQueryWrapper<Job>()
.eq(Job::getId, mapTaskRequest.getJobId())
.eq(Job::getGroupName, groupName)
@ -96,6 +81,22 @@ public class MapTaskPostHttpRequestHandler extends PostHttpRequestHandler {
retryRequest.getReqId()));
}
// 创建map任务
JobTaskGenerator taskInstance = JobTaskGeneratorFactory.getTaskInstance(job.getTaskType());
JobTaskGenerateContext context = JobTaskConverter.INSTANCE.toJobTaskInstanceGenerateContext(mapTaskRequest);
context.setGroupName(HttpHeaderUtil.getGroupName(headers));
context.setArgsStr(job.getArgsStr());
context.setNamespaceId(HttpHeaderUtil.getNamespace(headers));
context.setMrStage(MapReduceStageEnum.MAP.getStage());
context.setMapSubTask(mapTaskRequest.getSubTask());
context.setWfContext(mapTaskRequest.getWfContext());
List<JobTask> taskList = taskInstance.generate(context);
if (CollUtil.isEmpty(taskList)) {
return JsonUtil.toJsonString(
new NettyResult(StatusEnum.NO.getStatus(), "Job task is empty", Boolean.FALSE,
retryRequest.getReqId()));
}
String newWfContext = null;
if (Objects.nonNull(mapTaskRequest.getWorkflowTaskBatchId())) {
WorkflowTaskBatch workflowTaskBatch = workflowTaskBatchMapper.selectOne(