feat:(1.2.0-beta2): 1. 修复服务端生成channel并发问题 2. 修复客户端分片参数为ShardingJobArgs时不生效问题
This commit is contained in:
parent
b308d2692b
commit
399ed8298d
@ -114,6 +114,8 @@ public class JobEndPoint {
|
|||||||
private static JobContext buildJobContext(DispatchJobRequest dispatchJob) {
|
private static JobContext buildJobContext(DispatchJobRequest dispatchJob) {
|
||||||
JobContext jobContext = new JobContext();
|
JobContext jobContext = new JobContext();
|
||||||
jobContext.setJobId(dispatchJob.getJobId());
|
jobContext.setJobId(dispatchJob.getJobId());
|
||||||
|
jobContext.setShardingTotal(dispatchJob.getShardingTotal());
|
||||||
|
jobContext.setShardingIndex(dispatchJob.getShardingIndex());
|
||||||
jobContext.setNamespaceId(dispatchJob.getNamespaceId());
|
jobContext.setNamespaceId(dispatchJob.getNamespaceId());
|
||||||
jobContext.setTaskId(dispatchJob.getTaskId());
|
jobContext.setTaskId(dispatchJob.getTaskId());
|
||||||
jobContext.setTaskBatchId(dispatchJob.getTaskBatchId());
|
jobContext.setTaskBatchId(dispatchJob.getTaskBatchId());
|
||||||
|
@ -8,12 +8,9 @@ import com.aizuda.snailjob.client.job.core.annotation.MapExecutor;
|
|||||||
import com.aizuda.snailjob.client.job.core.annotation.MergeReduceExecutor;
|
import com.aizuda.snailjob.client.job.core.annotation.MergeReduceExecutor;
|
||||||
import com.aizuda.snailjob.client.job.core.annotation.ReduceExecutor;
|
import com.aizuda.snailjob.client.job.core.annotation.ReduceExecutor;
|
||||||
import com.aizuda.snailjob.client.job.core.cache.JobExecutorInfoCache;
|
import com.aizuda.snailjob.client.job.core.cache.JobExecutorInfoCache;
|
||||||
import com.aizuda.snailjob.client.job.core.dto.JobArgs;
|
import com.aizuda.snailjob.client.job.core.dto.*;
|
||||||
import com.aizuda.snailjob.client.job.core.dto.JobExecutorInfo;
|
|
||||||
import com.aizuda.snailjob.client.job.core.dto.MapArgs;
|
|
||||||
import com.aizuda.snailjob.client.job.core.dto.MergeReduceArgs;
|
|
||||||
import com.aizuda.snailjob.client.job.core.dto.ReduceArgs;
|
|
||||||
import com.aizuda.snailjob.common.log.SnailJobLog;
|
import com.aizuda.snailjob.common.log.SnailJobLog;
|
||||||
|
import com.google.common.collect.Lists;
|
||||||
import lombok.extern.slf4j.Slf4j;
|
import lombok.extern.slf4j.Slf4j;
|
||||||
import org.springframework.aop.framework.AopProxyUtils;
|
import org.springframework.aop.framework.AopProxyUtils;
|
||||||
import org.springframework.beans.BeansException;
|
import org.springframework.beans.BeansException;
|
||||||
@ -25,11 +22,7 @@ import org.springframework.stereotype.Component;
|
|||||||
import org.springframework.util.ReflectionUtils;
|
import org.springframework.util.ReflectionUtils;
|
||||||
|
|
||||||
import java.lang.reflect.Method;
|
import java.lang.reflect.Method;
|
||||||
import java.util.ArrayList;
|
import java.util.*;
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.Objects;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @author: opensnail
|
* @author: opensnail
|
||||||
@ -70,7 +63,7 @@ public class JobExecutorScanner implements Scanner, ApplicationContextAware {
|
|||||||
if (!JobExecutorInfoCache.isExisted(executorClassName)) {
|
if (!JobExecutorInfoCache.isExisted(executorClassName)) {
|
||||||
jobExecutorInfoList.add(new JobExecutorInfo(executorClassName,
|
jobExecutorInfoList.add(new JobExecutorInfo(executorClassName,
|
||||||
ReflectionUtils.findMethod(bean.getClass(), "jobExecute"),
|
ReflectionUtils.findMethod(bean.getClass(), "jobExecute"),
|
||||||
null,null, null, bean));
|
null, null, null, bean));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -79,7 +72,15 @@ public class JobExecutorScanner implements Scanner, ApplicationContextAware {
|
|||||||
if (Objects.nonNull(jobExecutor)) {
|
if (Objects.nonNull(jobExecutor)) {
|
||||||
String executorName = jobExecutor.name();
|
String executorName = jobExecutor.name();
|
||||||
if (!JobExecutorInfoCache.isExisted(executorName)) {
|
if (!JobExecutorInfoCache.isExisted(executorName)) {
|
||||||
Method method = ReflectionUtils.findMethod(bean.getClass(), jobExecutor.method(), JobArgs.class);
|
List<Class<? extends JobArgs>> classes = Lists.newArrayList(ShardingJobArgs.class, JobArgs.class);
|
||||||
|
Method method = null;
|
||||||
|
for (Class<? extends JobArgs> clazz : classes) {
|
||||||
|
method = ReflectionUtils.findMethod(bean.getClass(), jobExecutor.method(), clazz);
|
||||||
|
if (Objects.nonNull(method)) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (method == null) {
|
if (method == null) {
|
||||||
method = ReflectionUtils.findMethod(bean.getClass(), jobExecutor.method());
|
method = ReflectionUtils.findMethod(bean.getClass(), jobExecutor.method());
|
||||||
}
|
}
|
||||||
@ -93,14 +94,14 @@ public class JobExecutorScanner implements Scanner, ApplicationContextAware {
|
|||||||
Class<?>[] parameterTypes = method1.getParameterTypes();
|
Class<?>[] parameterTypes = method1.getParameterTypes();
|
||||||
MapExecutor mapExecutor = method1.getAnnotation(MapExecutor.class);
|
MapExecutor mapExecutor = method1.getAnnotation(MapExecutor.class);
|
||||||
if (Objects.nonNull(mapExecutor)
|
if (Objects.nonNull(mapExecutor)
|
||||||
&& parameterTypes.length >0
|
&& parameterTypes.length > 0
|
||||||
&& parameterTypes[0].isAssignableFrom(MapArgs.class)) {
|
&& parameterTypes[0].isAssignableFrom(MapArgs.class)) {
|
||||||
mapExecutorMethodMap.put(mapExecutor.taskName(), method1);
|
mapExecutorMethodMap.put(mapExecutor.taskName(), method1);
|
||||||
}
|
}
|
||||||
|
|
||||||
ReduceExecutor reduceExecutorAnno = method1.getAnnotation(ReduceExecutor.class);
|
ReduceExecutor reduceExecutorAnno = method1.getAnnotation(ReduceExecutor.class);
|
||||||
if (Objects.nonNull(reduceExecutorAnno)
|
if (Objects.nonNull(reduceExecutorAnno)
|
||||||
&& parameterTypes.length >0
|
&& parameterTypes.length > 0
|
||||||
&& parameterTypes[0].isAssignableFrom(ReduceArgs.class)) {
|
&& parameterTypes[0].isAssignableFrom(ReduceArgs.class)) {
|
||||||
reduceExecutor = method1;
|
reduceExecutor = method1;
|
||||||
continue;
|
continue;
|
||||||
@ -108,7 +109,7 @@ public class JobExecutorScanner implements Scanner, ApplicationContextAware {
|
|||||||
|
|
||||||
MergeReduceExecutor mergeReduceExecutorAnno = method1.getAnnotation(MergeReduceExecutor.class);
|
MergeReduceExecutor mergeReduceExecutorAnno = method1.getAnnotation(MergeReduceExecutor.class);
|
||||||
if (Objects.nonNull(mergeReduceExecutorAnno)
|
if (Objects.nonNull(mergeReduceExecutorAnno)
|
||||||
&& parameterTypes.length >0
|
&& parameterTypes.length > 0
|
||||||
&& parameterTypes[0].isAssignableFrom(MergeReduceArgs.class)) {
|
&& parameterTypes[0].isAssignableFrom(MergeReduceArgs.class)) {
|
||||||
mergeReduceExecutor = method1;
|
mergeReduceExecutor = method1;
|
||||||
}
|
}
|
||||||
@ -144,7 +145,7 @@ public class JobExecutorScanner implements Scanner, ApplicationContextAware {
|
|||||||
new JobExecutorInfo(
|
new JobExecutorInfo(
|
||||||
jobExecutor.name(),
|
jobExecutor.name(),
|
||||||
executeMethod,
|
executeMethod,
|
||||||
null,null, null,
|
null, null, null,
|
||||||
bean
|
bean
|
||||||
);
|
);
|
||||||
jobExecutorInfoList.add(jobExecutorInfo);
|
jobExecutorInfoList.add(jobExecutorInfo);
|
||||||
|
@ -52,7 +52,7 @@ public class NettyChannel {
|
|||||||
* @param body 请求的消息体
|
* @param body 请求的消息体
|
||||||
* @throws InterruptedException
|
* @throws InterruptedException
|
||||||
*/
|
*/
|
||||||
public static void send(String hostId, String hostIp, Integer port, HttpMethod method, String url, String body, HttpHeaders requestHeaders) throws InterruptedException {
|
public static synchronized void send(String hostId, String hostIp, Integer port, HttpMethod method, String url, String body, HttpHeaders requestHeaders) throws InterruptedException {
|
||||||
|
|
||||||
Channel channel = CHANNEL_MAP.get(Pair.of(hostId, hostIp));
|
Channel channel = CHANNEL_MAP.get(Pair.of(hostId, hostIp));
|
||||||
if (Objects.isNull(channel) || !channel.isActive()) {
|
if (Objects.isNull(channel) || !channel.isActive()) {
|
||||||
@ -100,7 +100,8 @@ public class NettyChannel {
|
|||||||
if (notTimeout) {
|
if (notTimeout) {
|
||||||
// 连接成功
|
// 连接成功
|
||||||
if (channel != null && channel.isActive()) {
|
if (channel != null && channel.isActive()) {
|
||||||
SnailJobLog.LOCAL.info("netty client started {} connect to server", channel.localAddress());
|
SnailJobLog.LOCAL.info("netty client started {} connect to server id:[{}] ip:[{}] channel:[{}]",
|
||||||
|
channel.localAddress(), hostId, ip, channel);
|
||||||
NettyChannel.setChannel(hostId, ip, channel);
|
NettyChannel.setChannel(hostId, ip, channel);
|
||||||
return channel;
|
return channel;
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user