Merge branch '1.2.0-beta3' into client-jdk8

# Conflicts:
#	pom.xml
This commit is contained in:
dhb52 2024-11-06 21:40:51 +08:00
commit 8a5278417c
14 changed files with 61 additions and 61 deletions

21
pom.xml
View File

@ -21,16 +21,19 @@
<java.version>1.8</java.version>
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
<revision>1.2.0-jdk8-beta2.1</revision>
<netty-all.version>4.1.94.Final</netty-all.version>
<hutool-all.version>5.8.25</hutool-all.version>
<mybatis-plus.version>3.5.7</mybatis-plus.version>
<revision>1.2.0-jdk8-beta3</revision>
<skipTests>true</skipTests>
<netty-all.version>4.1.114.Final</netty-all.version>
<hutool-all.version>5.8.32</hutool-all.version>
<mybatis-plus.version>3.5.9</mybatis-plus.version>
<guava-retrying.version>2.0.0</guava-retrying.version>
<jboss-logging.version>3.6.1.Final</jboss-logging.version>
<tinylog.version>1.3.6</tinylog.version>
<tinylog2.version>2.6.2</tinylog2.version>
<logtube.version>0.45.0</logtube.version>
<log4j.version>1.2.17</log4j.version>
<commons-logging.version>1.2</commons-logging.version>
<commons-logging.version>1.3.4</commons-logging.version>
<jakarta-validation.version>2.0.2</jakarta-validation.version>
<jakarta-mail-api.version>2.1.3</jakarta-mail-api.version>
<jakarta-mail.version>2.0.3</jakarta-mail.version>
@ -148,6 +151,12 @@
<artifactId>mybatis-plus-boot-starter</artifactId>
<version>${mybatis-plus.version}</version>
</dependency>
<dependency>
<!-- >= 3.5.9 PaginationInnerInterceptor 需要单独引入 -->
<groupId>com.baomidou</groupId>
<artifactId>mybatis-plus-jsqlparser</artifactId>
<version>${mybatis-plus.version}</version>
</dependency>
<dependency>
<groupId>jakarta.validation</groupId>
<artifactId>jakarta.validation-api</artifactId>
@ -181,7 +190,7 @@
<dependency>
<groupId>org.jboss.logging</groupId>
<artifactId>jboss-logging</artifactId>
<version>3.4.3.Final</version>
<version>${jboss-logging.version}</version>
<optional>true</optional>
</dependency>
<dependency>

View File

@ -19,7 +19,6 @@ import com.aizuda.snailjob.client.common.log.support.SnailJobLogManager;
import com.aizuda.snailjob.client.common.rpc.client.NettyChannel;
import com.aizuda.snailjob.common.log.constant.LogFieldConstants;
import com.aizuda.snailjob.common.log.dto.LogContentDTO;
import org.apache.log4j.MDC;
import org.apache.logging.log4j.core.Filter;
import org.apache.logging.log4j.core.Layout;
import org.apache.logging.log4j.core.LogEvent;
@ -30,6 +29,7 @@ import org.apache.logging.log4j.core.config.plugins.PluginElement;
import org.apache.logging.log4j.core.config.plugins.PluginFactory;
import org.apache.logging.log4j.core.util.Booleans;
import org.apache.logging.log4j.core.util.Throwables;
import org.slf4j.MDC;
import java.io.Serializable;
import java.util.Objects;

View File

@ -11,9 +11,10 @@ import com.aizuda.snailjob.common.log.SnailJobLog;
import lombok.Data;
import org.springframework.util.StringUtils;
import java.util.Iterator;
import java.util.Base64;
import java.util.Map;
import java.util.Objects;
import java.util.regex.Pattern;
public abstract class AbstractHttpExecutor {
@ -27,6 +28,7 @@ public abstract class AbstractHttpExecutor {
private static final String HTTP = "http";
private static final String HTTP_PREFIX = "http://";
private static final int HTTP_SUCCESS_CODE = 200;
private static final Pattern pattern = Pattern.compile("[\\u4e00-\\u9fa5]");
public ExecuteResult process(HttpParams httpParams) {
if (httpParams == null) {
@ -123,6 +125,11 @@ public abstract class AbstractHttpExecutor {
if ( Objects.nonNull(httpParams.getWfContext())) {
httpParams.getWfContext().forEach((key, value) -> {
String headerValue = (value instanceof String) ? (String) value : JsonUtil.toJsonString(value);
// 正则表达式匹配中文字符
if (pattern.matcher(headerValue).find()) {
// 如果包含中文字符则进行Base64编码
headerValue = Base64.getEncoder().encodeToString(headerValue.getBytes());
}
request.header(key, headerValue);
});
}

View File

@ -58,7 +58,7 @@
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>QLExpress</artifactId>
<version>3.3.1</version>
<version>3.3.4</version>
<scope>provided</scope>
<optional>true</optional>
</dependency>

View File

@ -61,7 +61,7 @@
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>QLExpress</artifactId>
<version>3.3.1</version>
<version>3.3.4</version>
<scope>provided</scope>
<optional>true</optional>
</dependency>

View File

@ -4,11 +4,11 @@ import cn.hutool.core.util.StrUtil;
import com.aizuda.snailjob.common.log.constant.LogFieldConstants;
import com.aizuda.snailjob.common.log.dialect.AbstractLog;
import com.aizuda.snailjob.common.log.factory.LogFactory;
import org.apache.log4j.MDC;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.spi.AbstractLogger;
import org.slf4j.MDC;
//import java.io.Serial;

View File

@ -32,6 +32,10 @@
<groupId>com.baomidou</groupId>
<artifactId>mybatis-plus-spring-boot3-starter</artifactId>
</dependency>
<dependency>
<groupId>com.baomidou</groupId>
<artifactId>mybatis-plus-jsqlparser</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-jdbc</artifactId>

View File

@ -16,7 +16,7 @@
<properties>
<java.version>17</java.version>
<postgresql.version>42.2.24</postgresql.version>
<postgresql.version>42.7.2</postgresql.version>
</properties>
<dependencies>

View File

@ -76,7 +76,7 @@
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>QLExpress</artifactId>
<version>3.3.1</version>
<version>3.3.4</version>
<exclusions>
<exclusion>
<groupId>commons-beanutils</groupId>

View File

@ -20,8 +20,6 @@ import com.baomidou.mybatisplus.core.conditions.update.LambdaUpdateWrapper;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.google.common.collect.Lists;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.support.TransactionCallbackWithoutResult;
@ -43,8 +41,6 @@ import java.util.List;
@RequiredArgsConstructor
public class JobClearLogSchedule extends AbstractSchedule implements Lifecycle {
// last clean log time
private static Long lastCleanLogTime = 0L;
private final SystemProperties systemProperties;
private final JobTaskBatchMapper jobTaskBatchMapper;
private final JobTaskMapper jobTaskMapper;
@ -58,7 +54,7 @@ public class JobClearLogSchedule extends AbstractSchedule implements Lifecycle {
@Override
public String lockAtMost() {
return "PT1H";
return "PT4H";
}
@Override
@ -69,8 +65,9 @@ public class JobClearLogSchedule extends AbstractSchedule implements Lifecycle {
@Override
protected void doExecute() {
try {
// 清楚日志默认保存天数大于零最少保留最近一天的日志数据
if (systemProperties.getLogStorage() <= 0 || System.currentTimeMillis() - lastCleanLogTime < 24 * 60 * 60 * 1000) {
// 清除日志默认保存天数大于零最少保留最近一天的日志数据
if (systemProperties.getLogStorage() <= 1) {
SnailJobLog.LOCAL.error("job clear log storage error", systemProperties.getLogStorage());
return;
}
// clean job log
@ -82,9 +79,6 @@ public class JobClearLogSchedule extends AbstractSchedule implements Lifecycle {
SnailJobLog.LOCAL.debug("Job clear success total:[{}]", total);
} catch (Exception e) {
SnailJobLog.LOCAL.error("job clear log error", e);
} finally {
// update clean time
lastCleanLogTime = System.currentTimeMillis();
}
}
@ -146,7 +140,7 @@ public class JobClearLogSchedule extends AbstractSchedule implements Lifecycle {
@Override
public void start() {
taskScheduler.scheduleAtFixedRate(this::execute, Duration.parse("PT1H"));
taskScheduler.scheduleAtFixedRate(this::execute, Duration.parse("PT4H"));
}
@Override

View File

@ -53,8 +53,6 @@ public class JobLogMergeSchedule extends AbstractSchedule implements Lifecycle {
private final JobLogMessageMapper jobLogMessageMapper;
private final TransactionTemplate transactionTemplate;
// last merge log time
private static Long lastMergeLogTime = 0L;
@Override
public String lockName() {
@ -74,10 +72,6 @@ public class JobLogMergeSchedule extends AbstractSchedule implements Lifecycle {
@Override
protected void doExecute() {
try {
// 合并日志数据最少保留最近一天的日志数据
if (System.currentTimeMillis() - lastMergeLogTime < 24 * 60 * 60 * 1000) {
return;
}
// merge job log
long total;
LocalDateTime endTime = LocalDateTime.now().minusDays(systemProperties.getMergeLogDays());
@ -87,9 +81,6 @@ public class JobLogMergeSchedule extends AbstractSchedule implements Lifecycle {
SnailJobLog.LOCAL.debug("job merge success total:[{}]", total);
} catch (Exception e) {
SnailJobLog.LOCAL.error("job merge log error", e);
} finally {
// update merge time
lastMergeLogTime = System.currentTimeMillis();
}
}
@ -192,7 +183,7 @@ public class JobLogMergeSchedule extends AbstractSchedule implements Lifecycle {
@Override
public void start() {
taskScheduler.scheduleAtFixedRate(this::execute, Duration.parse("PT1H"));
taskScheduler.scheduleAtFixedRate(this::execute, Duration.parse("PT1M"));
}
@Override

View File

@ -38,8 +38,6 @@ import java.util.List;
@Slf4j
public class ClearLogSchedule extends AbstractSchedule implements Lifecycle {
// last clean log time
private static Long lastCleanLogTime = 0L;
@Autowired
private RetryTaskLogMapper retryTaskLogMapper;
@Autowired
@ -56,7 +54,7 @@ public class ClearLogSchedule extends AbstractSchedule implements Lifecycle {
@Override
public String lockAtMost() {
return "PT1H";
return "PT4H";
}
@Override
@ -67,7 +65,9 @@ public class ClearLogSchedule extends AbstractSchedule implements Lifecycle {
@Override
protected void doExecute() {
try {
if (systemProperties.getLogStorage() <= 0 || System.currentTimeMillis() - lastCleanLogTime < 24 * 60 * 60 * 1000) {
// 清除日志默认保存天数大于零最少保留最近一天的日志数据
if (systemProperties.getLogStorage() <= 1) {
SnailJobLog.LOCAL.error("retry clear log storage error", systemProperties.getLogStorage());
return;
}
// clean retry log
@ -78,9 +78,6 @@ public class ClearLogSchedule extends AbstractSchedule implements Lifecycle {
SnailJobLog.LOCAL.debug("Retry clear success total:[{}]", total);
} catch (Exception e) {
SnailJobLog.LOCAL.error("clear log error", e);
} finally {
// update clean time
lastCleanLogTime = System.currentTimeMillis();
}
}
@ -137,7 +134,7 @@ public class ClearLogSchedule extends AbstractSchedule implements Lifecycle {
@Override
public void start() {
taskScheduler.scheduleAtFixedRate(this::execute, Duration.parse("PT1H"));
taskScheduler.scheduleAtFixedRate(this::execute, Duration.parse("PT4H"));
}
@Override

View File

@ -49,8 +49,6 @@ import static java.util.stream.Collectors.toList;
@RequiredArgsConstructor
public class RetryLogMergeSchedule extends AbstractSchedule implements Lifecycle {
// last merge log time
private static Long lastMergeLogTime = 0L;
private final SystemProperties systemProperties;
private final RetryTaskLogMapper retryTaskLogMapper;
private final RetryTaskLogMessageMapper retryTaskLogMessageMapper;
@ -74,10 +72,6 @@ public class RetryLogMergeSchedule extends AbstractSchedule implements Lifecycle
@Override
protected void doExecute() {
try {
// 合并日志数据最少保留最近一天的日志数据
if (System.currentTimeMillis() - lastMergeLogTime < 24 * 60 * 60 * 1000) {
return;
}
// merge job log
long total;
LocalDateTime endTime = LocalDateTime.now().minusDays(systemProperties.getMergeLogDays());
@ -87,9 +81,6 @@ public class RetryLogMergeSchedule extends AbstractSchedule implements Lifecycle
SnailJobLog.LOCAL.debug("job merge success total:[{}]", total);
} catch (Exception e) {
SnailJobLog.LOCAL.error("job merge log error", e);
} finally {
// update merge time
lastMergeLogTime = System.currentTimeMillis();
}
}

View File

@ -1,17 +1,24 @@
FROM --platform=$BUILDPLATFORM amazoncorretto:17 as builder
WORKDIR /
# 贝尔实验室 Spring 官方推荐镜像, 能解决arm芯片国产操作系统运行问题
FROM bellsoft/liberica-openjdk-debian:17.0.11-cds
ADD ./target/snail-job-server-exec.jar snail-job-server.jar
LABEL maintainer="opensnail"
LABEL app-name="snail-job-server"
LABEL description="SnailJob is a high performance distributed task scheduler and retry management center"
FROM --platform=$BUILDPLATFORM amazoncorretto:17
MAINTAINER opensnail
LABEL server-name=snail-job-server
ENV TZ=Asia/Shanghai LANG=C.UTF-8 LC_ALL=C.UTF-8
ENV JAVA_OPTS="-Djava.security.egd=file:/dev/./urandom -XX:+HeapDumpOnOutOfMemoryError -XX:+UseZGC"
ENV PARAMS=""
#对外暴漏的端口号
WORKDIR /snailjob/server
ADD ./target/snail-job-server-exec.jar app.jar
# 对外暴漏的端口号
EXPOSE 8080
EXPOSE 1788
WORKDIR /
COPY --from=builder /snail-job-server.jar .
# 数据卷, 用于映射日志目录
VOLUME /snailjob/server/data/log
ENTRYPOINT ["sh","-c","java -jar $JAVA_OPTS /snail-job-server.jar $PARAMS"]
# https://docs.docker.com/reference/build-checks/json-args-recommended/
SHELL ["/bin/bash", "-c"]
ENTRYPOINT java $JAVA_OPTS -jar app.jar $PARAMS