feat: snail-job_1.0.0 postgres 迁移

This commit is contained in:
dhb52 2024-05-13 23:36:20 +08:00
parent 76ad7b4e4b
commit a78b1eece4
11 changed files with 808 additions and 778 deletions

File diff suppressed because it is too large Load Diff

View File

@ -17,7 +17,7 @@
</resultMap> </resultMap>
<!-- 定义批量新增的 SQL 映射 --> <!-- 定义批量新增的 SQL 映射 -->
<insert id="batchInsert" parameterType="java.util.List"> <insert id="batchInsert" parameterType="java.util.List">
INSERT INTO job_log_message INSERT INTO sj_job_log_message
( (
namespace_id, namespace_id,
group_name, group_name,

View File

@ -27,16 +27,16 @@
</resultMap> </resultMap>
<update id="updateBatchNextTriggerAtById" parameterType="java.util.List"> <update id="updateBatchNextTriggerAtById" parameterType="java.util.List">
UPDATE job AS rt UPDATE sj_job AS rt
SET next_trigger_at = tt.next_trigger_at SET next_trigger_at = tt.next_trigger_at
FROM ( FROM (
<foreach collection="list" item="item" index="index" separator="UNION ALL"> <foreach collection="list" item="item" index="index" separator="UNION ALL">
select select
#{item.nextTriggerAt} as next_trigger_at, #{item.nextTriggerAt} as next_trigger_at,
#{item.id} as id #{item.id} as id
</foreach> </foreach>
) AS tt ) AS tt
WHERE rt.id = tt.id WHERE rt.id = tt.id
</update> </update>
</mapper> </mapper>

View File

@ -20,7 +20,7 @@
</resultMap> </resultMap>
<insert id="batchInsert" parameterType="java.util.List" useGeneratedKeys="true" keyProperty="id"> <insert id="batchInsert" parameterType="java.util.List" useGeneratedKeys="true" keyProperty="id">
INSERT INTO job_summary (namespace_id, group_name, business_id, trigger_at, system_task_type, INSERT INTO sj_job_summary (namespace_id, group_name, business_id, trigger_at, system_task_type,
success_num,fail_num,fail_reason,stop_num,stop_reason, cancel_num,cancel_reason) success_num,fail_num,fail_reason,stop_num,stop_reason, cancel_num,cancel_reason)
VALUES VALUES
<foreach collection="list" item="item" separator=","> <foreach collection="list" item="item" separator=",">
@ -42,7 +42,7 @@
</insert> </insert>
<update id="batchUpdate" parameterType="java.util.List"> <update id="batchUpdate" parameterType="java.util.List">
UPDATE job_summary AS rt UPDATE sj_job_summary AS rt
SET success_num = tt.success_num, SET success_num = tt.success_num,
fail_num = tt.fail_num, fail_num = tt.fail_num,
fail_reason = tt.fail_reason, fail_reason = tt.fail_reason,
@ -78,7 +78,7 @@
COALESCE(SUM(cancel_num), 0) AS cancel, COALESCE(SUM(cancel_num), 0) AS cancel,
COALESCE(SUM(fail_num + stop_num + cancel_num), 0) AS fail, COALESCE(SUM(fail_num + stop_num + cancel_num), 0) AS fail,
COALESCE(SUM(success_num + fail_num + stop_num + cancel_num), 0) AS total COALESCE(SUM(success_num + fail_num + stop_num + cancel_num), 0) AS total
FROM job_summary FROM sj_job_summary
${ew.customSqlSegment} ${ew.customSqlSegment}
GROUP BY createDt GROUP BY createDt
</select> </select>
@ -90,7 +90,7 @@
COALESCE(sum(cancel_num), 0) AS cancelNum, COALESCE(sum(cancel_num), 0) AS cancelNum,
COALESCE(sum(fail_num), 0) AS failNum, COALESCE(sum(fail_num), 0) AS failNum,
COALESCE(sum(success_num + fail_num + stop_num + cancel_num), 0) AS totalNum COALESCE(sum(success_num + fail_num + stop_num + cancel_num), 0) AS totalNum
FROM job_summary FROM sj_job_summary
${ew.customSqlSegment} ${ew.customSqlSegment}
</select> </select>
@ -98,13 +98,13 @@
resultType="com.aizuda.snailjob.template.datasource.persistence.dataobject.DashboardRetryLineResponseDO$Rank"> resultType="com.aizuda.snailjob.template.datasource.persistence.dataobject.DashboardRetryLineResponseDO$Rank">
SELECT SELECT
<if test="systemTaskType == 3"> <if test="systemTaskType == 3">
CONCAT(group_name, '/', (SELECT job_name FROM job WHERE id = business_id)) name, CONCAT(group_name, '/', (SELECT job_name FROM sj_job WHERE id = business_id)) name,
</if> </if>
<if test="systemTaskType == 4"> <if test="systemTaskType == 4">
CONCAT(group_name, '/', (SELECT workflow_name FROM workflow WHERE id = business_id)) name, CONCAT(group_name, '/', (SELECT workflow_name FROM sj_workflow WHERE id = business_id)) name,
</if> </if>
SUM(fail_num) AS total SUM(fail_num) AS total
FROM job_summary FROM sj_job_summary
${ew.customSqlSegment} ${ew.customSqlSegment}
HAVING SUM(fail_num) > 0 HAVING SUM(fail_num) > 0
ORDER BY total DESC LIMIT 10 ORDER BY total DESC LIMIT 10
@ -115,7 +115,7 @@
SELECT group_name AS groupName, SELECT group_name AS groupName,
SUM(CASE WHEN (job_status = 1) THEN 1 ELSE 0 END) AS run, SUM(CASE WHEN (job_status = 1) THEN 1 ELSE 0 END) AS run,
count(*) AS total count(*) AS total
FROM job FROM sj_job
${ew.customSqlSegment} ${ew.customSqlSegment}
GROUP BY namespace_id, group_name GROUP BY namespace_id, group_name
</select> </select>

View File

@ -19,21 +19,22 @@
id, namespace_id, unique_id, group_name, scene_name, idempotent_id, biz_no, executor_name, args_str, ext_attrs, create_dt, task_type id, namespace_id, unique_id, group_name, scene_name, idempotent_id, biz_no, executor_name, args_str, ext_attrs, create_dt, task_type
</sql> </sql>
<insert id="insertBatch"> <insert id="insertBatch">
insert into retry_dead_letter (namespace_id, unique_id, group_name, scene_name, INSERT INTO sj_retry_dead_letter (namespace_id, unique_id, group_name, scene_name,
idempotent_id, biz_no, executor_name, args_str, idempotent_id, biz_no, executor_name, args_str,
ext_attrs, create_dt ext_attrs, create_dt)
) VALUES
values
<foreach collection="retryDeadLetters" item="retryDeadLetter" separator=","> <foreach collection="retryDeadLetters" item="retryDeadLetter" separator=",">
(#{retryDeadLetter.namespaceId,jdbcType=VARCHAR}, #{retryDeadLetter.uniqueId,jdbcType=VARCHAR}, #{retryDeadLetter.groupName,jdbcType=VARCHAR}, #{retryDeadLetter.sceneName,jdbcType=VARCHAR}, (
#{retryDeadLetter.namespaceId,jdbcType=VARCHAR}, #{retryDeadLetter.uniqueId,jdbcType=VARCHAR},
#{retryDeadLetter.groupName,jdbcType=VARCHAR}, #{retryDeadLetter.sceneName,jdbcType=VARCHAR},
#{retryDeadLetter.idempotentId,jdbcType=VARCHAR}, #{retryDeadLetter.bizNo,jdbcType=VARCHAR}, #{retryDeadLetter.executorName,jdbcType=VARCHAR}, #{retryDeadLetter.argsStr,jdbcType=VARCHAR}, #{retryDeadLetter.idempotentId,jdbcType=VARCHAR}, #{retryDeadLetter.bizNo,jdbcType=VARCHAR}, #{retryDeadLetter.executorName,jdbcType=VARCHAR}, #{retryDeadLetter.argsStr,jdbcType=VARCHAR},
#{retryDeadLetter.extAttrs,jdbcType=VARCHAR}, #{retryDeadLetter.createDt,jdbcType=TIMESTAMP}) #{retryDeadLetter.extAttrs,jdbcType=VARCHAR}, #{retryDeadLetter.createDt,jdbcType=TIMESTAMP})
</foreach> </foreach>
</insert> </insert>
<select id="countRetryDeadLetterByCreateAt" resultType="int"> <select id="countRetryDeadLetterByCreateAt" resultType="int">
select SELECT count(*)
count(*) FROM sj_retry_dead_letter_${partition}
from retry_dead_letter_${partition} where create_dt>= #{startTime} and create_dt &lt;=#{endTime} WHERE create_dt>= #{startTime} and create_dt &lt;=#{endTime}
</select> </select>
</mapper> </mapper>

View File

@ -16,7 +16,7 @@
</resultMap> </resultMap>
<insert id="batchInsert" parameterType="java.util.List" useGeneratedKeys="true" keyProperty="id"> <insert id="batchInsert" parameterType="java.util.List" useGeneratedKeys="true" keyProperty="id">
INSERT INTO retry_summary (namespace_id, group_name, scene_name, trigger_at, INSERT INTO sj_retry_summary (namespace_id, group_name, scene_name, trigger_at,
running_num, finish_num, max_count_num, suspend_num) running_num, finish_num, max_count_num, suspend_num)
VALUES VALUES
<foreach collection="list" item="item" separator=","> <foreach collection="list" item="item" separator=",">
@ -34,7 +34,7 @@
</insert> </insert>
<update id="batchUpdate" parameterType="java.util.List"> <update id="batchUpdate" parameterType="java.util.List">
UPDATE retry_summary AS rt UPDATE sj_retry_summary AS rt
SET running_num = tt.running_num, SET running_num = tt.running_num,
finish_num = tt.finish_num, finish_num = tt.finish_num,
max_count_num = tt.max_count_num, max_count_num = tt.max_count_num,
@ -65,7 +65,7 @@
COALESCE(sum(max_count_num), 0) AS maxCountNum, COALESCE(sum(max_count_num), 0) AS maxCountNum,
COALESCE(sum(suspend_num), 0) AS suspendNum, COALESCE(sum(suspend_num), 0) AS suspendNum,
COALESCE(sum(running_num + finish_num + max_count_num + suspend_num), 0) AS totalNum COALESCE(sum(running_num + finish_num + max_count_num + suspend_num), 0) AS totalNum
FROM retry_summary FROM sj_retry_summary
${ew.customSqlSegment} ${ew.customSqlSegment}
</select> </select>
@ -73,7 +73,7 @@
resultType="com.aizuda.snailjob.template.datasource.persistence.dataobject.DashboardCardResponseDO$RetryTask"> resultType="com.aizuda.snailjob.template.datasource.persistence.dataobject.DashboardCardResponseDO$RetryTask">
SELECT SELECT
trigger_at, running_num, finish_num, max_count_num, suspend_num trigger_at, running_num, finish_num, max_count_num, suspend_num
FROM retry_summary FROM sj_retry_summary
${ew.customSqlSegment} ${ew.customSqlSegment}
LIMIT 7 LIMIT 7
</select> </select>
@ -87,7 +87,7 @@
COALESCE(SUM(max_count_num), 0) AS maxCountNum, COALESCE(SUM(max_count_num), 0) AS maxCountNum,
COALESCE(SUM(suspend_num), 0) AS suspendNum, COALESCE(SUM(suspend_num), 0) AS suspendNum,
COALESCE(SUM(finish_num + running_num + max_count_num + suspend_num), 0) AS total COALESCE(SUM(finish_num + running_num + max_count_num + suspend_num), 0) AS total
FROM retry_summary FROM sj_retry_summary
${ew.customSqlSegment} ${ew.customSqlSegment}
GROUP BY createDt GROUP BY createDt
</select> </select>
@ -97,7 +97,7 @@
SELECT SELECT
CONCAT(group_name, '/', scene_name) name, CONCAT(group_name, '/', scene_name) name,
SUM(running_num + finish_num + max_count_num + suspend_num) AS total SUM(running_num + finish_num + max_count_num + suspend_num) AS total
FROM retry_summary FROM sj_retry_summary
${ew.customSqlSegment} ${ew.customSqlSegment}
HAVING SUM(running_num + finish_num + max_count_num + suspend_num) > 0 HAVING SUM(running_num + finish_num + max_count_num + suspend_num) > 0
ORDER BY total DESC LIMIT 10 ORDER BY total DESC LIMIT 10
@ -106,9 +106,9 @@
<select id="retryTaskList" <select id="retryTaskList"
resultType="com.aizuda.snailjob.template.datasource.persistence.dataobject.DashboardRetryLineResponseDO$Task"> resultType="com.aizuda.snailjob.template.datasource.persistence.dataobject.DashboardRetryLineResponseDO$Task">
SELECT group_name AS groupName, SELECT group_name AS groupName,
sum(CASE WHEN (scene_status = 1) THEN 1 ELSE 0 END) AS run, SUM(CASE WHEN (scene_status = 1) THEN 1 ELSE 0 END) AS run,
count(*) AS total COUNT(*) AS total
FROM scene_config FROM sj_retry_scene_config
${ew.customSqlSegment} ${ew.customSqlSegment}
GROUP BY namespace_id, group_name GROUP BY namespace_id, group_name
</select> </select>

View File

@ -23,7 +23,7 @@
</sql> </sql>
<!-- 定义批量新增的 SQL 映射 --> <!-- 定义批量新增的 SQL 映射 -->
<insert id="batchInsert" parameterType="java.util.List"> <insert id="batchInsert" parameterType="java.util.List">
INSERT INTO retry_task_log (unique_id, group_name, scene_name, idempotent_id, biz_no, executor_name, INSERT INTO sj_retry_task_log (unique_id, group_name, scene_name, idempotent_id, biz_no, executor_name,
args_str, ext_attrs, task_type, create_dt, namespace_id) args_str, ext_attrs, task_type, create_dt, namespace_id)
VALUES VALUES
<foreach collection="list" item="item" separator=","> <foreach collection="list" item="item" separator=",">
@ -43,7 +43,7 @@
SUM(CASE WHEN (retry_status = 1) THEN 1 ELSE 0 END) AS finishNum, SUM(CASE WHEN (retry_status = 1) THEN 1 ELSE 0 END) AS finishNum,
SUM(CASE WHEN (retry_status = 2) THEN 1 ELSE 0 END) AS maxCountNum, SUM(CASE WHEN (retry_status = 2) THEN 1 ELSE 0 END) AS maxCountNum,
SUM(CASE WHEN (retry_status = 3) THEN 1 ELSE 0 END) AS suspendNum SUM(CASE WHEN (retry_status = 3) THEN 1 ELSE 0 END) AS suspendNum
FROM retry_task_log FROM sj_retry_task_log
${ew.customSqlSegment} ${ew.customSqlSegment}
</select> </select>
</mapper> </mapper>

View File

@ -12,7 +12,7 @@
</resultMap> </resultMap>
<insert id="batchInsert" parameterType="java.util.List"> <insert id="batchInsert" parameterType="java.util.List">
INSERT INTO retry_task_log_message (namespace_id, group_name, unique_id, log_num, message, INSERT INTO sj_retry_task_log_message (namespace_id, group_name, unique_id, log_num, message,
create_dt, real_time) create_dt, real_time)
VALUES VALUES
<foreach collection="list" item="item" separator=","> <foreach collection="list" item="item" separator=",">
@ -29,7 +29,7 @@
</insert> </insert>
<update id="batchUpdate" parameterType="java.util.List"> <update id="batchUpdate" parameterType="java.util.List">
UPDATE retry_task_log_message jlm, UPDATE sj_retry_task_log_message jlm,
( (
<foreach collection="list" item="item" index="index" separator="UNION ALL"> <foreach collection="list" item="item" index="index" separator="UNION ALL">
SELECT SELECT

View File

@ -1,46 +1,56 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd"> <!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.aizuda.snailjob.template.datasource.persistence.mapper.RetryTaskMapper"> <mapper namespace="com.aizuda.snailjob.template.datasource.persistence.mapper.RetryTaskMapper">
<resultMap id="BaseResultMap" type="com.aizuda.snailjob.template.datasource.persistence.po.RetryTask"> <resultMap id="BaseResultMap" type="com.aizuda.snailjob.template.datasource.persistence.po.RetryTask">
<id column="id" jdbcType="BIGINT" property="id" /> <id column="id" jdbcType="BIGINT" property="id" />
<result column="namespace_id" jdbcType="VARCHAR" property="namespaceId"/> <result column="namespace_id" jdbcType="VARCHAR" property="namespaceId"/>
<result column="unique_id" jdbcType="VARCHAR" property="uniqueId"/> <result column="unique_id" jdbcType="VARCHAR" property="uniqueId"/>
<result column="group_name" jdbcType="VARCHAR" property="groupName" /> <result column="group_name" jdbcType="VARCHAR" property="groupName" />
<result column="scene_name" jdbcType="VARCHAR" property="sceneName" /> <result column="scene_name" jdbcType="VARCHAR" property="sceneName" />
<result column="idempotent_id" jdbcType="VARCHAR" property="idempotentId" /> <result column="idempotent_id" jdbcType="VARCHAR" property="idempotentId" />
<result column="biz_no" jdbcType="VARCHAR" property="bizNo" /> <result column="biz_no" jdbcType="VARCHAR" property="bizNo" />
<result column="executor_name" jdbcType="VARCHAR" property="executorName" /> <result column="executor_name" jdbcType="VARCHAR" property="executorName" />
<result column="args_str" jdbcType="VARCHAR" property="argsStr" /> <result column="args_str" jdbcType="VARCHAR" property="argsStr" />
<result column="ext_attrs" jdbcType="VARCHAR" property="extAttrs" /> <result column="ext_attrs" jdbcType="VARCHAR" property="extAttrs" />
<result column="next_trigger_at" jdbcType="TIMESTAMP" property="nextTriggerAt" /> <result column="next_trigger_at" jdbcType="TIMESTAMP" property="nextTriggerAt" />
<result column="retry_count" jdbcType="TINYINT" property="retryCount" /> <result column="retry_count" jdbcType="TINYINT" property="retryCount" />
<result column="retry_status" jdbcType="TINYINT" property="retryStatus" /> <result column="retry_status" jdbcType="TINYINT" property="retryStatus" />
<result column="task_type" jdbcType="TINYINT" property="taskType"/> <result column="task_type" jdbcType="TINYINT" property="taskType"/>
<result column="create_dt" jdbcType="TIMESTAMP" property="createDt" /> <result column="create_dt" jdbcType="TIMESTAMP" property="createDt" />
<result column="update_dt" jdbcType="TIMESTAMP" property="updateDt" /> <result column="update_dt" jdbcType="TIMESTAMP" property="updateDt" />
</resultMap> </resultMap>
<sql id="Base_Column_List">
id, namespace_id, unique_id, group_name, scene_name, idempotent_id, biz_no, executor_name, args_str, ext_attrs, next_trigger_at, retry_count, retry_status, <sql id="Base_Column_List">
create_dt, update_dt, task_type id, namespace_id, unique_id, group_name, scene_name, idempotent_id, biz_no,
</sql> executor_name, args_str, ext_attrs, next_trigger_at, retry_count, retry_status,
<!-- 定义批量新增的 SQL 映射 --> create_dt, update_dt, task_type
<insert id="batchInsert" parameterType="java.util.List"> </sql>
INSERT INTO retry_task (namespace_id, unique_id, group_name, scene_name, idempotent_id, biz_no, executor_name, args_str, ext_attrs, next_trigger_at, task_type, retry_status, create_dt)
VALUES <!-- 定义批量新增的 SQL 映射 -->
<foreach collection="list" item="item" separator=","> <insert id="batchInsert" parameterType="java.util.List">
(#{item.namespaceId}, #{item.uniqueId}, #{item.groupName}, #{item.sceneName}, #{item.idempotentId}, #{item.bizNo}, #{item.executorName}, #{item.argsStr}, #{item.extAttrs}, #{item.nextTriggerAt}, #{item.taskType}, #{item.retryStatus}, #{item.createDt}) INSERT INTO sj_retry_task (namespace_id, unique_id, group_name, scene_name,
</foreach> idempotent_id, biz_no, executor_name, args_str, ext_attrs,
</insert> next_trigger_at, task_type, retry_status, create_dt)
<update id="updateBatchNextTriggerAtById" parameterType="java.util.List"> VALUES
UPDATE retry_task_${partition} AS rt <foreach collection="list" item="item" separator=",">
SET next_trigger_at = tt.next_trigger_at (
FROM ( #{item.namespaceId}, #{item.uniqueId}, #{item.groupName},
<foreach collection="list" item="item" index="index" separator="UNION ALL"> #{item.sceneName}, #{item.idempotentId}, #{item.bizNo}, #{item.executorName}, #{item.argsStr},
select #{item.extAttrs}, #{item.nextTriggerAt}, #{item.taskType}, #{item.retryStatus}, #{item.createDt}
#{item.nextTriggerAt} as next_trigger_at, )
#{item.id} as id </foreach>
</foreach> </insert>
) AS tt
WHERE rt.id = tt.id <update id="updateBatchNextTriggerAtById" parameterType="java.util.List">
</update> UPDATE sj_retry_task_${partition} AS rt
SET next_trigger_at = tt.next_trigger_at
FROM (
<foreach collection="list" item="item" index="index" separator="UNION ALL">
SELECT
#{item.nextTriggerAt} AS next_trigger_at,
#{item.id} AS id
</foreach>
) AS tt
WHERE rt.id = tt.id
</update>
</mapper> </mapper>

View File

@ -18,7 +18,7 @@
</sql> </sql>
<insert id="batchInsert" parameterType="java.util.List" useGeneratedKeys="true" keyProperty="id"> <insert id="batchInsert" parameterType="java.util.List" useGeneratedKeys="true" keyProperty="id">
INSERT INTO server_node (namespace_id, group_name, host_id, host_ip, host_port, INSERT INTO sj_server_node (namespace_id, group_name, host_id, host_ip, host_port,
expire_at, node_type, ext_attrs, create_dt) expire_at, node_type, ext_attrs, create_dt)
VALUES VALUES
<foreach collection="records" item="item" index="index" separator=","> <foreach collection="records" item="item" index="index" separator=",">
@ -37,7 +37,7 @@
</insert> </insert>
<update id="batchUpdateExpireAt" parameterType="java.util.List"> <update id="batchUpdateExpireAt" parameterType="java.util.List">
UPDATE server_node AS rt UPDATE sj_server_node AS rt
SET expire_at = tt.expire_at SET expire_at = tt.expire_at
FROM ( FROM (
<foreach collection="list" item="item" index="index" separator="UNION ALL"> <foreach collection="list" item="item" index="index" separator="UNION ALL">
@ -54,7 +54,7 @@
<select id="countActivePod" <select id="countActivePod"
resultType="com.aizuda.snailjob.template.datasource.persistence.dataobject.ActivePodQuantityResponseDO"> resultType="com.aizuda.snailjob.template.datasource.persistence.dataobject.ActivePodQuantityResponseDO">
SELECT node_type as nodeType, count(*) as total SELECT node_type as nodeType, count(*) as total
from server_node from sj_server_node
${ew.customSqlSegment} ${ew.customSqlSegment}
</select> </select>
</mapper> </mapper>

View File

@ -16,16 +16,16 @@
<result column="ext_attrs" property="extAttrs" /> <result column="ext_attrs" property="extAttrs" />
</resultMap> </resultMap>
<update id="updateBatchNextTriggerAtById" parameterType="java.util.List"> <update id="updateBatchNextTriggerAtById" parameterType="java.util.List">
UPDATE workflow AS rt UPDATE sj_workflow AS rt
SET next_trigger_at = tt.next_trigger_at SET next_trigger_at = tt.next_trigger_at
FROM ( FROM (
<foreach collection="list" item="item" index="index" separator="UNION ALL"> <foreach collection="list" item="item" index="index" separator="UNION ALL">
select SELECT
#{item.nextTriggerAt} as next_trigger_at, #{item.nextTriggerAt} AS next_trigger_at,
#{item.id} as id #{item.id} AS id
</foreach> </foreach>
) AS tt ) AS tt
WHERE rt.id = tt.id WHERE rt.id = tt.id
</update> </update>