feat: snail-job_1.0.0 postgres 迁移

This commit is contained in:
dhb52 2024-05-13 23:36:20 +08:00
parent 76ad7b4e4b
commit a78b1eece4
11 changed files with 808 additions and 778 deletions

File diff suppressed because it is too large Load Diff

View File

@ -17,7 +17,7 @@
</resultMap>
<!-- 定义批量新增的 SQL 映射 -->
<insert id="batchInsert" parameterType="java.util.List">
INSERT INTO job_log_message
INSERT INTO sj_job_log_message
(
namespace_id,
group_name,

View File

@ -27,16 +27,16 @@
</resultMap>
<update id="updateBatchNextTriggerAtById" parameterType="java.util.List">
UPDATE job AS rt
SET next_trigger_at = tt.next_trigger_at
FROM (
<foreach collection="list" item="item" index="index" separator="UNION ALL">
select
#{item.nextTriggerAt} as next_trigger_at,
#{item.id} as id
</foreach>
) AS tt
WHERE rt.id = tt.id
UPDATE sj_job AS rt
SET next_trigger_at = tt.next_trigger_at
FROM (
<foreach collection="list" item="item" index="index" separator="UNION ALL">
select
#{item.nextTriggerAt} as next_trigger_at,
#{item.id} as id
</foreach>
) AS tt
WHERE rt.id = tt.id
</update>
</mapper>

View File

@ -20,7 +20,7 @@
</resultMap>
<insert id="batchInsert" parameterType="java.util.List" useGeneratedKeys="true" keyProperty="id">
INSERT INTO job_summary (namespace_id, group_name, business_id, trigger_at, system_task_type,
INSERT INTO sj_job_summary (namespace_id, group_name, business_id, trigger_at, system_task_type,
success_num,fail_num,fail_reason,stop_num,stop_reason, cancel_num,cancel_reason)
VALUES
<foreach collection="list" item="item" separator=",">
@ -42,7 +42,7 @@
</insert>
<update id="batchUpdate" parameterType="java.util.List">
UPDATE job_summary AS rt
UPDATE sj_job_summary AS rt
SET success_num = tt.success_num,
fail_num = tt.fail_num,
fail_reason = tt.fail_reason,
@ -78,7 +78,7 @@
COALESCE(SUM(cancel_num), 0) AS cancel,
COALESCE(SUM(fail_num + stop_num + cancel_num), 0) AS fail,
COALESCE(SUM(success_num + fail_num + stop_num + cancel_num), 0) AS total
FROM job_summary
FROM sj_job_summary
${ew.customSqlSegment}
GROUP BY createDt
</select>
@ -90,7 +90,7 @@
COALESCE(sum(cancel_num), 0) AS cancelNum,
COALESCE(sum(fail_num), 0) AS failNum,
COALESCE(sum(success_num + fail_num + stop_num + cancel_num), 0) AS totalNum
FROM job_summary
FROM sj_job_summary
${ew.customSqlSegment}
</select>
@ -98,13 +98,13 @@
resultType="com.aizuda.snailjob.template.datasource.persistence.dataobject.DashboardRetryLineResponseDO$Rank">
SELECT
<if test="systemTaskType == 3">
CONCAT(group_name, '/', (SELECT job_name FROM job WHERE id = business_id)) name,
CONCAT(group_name, '/', (SELECT job_name FROM sj_job WHERE id = business_id)) name,
</if>
<if test="systemTaskType == 4">
CONCAT(group_name, '/', (SELECT workflow_name FROM workflow WHERE id = business_id)) name,
CONCAT(group_name, '/', (SELECT workflow_name FROM sj_workflow WHERE id = business_id)) name,
</if>
SUM(fail_num) AS total
FROM job_summary
FROM sj_job_summary
${ew.customSqlSegment}
HAVING SUM(fail_num) > 0
ORDER BY total DESC LIMIT 10
@ -115,7 +115,7 @@
SELECT group_name AS groupName,
SUM(CASE WHEN (job_status = 1) THEN 1 ELSE 0 END) AS run,
count(*) AS total
FROM job
FROM sj_job
${ew.customSqlSegment}
GROUP BY namespace_id, group_name
</select>

View File

@ -19,21 +19,22 @@
id, namespace_id, unique_id, group_name, scene_name, idempotent_id, biz_no, executor_name, args_str, ext_attrs, create_dt, task_type
</sql>
<insert id="insertBatch">
insert into retry_dead_letter (namespace_id, unique_id, group_name, scene_name,
idempotent_id, biz_no, executor_name, args_str,
ext_attrs, create_dt
)
values
INSERT INTO sj_retry_dead_letter (namespace_id, unique_id, group_name, scene_name,
idempotent_id, biz_no, executor_name, args_str,
ext_attrs, create_dt)
VALUES
<foreach collection="retryDeadLetters" item="retryDeadLetter" separator=",">
(#{retryDeadLetter.namespaceId,jdbcType=VARCHAR}, #{retryDeadLetter.uniqueId,jdbcType=VARCHAR}, #{retryDeadLetter.groupName,jdbcType=VARCHAR}, #{retryDeadLetter.sceneName,jdbcType=VARCHAR},
(
#{retryDeadLetter.namespaceId,jdbcType=VARCHAR}, #{retryDeadLetter.uniqueId,jdbcType=VARCHAR},
#{retryDeadLetter.groupName,jdbcType=VARCHAR}, #{retryDeadLetter.sceneName,jdbcType=VARCHAR},
#{retryDeadLetter.idempotentId,jdbcType=VARCHAR}, #{retryDeadLetter.bizNo,jdbcType=VARCHAR}, #{retryDeadLetter.executorName,jdbcType=VARCHAR}, #{retryDeadLetter.argsStr,jdbcType=VARCHAR},
#{retryDeadLetter.extAttrs,jdbcType=VARCHAR}, #{retryDeadLetter.createDt,jdbcType=TIMESTAMP})
</foreach>
</insert>
<select id="countRetryDeadLetterByCreateAt" resultType="int">
select
count(*)
from retry_dead_letter_${partition} where create_dt>= #{startTime} and create_dt &lt;=#{endTime}
SELECT count(*)
FROM sj_retry_dead_letter_${partition}
WHERE create_dt>= #{startTime} and create_dt &lt;=#{endTime}
</select>
</mapper>

View File

@ -16,7 +16,7 @@
</resultMap>
<insert id="batchInsert" parameterType="java.util.List" useGeneratedKeys="true" keyProperty="id">
INSERT INTO retry_summary (namespace_id, group_name, scene_name, trigger_at,
INSERT INTO sj_retry_summary (namespace_id, group_name, scene_name, trigger_at,
running_num, finish_num, max_count_num, suspend_num)
VALUES
<foreach collection="list" item="item" separator=",">
@ -34,7 +34,7 @@
</insert>
<update id="batchUpdate" parameterType="java.util.List">
UPDATE retry_summary AS rt
UPDATE sj_retry_summary AS rt
SET running_num = tt.running_num,
finish_num = tt.finish_num,
max_count_num = tt.max_count_num,
@ -65,7 +65,7 @@
COALESCE(sum(max_count_num), 0) AS maxCountNum,
COALESCE(sum(suspend_num), 0) AS suspendNum,
COALESCE(sum(running_num + finish_num + max_count_num + suspend_num), 0) AS totalNum
FROM retry_summary
FROM sj_retry_summary
${ew.customSqlSegment}
</select>
@ -73,7 +73,7 @@
resultType="com.aizuda.snailjob.template.datasource.persistence.dataobject.DashboardCardResponseDO$RetryTask">
SELECT
trigger_at, running_num, finish_num, max_count_num, suspend_num
FROM retry_summary
FROM sj_retry_summary
${ew.customSqlSegment}
LIMIT 7
</select>
@ -87,7 +87,7 @@
COALESCE(SUM(max_count_num), 0) AS maxCountNum,
COALESCE(SUM(suspend_num), 0) AS suspendNum,
COALESCE(SUM(finish_num + running_num + max_count_num + suspend_num), 0) AS total
FROM retry_summary
FROM sj_retry_summary
${ew.customSqlSegment}
GROUP BY createDt
</select>
@ -97,7 +97,7 @@
SELECT
CONCAT(group_name, '/', scene_name) name,
SUM(running_num + finish_num + max_count_num + suspend_num) AS total
FROM retry_summary
FROM sj_retry_summary
${ew.customSqlSegment}
HAVING SUM(running_num + finish_num + max_count_num + suspend_num) > 0
ORDER BY total DESC LIMIT 10
@ -106,9 +106,9 @@
<select id="retryTaskList"
resultType="com.aizuda.snailjob.template.datasource.persistence.dataobject.DashboardRetryLineResponseDO$Task">
SELECT group_name AS groupName,
sum(CASE WHEN (scene_status = 1) THEN 1 ELSE 0 END) AS run,
count(*) AS total
FROM scene_config
SUM(CASE WHEN (scene_status = 1) THEN 1 ELSE 0 END) AS run,
COUNT(*) AS total
FROM sj_retry_scene_config
${ew.customSqlSegment}
GROUP BY namespace_id, group_name
</select>

View File

@ -23,7 +23,7 @@
</sql>
<!-- 定义批量新增的 SQL 映射 -->
<insert id="batchInsert" parameterType="java.util.List">
INSERT INTO retry_task_log (unique_id, group_name, scene_name, idempotent_id, biz_no, executor_name,
INSERT INTO sj_retry_task_log (unique_id, group_name, scene_name, idempotent_id, biz_no, executor_name,
args_str, ext_attrs, task_type, create_dt, namespace_id)
VALUES
<foreach collection="list" item="item" separator=",">
@ -43,7 +43,7 @@
SUM(CASE WHEN (retry_status = 1) THEN 1 ELSE 0 END) AS finishNum,
SUM(CASE WHEN (retry_status = 2) THEN 1 ELSE 0 END) AS maxCountNum,
SUM(CASE WHEN (retry_status = 3) THEN 1 ELSE 0 END) AS suspendNum
FROM retry_task_log
FROM sj_retry_task_log
${ew.customSqlSegment}
</select>
</mapper>

View File

@ -12,7 +12,7 @@
</resultMap>
<insert id="batchInsert" parameterType="java.util.List">
INSERT INTO retry_task_log_message (namespace_id, group_name, unique_id, log_num, message,
INSERT INTO sj_retry_task_log_message (namespace_id, group_name, unique_id, log_num, message,
create_dt, real_time)
VALUES
<foreach collection="list" item="item" separator=",">
@ -29,7 +29,7 @@
</insert>
<update id="batchUpdate" parameterType="java.util.List">
UPDATE retry_task_log_message jlm,
UPDATE sj_retry_task_log_message jlm,
(
<foreach collection="list" item="item" index="index" separator="UNION ALL">
SELECT

View File

@ -1,46 +1,56 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.aizuda.snailjob.template.datasource.persistence.mapper.RetryTaskMapper">
<resultMap id="BaseResultMap" type="com.aizuda.snailjob.template.datasource.persistence.po.RetryTask">
<id column="id" jdbcType="BIGINT" property="id" />
<result column="namespace_id" jdbcType="VARCHAR" property="namespaceId"/>
<result column="unique_id" jdbcType="VARCHAR" property="uniqueId"/>
<result column="group_name" jdbcType="VARCHAR" property="groupName" />
<result column="scene_name" jdbcType="VARCHAR" property="sceneName" />
<result column="idempotent_id" jdbcType="VARCHAR" property="idempotentId" />
<result column="biz_no" jdbcType="VARCHAR" property="bizNo" />
<result column="executor_name" jdbcType="VARCHAR" property="executorName" />
<result column="args_str" jdbcType="VARCHAR" property="argsStr" />
<result column="ext_attrs" jdbcType="VARCHAR" property="extAttrs" />
<result column="next_trigger_at" jdbcType="TIMESTAMP" property="nextTriggerAt" />
<result column="retry_count" jdbcType="TINYINT" property="retryCount" />
<result column="retry_status" jdbcType="TINYINT" property="retryStatus" />
<result column="task_type" jdbcType="TINYINT" property="taskType"/>
<result column="create_dt" jdbcType="TIMESTAMP" property="createDt" />
<result column="update_dt" jdbcType="TIMESTAMP" property="updateDt" />
</resultMap>
<sql id="Base_Column_List">
id, namespace_id, unique_id, group_name, scene_name, idempotent_id, biz_no, executor_name, args_str, ext_attrs, next_trigger_at, retry_count, retry_status,
create_dt, update_dt, task_type
</sql>
<!-- 定义批量新增的 SQL 映射 -->
<insert id="batchInsert" parameterType="java.util.List">
INSERT INTO retry_task (namespace_id, unique_id, group_name, scene_name, idempotent_id, biz_no, executor_name, args_str, ext_attrs, next_trigger_at, task_type, retry_status, create_dt)
VALUES
<foreach collection="list" item="item" separator=",">
(#{item.namespaceId}, #{item.uniqueId}, #{item.groupName}, #{item.sceneName}, #{item.idempotentId}, #{item.bizNo}, #{item.executorName}, #{item.argsStr}, #{item.extAttrs}, #{item.nextTriggerAt}, #{item.taskType}, #{item.retryStatus}, #{item.createDt})
</foreach>
</insert>
<update id="updateBatchNextTriggerAtById" parameterType="java.util.List">
UPDATE retry_task_${partition} AS rt
SET next_trigger_at = tt.next_trigger_at
FROM (
<foreach collection="list" item="item" index="index" separator="UNION ALL">
select
#{item.nextTriggerAt} as next_trigger_at,
#{item.id} as id
</foreach>
) AS tt
WHERE rt.id = tt.id
</update>
<resultMap id="BaseResultMap" type="com.aizuda.snailjob.template.datasource.persistence.po.RetryTask">
<id column="id" jdbcType="BIGINT" property="id" />
<result column="namespace_id" jdbcType="VARCHAR" property="namespaceId"/>
<result column="unique_id" jdbcType="VARCHAR" property="uniqueId"/>
<result column="group_name" jdbcType="VARCHAR" property="groupName" />
<result column="scene_name" jdbcType="VARCHAR" property="sceneName" />
<result column="idempotent_id" jdbcType="VARCHAR" property="idempotentId" />
<result column="biz_no" jdbcType="VARCHAR" property="bizNo" />
<result column="executor_name" jdbcType="VARCHAR" property="executorName" />
<result column="args_str" jdbcType="VARCHAR" property="argsStr" />
<result column="ext_attrs" jdbcType="VARCHAR" property="extAttrs" />
<result column="next_trigger_at" jdbcType="TIMESTAMP" property="nextTriggerAt" />
<result column="retry_count" jdbcType="TINYINT" property="retryCount" />
<result column="retry_status" jdbcType="TINYINT" property="retryStatus" />
<result column="task_type" jdbcType="TINYINT" property="taskType"/>
<result column="create_dt" jdbcType="TIMESTAMP" property="createDt" />
<result column="update_dt" jdbcType="TIMESTAMP" property="updateDt" />
</resultMap>
<sql id="Base_Column_List">
id, namespace_id, unique_id, group_name, scene_name, idempotent_id, biz_no,
executor_name, args_str, ext_attrs, next_trigger_at, retry_count, retry_status,
create_dt, update_dt, task_type
</sql>
<!-- 定义批量新增的 SQL 映射 -->
<insert id="batchInsert" parameterType="java.util.List">
INSERT INTO sj_retry_task (namespace_id, unique_id, group_name, scene_name,
idempotent_id, biz_no, executor_name, args_str, ext_attrs,
next_trigger_at, task_type, retry_status, create_dt)
VALUES
<foreach collection="list" item="item" separator=",">
(
#{item.namespaceId}, #{item.uniqueId}, #{item.groupName},
#{item.sceneName}, #{item.idempotentId}, #{item.bizNo}, #{item.executorName}, #{item.argsStr},
#{item.extAttrs}, #{item.nextTriggerAt}, #{item.taskType}, #{item.retryStatus}, #{item.createDt}
)
</foreach>
</insert>
<update id="updateBatchNextTriggerAtById" parameterType="java.util.List">
UPDATE sj_retry_task_${partition} AS rt
SET next_trigger_at = tt.next_trigger_at
FROM (
<foreach collection="list" item="item" index="index" separator="UNION ALL">
SELECT
#{item.nextTriggerAt} AS next_trigger_at,
#{item.id} AS id
</foreach>
) AS tt
WHERE rt.id = tt.id
</update>
</mapper>

View File

@ -18,7 +18,7 @@
</sql>
<insert id="batchInsert" parameterType="java.util.List" useGeneratedKeys="true" keyProperty="id">
INSERT INTO server_node (namespace_id, group_name, host_id, host_ip, host_port,
INSERT INTO sj_server_node (namespace_id, group_name, host_id, host_ip, host_port,
expire_at, node_type, ext_attrs, create_dt)
VALUES
<foreach collection="records" item="item" index="index" separator=",">
@ -37,7 +37,7 @@
</insert>
<update id="batchUpdateExpireAt" parameterType="java.util.List">
UPDATE server_node AS rt
UPDATE sj_server_node AS rt
SET expire_at = tt.expire_at
FROM (
<foreach collection="list" item="item" index="index" separator="UNION ALL">
@ -54,7 +54,7 @@
<select id="countActivePod"
resultType="com.aizuda.snailjob.template.datasource.persistence.dataobject.ActivePodQuantityResponseDO">
SELECT node_type as nodeType, count(*) as total
from server_node
from sj_server_node
${ew.customSqlSegment}
</select>
</mapper>

View File

@ -16,16 +16,16 @@
<result column="ext_attrs" property="extAttrs" />
</resultMap>
<update id="updateBatchNextTriggerAtById" parameterType="java.util.List">
UPDATE workflow AS rt
SET next_trigger_at = tt.next_trigger_at
FROM (
<foreach collection="list" item="item" index="index" separator="UNION ALL">
select
#{item.nextTriggerAt} as next_trigger_at,
#{item.id} as id
</foreach>
) AS tt
WHERE rt.id = tt.id
UPDATE sj_workflow AS rt
SET next_trigger_at = tt.next_trigger_at
FROM (
<foreach collection="list" item="item" index="index" separator="UNION ALL">
SELECT
#{item.nextTriggerAt} AS next_trigger_at,
#{item.id} AS id
</foreach>
) AS tt
WHERE rt.id = tt.id
</update>