feat(3.2.0) feat: docker增加mariadb支持

* fix: 使用最新mysql的mapper.xml覆盖mariadb
* feat: application.yml增加mariadb连接配置
* feat: docker增加mariadb支持
This commit is contained in:
dhb52 2024-04-01 00:46:24 +00:00 committed by byteblogs168
parent effbe9b0c1
commit 78312b6ee8
12 changed files with 273 additions and 200 deletions

View File

@ -7,6 +7,7 @@ volumes:
postgres: { }
oracle: { }
sqlserver: { }
mariadb: { }
services:
mysql:
@ -69,3 +70,17 @@ services:
- ../sql/easy_retry_sqlserver.sql:/tmp/schema.sql:ro
# docker compose exec sqlserver bash /tmp/create_schema.sh
- ./sqlserver/create_schema.sh:/tmp/create_schema.sh:ro
mariadb:
image: mariadb:10.11.7
restart: unless-stopped
environment:
TZ: Asia/Shanghai
MARIADB_ROOT_PASSWORD: root
MARIADB_DATABASE: easy_retry
ports:
- "3308:3306"
volumes:
- mariadb:/var/lib/mysql/
# 注入初始化脚本, mysql
- ../sql/easy_retry_mysql.sql:/docker-entrypoint-initdb.d/init.sql:ro

View File

@ -19,30 +19,30 @@
<!-- 定义批量新增的 SQL 映射 -->
<insert id="batchInsert" parameterType="java.util.List">
INSERT INTO job_log_message
(
namespace_id,
group_name,
job_id,
task_batch_id,
task_id,
log_num,
message,
create_dt,
real_time
)
VALUES
<foreach collection="list" item="item" separator=",">
(
#{item.namespaceId},
#{item.groupName},
#{item.jobId},
#{item.taskBatchId},
#{item.taskId},
#{item.logNum},
#{item.message},
#{item.createDt},
#{item.realTime}
namespace_id,
group_name,
job_id,
task_batch_id,
task_id,
log_num,
message,
create_dt,
real_time
)
</foreach>
VALUES
<foreach collection="list" item="item" separator=",">
(
#{item.namespaceId},
#{item.groupName},
#{item.jobId},
#{item.taskBatchId},
#{item.taskId},
#{item.logNum},
#{item.message},
#{item.createDt},
#{item.realTime}
)
</foreach>
</insert>
</mapper>

View File

@ -27,17 +27,16 @@
</resultMap>
<update id="updateBatchNextTriggerAtById" parameterType="java.util.List">
update job rt,
(
<foreach collection="list" item="item" index="index" separator=" union all ">
select
#{item.nextTriggerAt} as next_trigger_at,
#{item.id} as id
</foreach>
) tt
set
rt.next_trigger_at = tt.next_trigger_at
where rt.id = tt.id
UPDATE job rt,
(
<foreach collection="list" item="item" index="index" separator=" UNION ALL ">
SELECT
#{item.nextTriggerAt} AS next_trigger_at,
#{item.id} AS id
</foreach>
) tt
SET rt.next_trigger_at = tt.next_trigger_at
WHERE rt.id = tt.id
</update>
</mapper>

View File

@ -23,50 +23,49 @@
INSERT INTO job_summary (namespace_id, group_name, business_id, trigger_at, system_task_type,
success_num,fail_num,fail_reason,stop_num,stop_reason, cancel_num,cancel_reason)
VALUES
<foreach collection="list" item="item" separator=",">
(
#{item.namespaceId},
#{item.groupName},
#{item.businessId},
#{item.triggerAt},
#{item.systemTaskType},
#{item.successNum},
#{item.failNum},
#{item.failReason},
#{item.stopNum},
#{item.stopReason},
#{item.cancelNum},
#{item.cancelReason}
)
</foreach>
<foreach collection="list" item="item" separator=",">
(
#{item.namespaceId},
#{item.groupName},
#{item.businessId},
#{item.triggerAt},
#{item.systemTaskType},
#{item.successNum},
#{item.failNum},
#{item.failReason},
#{item.stopNum},
#{item.stopReason},
#{item.cancelNum},
#{item.cancelReason}
)
</foreach>
</insert>
<update id="batchUpdate" parameterType="java.util.List">
UPDATE job_summary rt,
(
<foreach collection="list" item="item" index="index" separator=" UNION ALL ">
SELECT
#{item.successNum} AS success_num,
#{item.failNum} AS fail_num,
#{item.failReason} AS fail_reason,
#{item.stopNum} AS stop_num,
#{item.stopReason} AS stop_reason,
#{item.cancelNum} AS cancel_num,
#{item.cancelReason} AS cancel_reason,
#{item.triggerAt} AS trigger_at,
#{item.businessId} AS business_id
</foreach>
) tt
SET
rt.success_num = tt.success_num,
rt.fail_num = tt.fail_num,
rt.fail_reason = tt.fail_reason,
rt.stop_num = tt.stop_num,
rt.stop_reason = tt.stop_reason,
rt.cancel_num = tt.cancel_num,
rt.cancel_reason = tt.cancel_reason
WHERE rt.trigger_at = tt.trigger_at
AND rt.business_id = tt.business_id
(
<foreach collection="list" item="item" index="index" separator=" UNION ALL ">
SELECT
#{item.successNum} AS success_num,
#{item.failNum} AS fail_num,
#{item.failReason} AS fail_reason,
#{item.stopNum} AS stop_num,
#{item.stopReason} AS stop_reason,
#{item.cancelNum} AS cancel_num,
#{item.cancelReason} AS cancel_reason,
#{item.triggerAt} AS trigger_at,
#{item.businessId} AS business_id
</foreach>
) tt
SET rt.success_num = tt.success_num,
rt.fail_num = tt.fail_num,
rt.fail_reason = tt.fail_reason,
rt.stop_num = tt.stop_num,
rt.stop_reason = tt.stop_reason,
rt.cancel_num = tt.cancel_num,
rt.cancel_reason = tt.cancel_reason
WHERE rt.trigger_at = tt.trigger_at
AND rt.business_id = tt.business_id
</update>
<select id="jobLineList"
@ -74,10 +73,9 @@
SELECT
DATE_FORMAT(trigger_at, #{dateFormat}) AS createDt,
IFNULL(SUM(success_num), 0) AS success,
IFNULL(SUM(fail_num), 0) AS failNum,
IFNULL(SUM(stop_num), 0) AS stop,
IFNULL(SUM(cancel_num), 0) AS cancel,
IFNULL(SUM(fail_num + stop_num + cancel_num), 0) AS fail,
IFNULL(SUM(fail_num), 0) AS fail,
IFNULL(SUM(success_num + fail_num + stop_num + cancel_num), 0) AS total
FROM job_summary
${ew.customSqlSegment}
@ -86,11 +84,12 @@
<select id="toJobTask"
resultType="com.aizuda.easy.retry.template.datasource.persistence.dataobject.DashboardCardResponseDO$JobTask">
SELECT IFNULL(sum(success_num), 0) AS successNum,
IFNULL(sum(stop_num), 0) AS stopNum,
IFNULL(sum(cancel_num), 0) AS cancelNum,
IFNULL(sum(fail_num), 0) AS failNum,
IFNULL(sum(success_num + fail_num + stop_num + cancel_num), 0) AS totalNum
SELECT
IFNULL(SUM(success_num), 0) AS successNum,
IFNULL(SUM(stop_num), 0) AS stopNum,
IFNULL(SUM(cancel_num), 0) AS cancelNum,
IFNULL(SUM(fail_num), 0) AS failNum,
IFNULL(SUM(success_num + fail_num + stop_num + cancel_num), 0) AS totalNum
FROM job_summary
${ew.customSqlSegment}
</select>
@ -104,10 +103,12 @@
<if test="systemTaskType == 4">
CONCAT(group_name, '/', (SELECT workflow_name FROM workflow WHERE id = business_id)) name,
</if>
SUM(fail_num) AS total FROM job_summary
SUM(fail_num) AS total
FROM job_summary
${ew.customSqlSegment}
HAVING total > 0
ORDER BY total DESC LIMIT 10
ORDER BY total DESC
LIMIT 10
</select>
<select id="jobTaskList"

View File

@ -15,19 +15,30 @@
<result column="task_type" jdbcType="TINYINT" property="taskType"/>
<result column="create_dt" jdbcType="TIMESTAMP" property="createDt" />
</resultMap>
<sql id="Base_Column_List">
id, namespace_id, unique_id, group_name, scene_name, idempotent_id, biz_no, executor_name, args_str, ext_attrs, create_dt, task_type
id, unique_id, group_name, scene_name, idempotent_id, biz_no, executor_name, args_str, ext_attrs, create_dt, task_type
</sql>
<insert id="insertBatch">
insert into retry_dead_letter (namespace_id, unique_id, group_name, scene_name,
idempotent_id, biz_no, executor_name, args_str,
ext_attrs, create_dt
)
values
INSERT INTO retry_dead_letter (namespace_id, unique_id, group_name, scene_name,
idempotent_id, biz_no, executor_name, args_str,
ext_attrs, create_dt
)
VALUES
<foreach collection="retryDeadLetters" item="retryDeadLetter" separator=",">
(#{retryDeadLetter.namespaceId,jdbcType=VARCHAR}, #{retryDeadLetter.uniqueId,jdbcType=VARCHAR}, #{retryDeadLetter.groupName,jdbcType=VARCHAR}, #{retryDeadLetter.sceneName,jdbcType=VARCHAR},
#{retryDeadLetter.idempotentId,jdbcType=VARCHAR}, #{retryDeadLetter.bizNo,jdbcType=VARCHAR}, #{retryDeadLetter.executorName,jdbcType=VARCHAR}, #{retryDeadLetter.argsStr,jdbcType=VARCHAR},
#{retryDeadLetter.extAttrs,jdbcType=VARCHAR}, #{retryDeadLetter.createDt,jdbcType=TIMESTAMP})
(
#{retryDeadLetter.namespaceId,jdbcType=VARCHAR},
#{retryDeadLetter.uniqueId,jdbcType=VARCHAR},
#{retryDeadLetter.groupName,jdbcType=VARCHAR},
#{retryDeadLetter.sceneName,jdbcType=VARCHAR},
#{retryDeadLetter.idempotentId,jdbcType=VARCHAR},
#{retryDeadLetter.bizNo,jdbcType=VARCHAR},
#{retryDeadLetter.executorName,jdbcType=VARCHAR},
#{retryDeadLetter.argsStr,jdbcType=VARCHAR},
#{retryDeadLetter.extAttrs,jdbcType=VARCHAR},
#{retryDeadLetter.createDt,jdbcType=TIMESTAMP}
)
</foreach>
</insert>

View File

@ -16,8 +16,9 @@
</resultMap>
<insert id="batchInsert" parameterType="java.util.List" useGeneratedKeys="true" keyProperty="id">
INSERT INTO retry_summary (namespace_id, group_name, scene_name, trigger_at,
running_num, finish_num, max_count_num, suspend_num)
INSERT INTO
retry_summary (namespace_id, group_name, scene_name, trigger_at, running_num, finish_num, max_count_num,
suspend_num)
VALUES
<foreach collection="list" item="item" separator=",">
(
@ -35,44 +36,47 @@
<update id="batchUpdate" parameterType="java.util.List">
UPDATE retry_summary rt,
(
<foreach collection="list" item="item" index="index" separator=" UNION ALL ">
SELECT
#{item.runningNum} AS running_num,
#{item.finishNum} AS finish_num,
#{item.maxCountNum} AS max_count_num,
#{item.suspendNum} AS suspend_num,
#{item.triggerAt} AS trigger_at,
#{item.sceneName} AS scene_name,
#{item.namespaceId} AS namespace_id,
#{item.groupName} AS group_name
</foreach>
) tt
SET rt.running_num = tt.running_num,
rt.finish_num = tt.finish_num,
rt.max_count_num = tt.max_count_num,
rt.suspend_num = tt.suspend_num
WHERE rt.trigger_at = tt.trigger_at
AND rt.group_name = tt.group_name
AND rt.namespace_id = tt.namespace_id
AND rt.scene_name = tt.scene_name
(
<foreach collection="list" item="item" index="index" separator=" UNION ALL ">
SELECT
#{item.runningNum} AS running_num,
#{item.finishNum} AS finish_num,
#{item.maxCountNum} AS max_count_num,
#{item.suspendNum} AS suspend_num,
#{item.triggerAt} AS trigger_at,
#{item.sceneName} AS scene_name,
#{item.namespaceId} AS namespace_id,
#{item.groupName} AS group_name
</foreach>
) tt
SET rt.running_num = tt.running_num,
rt.finish_num = tt.finish_num,
rt.max_count_num = tt.max_count_num,
rt.suspend_num = tt.suspend_num
WHERE rt.trigger_at = tt.trigger_at
AND rt.group_name = tt.group_name
AND rt.namespace_id = tt.namespace_id
AND rt.scene_name = tt.scene_name
</update>
<select id="retryTask"
resultType="com.aizuda.easy.retry.template.datasource.persistence.dataobject.DashboardCardResponseDO$RetryTask">
SELECT ifnull(sum(running_num), 0) AS runningNum,
ifnull(sum(finish_num), 0) AS finishNum,
ifnull(sum(max_count_num), 0) AS maxCountNum,
ifnull(sum(suspend_num), 0) AS suspendNum,
ifnull(sum(running_num + finish_num + max_count_num + suspend_num), 0) AS totalNum
SELECT IFNULL(SUM(running_num), 0) AS runningNum,
IFNULL(SUM(finish_num), 0) AS finishNum,
IFNULL(SUM(max_count_num), 0) AS maxCountNum,
IFNULL(SUM(suspend_num), 0) AS suspendNum,
IFNULL(SUM(running_num + finish_num + max_count_num + suspend_num), 0) AS totalNum
FROM retry_summary
${ew.customSqlSegment}
</select>
<select id="retryTaskBarList"
resultType="com.aizuda.easy.retry.template.datasource.persistence.dataobject.DashboardCardResponseDO$RetryTask">
SELECT
trigger_at, running_num, finish_num, max_count_num, suspend_num
SELECT trigger_at,
running_num,
finish_num,
max_count_num,
suspend_num
FROM retry_summary
${ew.customSqlSegment}
LIMIT 7
@ -81,12 +85,12 @@
<select id="retryLineList"
resultType="com.aizuda.easy.retry.template.datasource.persistence.dataobject.DashboardLineResponseDO">
SELECT
DATE_FORMAT(create_dt, #{dateFormat}) AS createDt,
IFNULL(SUM(finish_num), 0) AS successNum,
IFNULL(SUM(running_num), 0) AS runningNum,
IFNULL(SUM(max_count_num), 0) AS maxCountNum,
IFNULL(SUM(suspend_num), 0) AS suspendNum,
IFNULL(SUM(finish_num + running_num + max_count_num + suspend_num), 0) AS total
DATE_FORMAT(create_dt, #{dateFormat}) AS createDt,
IFNULL(SUM(finish_num), 0) AS successNum,
IFNULL(SUM(running_num), 0) AS runningNum,
IFNULL(SUM(max_count_num), 0) AS maxCountNum,
IFNULL(SUM(suspend_num), 0) AS suspendNum,
IFNULL(SUM(finish_num + running_num + max_count_num + suspend_num), 0) AS total
FROM retry_summary
${ew.customSqlSegment}
GROUP BY createDt
@ -94,22 +98,21 @@
<select id="dashboardRank"
resultType="com.aizuda.easy.retry.template.datasource.persistence.dataobject.DashboardRetryLineResponseDO$Rank">
SELECT
CONCAT(group_name, '/', scene_name) `name`,
SUM(running_num + finish_num + max_count_num + suspend_num) AS total
SELECT CONCAT(group_name, '/', scene_name) `name`,
SUM(running_num + finish_num + max_count_num + suspend_num) AS total
FROM retry_summary
${ew.customSqlSegment}
HAVING total > 0
ORDER BY total DESC LIMIT 10
ORDER BY total DESC
LIMIT 10
</select>
<select id="retryTaskList"
resultType="com.aizuda.easy.retry.template.datasource.persistence.dataobject.DashboardRetryLineResponseDO$Task">
SELECT group_name AS groupName,
sum(CASE WHEN (scene_status = 1) THEN 1 ELSE 0 END) AS run,
count(*) AS total
SUM(CASE WHEN (scene_status = 1) THEN 1 ELSE 0 END) AS run,
COUNT(*) AS total
FROM scene_config
WHERE namespace_id = #{namespaceId}
${ew.customSqlSegment}
GROUP BY namespace_id, group_name
</select>

View File

@ -17,24 +17,25 @@
<result column="create_dt" jdbcType="TIMESTAMP" property="createDt"/>
</resultMap>
<sql id="Base_Column_List">
id
, unique_id, group_name, scene_name, idempotent_id, biz_no, executor_name, args_str, ext_attrs, retry_status,
create_dt, task_type, namespace_id
id, unique_id, group_name, scene_name, idempotent_id, biz_no, executor_name, args_str, ext_attrs, retry_status,
create_dt, task_type, namespace_id
</sql>
<!-- 定义批量新增的 SQL 映射 -->
<insert id="batchInsert" parameterType="java.util.List">
INSERT INTO retry_task_log (unique_id, group_name, scene_name, idempotent_id, biz_no, executor_name,
args_str, ext_attrs, task_type, create_dt, namespace_id)
args_str, ext_attrs, task_type, create_dt, namespace_id)
VALUES
<foreach collection="list" item="item" separator=",">
(#{item.uniqueId}, #{item.groupName}, #{item.sceneName}, #{item.idempotentId},
#{item.bizNo}, #{item.executorName}, #{item.argsStr}, #{item.extAttrs},
#{item.taskType}, #{item.createDt}, #{item.namespaceId})
#{item.bizNo}, #{item.executorName}, #{item.argsStr}, #{item.extAttrs},
#{item.taskType}, #{item.createDt}, #{item.namespaceId})
</foreach>
</insert>
<!-- 重试统计 -->
<select id="retrySummaryRetryTaskLogList"
resultType="com.aizuda.easy.retry.template.datasource.persistence.dataobject.DashboardRetryResponseDO">
resultType="com.aizuda.easy.retry.template.datasource.persistence.dataobject.DashboardRetryResponseDO">
SELECT namespace_id AS namespaceId,
group_name AS groupName,
scene_name AS sceneName,

View File

@ -8,7 +8,42 @@
<result column="group_name" property="groupName" />
<result column="unique_id" property="uniqueId" />
<result column="create_dt" property="createDt" />
<result column="message" property="message" />
<result column="log_num" property="logNum"/>
<result column="message" property="message"/>
<result column="real_time" property="realTime"/>
<result column="client_info" property="clientInfo"/>
</resultMap>
<!-- 定义批量新增的 SQL 映射 -->
<insert id="batchInsert" parameterType="java.util.List">
INSERT INTO retry_task_log_message (namespace_id, group_name, unique_id, log_num, message,
create_dt, real_time, client_info)
VALUES
<foreach collection="list" item="item" separator=",">
(
#{item.namespaceId},
#{item.groupName},
#{item.uniqueId},
#{item.logNum},
#{item.message},
#{item.createDt},
#{item.realTime},
#{item.clientInfo}
)
</foreach>
</insert>
<update id="batchUpdate" parameterType="java.util.List">
UPDATE retry_task_log_message jlm,
(
<foreach collection="list" item="item" index="index" separator=" UNION ALL ">
SELECT
#{item.message} AS message,
#{item.logNum} AS log_num,
#{item.id} AS id
</foreach>
) tt
SET jlm.message = tt.message, jlm.log_num = tt.log_num
WHERE jlm.id = tt.id
</update>
</mapper>

View File

@ -31,18 +31,16 @@
(#{item.namespaceId}, #{item.uniqueId}, #{item.groupName}, #{item.sceneName}, #{item.idempotentId}, #{item.bizNo}, #{item.executorName}, #{item.argsStr}, #{item.extAttrs}, #{item.nextTriggerAt}, #{item.taskType}, #{item.retryStatus}, #{item.createDt})
</foreach>
</insert>
<update id="updateBatchNextTriggerAtById" parameterType="java.util.List">
update retry_task_${partition} rt,
(
<foreach collection="list" item="item" index="index" separator=" union all ">
select
#{item.nextTriggerAt} as next_trigger_at,
#{item.id} as id
</foreach>
) tt
set
rt.next_trigger_at = tt.next_trigger_at
where rt.id = tt.id
UPDATE retry_task_${partition} rt,
(
<foreach collection="list" item="item" index="index" separator=" UNION ALL ">
SELECT
#{item.nextTriggerAt} AS next_trigger_at,
#{item.id} AS id
</foreach>
) tt
SET rt.next_trigger_at = tt.next_trigger_at
WHERE rt.id = tt.id
</update>
</mapper>

View File

@ -1,22 +1,25 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.aizuda.easy.retry.template.datasource.persistence.mapper.ServerNodeMapper">
<resultMap id="BaseResultMap" type="com.aizuda.easy.retry.template.datasource.persistence.po.ServerNode">
<id column="id" jdbcType="BIGINT" property="id" />
<result column="group_name" jdbcType="VARCHAR" property="groupName" />
<result column="host_id" jdbcType="VARCHAR" property="hostId" />
<result column="host_ip" jdbcType="VARCHAR" property="hostIp" />
<result column="host_port" jdbcType="INTEGER" property="hostPort" />
<result column="expire_at" jdbcType="TIMESTAMP" property="expireAt" />
<result column="node_type" jdbcType="TINYINT" property="nodeType" />
<result column="context_path" jdbcType="VARCHAR" property="contextPath" />
<result column="ext_attrs" jdbcType="VARCHAR" property="extAttrs" />
<result column="create_dt" jdbcType="TIMESTAMP" property="createDt" />
<result column="update_dt" jdbcType="TIMESTAMP" property="updateDt" />
</resultMap>
<sql id="Base_Column_List">
id, group_name, context_path, host_id, host_ip, host_port, expire_at, node_type,create_dt,update_dt
</sql>
<resultMap id="BaseResultMap" type="com.aizuda.easy.retry.template.datasource.persistence.po.ServerNode">
<id column="id" jdbcType="BIGINT" property="id"/>
<result column="namespace_id" jdbcType="VARCHAR" property="namespaceId"/>
<result column="group_name" jdbcType="VARCHAR" property="groupName"/>
<result column="host_id" jdbcType="VARCHAR" property="hostId"/>
<result column="host_ip" jdbcType="VARCHAR" property="hostIp"/>
<result column="host_port" jdbcType="INTEGER" property="hostPort"/>
<result column="expire_at" jdbcType="TIMESTAMP" property="expireAt"/>
<result column="node_type" jdbcType="TINYINT" property="nodeType"/>
<result column="context_path" jdbcType="VARCHAR" property="contextPath"/>
<result column="ext_attrs" jdbcType="VARCHAR" property="extAttrs"/>
<result column="create_dt" jdbcType="TIMESTAMP" property="createDt"/>
<result column="update_dt" jdbcType="TIMESTAMP" property="updateDt"/>
</resultMap>
<sql id="Base_Column_List">
id, namespace_id, group_name, context_path, host_id, host_ip, host_port,
expire_at, node_type,create_dt,update_dt
</sql>
<insert id="batchInsert" parameterType="java.util.List" useGeneratedKeys="true" keyProperty="id">
INSERT INTO server_node (namespace_id, group_name, host_id, host_ip, host_port,
@ -40,23 +43,26 @@
<update id="batchUpdateExpireAt" parameterType="java.util.List">
UPDATE server_node rt,
(
<foreach collection="list" item="item" index="index" separator=" UNION ALL ">
SELECT
#{item.expireAt} AS expire_at,
#{item.contextPath} AS context_path,
#{item.hostId} AS host_id,
#{item.hostIp} AS host_ip
</foreach>
) tt
SET rt.expire_at = tt.expire_at,
rt.context_path = tt.context_path
WHERE rt.host_id = tt.host_id and rt.host_ip = tt.host_ip
(
<foreach collection="list" item="item" index="index" separator=" UNION ALL ">
SELECT
#{item.expireAt} AS expire_at,
#{item.contextPath} AS context_path,
#{item.hostId} AS host_id,
#{item.hostIp} AS host_ip
</foreach>
) tt
SET rt.expire_at = tt.expire_at,
rt.context_path = tt.context_path
WHERE rt.host_id = tt.host_id
AND rt.host_ip = tt.host_ip
</update>
<select id="countActivePod" resultType="com.aizuda.easy.retry.template.datasource.persistence.dataobject.ActivePodQuantityResponseDO">
SELECT node_type as nodeType, count(*) as total
from server_node
${ew.customSqlSegment}
</select>
<select id="countActivePod"
resultType="com.aizuda.easy.retry.template.datasource.persistence.dataobject.ActivePodQuantityResponseDO">
SELECT node_type AS nodeType,
COUNT(*) AS total
FROM server_node
${ew.customSqlSegment}
</select>
</mapper>

View File

@ -15,19 +15,18 @@
<result column="deleted" property="deleted" />
<result column="ext_attrs" property="extAttrs" />
</resultMap>
<update id="updateBatchNextTriggerAtById" parameterType="java.util.List">
update workflow rt,
(
<foreach collection="list" item="item" index="index" separator=" union all ">
select
#{item.nextTriggerAt} as next_trigger_at,
#{item.id} as id
</foreach>
) tt
set
rt.next_trigger_at = tt.next_trigger_at
where rt.id = tt.id
UPDATE workflow rt,
(
<foreach collection="list" item="item" index="index" separator=" UNION ALL ">
SELECT
#{item.nextTriggerAt} AS next_trigger_at,
#{item.id} AS id
</foreach>
) tt
SET rt.next_trigger_at = tt.next_trigger_at
WHERE rt.id = tt.id
</update>
</mapper>

View File

@ -27,6 +27,11 @@ spring:
# url: jdbc:sqlserver://localhost:1433;DatabaseName=easy_retry;SelectMethod=cursor;encrypt=false;rewriteBatchedStatements=true
# username: SA
# password: EasyRetry@24
## mariadb
# driver-class-name: org.mariadb.jdbc.Driver
# url: jdbc:mariadb://localhost:3308/easy_retry?useSSL=false&characterEncoding=utf8&useUnicode=true
# username: root
# password: root
type: com.zaxxer.hikari.HikariDataSource
hikari:
connection-timeout: 30000