diff --git a/others/powerjob-mysql.sql b/others/powerjob-mysql.sql
index fec1d2ff..27568b18 100644
--- a/others/powerjob-mysql.sql
+++ b/others/powerjob-mysql.sql
@@ -1,3 +1,22 @@
+/*
+ 官方 SQL 仅基于特定版本(MySQL8)导出,不一定兼容其他数据库,也不一定兼容其他版本。此 SQL 仅供参考。
+ 如果您的数据库无法使用此 SQL,建议使用 SpringDataJPA 自带的建表能力,先在开发环境直连测试库自动建表,然后自行导出相关的 SQL 即可
+
+ Navicat Premium Data Transfer
+
+ Source Server : Local@3306
+ Source Server Type : MySQL
+ Source Server Version : 80300 (8.3.0)
+ Source Host : localhost:3306
+ Source Schema : powerjob4
+
+ Target Server Type : MySQL
+ Target Server Version : 80300 (8.3.0)
+ File Encoding : 65001
+
+ Date: 02/03/2024 18:51:36
+*/
+
SET NAMES utf8mb4;
SET FOREIGN_KEY_CHECKS = 0;
@@ -5,260 +24,224 @@ SET FOREIGN_KEY_CHECKS = 0;
-- Table structure for app_info
-- ----------------------------
DROP TABLE IF EXISTS `app_info`;
-CREATE TABLE `app_info`
-(
- `id` bigint NOT NULL AUTO_INCREMENT COMMENT '应用ID',
- `app_name` varchar(128) not NULL COMMENT '应用名称',
- `current_server` varchar(255) default null COMMENT 'Server地址,用于负责调度应用的ActorSystem地址',
- `gmt_create` datetime not null COMMENT '创建时间',
- `gmt_modified` datetime not null COMMENT '更新时间',
- `password` varchar(255) not null COMMENT '应用密码',
- PRIMARY KEY (`id`),
- UNIQUE KEY `uidx01_app_info` (`app_name`)
-) ENGINE = InnoDB
- AUTO_INCREMENT = 1
- DEFAULT CHARSET = utf8mb4
- COLLATE = utf8mb4_general_ci COMMENT ='应用表';
+CREATE TABLE `app_info` (
+ `id` bigint NOT NULL AUTO_INCREMENT,
+ `app_name` varchar(255) DEFAULT NULL,
+ `current_server` varchar(255) DEFAULT NULL,
+ `gmt_create` datetime(6) DEFAULT NULL,
+ `gmt_modified` datetime(6) DEFAULT NULL,
+ `password` varchar(255) DEFAULT NULL,
+ PRIMARY KEY (`id`),
+ UNIQUE KEY `uidx01_app_info` (`app_name`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for container_info
-- ----------------------------
-
DROP TABLE IF EXISTS `container_info`;
-CREATE TABLE `container_info`
-(
- `id` bigint NOT NULL AUTO_INCREMENT COMMENT '容器ID',
- `app_id` bigint not null COMMENT '应用ID',
- `container_name` varchar(128) not null COMMENT '容器名称',
- `gmt_create` datetime not null COMMENT '创建时间',
- `gmt_modified` datetime not null COMMENT '更新时间',
- `last_deploy_time` datetime DEFAULT NULL COMMENT '上次部署时间',
- `source_info` varchar(255) DEFAULT NULL COMMENT '资源信息,内容取决于source_type\n1、FatJar -> String\n2、Git -> JSON,{"repo”:””仓库,”branch”:”分支”,”username”:”账号,”password”:”密码”}',
- `source_type` int not null COMMENT '资源类型,1:FatJar/2:Git',
- `status` int not null COMMENT '状态,1:正常ENABLE/2:已禁用DISABLE/99:已删除DELETED',
- `version` varchar(255) default null COMMENT '版本',
- PRIMARY KEY (`id`),
- KEY `idx01_container_info` (`app_id`)
-) ENGINE = InnoDB
- DEFAULT CHARSET = utf8mb4
- COLLATE = utf8mb4_general_ci COMMENT ='容器表';
+CREATE TABLE `container_info` (
+ `id` bigint NOT NULL AUTO_INCREMENT,
+ `app_id` bigint DEFAULT NULL,
+ `container_name` varchar(255) DEFAULT NULL,
+ `gmt_create` datetime(6) DEFAULT NULL,
+ `gmt_modified` datetime(6) DEFAULT NULL,
+ `last_deploy_time` datetime(6) DEFAULT NULL,
+ `source_info` varchar(255) DEFAULT NULL,
+ `source_type` int DEFAULT NULL,
+ `status` int DEFAULT NULL,
+ `version` varchar(255) DEFAULT NULL,
+ PRIMARY KEY (`id`),
+ KEY `idx01_container_info` (`app_id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for instance_info
-- ----------------------------
DROP TABLE IF EXISTS `instance_info`;
-CREATE TABLE `instance_info`
-(
- `id` bigint NOT NULL AUTO_INCREMENT COMMENT '任务实例ID',
- `app_id` bigint not null COMMENT '应用ID',
- `instance_id` bigint not null COMMENT '任务实例ID',
- `type` int not NULL COMMENT '任务实例类型,1:普通NORMAL/2:工作流WORKFLOW',
- `job_id` bigint not NULL COMMENT '任务ID',
- `instance_params` longtext COMMENT '任务动态参数',
- `job_params` longtext COMMENT '任务静态参数',
- `actual_trigger_time` bigint default NULL COMMENT '实际触发时间',
- `expected_trigger_time` bigint DEFAULT NULL COMMENT '计划触发时间',
- `finished_time` bigint DEFAULT NULL COMMENT '执行结束时间',
- `last_report_time` bigint DEFAULT NULL COMMENT '最后上报时间',
- `result` longtext COMMENT '执行结果',
- `running_times` bigint DEFAULT NULL COMMENT '总执行次数,用于重试判断',
- `status` int not NULL COMMENT '任务状态,1:等待派发WAITING_DISPATCH/2:等待Worker接收WAITING_WORKER_RECEIVE/3:运行中RUNNING/4:失败FAILED/5:成功SUCCEED/9:取消CANCELED/10:手动停止STOPPED',
- `task_tracker_address` varchar(255) DEFAULT NULL COMMENT 'TaskTracker地址',
- `wf_instance_id` bigint DEFAULT NULL COMMENT '工作流实例ID',
- `additional_data` longtext comment '附加信息 (JSON)',
- `gmt_create` datetime not NULL COMMENT '创建时间',
- `gmt_modified` datetime not NULL COMMENT '更新时间',
- PRIMARY KEY (`id`),
- KEY `idx01_instance_info` (`job_id`, `status`),
- KEY `idx02_instance_info` (`app_id`, `status`),
- KEY `idx03_instance_info` (`instance_id`, `status`)
-) ENGINE = InnoDB
- AUTO_INCREMENT = 1
- DEFAULT CHARSET = utf8mb4
- COLLATE = utf8mb4_general_ci COMMENT ='任务实例表';
+CREATE TABLE `instance_info` (
+ `id` bigint NOT NULL AUTO_INCREMENT,
+ `actual_trigger_time` bigint DEFAULT NULL,
+ `app_id` bigint DEFAULT NULL,
+ `expected_trigger_time` bigint DEFAULT NULL,
+ `finished_time` bigint DEFAULT NULL,
+ `gmt_create` datetime(6) DEFAULT NULL,
+ `gmt_modified` datetime(6) DEFAULT NULL,
+ `instance_id` bigint DEFAULT NULL,
+ `instance_params` longtext,
+ `job_id` bigint DEFAULT NULL,
+ `job_params` longtext,
+ `last_report_time` bigint DEFAULT NULL,
+ `result` longtext,
+ `running_times` bigint DEFAULT NULL,
+ `status` int DEFAULT NULL,
+ `task_tracker_address` varchar(255) DEFAULT NULL,
+ `type` int DEFAULT NULL,
+ `wf_instance_id` bigint DEFAULT NULL,
+ PRIMARY KEY (`id`),
+ KEY `idx01_instance_info` (`job_id`,`status`),
+ KEY `idx02_instance_info` (`app_id`,`status`),
+ KEY `idx03_instance_info` (`instance_id`,`status`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for job_info
-- ----------------------------
DROP TABLE IF EXISTS `job_info`;
-CREATE TABLE `job_info`
-(
- `id` bigint NOT NULL AUTO_INCREMENT,
- `app_id` bigint DEFAULT NULL COMMENT '应用ID',
- `job_name` varchar(128) DEFAULT NULL COMMENT '任务名称',
- `job_description` varchar(255) DEFAULT NULL COMMENT '任务描述',
- `job_params` text COMMENT '任务默认参数',
- `concurrency` int DEFAULT NULL COMMENT '并发度,同时执行某个任务的最大线程数量',
- `designated_workers` varchar(255) DEFAULT NULL COMMENT '运行节点,空:不限(多值逗号分割)',
- `dispatch_strategy` int DEFAULT NULL COMMENT '投递策略,1:健康优先/2:随机',
- `execute_type` int not NULL COMMENT '执行类型,1:单机STANDALONE/2:广播BROADCAST/3:MAP_REDUCE/4:MAP',
- `instance_retry_num` int not null DEFAULT 0 COMMENT 'Instance重试次数',
- `instance_time_limit` bigint not null DEFAULT 0 COMMENT '任务整体超时时间',
- `lifecycle` varchar(255) DEFAULT NULL COMMENT '生命周期',
- `max_instance_num` int not null DEFAULT 1 COMMENT '最大同时运行任务数,默认 1',
- `max_worker_count` int not null DEFAULT 0 COMMENT '最大运行节点数量',
- `min_cpu_cores` double NOT NULL default 0 COMMENT '最低CPU核心数量,0:不限',
- `min_disk_space` double NOT NULL default 0 COMMENT '最低磁盘空间(GB),0:不限',
- `min_memory_space` double NOT NULL default 0 COMMENT '最低内存空间(GB),0:不限',
- `next_trigger_time` bigint DEFAULT NULL COMMENT '下一次调度时间',
- `notify_user_ids` varchar(255) DEFAULT NULL COMMENT '报警用户(多值逗号分割)',
- `processor_info` varchar(255) DEFAULT NULL COMMENT '执行器信息',
- `processor_type` int not NULL COMMENT '执行器类型,1:内建处理器BUILT_IN/2:SHELL/3:PYTHON/4:外部处理器(动态加载)EXTERNAL',
- `status` int not NULL COMMENT '状态,1:正常ENABLE/2:已禁用DISABLE/99:已删除DELETED',
- `task_retry_num` int not NULL default 0 COMMENT 'Task重试次数',
- `time_expression` varchar(255) default NULL COMMENT '时间表达式,内容取决于time_expression_type,1:CRON/2:NULL/3:LONG/4:LONG',
- `time_expression_type` int not NULL COMMENT '时间表达式类型,1:CRON/2:API/3:FIX_RATE/4:FIX_DELAY,5:WORKFLOW\n)',
- `tag` varchar(255) DEFAULT NULL COMMENT 'TAG',
- `log_config` varchar(255) DEFAULT NULL COMMENT '日志配置',
- `extra` varchar(255) DEFAULT NULL COMMENT '扩展字段',
- `gmt_create` datetime not NULL COMMENT '创建时间',
- `gmt_modified` datetime not NULL COMMENT '更新时间',
- PRIMARY KEY (`id`),
- KEY `idx01_job_info` (`app_id`, `status`, `time_expression_type`, `next_trigger_time`)
-) ENGINE = InnoDB
- AUTO_INCREMENT = 1
- DEFAULT CHARSET = utf8mb4
- COLLATE = utf8mb4_general_ci COMMENT ='任务表';
+CREATE TABLE `job_info` (
+ `id` bigint NOT NULL AUTO_INCREMENT,
+ `advanced_runtime_config` varchar(255) DEFAULT NULL,
+ `alarm_config` varchar(255) DEFAULT NULL,
+ `app_id` bigint DEFAULT NULL,
+ `concurrency` int DEFAULT NULL,
+ `designated_workers` varchar(255) DEFAULT NULL,
+ `dispatch_strategy` int DEFAULT NULL,
+ `dispatch_strategy_config` varchar(255) DEFAULT NULL,
+ `execute_type` int DEFAULT NULL,
+ `extra` varchar(255) DEFAULT NULL,
+ `gmt_create` datetime(6) DEFAULT NULL,
+ `gmt_modified` datetime(6) DEFAULT NULL,
+ `instance_retry_num` int DEFAULT NULL,
+ `instance_time_limit` bigint DEFAULT NULL,
+ `job_description` varchar(255) DEFAULT NULL,
+ `job_name` varchar(255) DEFAULT NULL,
+ `job_params` longtext,
+ `lifecycle` varchar(255) DEFAULT NULL,
+ `log_config` varchar(255) DEFAULT NULL,
+ `max_instance_num` int DEFAULT NULL,
+ `max_worker_count` int DEFAULT NULL,
+ `min_cpu_cores` double NOT NULL,
+ `min_disk_space` double NOT NULL,
+ `min_memory_space` double NOT NULL,
+ `next_trigger_time` bigint DEFAULT NULL,
+ `notify_user_ids` varchar(255) DEFAULT NULL,
+ `processor_info` varchar(255) DEFAULT NULL,
+ `processor_type` int DEFAULT NULL,
+ `status` int DEFAULT NULL,
+ `tag` varchar(255) DEFAULT NULL,
+ `task_retry_num` int DEFAULT NULL,
+ `time_expression` varchar(255) DEFAULT NULL,
+ `time_expression_type` int DEFAULT NULL,
+ PRIMARY KEY (`id`),
+ KEY `idx01_job_info` (`app_id`,`status`,`time_expression_type`,`next_trigger_time`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for oms_lock
-- ----------------------------
DROP TABLE IF EXISTS `oms_lock`;
-CREATE TABLE `oms_lock`
-(
- `id` bigint NOT NULL AUTO_INCREMENT COMMENT '序号ID',
- `lock_name` varchar(128) DEFAULT NULL COMMENT '名称',
- `max_lock_time` bigint DEFAULT NULL COMMENT '最长持锁时间',
- `ownerip` varchar(255) DEFAULT NULL COMMENT '拥有者IP',
- `gmt_create` datetime not NULL COMMENT '创建时间',
- `gmt_modified` datetime not NULL COMMENT '更新时间',
- PRIMARY KEY (`id`),
- UNIQUE KEY `uidx01_oms_lock` (`lock_name`)
-) ENGINE = InnoDB
- AUTO_INCREMENT = 1
- DEFAULT CHARSET = utf8mb4
- COLLATE = utf8mb4_general_ci COMMENT ='数据库锁';
+CREATE TABLE `oms_lock` (
+ `id` bigint NOT NULL AUTO_INCREMENT,
+ `gmt_create` datetime(6) DEFAULT NULL,
+ `gmt_modified` datetime(6) DEFAULT NULL,
+ `lock_name` varchar(255) DEFAULT NULL,
+ `max_lock_time` bigint DEFAULT NULL,
+ `ownerip` varchar(255) DEFAULT NULL,
+ PRIMARY KEY (`id`),
+ UNIQUE KEY `uidx01_oms_lock` (`lock_name`)
+) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for server_info
-- ----------------------------
DROP TABLE IF EXISTS `server_info`;
-CREATE TABLE `server_info`
-(
- `id` bigint NOT NULL AUTO_INCREMENT COMMENT '服务器ID',
- `gmt_create` datetime DEFAULT NULL COMMENT '创建时间',
- `gmt_modified` datetime DEFAULT NULL COMMENT '更新时间',
- `ip` varchar(128) DEFAULT NULL COMMENT '服务器IP地址',
- PRIMARY KEY (`id`),
- UNIQUE KEY `uidx01_server_info` (`ip`),
- KEY `idx01_server_info` (`gmt_modified`)
-) ENGINE = InnoDB
- AUTO_INCREMENT = 1
- DEFAULT CHARSET = utf8mb4
- COLLATE = utf8mb4_general_ci COMMENT ='服务器表';
+CREATE TABLE `server_info` (
+ `id` bigint NOT NULL AUTO_INCREMENT,
+ `gmt_create` datetime(6) DEFAULT NULL,
+ `gmt_modified` datetime(6) DEFAULT NULL,
+ `ip` varchar(255) DEFAULT NULL,
+ PRIMARY KEY (`id`),
+ UNIQUE KEY `uidx01_server_info` (`ip`),
+ KEY `idx01_server_info` (`gmt_modified`)
+) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for user_info
-- ----------------------------
DROP TABLE IF EXISTS `user_info`;
-CREATE TABLE `user_info`
-(
- `id` bigint NOT NULL AUTO_INCREMENT COMMENT '用户ID',
- `username` varchar(128) not NULL COMMENT '用户名',
- `password` varchar(255) default NULL COMMENT '密码',
- `phone` varchar(255) DEFAULT NULL COMMENT '手机号',
- `email` varchar(128) not NULL COMMENT '邮箱',
- `extra` varchar(255) DEFAULT NULL COMMENT '扩展字段',
- `web_hook` varchar(255) DEFAULT NULL COMMENT 'webhook地址',
- `gmt_create` datetime not NULL COMMENT '创建时间',
- `gmt_modified` datetime not NULL COMMENT '更新时间',
- PRIMARY KEY (`id`),
- unique index uidx01_user_info (username),
- unique index uidx02_user_info (email)
-) ENGINE = InnoDB
- DEFAULT CHARSET = utf8mb4
- COLLATE = utf8mb4_general_ci COMMENT ='用户表';
+CREATE TABLE `user_info` (
+ `id` bigint NOT NULL AUTO_INCREMENT,
+ `email` varchar(255) DEFAULT NULL,
+ `extra` varchar(255) DEFAULT NULL,
+ `gmt_create` datetime(6) DEFAULT NULL,
+ `gmt_modified` datetime(6) DEFAULT NULL,
+ `password` varchar(255) DEFAULT NULL,
+ `phone` varchar(255) DEFAULT NULL,
+ `username` varchar(255) DEFAULT NULL,
+ `web_hook` varchar(255) DEFAULT NULL,
+ PRIMARY KEY (`id`),
+ KEY `uidx01_user_info` (`username`),
+ KEY `uidx02_user_info` (`email`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for workflow_info
-- ----------------------------
DROP TABLE IF EXISTS `workflow_info`;
-CREATE TABLE `workflow_info`
-(
- `id` bigint NOT NULL AUTO_INCREMENT COMMENT '工作流ID',
- `app_id` bigint not NULL COMMENT '应用ID',
- `wf_name` varchar(128) not NULL COMMENT '工作流名称',
- `wf_description` varchar(255) default NULL COMMENT '工作流描述',
- `extra` varchar(255) DEFAULT NULL COMMENT '扩展字段',
- `lifecycle` varchar(255) DEFAULT NULL COMMENT '生命周期',
- `max_wf_instance_num` int not null DEFAULT 1 COMMENT '最大运行工作流数量,默认 1',
- `next_trigger_time` bigint DEFAULT NULL COMMENT '下次调度时间',
- `notify_user_ids` varchar(255) DEFAULT NULL COMMENT '报警用户(多值逗号分割)',
- `pedag` text COMMENT 'DAG信息(JSON)',
- `status` int not NULL COMMENT '状态,1:正常ENABLE/2:已禁用DISABLE/99:已删除DELETED',
- `time_expression` varchar(255) DEFAULT NULL COMMENT '时间表达式,内容取决于time_expression_type,1:CRON/2:NULL/3:LONG/4:LONG',
- `time_expression_type` int not NULL COMMENT '时间表达式类型,1:CRON/2:API/3:FIX_RATE/4:FIX_DELAY,5:WORKFLOW\n)',
- `gmt_create` datetime DEFAULT NULL COMMENT '创建时间',
- `gmt_modified` datetime DEFAULT NULL COMMENT '更新时间',
- PRIMARY KEY (`id`),
- KEY `idx01_workflow_info` (`app_id`, `status`, `time_expression_type`, next_trigger_time)
-) ENGINE = InnoDB
- AUTO_INCREMENT = 1
- DEFAULT CHARSET = utf8mb4
- COLLATE = utf8mb4_general_ci COMMENT ='工作流表';
+CREATE TABLE `workflow_info` (
+ `id` bigint NOT NULL AUTO_INCREMENT,
+ `app_id` bigint DEFAULT NULL,
+ `extra` varchar(255) DEFAULT NULL,
+ `gmt_create` datetime(6) DEFAULT NULL,
+ `gmt_modified` datetime(6) DEFAULT NULL,
+ `lifecycle` varchar(255) DEFAULT NULL,
+ `max_wf_instance_num` int DEFAULT NULL,
+ `next_trigger_time` bigint DEFAULT NULL,
+ `notify_user_ids` varchar(255) DEFAULT NULL,
+ `pedag` longtext,
+ `status` int DEFAULT NULL,
+ `time_expression` varchar(255) DEFAULT NULL,
+ `time_expression_type` int DEFAULT NULL,
+ `wf_description` varchar(255) DEFAULT NULL,
+ `wf_name` varchar(255) DEFAULT NULL,
+ PRIMARY KEY (`id`),
+ KEY `idx01_workflow_info` (`app_id`,`status`,`time_expression_type`,`next_trigger_time`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for workflow_instance_info
-- ----------------------------
DROP TABLE IF EXISTS `workflow_instance_info`;
-CREATE TABLE `workflow_instance_info`
-(
- `id` bigint NOT NULL AUTO_INCREMENT COMMENT '工作流实例ID',
- `wf_instance_id` bigint DEFAULT NULL COMMENT '工作流实例ID',
- `workflow_id` bigint DEFAULT NULL COMMENT '工作流ID',
- `actual_trigger_time` bigint DEFAULT NULL COMMENT '实际触发时间',
- `app_id` bigint DEFAULT NULL COMMENT '应用ID',
- `dag` text COMMENT 'DAG信息(JSON)',
- `expected_trigger_time` bigint DEFAULT NULL COMMENT '计划触发时间',
- `finished_time` bigint DEFAULT NULL COMMENT '执行结束时间',
- `result` text COMMENT '执行结果',
- `status` int DEFAULT NULL COMMENT '工作流实例状态,1:等待调度WAITING/2:运行中RUNNING/3:失败FAILED/4:成功SUCCEED/10:手动停止STOPPED',
- `wf_context` text COMMENT '工作流上下文',
- `wf_init_params` text COMMENT '工作流启动参数',
- `gmt_create` datetime DEFAULT NULL COMMENT '创建时间',
- `gmt_modified` datetime DEFAULT NULL COMMENT '更新时间',
- PRIMARY KEY (`id`),
- unique index uidx01_wf_instance (`wf_instance_id`),
- index idx01_wf_instance (`workflow_id`, `status`),
- index idx02_wf_instance (`app_id`, `status`, `expected_trigger_time`)
-) ENGINE = InnoDB
- AUTO_INCREMENT = 1
- DEFAULT CHARSET = utf8mb4
- COLLATE = utf8mb4_general_ci COMMENT ='工作流实例表';
+CREATE TABLE `workflow_instance_info` (
+ `id` bigint NOT NULL AUTO_INCREMENT,
+ `actual_trigger_time` bigint DEFAULT NULL,
+ `app_id` bigint DEFAULT NULL,
+ `dag` longtext,
+ `expected_trigger_time` bigint DEFAULT NULL,
+ `finished_time` bigint DEFAULT NULL,
+ `gmt_create` datetime(6) DEFAULT NULL,
+ `gmt_modified` datetime(6) DEFAULT NULL,
+ `parent_wf_instance_id` bigint DEFAULT NULL,
+ `result` longtext,
+ `status` int DEFAULT NULL,
+ `wf_context` longtext,
+ `wf_init_params` longtext,
+ `wf_instance_id` bigint DEFAULT NULL,
+ `workflow_id` bigint DEFAULT NULL,
+ PRIMARY KEY (`id`),
+ UNIQUE KEY `uidx01_wf_instance` (`wf_instance_id`),
+ KEY `idx01_wf_instance` (`workflow_id`,`status`,`app_id`,`expected_trigger_time`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for workflow_node_info
-- ----------------------------
DROP TABLE IF EXISTS `workflow_node_info`;
-CREATE TABLE `workflow_node_info`
-(
- `id` bigint NOT NULL AUTO_INCREMENT COMMENT '节点ID',
- `app_id` bigint NOT NULL COMMENT '应用ID',
- `enable` bit(1) NOT NULL COMMENT '是否启动,0:否/1:是',
- `extra` text COMMENT '扩展字段',
- `gmt_create` datetime NOT NULL COMMENT '创建时间',
- `gmt_modified` datetime NOT NULL COMMENT '更新时间',
- `job_id` bigint default NULL COMMENT '任务ID',
- `node_name` varchar(255) DEFAULT NULL COMMENT '节点名称',
- `node_params` text COMMENT '节点参数',
- `skip_when_failed` bit(1) NOT NULL COMMENT '是否允许失败跳过,0:否/1:是',
- `type` int DEFAULT NULL COMMENT '节点类型,1:任务JOB',
- `workflow_id` bigint DEFAULT NULL COMMENT '工作流ID',
- PRIMARY KEY (`id`),
- KEY `idx01_workflow_node_info` (`workflow_id`,`gmt_create`)
-) ENGINE = InnoDB
- AUTO_INCREMENT = 1
- DEFAULT CHARSET = utf8mb4
- COLLATE = utf8mb4_general_ci COMMENT ='工作流节点表';
+CREATE TABLE `workflow_node_info` (
+ `id` bigint NOT NULL AUTO_INCREMENT,
+ `app_id` bigint NOT NULL,
+ `enable` bit(1) NOT NULL,
+ `extra` longtext,
+ `gmt_create` datetime(6) NOT NULL,
+ `gmt_modified` datetime(6) NOT NULL,
+ `job_id` bigint DEFAULT NULL,
+ `node_name` varchar(255) DEFAULT NULL,
+ `node_params` longtext,
+ `skip_when_failed` bit(1) NOT NULL,
+ `type` int DEFAULT NULL,
+ `workflow_id` bigint DEFAULT NULL,
+ PRIMARY KEY (`id`),
+ KEY `idx01_workflow_node_info` (`workflow_id`,`gmt_create`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
SET FOREIGN_KEY_CHECKS = 1;
+
diff --git a/others/sql/upgrade/v4.3.7-v4.3.8.sql b/others/sql/upgrade/v4.3.7-v4.3.8.sql
new file mode 100644
index 00000000..e2ab1e46
--- /dev/null
+++ b/others/sql/upgrade/v4.3.7-v4.3.8.sql
@@ -0,0 +1,6 @@
+-- Upgrade SQL FROM 4.3.7 to 4.3.8
+-- ----------------------------
+-- Table change for job_info
+-- ----------------------------
+alter table job_info add dispatch_strategy_config varchar(255) comment 'dispatch_strategy_config' default null;
+alter table job_info add advanced_runtime_config varchar(255) comment 'advanced_runtime_config' default null;
diff --git a/pom.xml b/pom.xml
index fc36f9e6..f9bf1b28 100644
--- a/pom.xml
+++ b/pom.xml
@@ -6,7 +6,7 @@
tech.powerjob
powerjob
- 4.3.8
+ 4.3.9
pom
powerjob
http://www.powerjob.tech
diff --git a/powerjob-client/pom.xml b/powerjob-client/pom.xml
index b1b6a14f..ad852573 100644
--- a/powerjob-client/pom.xml
+++ b/powerjob-client/pom.xml
@@ -5,18 +5,18 @@
powerjob
tech.powerjob
- 4.3.8
+ 4.3.9
4.0.0
powerjob-client
- 4.3.8
+ 4.3.9
jar
5.9.1
1.2.83
- 4.3.8
+ 4.3.9
3.2.4
diff --git a/powerjob-common/pom.xml b/powerjob-common/pom.xml
index 588cdbdb..16765564 100644
--- a/powerjob-common/pom.xml
+++ b/powerjob-common/pom.xml
@@ -5,12 +5,12 @@
powerjob
tech.powerjob
- 4.3.8
+ 4.3.9
4.0.0
powerjob-common
- 4.3.8
+ 4.3.9
jar
diff --git a/powerjob-official-processors/pom.xml b/powerjob-official-processors/pom.xml
index ad812d52..7a8d5550 100644
--- a/powerjob-official-processors/pom.xml
+++ b/powerjob-official-processors/pom.xml
@@ -5,12 +5,12 @@
powerjob
tech.powerjob
- 4.3.8
+ 4.3.9
4.0.0
powerjob-official-processors
- 4.3.8
+ 4.3.9
jar
@@ -20,7 +20,7 @@
5.9.1
1.2.13
- 4.3.8
+ 4.3.9
2.2.224
8.0.28
5.3.31
diff --git a/powerjob-remote/pom.xml b/powerjob-remote/pom.xml
index fdb86441..c1629f5d 100644
--- a/powerjob-remote/pom.xml
+++ b/powerjob-remote/pom.xml
@@ -5,7 +5,7 @@
powerjob
tech.powerjob
- 4.3.8
+ 4.3.9
4.0.0
pom
diff --git a/powerjob-remote/powerjob-remote-benchmark/pom.xml b/powerjob-remote/powerjob-remote-benchmark/pom.xml
index b9002884..230a6e27 100644
--- a/powerjob-remote/powerjob-remote-benchmark/pom.xml
+++ b/powerjob-remote/powerjob-remote-benchmark/pom.xml
@@ -5,7 +5,7 @@
powerjob-remote
tech.powerjob
- 4.3.8
+ 4.3.9
4.0.0
@@ -21,8 +21,8 @@
1.2.13
2.7.18
- 4.3.8
- 4.3.8
+ 4.3.9
+ 4.3.9
3.9.0
4.2.9
diff --git a/powerjob-remote/powerjob-remote-framework/pom.xml b/powerjob-remote/powerjob-remote-framework/pom.xml
index f55fe02c..a244e551 100644
--- a/powerjob-remote/powerjob-remote-framework/pom.xml
+++ b/powerjob-remote/powerjob-remote-framework/pom.xml
@@ -5,11 +5,11 @@
powerjob-remote
tech.powerjob
- 4.3.8
+ 4.3.9
4.0.0
- 4.3.8
+ 4.3.9
powerjob-remote-framework
@@ -17,7 +17,7 @@
8
UTF-8
- 4.3.8
+ 4.3.9
0.10.2
diff --git a/powerjob-remote/powerjob-remote-impl-akka/pom.xml b/powerjob-remote/powerjob-remote-impl-akka/pom.xml
index c6beade5..4a9b4a23 100644
--- a/powerjob-remote/powerjob-remote-impl-akka/pom.xml
+++ b/powerjob-remote/powerjob-remote-impl-akka/pom.xml
@@ -5,19 +5,19 @@
powerjob-remote
tech.powerjob
- 4.3.8
+ 4.3.9
4.0.0
powerjob-remote-impl-akka
- 4.3.8
+ 4.3.9
8
8
UTF-8
- 4.3.8
+ 4.3.9
2.6.13
diff --git a/powerjob-remote/powerjob-remote-impl-http/pom.xml b/powerjob-remote/powerjob-remote-impl-http/pom.xml
index a43c9c57..19e3cce4 100644
--- a/powerjob-remote/powerjob-remote-impl-http/pom.xml
+++ b/powerjob-remote/powerjob-remote-impl-http/pom.xml
@@ -5,12 +5,12 @@
powerjob-remote
tech.powerjob
- 4.3.8
+ 4.3.9
4.0.0
powerjob-remote-impl-http
- 4.3.8
+ 4.3.9
8
@@ -18,7 +18,7 @@
UTF-8
4.3.7
- 4.3.8
+ 4.3.9
diff --git a/powerjob-server/pom.xml b/powerjob-server/pom.xml
index d086dc01..70bf2ee4 100644
--- a/powerjob-server/pom.xml
+++ b/powerjob-server/pom.xml
@@ -5,12 +5,12 @@
powerjob
tech.powerjob
- 4.3.8
+ 4.3.9
4.0.0
powerjob-server
- 4.3.8
+ 4.3.9
pom
@@ -50,12 +50,12 @@
3.0.10
9.2.1
- 4.3.8
- 4.3.8
- 4.3.8
+ 4.3.9
+ 4.3.9
+ 4.3.9
1.6.14
3.17.1
- 8.5.2
+ 1.12.665
4.4
@@ -114,12 +114,13 @@
aliyun-sdk-oss
${aliyun-sdk-oss.version}
-
+
- io.minio
- minio
- ${minio.version}
+ com.amazonaws
+ aws-java-sdk-s3
+ ${aws-java-sdk-s3.version}
+
org.apache.commons
diff --git a/powerjob-server/powerjob-server-common/pom.xml b/powerjob-server/powerjob-server-common/pom.xml
index 6496e51f..72d44c41 100644
--- a/powerjob-server/powerjob-server-common/pom.xml
+++ b/powerjob-server/powerjob-server-common/pom.xml
@@ -5,7 +5,7 @@
powerjob-server
tech.powerjob
- 4.3.8
+ 4.3.9
../pom.xml
4.0.0
diff --git a/powerjob-server/powerjob-server-core/pom.xml b/powerjob-server/powerjob-server-core/pom.xml
index eba74eea..426bd20b 100644
--- a/powerjob-server/powerjob-server-core/pom.xml
+++ b/powerjob-server/powerjob-server-core/pom.xml
@@ -5,7 +5,7 @@
powerjob-server
tech.powerjob
- 4.3.8
+ 4.3.9
../pom.xml
4.0.0
diff --git a/powerjob-server/powerjob-server-core/src/main/java/tech/powerjob/server/core/container/ContainerService.java b/powerjob-server/powerjob-server-core/src/main/java/tech/powerjob/server/core/container/ContainerService.java
index acb2fd0b..58a636d1 100644
--- a/powerjob-server/powerjob-server-core/src/main/java/tech/powerjob/server/core/container/ContainerService.java
+++ b/powerjob-server/powerjob-server-core/src/main/java/tech/powerjob/server/core/container/ContainerService.java
@@ -317,40 +317,39 @@ public class ContainerService {
.collect(Collectors.toSet());
Set deployedList = Sets.newLinkedHashSet();
- List unDeployedList = Lists.newLinkedList();
- Multimap version2Address = ArrayListMultimap.create();
+ Multimap version2DeployedContainerInfoList = ArrayListMultimap.create();
infoList.forEach(info -> {
String targetWorkerAddress = info.getWorkerAddress();
if (aliveWorkers.contains(targetWorkerAddress)) {
deployedList.add(targetWorkerAddress);
- version2Address.put(info.getVersion(), targetWorkerAddress);
- }else {
- unDeployedList.add(targetWorkerAddress);
+ version2DeployedContainerInfoList.put(info.getVersion(), info);
}
});
+ Set unDeployedList = Sets.newHashSet(aliveWorkers);
+ unDeployedList.removeAll(deployedList);
+
StringBuilder sb = new StringBuilder("========== DeployedInfo ==========").append(System.lineSeparator());
+
// 集群分裂,各worker版本不统一,问题很大
- if (version2Address.keySet().size() > 1) {
+ if (version2DeployedContainerInfoList.keySet().size() > 1) {
sb.append("WARN: there exists multi version container now, please redeploy to fix this problem").append(System.lineSeparator());
- sb.append("divisive version ==> ").append(System.lineSeparator());
- version2Address.forEach((v, addressList) -> {
- sb.append("version: ").append(v).append(System.lineSeparator());
- sb.append(addressList);
- });
- sb.append(System.lineSeparator());
}
+
+ sb.append("divisive version ==> ").append(System.lineSeparator());
+ version2DeployedContainerInfoList.asMap().forEach((version, deployedContainerInfos) -> {
+ sb.append("[version] ").append(version).append(System.lineSeparator());
+ deployedContainerInfos.forEach(deployedContainerInfo -> sb.append(String.format("Address: %s, DeployedTime: %s", deployedContainerInfo.getWorkerAddress(), CommonUtils.formatTime(deployedContainerInfo.getDeployedTime()))).append(System.lineSeparator()));
+ });
+
// 当前在线未部署机器
if (!CollectionUtils.isEmpty(unDeployedList)) {
- sb.append("WARN: there exists unDeployed worker(OhMyScheduler will auto fix when some job need to process)").append(System.lineSeparator());
- sb.append("unDeployed worker list ==> ").append(System.lineSeparator());
+ sb.append("WARN: there exists unDeployed worker(PowerJob will auto fix when some job need to process)").append(System.lineSeparator());
+ sb.append("unDeployed worker list ==> ").append(unDeployedList).append(System.lineSeparator());
}
- // 当前部署机器
- sb.append("deployed worker list ==> ").append(System.lineSeparator());
+
if (CollectionUtils.isEmpty(deployedList)) {
- sb.append("no worker deployed now~");
- }else {
- sb.append(deployedList);
+ sb.append("no worker deployed this container now~");
}
return sb.toString();
diff --git a/powerjob-server/powerjob-server-core/src/main/java/tech/powerjob/server/core/scheduler/InstanceStatusCheckService.java b/powerjob-server/powerjob-server-core/src/main/java/tech/powerjob/server/core/scheduler/InstanceStatusCheckService.java
index 4652904f..a6674c6d 100644
--- a/powerjob-server/powerjob-server-core/src/main/java/tech/powerjob/server/core/scheduler/InstanceStatusCheckService.java
+++ b/powerjob-server/powerjob-server-core/src/main/java/tech/powerjob/server/core/scheduler/InstanceStatusCheckService.java
@@ -145,7 +145,11 @@ public class InstanceStatusCheckService {
log.info("[InstanceStatusChecker] RunningInstance status check using {}.", stopwatch.stop());
}
- private void handleWaitingDispatchInstance(List partAppIds) {
+ private void handleWaitingDispatchInstance(List appIds) {
+
+ // 存在移除操作,需要重新创建集合,否则会导致外层抛出 NoSuchElementException: null
+ List partAppIds = Lists.newArrayList(appIds);
+
// 1. 检查等待 WAITING_DISPATCH 状态的任务
long threshold = System.currentTimeMillis() - DISPATCH_TIMEOUT_MS;
List waitingDispatchInstances = instanceInfoRepository.findAllByAppIdInAndStatusAndExpectedTriggerTimeLessThan(partAppIds, InstanceStatus.WAITING_DISPATCH.getV(), threshold, PageRequest.of(0, MAX_BATCH_NUM_INSTANCE));
diff --git a/powerjob-server/powerjob-server-extension/pom.xml b/powerjob-server/powerjob-server-extension/pom.xml
index a13ce7b3..ad4d772f 100644
--- a/powerjob-server/powerjob-server-extension/pom.xml
+++ b/powerjob-server/powerjob-server-extension/pom.xml
@@ -5,7 +5,7 @@
powerjob-server
tech.powerjob
- 4.3.8
+ 4.3.9
../pom.xml
4.0.0
diff --git a/powerjob-server/powerjob-server-migrate/pom.xml b/powerjob-server/powerjob-server-migrate/pom.xml
index f7fc9607..a8bf4916 100644
--- a/powerjob-server/powerjob-server-migrate/pom.xml
+++ b/powerjob-server/powerjob-server-migrate/pom.xml
@@ -5,7 +5,7 @@
powerjob-server
tech.powerjob
- 4.3.8
+ 4.3.9
../pom.xml
4.0.0
diff --git a/powerjob-server/powerjob-server-monitor/pom.xml b/powerjob-server/powerjob-server-monitor/pom.xml
index 9b6d088d..012e8121 100644
--- a/powerjob-server/powerjob-server-monitor/pom.xml
+++ b/powerjob-server/powerjob-server-monitor/pom.xml
@@ -5,7 +5,7 @@
powerjob-server
tech.powerjob
- 4.3.8
+ 4.3.9
../pom.xml
4.0.0
diff --git a/powerjob-server/powerjob-server-persistence/pom.xml b/powerjob-server/powerjob-server-persistence/pom.xml
index 54cb0459..5956e0ee 100644
--- a/powerjob-server/powerjob-server-persistence/pom.xml
+++ b/powerjob-server/powerjob-server-persistence/pom.xml
@@ -5,7 +5,7 @@
powerjob-server
tech.powerjob
- 4.3.8
+ 4.3.9
../pom.xml
4.0.0
@@ -41,8 +41,8 @@
aliyun-sdk-oss
- io.minio
- minio
+ com.amazonaws
+ aws-java-sdk-s3
diff --git a/powerjob-server/powerjob-server-persistence/src/main/java/tech/powerjob/server/persistence/storage/impl/MinioOssService.java b/powerjob-server/powerjob-server-persistence/src/main/java/tech/powerjob/server/persistence/storage/impl/MinioOssService.java
index 5ee30ec2..cc14de9c 100644
--- a/powerjob-server/powerjob-server-persistence/src/main/java/tech/powerjob/server/persistence/storage/impl/MinioOssService.java
+++ b/powerjob-server/powerjob-server-persistence/src/main/java/tech/powerjob/server/persistence/storage/impl/MinioOssService.java
@@ -1,8 +1,13 @@
package tech.powerjob.server.persistence.storage.impl;
+import com.amazonaws.auth.AWSStaticCredentialsProvider;
+import com.amazonaws.auth.BasicAWSCredentials;
+import com.amazonaws.client.builder.AwsClientBuilder;
+import com.amazonaws.services.s3.AmazonS3;
+import com.amazonaws.services.s3.AmazonS3ClientBuilder;
+import com.amazonaws.services.s3.model.*;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
-import io.minio.*;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.FileUtils;
@@ -16,8 +21,6 @@ import tech.powerjob.server.extension.dfs.*;
import tech.powerjob.server.persistence.storage.AbstractDFsService;
import javax.annotation.Priority;
-import java.nio.file.Files;
-import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Optional;
@@ -44,19 +47,19 @@ public class MinioOssService extends AbstractDFsService {
private static final String KEY_BUCKET_NAME = "bucketName";
private static final String ACCESS_KEY = "accessKey";
private static final String SECRET_KEY = "secretKey";
- private MinioClient minioClient;
+ private AmazonS3 amazonS3;
private String bucket;
- private static final String NO_SUCH_KEY = "NoSuchKey";
+ private static final String NOT_FOUNT = "404 Not Found";
@Override
public void store(StoreRequest storeRequest) {
try {
- minioClient.uploadObject(UploadObjectArgs.builder()
- .bucket(this.bucket)
- .object(parseFileName(storeRequest.getFileLocation()))
- .filename(storeRequest.getLocalFile().getPath())
- .contentType(Files.probeContentType(storeRequest.getLocalFile().toPath()))
- .build());
+
+ String fileName = parseFileName(storeRequest.getFileLocation());
+ // 创建 PutObjectRequest 对象
+ PutObjectRequest request = new PutObjectRequest(this.bucket, fileName, storeRequest.getLocalFile());
+
+ amazonS3.putObject(request);
} catch (Throwable t) {
ExceptionUtils.rethrow(t);
}
@@ -66,13 +69,11 @@ public class MinioOssService extends AbstractDFsService {
public void download(DownloadRequest downloadRequest) {
try {
FileUtils.forceMkdirParent(downloadRequest.getTarget());
- // 下载文件
- minioClient.downloadObject(
- DownloadObjectArgs.builder()
- .bucket(this.bucket)
- .object(parseFileName(downloadRequest.getFileLocation()))
- .filename(downloadRequest.getTarget().getAbsolutePath())
- .build());
+
+ String fileName = parseFileName(downloadRequest.getFileLocation());
+ GetObjectRequest getObjectRequest = new GetObjectRequest(this.bucket, fileName);
+ amazonS3.getObject(getObjectRequest, downloadRequest.getTarget());
+
} catch (Throwable t) {
ExceptionUtils.rethrow(t);
}
@@ -86,26 +87,28 @@ public class MinioOssService extends AbstractDFsService {
@Override
public Optional fetchFileMeta(FileLocation fileLocation) {
try {
- StatObjectResponse stat = minioClient.statObject(StatObjectArgs.builder()
- .bucket(this.bucket)
- .object(parseFileName(fileLocation))
- .build());
- return Optional.ofNullable(stat).map(minioStat -> {
+
+ String fileName = parseFileName(fileLocation);
+ ObjectMetadata objectMetadata = amazonS3.getObjectMetadata(this.bucket, fileName);
+
+ return Optional.ofNullable(objectMetadata).map(minioStat -> {
Map metaInfo = Maps.newHashMap();
- if (stat.userMetadata() != null) {
- metaInfo.putAll(stat.userMetadata());
+
+ if (objectMetadata.getRawMetadata() != null) {
+ metaInfo.putAll(objectMetadata.getRawMetadata());
}
return new FileMeta()
- .setLastModifiedTime(Date.from(stat.lastModified().toInstant()))
- .setLength(stat.size())
+ .setLastModifiedTime(objectMetadata.getLastModified())
+ .setLength(objectMetadata.getContentLength())
.setMetaInfo(metaInfo);
});
- } catch (Exception oe) {
- String errorCode = oe.getMessage();
- if (NO_SUCH_KEY.equalsIgnoreCase(errorCode)) {
+ } catch (AmazonS3Exception s3Exception) {
+ String errorCode = s3Exception.getErrorCode();
+ if (NOT_FOUNT.equalsIgnoreCase(errorCode)) {
return Optional.empty();
}
+ } catch (Exception oe) {
ExceptionUtils.rethrow(oe);
}
return Optional.empty();
@@ -170,8 +173,18 @@ public class MinioOssService extends AbstractDFsService {
if (StringUtils.isEmpty(bucketName)) {
throw new IllegalArgumentException("'oms.storage.dfs.minio.bucketName' can't be empty, please creat a bucket in minio oss console then config it to powerjob");
}
+
+ // 创建凭证对象
+ BasicAWSCredentials awsCreds = new BasicAWSCredentials(accessKey, secretKey);
+
+ // 创建AmazonS3客户端并指定终端节点和凭证
+ this.amazonS3 = AmazonS3ClientBuilder.standard()
+ // 当使用 AWS Java SDK 连接到非AWS服务(如MinIO)时,指定区域(Region)是必需的,即使这个区域对于你的MinIO实例并不真正适用。原因在于AWS SDK的客户端构建器需要一个区域来配置其服务端点,即使在连接到本地或第三方S3兼容服务时也是如此。使用 "us-east-1" 作为占位符是很常见的做法,因为它是AWS最常用的区域之一。这不会影响到实际的连接或数据传输,因为真正的服务地址是由你提供的终端节点URL决定的。如果你的代码主要是与MinIO交互,并且不涉及AWS服务,那么这个区域设置只是形式上的要求。
+ .withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(endpoint, "us-east-1"))
+ .withCredentials(new AWSStaticCredentialsProvider(awsCreds))
+ .withPathStyleAccessEnabled(true) // 重要:启用路径样式访问
+ .build();
this.bucket = bucketName;
- minioClient = MinioClient.builder().endpoint(endpoint).credentials(accessKey, secretKey).build();
createBucket(bucketName);
log.info("[Minio] initialize OSS successfully!");
}
@@ -183,10 +196,15 @@ public class MinioOssService extends AbstractDFsService {
*/
@SneakyThrows(Exception.class)
public void createBucket(String bucketName) {
- if (!bucketExists(bucketName)) {
- minioClient.makeBucket(MakeBucketArgs.builder()
- .bucket(bucketName).build());
+
+ // 建议自行创建 bucket,设置好相关的策略
+ if (bucketExists(bucketName)) {
+ return;
}
+
+ Bucket createBucketResult = amazonS3.createBucket(bucketName);
+ log.info("[Minio] createBucket successfully, bucketName: {}, createResult: {}", bucketName, createBucketResult);
+
String policy = "{\n" +
" \"Version\": \"2012-10-17\",\n" +
" \"Statement\": [\n" +
@@ -206,7 +224,11 @@ public class MinioOssService extends AbstractDFsService {
" }\n" +
" ]\n" +
"}";
- minioClient.setBucketPolicy(SetBucketPolicyArgs.builder().bucket(bucketName).config(policy).build());
+ try {
+ amazonS3.setBucketPolicy(bucketName, policy);
+ } catch (Exception e) {
+ log.warn("[Minio] setBucketPolicy failed, maybe you need to setBucketPolicy by yourself!", e);
+ }
}
/**
@@ -217,7 +239,8 @@ public class MinioOssService extends AbstractDFsService {
*/
@SneakyThrows(Exception.class)
public boolean bucketExists(String bucketName) {
- return minioClient.bucketExists(BucketExistsArgs.builder().bucket(bucketName).build());
+
+ return amazonS3.doesBucketExistV2(bucketName);
}
public static class MinioOssCondition extends PropertyAndOneBeanCondition {
diff --git a/powerjob-server/powerjob-server-persistence/src/test/java/tech/powerjob/server/persistence/storage/impl/AliOssServiceTest.java b/powerjob-server/powerjob-server-persistence/src/test/java/tech/powerjob/server/persistence/storage/impl/AliOssServiceTest.java
index 94167731..86ae4cd5 100644
--- a/powerjob-server/powerjob-server-persistence/src/test/java/tech/powerjob/server/persistence/storage/impl/AliOssServiceTest.java
+++ b/powerjob-server/powerjob-server-persistence/src/test/java/tech/powerjob/server/persistence/storage/impl/AliOssServiceTest.java
@@ -1,8 +1,8 @@
package tech.powerjob.server.persistence.storage.impl;
import com.aliyun.oss.common.utils.AuthUtils;
-import com.aliyun.oss.common.utils.StringUtils;
import lombok.extern.slf4j.Slf4j;
+import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.exception.ExceptionUtils;
import tech.powerjob.server.extension.dfs.DFsService;
@@ -33,7 +33,7 @@ class AliOssServiceTest extends AbstractDfsServiceTest {
log.info("[AliOssServiceTest] ak: {}, sk: {}", accessKeyId, secretAccessKey);
- if (org.apache.commons.lang3.StringUtils.isAnyEmpty(accessKeyId, secretAccessKey)) {
+ if (StringUtils.isAnyEmpty(accessKeyId, secretAccessKey)) {
return Optional.empty();
}
diff --git a/powerjob-server/powerjob-server-persistence/src/test/java/tech/powerjob/server/persistence/storage/impl/MinioOssServiceTest.java b/powerjob-server/powerjob-server-persistence/src/test/java/tech/powerjob/server/persistence/storage/impl/MinioOssServiceTest.java
new file mode 100644
index 00000000..0ed5bf4b
--- /dev/null
+++ b/powerjob-server/powerjob-server-persistence/src/test/java/tech/powerjob/server/persistence/storage/impl/MinioOssServiceTest.java
@@ -0,0 +1,30 @@
+package tech.powerjob.server.persistence.storage.impl;
+
+import lombok.extern.slf4j.Slf4j;
+import tech.powerjob.server.extension.dfs.DFsService;
+
+import java.util.Optional;
+
+/**
+ * MinioOssServiceTest
+ * 测试需要先本地部署 minio,因此捕获异常,失败也不阻断测试
+ *
+ * @author tjq
+ * @since 2024/2/26
+ */
+@Slf4j
+class MinioOssServiceTest extends AbstractDfsServiceTest {
+
+ @Override
+ protected Optional fetchService() {
+ try {
+ MinioOssService aliOssService = new MinioOssService();
+ aliOssService.initOssClient("http://192.168.124.23:9000", "pj2","testAk", "testSktestSktestSk");
+ return Optional.of(aliOssService);
+ } catch (Exception e) {
+ // 仅异常提醒
+ log.error("[MinioOssServiceTest] test exception!", e);
+ }
+ return Optional.empty();
+ }
+}
\ No newline at end of file
diff --git a/powerjob-server/powerjob-server-remote/pom.xml b/powerjob-server/powerjob-server-remote/pom.xml
index 1385cdd1..5254a96e 100644
--- a/powerjob-server/powerjob-server-remote/pom.xml
+++ b/powerjob-server/powerjob-server-remote/pom.xml
@@ -5,7 +5,7 @@
powerjob-server
tech.powerjob
- 4.3.8
+ 4.3.9
../pom.xml
4.0.0
diff --git a/powerjob-server/powerjob-server-remote/src/main/java/tech/powerjob/server/remote/worker/selector/impl/SpecifyTaskTrackerSelector.java b/powerjob-server/powerjob-server-remote/src/main/java/tech/powerjob/server/remote/worker/selector/impl/SpecifyTaskTrackerSelector.java
index de2b738b..cbad5331 100644
--- a/powerjob-server/powerjob-server-remote/src/main/java/tech/powerjob/server/remote/worker/selector/impl/SpecifyTaskTrackerSelector.java
+++ b/powerjob-server/powerjob-server-remote/src/main/java/tech/powerjob/server/remote/worker/selector/impl/SpecifyTaskTrackerSelector.java
@@ -1,7 +1,7 @@
package tech.powerjob.server.remote.worker.selector.impl;
+import com.google.common.collect.Lists;
import lombok.extern.slf4j.Slf4j;
-import org.apache.commons.compress.utils.Lists;
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Component;
import tech.powerjob.common.enums.DispatchStrategy;
diff --git a/powerjob-server/powerjob-server-starter/pom.xml b/powerjob-server/powerjob-server-starter/pom.xml
index 9564396f..b2d32bd1 100644
--- a/powerjob-server/powerjob-server-starter/pom.xml
+++ b/powerjob-server/powerjob-server-starter/pom.xml
@@ -5,7 +5,7 @@
powerjob-server
tech.powerjob
- 4.3.8
+ 4.3.9
../pom.xml
4.0.0
diff --git a/powerjob-server/powerjob-server-starter/src/main/resources/application-daily.properties b/powerjob-server/powerjob-server-starter/src/main/resources/application-daily.properties
index a2285ddd..6722e8b7 100644
--- a/powerjob-server/powerjob-server-starter/src/main/resources/application-daily.properties
+++ b/powerjob-server/powerjob-server-starter/src/main/resources/application-daily.properties
@@ -12,7 +12,7 @@ spring.datasource.core.minimum-idle=5
####### Storage properties(Delete if not needed) #######
#oms.storage.dfs.mongodb.uri=mongodb+srv://zqq:No1Bug2Please3!@cluster0.wie54.gcp.mongodb.net/powerjob_daily?retryWrites=true&w=majority
oms.storage.dfs.mysql_series.driver=com.mysql.cj.jdbc.Driver
-oms.storage.dfs.mysql_series.url=jdbc:mysql://localhost:3306/powerjob-daily?useUnicode=true&characterEncoding=UTF-8&serverTimezone=Asia/Shanghai
+oms.storage.dfs.mysql_series.url=jdbc:mysql://powerjob-mysql:3306/powerjob-daily?useUnicode=true&characterEncoding=UTF-8&serverTimezone=Asia/Shanghai
oms.storage.dfs.mysql_series.username=root
oms.storage.dfs.mysql_series.password=No1Bug2Please3!
oms.storage.dfs.mysql_series.auto_create_table=true
diff --git a/powerjob-server/powerjob-server-starter/src/main/resources/application-product.properties b/powerjob-server/powerjob-server-starter/src/main/resources/application-product.properties
index efd5a637..9557f5d8 100644
--- a/powerjob-server/powerjob-server-starter/src/main/resources/application-product.properties
+++ b/powerjob-server/powerjob-server-starter/src/main/resources/application-product.properties
@@ -10,7 +10,7 @@ spring.datasource.core.maximum-pool-size=20
spring.datasource.core.minimum-idle=5
####### Storage properties(Delete if not needed) #######
-oms.storage.dfs.mongodb.uri=mongodb://localhost:27017/powerjob-product
+#oms.storage.dfs.mongodb.uri=mongodb://localhost:27017/powerjob-product
####### Email properties(Non-core configuration properties) #######
####### Delete the following code to disable the mail #######
diff --git a/powerjob-worker-agent/pom.xml b/powerjob-worker-agent/pom.xml
index d8d83579..ffd99af1 100644
--- a/powerjob-worker-agent/pom.xml
+++ b/powerjob-worker-agent/pom.xml
@@ -5,24 +5,24 @@
powerjob
tech.powerjob
- 4.3.8
+ 4.3.9
4.0.0
powerjob-worker-agent
- 4.3.8
+ 4.3.9
jar
- 4.3.8
+ 4.3.9
1.2.13
4.3.2
5.3.31
2.3.4.RELEASE
- 4.3.8
+ 4.3.9
8.0.28
diff --git a/powerjob-worker-samples/pom.xml b/powerjob-worker-samples/pom.xml
index 710eb00a..88082e93 100644
--- a/powerjob-worker-samples/pom.xml
+++ b/powerjob-worker-samples/pom.xml
@@ -5,18 +5,18 @@
powerjob
tech.powerjob
- 4.3.8
+ 4.3.9
4.0.0
powerjob-worker-samples
- 4.3.8
+ 4.3.9
2.7.18
- 4.3.8
+ 4.3.9
1.2.83
- 4.3.8
+ 4.3.9
true
diff --git a/powerjob-worker-spring-boot-starter/pom.xml b/powerjob-worker-spring-boot-starter/pom.xml
index 4cd05808..66419bb3 100644
--- a/powerjob-worker-spring-boot-starter/pom.xml
+++ b/powerjob-worker-spring-boot-starter/pom.xml
@@ -5,16 +5,16 @@
powerjob
tech.powerjob
- 4.3.8
+ 4.3.9
4.0.0
powerjob-worker-spring-boot-starter
- 4.3.8
+ 4.3.9
jar
- 4.3.8
+ 4.3.9
2.7.18
diff --git a/powerjob-worker/pom.xml b/powerjob-worker/pom.xml
index 828019a3..ddb85026 100644
--- a/powerjob-worker/pom.xml
+++ b/powerjob-worker/pom.xml
@@ -5,12 +5,12 @@
powerjob
tech.powerjob
- 4.3.8
+ 4.3.9
4.0.0
powerjob-worker
- 4.3.8
+ 4.3.9
jar
@@ -21,10 +21,10 @@
1.2.13
- 4.3.8
- 4.3.8
- 4.3.8
- 4.3.8
+ 4.3.9
+ 4.3.9
+ 4.3.9
+ 4.3.9
diff --git a/powerjob-worker/src/main/java/tech/powerjob/worker/container/OmsContainerFactory.java b/powerjob-worker/src/main/java/tech/powerjob/worker/container/OmsContainerFactory.java
index f857e43c..0f62f0d9 100644
--- a/powerjob-worker/src/main/java/tech/powerjob/worker/container/OmsContainerFactory.java
+++ b/powerjob-worker/src/main/java/tech/powerjob/worker/container/OmsContainerFactory.java
@@ -92,6 +92,7 @@ public class OmsContainerFactory {
try {
if (!jarFile.exists()) {
+ log.info("[OmsContainer-{}] container not exist(path={}), try to download from server!", containerId, jarFile.getPath());
FileUtils.forceMkdirParent(jarFile);
FileUtils.copyURLToFile(new URL(request.getDownloadURL()), jarFile, 5000, 300000);
log.info("[OmsContainer-{}] download jar successfully, path={}", containerId, jarFile.getPath());
@@ -107,6 +108,7 @@ public class OmsContainerFactory {
if (oldContainer != null) {
// 销毁旧容器
+ log.info("[OmsContainer-{}] start to destroy old container(version={})", containerId, oldContainer.getVersion());
oldContainer.destroy();
}
diff --git a/powerjob-worker/src/main/java/tech/powerjob/worker/extension/processor/ProcessorBean.java b/powerjob-worker/src/main/java/tech/powerjob/worker/extension/processor/ProcessorBean.java
index c7238f33..28c31127 100644
--- a/powerjob-worker/src/main/java/tech/powerjob/worker/extension/processor/ProcessorBean.java
+++ b/powerjob-worker/src/main/java/tech/powerjob/worker/extension/processor/ProcessorBean.java
@@ -25,4 +25,10 @@ public class ProcessorBean {
*/
private transient ClassLoader classLoader;
+ /**
+ * Bean 是否稳定
+ * SpringBean / 普通Java 对象,在整个 JVM 生命周期内都不会变,可声明为稳定,在上层缓存,避免每次都要重现 build processor
+ * 对于动态容器,可能在部署后改变,则需要声明为不稳定
+ */
+ private boolean stable = true;
}
diff --git a/powerjob-worker/src/main/java/tech/powerjob/worker/processor/PowerJobProcessorLoader.java b/powerjob-worker/src/main/java/tech/powerjob/worker/processor/PowerJobProcessorLoader.java
index 00be4fa5..f7e3af68 100644
--- a/powerjob-worker/src/main/java/tech/powerjob/worker/processor/PowerJobProcessorLoader.java
+++ b/powerjob-worker/src/main/java/tech/powerjob/worker/processor/PowerJobProcessorLoader.java
@@ -30,27 +30,36 @@ public class PowerJobProcessorLoader implements ProcessorLoader {
@Override
public ProcessorBean load(ProcessorDefinition definition) {
- return def2Bean.computeIfAbsent(definition, ignore -> {
- final String processorType = definition.getProcessorType();
- log.info("[ProcessorFactory] start to load Processor: {}", definition);
- for (ProcessorFactory pf : processorFactoryList) {
- final String pfName = pf.getClass().getSimpleName();
- if (!Optional.ofNullable(pf.supportTypes()).orElse(Collections.emptySet()).contains(processorType)) {
- log.info("[ProcessorFactory] [{}] can't load type={}, skip!", pfName, processorType);
- continue;
- }
- log.info("[ProcessorFactory] [{}] try to load processor: {}", pfName, definition);
- try {
- ProcessorBean processorBean = pf.build(definition);
- if (processorBean != null) {
- log.info("[ProcessorFactory] [{}] load processor successfully: {}", pfName, definition);
- return processorBean;
- }
- } catch (Throwable t) {
- log.error("[ProcessorFactory] [{}] load processor failed: {}", pfName, definition, t);
- }
+
+ ProcessorBean pBean = def2Bean.computeIfAbsent(definition, ignore -> buildProcessorBean(definition));
+
+ if (pBean.isStable()) {
+ return pBean;
+ }
+
+ return buildProcessorBean(definition);
+ }
+
+ private ProcessorBean buildProcessorBean(ProcessorDefinition definition) {
+ final String processorType = definition.getProcessorType();
+ log.info("[ProcessorFactory] start to load Processor: {}", definition);
+ for (ProcessorFactory pf : processorFactoryList) {
+ final String pfName = pf.getClass().getSimpleName();
+ if (!Optional.ofNullable(pf.supportTypes()).orElse(Collections.emptySet()).contains(processorType)) {
+ log.info("[ProcessorFactory] [{}] can't load type={}, skip!", pfName, processorType);
+ continue;
}
- throw new PowerJobException("fetch Processor failed, please check your processorType and processorInfo config");
- });
+ log.info("[ProcessorFactory] [{}] try to load processor: {}", pfName, definition);
+ try {
+ ProcessorBean processorBean = pf.build(definition);
+ if (processorBean != null) {
+ log.info("[ProcessorFactory] [{}] load processor successfully: {}", pfName, definition);
+ return processorBean;
+ }
+ } catch (Throwable t) {
+ log.error("[ProcessorFactory] [{}] load processor failed: {}", pfName, definition, t);
+ }
+ }
+ throw new PowerJobException("fetch Processor failed, please check your processorType and processorInfo config");
}
}
diff --git a/powerjob-worker/src/main/java/tech/powerjob/worker/processor/impl/JarContainerProcessorFactory.java b/powerjob-worker/src/main/java/tech/powerjob/worker/processor/impl/JarContainerProcessorFactory.java
index 26762be5..59c4e002 100644
--- a/powerjob-worker/src/main/java/tech/powerjob/worker/processor/impl/JarContainerProcessorFactory.java
+++ b/powerjob-worker/src/main/java/tech/powerjob/worker/processor/impl/JarContainerProcessorFactory.java
@@ -46,7 +46,9 @@ public class JarContainerProcessorFactory implements ProcessorFactory {
if (omsContainer != null) {
return new ProcessorBean()
.setProcessor(omsContainer.getProcessor(className))
- .setClassLoader(omsContainer.getContainerClassLoader());
+ .setClassLoader(omsContainer.getContainerClassLoader())
+ .setStable(false)
+ ;
} else {
log.warn("[ProcessorFactory] load container failed. processor info : {}", processorInfo);
}