Merge branch '4.3.9'

This commit is contained in:
tjq 2024-03-02 20:11:56 +08:00
commit d44131128e
36 changed files with 417 additions and 352 deletions

View File

@ -1,3 +1,22 @@
/*
SQL MySQL8 SQL
使 SQL使 SpringDataJPA SQL
Navicat Premium Data Transfer
Source Server : Local@3306
Source Server Type : MySQL
Source Server Version : 80300 (8.3.0)
Source Host : localhost:3306
Source Schema : powerjob4
Target Server Type : MySQL
Target Server Version : 80300 (8.3.0)
File Encoding : 65001
Date: 02/03/2024 18:51:36
*/
SET NAMES utf8mb4;
SET FOREIGN_KEY_CHECKS = 0;
@ -5,260 +24,224 @@ SET FOREIGN_KEY_CHECKS = 0;
-- Table structure for app_info
-- ----------------------------
DROP TABLE IF EXISTS `app_info`;
CREATE TABLE `app_info`
(
`id` bigint NOT NULL AUTO_INCREMENT COMMENT '应用ID',
`app_name` varchar(128) not NULL COMMENT '应用名称',
`current_server` varchar(255) default null COMMENT 'Server地址,用于负责调度应用的ActorSystem地址',
`gmt_create` datetime not null COMMENT '创建时间',
`gmt_modified` datetime not null COMMENT '更新时间',
`password` varchar(255) not null COMMENT '应用密码',
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_app_info` (`app_name`)
) ENGINE = InnoDB
AUTO_INCREMENT = 1
DEFAULT CHARSET = utf8mb4
COLLATE = utf8mb4_general_ci COMMENT ='应用表';
CREATE TABLE `app_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_name` varchar(255) DEFAULT NULL,
`current_server` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`password` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_app_info` (`app_name`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for container_info
-- ----------------------------
DROP TABLE IF EXISTS `container_info`;
CREATE TABLE `container_info`
(
`id` bigint NOT NULL AUTO_INCREMENT COMMENT '容器ID',
`app_id` bigint not null COMMENT '应用ID',
`container_name` varchar(128) not null COMMENT '容器名称',
`gmt_create` datetime not null COMMENT '创建时间',
`gmt_modified` datetime not null COMMENT '更新时间',
`last_deploy_time` datetime DEFAULT NULL COMMENT '上次部署时间',
`source_info` varchar(255) DEFAULT NULL COMMENT '资源信息,内容取决于source_type\n1、FatJar -> String\n2、Git -> JSON{"repo”:””仓库,”branch”:”分支”,”username”:”账号,”password”:”密码”}',
`source_type` int not null COMMENT '资源类型,1:FatJar/2:Git',
`status` int not null COMMENT '状态,1:正常ENABLE/2:已禁用DISABLE/99:已删除DELETED',
`version` varchar(255) default null COMMENT '版本',
PRIMARY KEY (`id`),
KEY `idx01_container_info` (`app_id`)
) ENGINE = InnoDB
DEFAULT CHARSET = utf8mb4
COLLATE = utf8mb4_general_ci COMMENT ='容器表';
CREATE TABLE `container_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_id` bigint DEFAULT NULL,
`container_name` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`last_deploy_time` datetime(6) DEFAULT NULL,
`source_info` varchar(255) DEFAULT NULL,
`source_type` int DEFAULT NULL,
`status` int DEFAULT NULL,
`version` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_container_info` (`app_id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for instance_info
-- ----------------------------
DROP TABLE IF EXISTS `instance_info`;
CREATE TABLE `instance_info`
(
`id` bigint NOT NULL AUTO_INCREMENT COMMENT '任务实例ID',
`app_id` bigint not null COMMENT '应用ID',
`instance_id` bigint not null COMMENT '任务实例ID',
`type` int not NULL COMMENT '任务实例类型,1:普通NORMAL/2:工作流WORKFLOW',
`job_id` bigint not NULL COMMENT '任务ID',
`instance_params` longtext COMMENT '任务动态参数',
`job_params` longtext COMMENT '任务静态参数',
`actual_trigger_time` bigint default NULL COMMENT '实际触发时间',
`expected_trigger_time` bigint DEFAULT NULL COMMENT '计划触发时间',
`finished_time` bigint DEFAULT NULL COMMENT '执行结束时间',
`last_report_time` bigint DEFAULT NULL COMMENT '最后上报时间',
`result` longtext COMMENT '执行结果',
`running_times` bigint DEFAULT NULL COMMENT '总执行次数,用于重试判断',
`status` int not NULL COMMENT '任务状态,1:等待派发WAITING_DISPATCH/2:等待Worker接收WAITING_WORKER_RECEIVE/3:运行中RUNNING/4:失败FAILED/5:成功SUCCEED/9:取消CANCELED/10:手动停止STOPPED',
`task_tracker_address` varchar(255) DEFAULT NULL COMMENT 'TaskTracker地址',
`wf_instance_id` bigint DEFAULT NULL COMMENT '工作流实例ID',
`additional_data` longtext comment '附加信息 (JSON)',
`gmt_create` datetime not NULL COMMENT '创建时间',
`gmt_modified` datetime not NULL COMMENT '更新时间',
PRIMARY KEY (`id`),
KEY `idx01_instance_info` (`job_id`, `status`),
KEY `idx02_instance_info` (`app_id`, `status`),
KEY `idx03_instance_info` (`instance_id`, `status`)
) ENGINE = InnoDB
AUTO_INCREMENT = 1
DEFAULT CHARSET = utf8mb4
COLLATE = utf8mb4_general_ci COMMENT ='任务实例表';
CREATE TABLE `instance_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`actual_trigger_time` bigint DEFAULT NULL,
`app_id` bigint DEFAULT NULL,
`expected_trigger_time` bigint DEFAULT NULL,
`finished_time` bigint DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`instance_id` bigint DEFAULT NULL,
`instance_params` longtext,
`job_id` bigint DEFAULT NULL,
`job_params` longtext,
`last_report_time` bigint DEFAULT NULL,
`result` longtext,
`running_times` bigint DEFAULT NULL,
`status` int DEFAULT NULL,
`task_tracker_address` varchar(255) DEFAULT NULL,
`type` int DEFAULT NULL,
`wf_instance_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_instance_info` (`job_id`,`status`),
KEY `idx02_instance_info` (`app_id`,`status`),
KEY `idx03_instance_info` (`instance_id`,`status`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for job_info
-- ----------------------------
DROP TABLE IF EXISTS `job_info`;
CREATE TABLE `job_info`
(
`id` bigint NOT NULL AUTO_INCREMENT,
`app_id` bigint DEFAULT NULL COMMENT '应用ID',
`job_name` varchar(128) DEFAULT NULL COMMENT '任务名称',
`job_description` varchar(255) DEFAULT NULL COMMENT '任务描述',
`job_params` text COMMENT '任务默认参数',
`concurrency` int DEFAULT NULL COMMENT '并发度,同时执行某个任务的最大线程数量',
`designated_workers` varchar(255) DEFAULT NULL COMMENT '运行节点,空:不限(多值逗号分割)',
`dispatch_strategy` int DEFAULT NULL COMMENT '投递策略,1:健康优先/2:随机',
`execute_type` int not NULL COMMENT '执行类型,1:单机STANDALONE/2:广播BROADCAST/3:MAP_REDUCE/4:MAP',
`instance_retry_num` int not null DEFAULT 0 COMMENT 'Instance重试次数',
`instance_time_limit` bigint not null DEFAULT 0 COMMENT '任务整体超时时间',
`lifecycle` varchar(255) DEFAULT NULL COMMENT '生命周期',
`max_instance_num` int not null DEFAULT 1 COMMENT '最大同时运行任务数,默认 1',
`max_worker_count` int not null DEFAULT 0 COMMENT '最大运行节点数量',
`min_cpu_cores` double NOT NULL default 0 COMMENT '最低CPU核心数量,0:不限',
`min_disk_space` double NOT NULL default 0 COMMENT '最低磁盘空间(GB),0:不限',
`min_memory_space` double NOT NULL default 0 COMMENT '最低内存空间(GB),0:不限',
`next_trigger_time` bigint DEFAULT NULL COMMENT '下一次调度时间',
`notify_user_ids` varchar(255) DEFAULT NULL COMMENT '报警用户(多值逗号分割)',
`processor_info` varchar(255) DEFAULT NULL COMMENT '执行器信息',
`processor_type` int not NULL COMMENT '执行器类型,1:内建处理器BUILT_IN/2:SHELL/3:PYTHON/4:外部处理器动态加载EXTERNAL',
`status` int not NULL COMMENT '状态,1:正常ENABLE/2:已禁用DISABLE/99:已删除DELETED',
`task_retry_num` int not NULL default 0 COMMENT 'Task重试次数',
`time_expression` varchar(255) default NULL COMMENT '时间表达式,内容取决于time_expression_type,1:CRON/2:NULL/3:LONG/4:LONG',
`time_expression_type` int not NULL COMMENT '时间表达式类型,1:CRON/2:API/3:FIX_RATE/4:FIX_DELAY,5:WORKFLOW\n',
`tag` varchar(255) DEFAULT NULL COMMENT 'TAG',
`log_config` varchar(255) DEFAULT NULL COMMENT '日志配置',
`extra` varchar(255) DEFAULT NULL COMMENT '扩展字段',
`gmt_create` datetime not NULL COMMENT '创建时间',
`gmt_modified` datetime not NULL COMMENT '更新时间',
PRIMARY KEY (`id`),
KEY `idx01_job_info` (`app_id`, `status`, `time_expression_type`, `next_trigger_time`)
) ENGINE = InnoDB
AUTO_INCREMENT = 1
DEFAULT CHARSET = utf8mb4
COLLATE = utf8mb4_general_ci COMMENT ='任务表';
CREATE TABLE `job_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`advanced_runtime_config` varchar(255) DEFAULT NULL,
`alarm_config` varchar(255) DEFAULT NULL,
`app_id` bigint DEFAULT NULL,
`concurrency` int DEFAULT NULL,
`designated_workers` varchar(255) DEFAULT NULL,
`dispatch_strategy` int DEFAULT NULL,
`dispatch_strategy_config` varchar(255) DEFAULT NULL,
`execute_type` int DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`instance_retry_num` int DEFAULT NULL,
`instance_time_limit` bigint DEFAULT NULL,
`job_description` varchar(255) DEFAULT NULL,
`job_name` varchar(255) DEFAULT NULL,
`job_params` longtext,
`lifecycle` varchar(255) DEFAULT NULL,
`log_config` varchar(255) DEFAULT NULL,
`max_instance_num` int DEFAULT NULL,
`max_worker_count` int DEFAULT NULL,
`min_cpu_cores` double NOT NULL,
`min_disk_space` double NOT NULL,
`min_memory_space` double NOT NULL,
`next_trigger_time` bigint DEFAULT NULL,
`notify_user_ids` varchar(255) DEFAULT NULL,
`processor_info` varchar(255) DEFAULT NULL,
`processor_type` int DEFAULT NULL,
`status` int DEFAULT NULL,
`tag` varchar(255) DEFAULT NULL,
`task_retry_num` int DEFAULT NULL,
`time_expression` varchar(255) DEFAULT NULL,
`time_expression_type` int DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_job_info` (`app_id`,`status`,`time_expression_type`,`next_trigger_time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for oms_lock
-- ----------------------------
DROP TABLE IF EXISTS `oms_lock`;
CREATE TABLE `oms_lock`
(
`id` bigint NOT NULL AUTO_INCREMENT COMMENT '序号ID',
`lock_name` varchar(128) DEFAULT NULL COMMENT '名称',
`max_lock_time` bigint DEFAULT NULL COMMENT '最长持锁时间',
`ownerip` varchar(255) DEFAULT NULL COMMENT '拥有者IP',
`gmt_create` datetime not NULL COMMENT '创建时间',
`gmt_modified` datetime not NULL COMMENT '更新时间',
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_oms_lock` (`lock_name`)
) ENGINE = InnoDB
AUTO_INCREMENT = 1
DEFAULT CHARSET = utf8mb4
COLLATE = utf8mb4_general_ci COMMENT ='数据库锁';
CREATE TABLE `oms_lock` (
`id` bigint NOT NULL AUTO_INCREMENT,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`lock_name` varchar(255) DEFAULT NULL,
`max_lock_time` bigint DEFAULT NULL,
`ownerip` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_oms_lock` (`lock_name`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for server_info
-- ----------------------------
DROP TABLE IF EXISTS `server_info`;
CREATE TABLE `server_info`
(
`id` bigint NOT NULL AUTO_INCREMENT COMMENT '服务器ID',
`gmt_create` datetime DEFAULT NULL COMMENT '创建时间',
`gmt_modified` datetime DEFAULT NULL COMMENT '更新时间',
`ip` varchar(128) DEFAULT NULL COMMENT '服务器IP地址',
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_server_info` (`ip`),
KEY `idx01_server_info` (`gmt_modified`)
) ENGINE = InnoDB
AUTO_INCREMENT = 1
DEFAULT CHARSET = utf8mb4
COLLATE = utf8mb4_general_ci COMMENT ='服务器表';
CREATE TABLE `server_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`ip` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_server_info` (`ip`),
KEY `idx01_server_info` (`gmt_modified`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for user_info
-- ----------------------------
DROP TABLE IF EXISTS `user_info`;
CREATE TABLE `user_info`
(
`id` bigint NOT NULL AUTO_INCREMENT COMMENT '用户ID',
`username` varchar(128) not NULL COMMENT '用户名',
`password` varchar(255) default NULL COMMENT '密码',
`phone` varchar(255) DEFAULT NULL COMMENT '手机号',
`email` varchar(128) not NULL COMMENT '邮箱',
`extra` varchar(255) DEFAULT NULL COMMENT '扩展字段',
`web_hook` varchar(255) DEFAULT NULL COMMENT 'webhook地址',
`gmt_create` datetime not NULL COMMENT '创建时间',
`gmt_modified` datetime not NULL COMMENT '更新时间',
PRIMARY KEY (`id`),
unique index uidx01_user_info (username),
unique index uidx02_user_info (email)
) ENGINE = InnoDB
DEFAULT CHARSET = utf8mb4
COLLATE = utf8mb4_general_ci COMMENT ='用户表';
CREATE TABLE `user_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`email` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`password` varchar(255) DEFAULT NULL,
`phone` varchar(255) DEFAULT NULL,
`username` varchar(255) DEFAULT NULL,
`web_hook` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `uidx01_user_info` (`username`),
KEY `uidx02_user_info` (`email`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for workflow_info
-- ----------------------------
DROP TABLE IF EXISTS `workflow_info`;
CREATE TABLE `workflow_info`
(
`id` bigint NOT NULL AUTO_INCREMENT COMMENT '工作流ID',
`app_id` bigint not NULL COMMENT '应用ID',
`wf_name` varchar(128) not NULL COMMENT '工作流名称',
`wf_description` varchar(255) default NULL COMMENT '工作流描述',
`extra` varchar(255) DEFAULT NULL COMMENT '扩展字段',
`lifecycle` varchar(255) DEFAULT NULL COMMENT '生命周期',
`max_wf_instance_num` int not null DEFAULT 1 COMMENT '最大运行工作流数量,默认 1',
`next_trigger_time` bigint DEFAULT NULL COMMENT '下次调度时间',
`notify_user_ids` varchar(255) DEFAULT NULL COMMENT '报警用户(多值逗号分割)',
`pedag` text COMMENT 'DAG信息(JSON)',
`status` int not NULL COMMENT '状态,1:正常ENABLE/2:已禁用DISABLE/99:已删除DELETED',
`time_expression` varchar(255) DEFAULT NULL COMMENT '时间表达式,内容取决于time_expression_type,1:CRON/2:NULL/3:LONG/4:LONG',
`time_expression_type` int not NULL COMMENT '时间表达式类型,1:CRON/2:API/3:FIX_RATE/4:FIX_DELAY,5:WORKFLOW\n',
`gmt_create` datetime DEFAULT NULL COMMENT '创建时间',
`gmt_modified` datetime DEFAULT NULL COMMENT '更新时间',
PRIMARY KEY (`id`),
KEY `idx01_workflow_info` (`app_id`, `status`, `time_expression_type`, next_trigger_time)
) ENGINE = InnoDB
AUTO_INCREMENT = 1
DEFAULT CHARSET = utf8mb4
COLLATE = utf8mb4_general_ci COMMENT ='工作流表';
CREATE TABLE `workflow_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_id` bigint DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`lifecycle` varchar(255) DEFAULT NULL,
`max_wf_instance_num` int DEFAULT NULL,
`next_trigger_time` bigint DEFAULT NULL,
`notify_user_ids` varchar(255) DEFAULT NULL,
`pedag` longtext,
`status` int DEFAULT NULL,
`time_expression` varchar(255) DEFAULT NULL,
`time_expression_type` int DEFAULT NULL,
`wf_description` varchar(255) DEFAULT NULL,
`wf_name` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_workflow_info` (`app_id`,`status`,`time_expression_type`,`next_trigger_time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for workflow_instance_info
-- ----------------------------
DROP TABLE IF EXISTS `workflow_instance_info`;
CREATE TABLE `workflow_instance_info`
(
`id` bigint NOT NULL AUTO_INCREMENT COMMENT '工作流实例ID',
`wf_instance_id` bigint DEFAULT NULL COMMENT '工作流实例ID',
`workflow_id` bigint DEFAULT NULL COMMENT '工作流ID',
`actual_trigger_time` bigint DEFAULT NULL COMMENT '实际触发时间',
`app_id` bigint DEFAULT NULL COMMENT '应用ID',
`dag` text COMMENT 'DAG信息(JSON)',
`expected_trigger_time` bigint DEFAULT NULL COMMENT '计划触发时间',
`finished_time` bigint DEFAULT NULL COMMENT '执行结束时间',
`result` text COMMENT '执行结果',
`status` int DEFAULT NULL COMMENT '工作流实例状态,1:等待调度WAITING/2:运行中RUNNING/3:失败FAILED/4:成功SUCCEED/10:手动停止STOPPED',
`wf_context` text COMMENT '工作流上下文',
`wf_init_params` text COMMENT '工作流启动参数',
`gmt_create` datetime DEFAULT NULL COMMENT '创建时间',
`gmt_modified` datetime DEFAULT NULL COMMENT '更新时间',
PRIMARY KEY (`id`),
unique index uidx01_wf_instance (`wf_instance_id`),
index idx01_wf_instance (`workflow_id`, `status`),
index idx02_wf_instance (`app_id`, `status`, `expected_trigger_time`)
) ENGINE = InnoDB
AUTO_INCREMENT = 1
DEFAULT CHARSET = utf8mb4
COLLATE = utf8mb4_general_ci COMMENT ='工作流实例表';
CREATE TABLE `workflow_instance_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`actual_trigger_time` bigint DEFAULT NULL,
`app_id` bigint DEFAULT NULL,
`dag` longtext,
`expected_trigger_time` bigint DEFAULT NULL,
`finished_time` bigint DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`parent_wf_instance_id` bigint DEFAULT NULL,
`result` longtext,
`status` int DEFAULT NULL,
`wf_context` longtext,
`wf_init_params` longtext,
`wf_instance_id` bigint DEFAULT NULL,
`workflow_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_wf_instance` (`wf_instance_id`),
KEY `idx01_wf_instance` (`workflow_id`,`status`,`app_id`,`expected_trigger_time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for workflow_node_info
-- ----------------------------
DROP TABLE IF EXISTS `workflow_node_info`;
CREATE TABLE `workflow_node_info`
(
`id` bigint NOT NULL AUTO_INCREMENT COMMENT '节点ID',
`app_id` bigint NOT NULL COMMENT '应用ID',
`enable` bit(1) NOT NULL COMMENT '是否启动,0:否/1:是',
`extra` text COMMENT '扩展字段',
`gmt_create` datetime NOT NULL COMMENT '创建时间',
`gmt_modified` datetime NOT NULL COMMENT '更新时间',
`job_id` bigint default NULL COMMENT '任务ID',
`node_name` varchar(255) DEFAULT NULL COMMENT '节点名称',
`node_params` text COMMENT '节点参数',
`skip_when_failed` bit(1) NOT NULL COMMENT '是否允许失败跳过,0:否/1:是',
`type` int DEFAULT NULL COMMENT '节点类型,1:任务JOB',
`workflow_id` bigint DEFAULT NULL COMMENT '工作流ID',
PRIMARY KEY (`id`),
KEY `idx01_workflow_node_info` (`workflow_id`,`gmt_create`)
) ENGINE = InnoDB
AUTO_INCREMENT = 1
DEFAULT CHARSET = utf8mb4
COLLATE = utf8mb4_general_ci COMMENT ='工作流节点表';
CREATE TABLE `workflow_node_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_id` bigint NOT NULL,
`enable` bit(1) NOT NULL,
`extra` longtext,
`gmt_create` datetime(6) NOT NULL,
`gmt_modified` datetime(6) NOT NULL,
`job_id` bigint DEFAULT NULL,
`node_name` varchar(255) DEFAULT NULL,
`node_params` longtext,
`skip_when_failed` bit(1) NOT NULL,
`type` int DEFAULT NULL,
`workflow_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_workflow_node_info` (`workflow_id`,`gmt_create`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
SET FOREIGN_KEY_CHECKS = 1;

View File

@ -0,0 +1,6 @@
-- Upgrade SQL FROM 4.3.7 to 4.3.8
-- ----------------------------
-- Table change for job_info
-- ----------------------------
alter table job_info add dispatch_strategy_config varchar(255) comment 'dispatch_strategy_config' default null;
alter table job_info add advanced_runtime_config varchar(255) comment 'advanced_runtime_config' default null;

View File

@ -6,7 +6,7 @@
<groupId>tech.powerjob</groupId>
<artifactId>powerjob</artifactId>
<version>4.3.8</version>
<version>4.3.9</version>
<packaging>pom</packaging>
<name>powerjob</name>
<url>http://www.powerjob.tech</url>

View File

@ -5,18 +5,18 @@
<parent>
<artifactId>powerjob</artifactId>
<groupId>tech.powerjob</groupId>
<version>4.3.8</version>
<version>4.3.9</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>powerjob-client</artifactId>
<version>4.3.8</version>
<version>4.3.9</version>
<packaging>jar</packaging>
<properties>
<junit.version>5.9.1</junit.version>
<fastjson.version>1.2.83</fastjson.version>
<powerjob.common.version>4.3.8</powerjob.common.version>
<powerjob.common.version>4.3.9</powerjob.common.version>
<mvn.shade.plugin.version>3.2.4</mvn.shade.plugin.version>
</properties>

View File

@ -5,12 +5,12 @@
<parent>
<artifactId>powerjob</artifactId>
<groupId>tech.powerjob</groupId>
<version>4.3.8</version>
<version>4.3.9</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>powerjob-common</artifactId>
<version>4.3.8</version>
<version>4.3.9</version>
<packaging>jar</packaging>
<properties>

View File

@ -5,12 +5,12 @@
<parent>
<artifactId>powerjob</artifactId>
<groupId>tech.powerjob</groupId>
<version>4.3.8</version>
<version>4.3.9</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>powerjob-official-processors</artifactId>
<version>4.3.8</version>
<version>4.3.9</version>
<packaging>jar</packaging>
<properties>
@ -20,7 +20,7 @@
<!-- 不会被打包的部分scope 只能是 test 或 provide -->
<junit.version>5.9.1</junit.version>
<logback.version>1.2.13</logback.version>
<powerjob.worker.version>4.3.8</powerjob.worker.version>
<powerjob.worker.version>4.3.9</powerjob.worker.version>
<h2.db.version>2.2.224</h2.db.version>
<mysql.version>8.0.28</mysql.version>
<spring.version>5.3.31</spring.version>

View File

@ -5,7 +5,7 @@
<parent>
<artifactId>powerjob</artifactId>
<groupId>tech.powerjob</groupId>
<version>4.3.8</version>
<version>4.3.9</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<packaging>pom</packaging>

View File

@ -5,7 +5,7 @@
<parent>
<artifactId>powerjob-remote</artifactId>
<groupId>tech.powerjob</groupId>
<version>4.3.8</version>
<version>4.3.9</version>
</parent>
<modelVersion>4.0.0</modelVersion>
@ -21,8 +21,8 @@
<logback.version>1.2.13</logback.version>
<springboot.version>2.7.18</springboot.version>
<powerjob-remote-impl-http.version>4.3.8</powerjob-remote-impl-http.version>
<powerjob-remote-impl-akka.version>4.3.8</powerjob-remote-impl-akka.version>
<powerjob-remote-impl-http.version>4.3.9</powerjob-remote-impl-http.version>
<powerjob-remote-impl-akka.version>4.3.9</powerjob-remote-impl-akka.version>
<gatling.version>3.9.0</gatling.version>
<gatling-maven-plugin.version>4.2.9</gatling-maven-plugin.version>

View File

@ -5,11 +5,11 @@
<parent>
<artifactId>powerjob-remote</artifactId>
<groupId>tech.powerjob</groupId>
<version>4.3.8</version>
<version>4.3.9</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<version>4.3.8</version>
<version>4.3.9</version>
<artifactId>powerjob-remote-framework</artifactId>
<properties>
@ -17,7 +17,7 @@
<maven.compiler.target>8</maven.compiler.target>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<powerjob-common.version>4.3.8</powerjob-common.version>
<powerjob-common.version>4.3.9</powerjob-common.version>
<reflections.version>0.10.2</reflections.version>

View File

@ -5,19 +5,19 @@
<parent>
<artifactId>powerjob-remote</artifactId>
<groupId>tech.powerjob</groupId>
<version>4.3.8</version>
<version>4.3.9</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>powerjob-remote-impl-akka</artifactId>
<version>4.3.8</version>
<version>4.3.9</version>
<properties>
<maven.compiler.source>8</maven.compiler.source>
<maven.compiler.target>8</maven.compiler.target>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<powerjob-remote-framework.version>4.3.8</powerjob-remote-framework.version>
<powerjob-remote-framework.version>4.3.9</powerjob-remote-framework.version>
<akka.version>2.6.13</akka.version>
</properties>

View File

@ -5,12 +5,12 @@
<parent>
<artifactId>powerjob-remote</artifactId>
<groupId>tech.powerjob</groupId>
<version>4.3.8</version>
<version>4.3.9</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>powerjob-remote-impl-http</artifactId>
<version>4.3.8</version>
<version>4.3.9</version>
<properties>
<maven.compiler.source>8</maven.compiler.source>
@ -18,7 +18,7 @@
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<vertx.version>4.3.7</vertx.version>
<powerjob-remote-framework.version>4.3.8</powerjob-remote-framework.version>
<powerjob-remote-framework.version>4.3.9</powerjob-remote-framework.version>
</properties>
<dependencies>

View File

@ -5,12 +5,12 @@
<parent>
<artifactId>powerjob</artifactId>
<groupId>tech.powerjob</groupId>
<version>4.3.8</version>
<version>4.3.9</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>powerjob-server</artifactId>
<version>4.3.8</version>
<version>4.3.9</version>
<packaging>pom</packaging>
<modules>
@ -50,12 +50,12 @@
<groovy.version>3.0.10</groovy.version>
<cron-utils.version>9.2.1</cron-utils.version>
<powerjob-common.version>4.3.8</powerjob-common.version>
<powerjob-remote-impl-http.version>4.3.8</powerjob-remote-impl-http.version>
<powerjob-remote-impl-akka.version>4.3.8</powerjob-remote-impl-akka.version>
<powerjob-common.version>4.3.9</powerjob-common.version>
<powerjob-remote-impl-http.version>4.3.9</powerjob-remote-impl-http.version>
<powerjob-remote-impl-akka.version>4.3.9</powerjob-remote-impl-akka.version>
<springdoc-openapi-ui.version>1.6.14</springdoc-openapi-ui.version>
<aliyun-sdk-oss.version>3.17.1</aliyun-sdk-oss.version>
<minio.version>8.5.2</minio.version>
<aws-java-sdk-s3.version>1.12.665</aws-java-sdk-s3.version>
<commons-collections4.version>4.4</commons-collections4.version>
</properties>
@ -114,12 +114,13 @@
<artifactId>aliyun-sdk-oss</artifactId>
<version>${aliyun-sdk-oss.version}</version>
</dependency>
<!-- 存储扩展-Minio,未使用可移除 -->
<!-- 存储扩展-Minio/S3,未使用可移除minio-client 依赖 OKHTTP4.x 版本,强制引入 kotlin 标准库,为了防止引入更多问题放弃) -->
<dependency>
<groupId>io.minio</groupId>
<artifactId>minio</artifactId>
<version>${minio.version}</version>
<groupId>com.amazonaws</groupId>
<artifactId>aws-java-sdk-s3</artifactId>
<version>${aws-java-sdk-s3.version}</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.commons/commons-collections4 -->
<dependency>
<groupId>org.apache.commons</groupId>

View File

@ -5,7 +5,7 @@
<parent>
<artifactId>powerjob-server</artifactId>
<groupId>tech.powerjob</groupId>
<version>4.3.8</version>
<version>4.3.9</version>
<relativePath>../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>

View File

@ -5,7 +5,7 @@
<parent>
<artifactId>powerjob-server</artifactId>
<groupId>tech.powerjob</groupId>
<version>4.3.8</version>
<version>4.3.9</version>
<relativePath>../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>

View File

@ -317,40 +317,39 @@ public class ContainerService {
.collect(Collectors.toSet());
Set<String> deployedList = Sets.newLinkedHashSet();
List<String> unDeployedList = Lists.newLinkedList();
Multimap<String, String> version2Address = ArrayListMultimap.create();
Multimap<String, DeployedContainerInfo> version2DeployedContainerInfoList = ArrayListMultimap.create();
infoList.forEach(info -> {
String targetWorkerAddress = info.getWorkerAddress();
if (aliveWorkers.contains(targetWorkerAddress)) {
deployedList.add(targetWorkerAddress);
version2Address.put(info.getVersion(), targetWorkerAddress);
}else {
unDeployedList.add(targetWorkerAddress);
version2DeployedContainerInfoList.put(info.getVersion(), info);
}
});
Set<String> unDeployedList = Sets.newHashSet(aliveWorkers);
unDeployedList.removeAll(deployedList);
StringBuilder sb = new StringBuilder("========== DeployedInfo ==========").append(System.lineSeparator());
// 集群分裂各worker版本不统一问题很大
if (version2Address.keySet().size() > 1) {
if (version2DeployedContainerInfoList.keySet().size() > 1) {
sb.append("WARN: there exists multi version container now, please redeploy to fix this problem").append(System.lineSeparator());
sb.append("divisive version ==> ").append(System.lineSeparator());
version2Address.forEach((v, addressList) -> {
sb.append("version: ").append(v).append(System.lineSeparator());
sb.append(addressList);
});
sb.append(System.lineSeparator());
}
sb.append("divisive version ==> ").append(System.lineSeparator());
version2DeployedContainerInfoList.asMap().forEach((version, deployedContainerInfos) -> {
sb.append("[version] ").append(version).append(System.lineSeparator());
deployedContainerInfos.forEach(deployedContainerInfo -> sb.append(String.format("Address: %s, DeployedTime: %s", deployedContainerInfo.getWorkerAddress(), CommonUtils.formatTime(deployedContainerInfo.getDeployedTime()))).append(System.lineSeparator()));
});
// 当前在线未部署机器
if (!CollectionUtils.isEmpty(unDeployedList)) {
sb.append("WARN: there exists unDeployed worker(OhMyScheduler will auto fix when some job need to process)").append(System.lineSeparator());
sb.append("unDeployed worker list ==> ").append(System.lineSeparator());
sb.append("WARN: there exists unDeployed worker(PowerJob will auto fix when some job need to process)").append(System.lineSeparator());
sb.append("unDeployed worker list ==> ").append(unDeployedList).append(System.lineSeparator());
}
// 当前部署机器
sb.append("deployed worker list ==> ").append(System.lineSeparator());
if (CollectionUtils.isEmpty(deployedList)) {
sb.append("no worker deployed now~");
}else {
sb.append(deployedList);
sb.append("no worker deployed this container now~");
}
return sb.toString();

View File

@ -145,7 +145,11 @@ public class InstanceStatusCheckService {
log.info("[InstanceStatusChecker] RunningInstance status check using {}.", stopwatch.stop());
}
private void handleWaitingDispatchInstance(List<Long> partAppIds) {
private void handleWaitingDispatchInstance(List<Long> appIds) {
// 存在移除操作需要重新创建集合否则会导致外层抛出 NoSuchElementException: null
List<Long> partAppIds = Lists.newArrayList(appIds);
// 1. 检查等待 WAITING_DISPATCH 状态的任务
long threshold = System.currentTimeMillis() - DISPATCH_TIMEOUT_MS;
List<InstanceInfoDO> waitingDispatchInstances = instanceInfoRepository.findAllByAppIdInAndStatusAndExpectedTriggerTimeLessThan(partAppIds, InstanceStatus.WAITING_DISPATCH.getV(), threshold, PageRequest.of(0, MAX_BATCH_NUM_INSTANCE));

View File

@ -5,7 +5,7 @@
<parent>
<artifactId>powerjob-server</artifactId>
<groupId>tech.powerjob</groupId>
<version>4.3.8</version>
<version>4.3.9</version>
<relativePath>../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>

View File

@ -5,7 +5,7 @@
<parent>
<artifactId>powerjob-server</artifactId>
<groupId>tech.powerjob</groupId>
<version>4.3.8</version>
<version>4.3.9</version>
<relativePath>../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>

View File

@ -5,7 +5,7 @@
<parent>
<artifactId>powerjob-server</artifactId>
<groupId>tech.powerjob</groupId>
<version>4.3.8</version>
<version>4.3.9</version>
<relativePath>../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>

View File

@ -5,7 +5,7 @@
<parent>
<artifactId>powerjob-server</artifactId>
<groupId>tech.powerjob</groupId>
<version>4.3.8</version>
<version>4.3.9</version>
<relativePath>../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
@ -41,8 +41,8 @@
<artifactId>aliyun-sdk-oss</artifactId>
</dependency>
<dependency>
<groupId>io.minio</groupId>
<artifactId>minio</artifactId>
<groupId>com.amazonaws</groupId>
<artifactId>aws-java-sdk-s3</artifactId>
</dependency>
</dependencies>

View File

@ -1,8 +1,13 @@
package tech.powerjob.server.persistence.storage.impl;
import com.amazonaws.auth.AWSStaticCredentialsProvider;
import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.client.builder.AwsClientBuilder;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.AmazonS3ClientBuilder;
import com.amazonaws.services.s3.model.*;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import io.minio.*;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.FileUtils;
@ -16,8 +21,6 @@ import tech.powerjob.server.extension.dfs.*;
import tech.powerjob.server.persistence.storage.AbstractDFsService;
import javax.annotation.Priority;
import java.nio.file.Files;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Optional;
@ -44,19 +47,19 @@ public class MinioOssService extends AbstractDFsService {
private static final String KEY_BUCKET_NAME = "bucketName";
private static final String ACCESS_KEY = "accessKey";
private static final String SECRET_KEY = "secretKey";
private MinioClient minioClient;
private AmazonS3 amazonS3;
private String bucket;
private static final String NO_SUCH_KEY = "NoSuchKey";
private static final String NOT_FOUNT = "404 Not Found";
@Override
public void store(StoreRequest storeRequest) {
try {
minioClient.uploadObject(UploadObjectArgs.builder()
.bucket(this.bucket)
.object(parseFileName(storeRequest.getFileLocation()))
.filename(storeRequest.getLocalFile().getPath())
.contentType(Files.probeContentType(storeRequest.getLocalFile().toPath()))
.build());
String fileName = parseFileName(storeRequest.getFileLocation());
// 创建 PutObjectRequest 对象
PutObjectRequest request = new PutObjectRequest(this.bucket, fileName, storeRequest.getLocalFile());
amazonS3.putObject(request);
} catch (Throwable t) {
ExceptionUtils.rethrow(t);
}
@ -66,13 +69,11 @@ public class MinioOssService extends AbstractDFsService {
public void download(DownloadRequest downloadRequest) {
try {
FileUtils.forceMkdirParent(downloadRequest.getTarget());
// 下载文件
minioClient.downloadObject(
DownloadObjectArgs.builder()
.bucket(this.bucket)
.object(parseFileName(downloadRequest.getFileLocation()))
.filename(downloadRequest.getTarget().getAbsolutePath())
.build());
String fileName = parseFileName(downloadRequest.getFileLocation());
GetObjectRequest getObjectRequest = new GetObjectRequest(this.bucket, fileName);
amazonS3.getObject(getObjectRequest, downloadRequest.getTarget());
} catch (Throwable t) {
ExceptionUtils.rethrow(t);
}
@ -86,26 +87,28 @@ public class MinioOssService extends AbstractDFsService {
@Override
public Optional<FileMeta> fetchFileMeta(FileLocation fileLocation) {
try {
StatObjectResponse stat = minioClient.statObject(StatObjectArgs.builder()
.bucket(this.bucket)
.object(parseFileName(fileLocation))
.build());
return Optional.ofNullable(stat).map(minioStat -> {
String fileName = parseFileName(fileLocation);
ObjectMetadata objectMetadata = amazonS3.getObjectMetadata(this.bucket, fileName);
return Optional.ofNullable(objectMetadata).map(minioStat -> {
Map<String, Object> metaInfo = Maps.newHashMap();
if (stat.userMetadata() != null) {
metaInfo.putAll(stat.userMetadata());
if (objectMetadata.getRawMetadata() != null) {
metaInfo.putAll(objectMetadata.getRawMetadata());
}
return new FileMeta()
.setLastModifiedTime(Date.from(stat.lastModified().toInstant()))
.setLength(stat.size())
.setLastModifiedTime(objectMetadata.getLastModified())
.setLength(objectMetadata.getContentLength())
.setMetaInfo(metaInfo);
});
} catch (Exception oe) {
String errorCode = oe.getMessage();
if (NO_SUCH_KEY.equalsIgnoreCase(errorCode)) {
} catch (AmazonS3Exception s3Exception) {
String errorCode = s3Exception.getErrorCode();
if (NOT_FOUNT.equalsIgnoreCase(errorCode)) {
return Optional.empty();
}
} catch (Exception oe) {
ExceptionUtils.rethrow(oe);
}
return Optional.empty();
@ -170,8 +173,18 @@ public class MinioOssService extends AbstractDFsService {
if (StringUtils.isEmpty(bucketName)) {
throw new IllegalArgumentException("'oms.storage.dfs.minio.bucketName' can't be empty, please creat a bucket in minio oss console then config it to powerjob");
}
// 创建凭证对象
BasicAWSCredentials awsCreds = new BasicAWSCredentials(accessKey, secretKey);
// 创建AmazonS3客户端并指定终端节点和凭证
this.amazonS3 = AmazonS3ClientBuilder.standard()
// 当使用 AWS Java SDK 连接到非AWS服务如MinIO指定区域Region是必需的即使这个区域对于你的MinIO实例并不真正适用原因在于AWS SDK的客户端构建器需要一个区域来配置其服务端点即使在连接到本地或第三方S3兼容服务时也是如此使用 "us-east-1" 作为占位符是很常见的做法因为它是AWS最常用的区域之一这不会影响到实际的连接或数据传输因为真正的服务地址是由你提供的终端节点URL决定的如果你的代码主要是与MinIO交互并且不涉及AWS服务那么这个区域设置只是形式上的要求
.withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(endpoint, "us-east-1"))
.withCredentials(new AWSStaticCredentialsProvider(awsCreds))
.withPathStyleAccessEnabled(true) // 重要启用路径样式访问
.build();
this.bucket = bucketName;
minioClient = MinioClient.builder().endpoint(endpoint).credentials(accessKey, secretKey).build();
createBucket(bucketName);
log.info("[Minio] initialize OSS successfully!");
}
@ -183,10 +196,15 @@ public class MinioOssService extends AbstractDFsService {
*/
@SneakyThrows(Exception.class)
public void createBucket(String bucketName) {
if (!bucketExists(bucketName)) {
minioClient.makeBucket(MakeBucketArgs.builder()
.bucket(bucketName).build());
// 建议自行创建 bucket设置好相关的策略
if (bucketExists(bucketName)) {
return;
}
Bucket createBucketResult = amazonS3.createBucket(bucketName);
log.info("[Minio] createBucket successfully, bucketName: {}, createResult: {}", bucketName, createBucketResult);
String policy = "{\n" +
" \"Version\": \"2012-10-17\",\n" +
" \"Statement\": [\n" +
@ -206,7 +224,11 @@ public class MinioOssService extends AbstractDFsService {
" }\n" +
" ]\n" +
"}";
minioClient.setBucketPolicy(SetBucketPolicyArgs.builder().bucket(bucketName).config(policy).build());
try {
amazonS3.setBucketPolicy(bucketName, policy);
} catch (Exception e) {
log.warn("[Minio] setBucketPolicy failed, maybe you need to setBucketPolicy by yourself!", e);
}
}
/**
@ -217,7 +239,8 @@ public class MinioOssService extends AbstractDFsService {
*/
@SneakyThrows(Exception.class)
public boolean bucketExists(String bucketName) {
return minioClient.bucketExists(BucketExistsArgs.builder().bucket(bucketName).build());
return amazonS3.doesBucketExistV2(bucketName);
}
public static class MinioOssCondition extends PropertyAndOneBeanCondition {

View File

@ -1,8 +1,8 @@
package tech.powerjob.server.persistence.storage.impl;
import com.aliyun.oss.common.utils.AuthUtils;
import com.aliyun.oss.common.utils.StringUtils;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.exception.ExceptionUtils;
import tech.powerjob.server.extension.dfs.DFsService;
@ -33,7 +33,7 @@ class AliOssServiceTest extends AbstractDfsServiceTest {
log.info("[AliOssServiceTest] ak: {}, sk: {}", accessKeyId, secretAccessKey);
if (org.apache.commons.lang3.StringUtils.isAnyEmpty(accessKeyId, secretAccessKey)) {
if (StringUtils.isAnyEmpty(accessKeyId, secretAccessKey)) {
return Optional.empty();
}

View File

@ -0,0 +1,30 @@
package tech.powerjob.server.persistence.storage.impl;
import lombok.extern.slf4j.Slf4j;
import tech.powerjob.server.extension.dfs.DFsService;
import java.util.Optional;
/**
* MinioOssServiceTest
* 测试需要先本地部署 minio因此捕获异常失败也不阻断测试
*
* @author tjq
* @since 2024/2/26
*/
@Slf4j
class MinioOssServiceTest extends AbstractDfsServiceTest {
@Override
protected Optional<DFsService> fetchService() {
try {
MinioOssService aliOssService = new MinioOssService();
aliOssService.initOssClient("http://192.168.124.23:9000", "pj2","testAk", "testSktestSktestSk");
return Optional.of(aliOssService);
} catch (Exception e) {
// 仅异常提醒
log.error("[MinioOssServiceTest] test exception!", e);
}
return Optional.empty();
}
}

View File

@ -5,7 +5,7 @@
<parent>
<artifactId>powerjob-server</artifactId>
<groupId>tech.powerjob</groupId>
<version>4.3.8</version>
<version>4.3.9</version>
<relativePath>../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>

View File

@ -1,7 +1,7 @@
package tech.powerjob.server.remote.worker.selector.impl;
import com.google.common.collect.Lists;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.compress.utils.Lists;
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Component;
import tech.powerjob.common.enums.DispatchStrategy;

View File

@ -5,7 +5,7 @@
<parent>
<artifactId>powerjob-server</artifactId>
<groupId>tech.powerjob</groupId>
<version>4.3.8</version>
<version>4.3.9</version>
<relativePath>../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>

View File

@ -12,7 +12,7 @@ spring.datasource.core.minimum-idle=5
####### Storage properties(Delete if not needed) #######
#oms.storage.dfs.mongodb.uri=mongodb+srv://zqq:No1Bug2Please3!@cluster0.wie54.gcp.mongodb.net/powerjob_daily?retryWrites=true&w=majority
oms.storage.dfs.mysql_series.driver=com.mysql.cj.jdbc.Driver
oms.storage.dfs.mysql_series.url=jdbc:mysql://localhost:3306/powerjob-daily?useUnicode=true&characterEncoding=UTF-8&serverTimezone=Asia/Shanghai
oms.storage.dfs.mysql_series.url=jdbc:mysql://powerjob-mysql:3306/powerjob-daily?useUnicode=true&characterEncoding=UTF-8&serverTimezone=Asia/Shanghai
oms.storage.dfs.mysql_series.username=root
oms.storage.dfs.mysql_series.password=No1Bug2Please3!
oms.storage.dfs.mysql_series.auto_create_table=true

View File

@ -10,7 +10,7 @@ spring.datasource.core.maximum-pool-size=20
spring.datasource.core.minimum-idle=5
####### Storage properties(Delete if not needed) #######
oms.storage.dfs.mongodb.uri=mongodb://localhost:27017/powerjob-product
#oms.storage.dfs.mongodb.uri=mongodb://localhost:27017/powerjob-product
####### Email properties(Non-core configuration properties) #######
####### Delete the following code to disable the mail #######

View File

@ -5,24 +5,24 @@
<parent>
<artifactId>powerjob</artifactId>
<groupId>tech.powerjob</groupId>
<version>4.3.8</version>
<version>4.3.9</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>powerjob-worker-agent</artifactId>
<version>4.3.8</version>
<version>4.3.9</version>
<packaging>jar</packaging>
<properties>
<powerjob.worker.version>4.3.8</powerjob.worker.version>
<powerjob.worker.version>4.3.9</powerjob.worker.version>
<logback.version>1.2.13</logback.version>
<picocli.version>4.3.2</picocli.version>
<spring.version>5.3.31</spring.version>
<spring.boot.version>2.3.4.RELEASE</spring.boot.version>
<powerjob.official.processors.version>4.3.8</powerjob.official.processors.version>
<powerjob.official.processors.version>4.3.9</powerjob.official.processors.version>
<!-- dependency for dynamic sql processor -->
<mysql.version>8.0.28</mysql.version>

View File

@ -5,18 +5,18 @@
<parent>
<artifactId>powerjob</artifactId>
<groupId>tech.powerjob</groupId>
<version>4.3.8</version>
<version>4.3.9</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>powerjob-worker-samples</artifactId>
<version>4.3.8</version>
<version>4.3.9</version>
<properties>
<springboot.version>2.7.18</springboot.version>
<powerjob.worker.starter.version>4.3.8</powerjob.worker.starter.version>
<powerjob.worker.starter.version>4.3.9</powerjob.worker.starter.version>
<fastjson.version>1.2.83</fastjson.version>
<powerjob.official.processors.version>4.3.8</powerjob.official.processors.version>
<powerjob.official.processors.version>4.3.9</powerjob.official.processors.version>
<!-- 部署时跳过该module -->
<maven.deploy.skip>true</maven.deploy.skip>

View File

@ -5,16 +5,16 @@
<parent>
<artifactId>powerjob</artifactId>
<groupId>tech.powerjob</groupId>
<version>4.3.8</version>
<version>4.3.9</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>powerjob-worker-spring-boot-starter</artifactId>
<version>4.3.8</version>
<version>4.3.9</version>
<packaging>jar</packaging>
<properties>
<powerjob.worker.version>4.3.8</powerjob.worker.version>
<powerjob.worker.version>4.3.9</powerjob.worker.version>
<springboot.version>2.7.18</springboot.version>
</properties>

View File

@ -5,12 +5,12 @@
<parent>
<artifactId>powerjob</artifactId>
<groupId>tech.powerjob</groupId>
<version>4.3.8</version>
<version>4.3.9</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>powerjob-worker</artifactId>
<version>4.3.8</version>
<version>4.3.9</version>
<packaging>jar</packaging>
<properties>
@ -21,10 +21,10 @@
<logback.version>1.2.13</logback.version>
<powerjob-common.version>4.3.8</powerjob-common.version>
<powerjob-remote-framework.version>4.3.8</powerjob-remote-framework.version>
<powerjob-remote-impl-akka.version>4.3.8</powerjob-remote-impl-akka.version>
<powerjob-remote-impl-http.version>4.3.8</powerjob-remote-impl-http.version>
<powerjob-common.version>4.3.9</powerjob-common.version>
<powerjob-remote-framework.version>4.3.9</powerjob-remote-framework.version>
<powerjob-remote-impl-akka.version>4.3.9</powerjob-remote-impl-akka.version>
<powerjob-remote-impl-http.version>4.3.9</powerjob-remote-impl-http.version>
</properties>
<dependencies>

View File

@ -92,6 +92,7 @@ public class OmsContainerFactory {
try {
if (!jarFile.exists()) {
log.info("[OmsContainer-{}] container not exist(path={}), try to download from server!", containerId, jarFile.getPath());
FileUtils.forceMkdirParent(jarFile);
FileUtils.copyURLToFile(new URL(request.getDownloadURL()), jarFile, 5000, 300000);
log.info("[OmsContainer-{}] download jar successfully, path={}", containerId, jarFile.getPath());
@ -107,6 +108,7 @@ public class OmsContainerFactory {
if (oldContainer != null) {
// 销毁旧容器
log.info("[OmsContainer-{}] start to destroy old container(version={})", containerId, oldContainer.getVersion());
oldContainer.destroy();
}

View File

@ -25,4 +25,10 @@ public class ProcessorBean {
*/
private transient ClassLoader classLoader;
/**
* Bean 是否稳定
* SpringBean / 普通Java 对象在整个 JVM 生命周期内都不会变可声明为稳定在上层缓存避免每次都要重现 build processor
* 对于动态容器可能在部署后改变则需要声明为不稳定
*/
private boolean stable = true;
}

View File

@ -30,27 +30,36 @@ public class PowerJobProcessorLoader implements ProcessorLoader {
@Override
public ProcessorBean load(ProcessorDefinition definition) {
return def2Bean.computeIfAbsent(definition, ignore -> {
final String processorType = definition.getProcessorType();
log.info("[ProcessorFactory] start to load Processor: {}", definition);
for (ProcessorFactory pf : processorFactoryList) {
final String pfName = pf.getClass().getSimpleName();
if (!Optional.ofNullable(pf.supportTypes()).orElse(Collections.emptySet()).contains(processorType)) {
log.info("[ProcessorFactory] [{}] can't load type={}, skip!", pfName, processorType);
continue;
}
log.info("[ProcessorFactory] [{}] try to load processor: {}", pfName, definition);
try {
ProcessorBean processorBean = pf.build(definition);
if (processorBean != null) {
log.info("[ProcessorFactory] [{}] load processor successfully: {}", pfName, definition);
return processorBean;
}
} catch (Throwable t) {
log.error("[ProcessorFactory] [{}] load processor failed: {}", pfName, definition, t);
}
ProcessorBean pBean = def2Bean.computeIfAbsent(definition, ignore -> buildProcessorBean(definition));
if (pBean.isStable()) {
return pBean;
}
return buildProcessorBean(definition);
}
private ProcessorBean buildProcessorBean(ProcessorDefinition definition) {
final String processorType = definition.getProcessorType();
log.info("[ProcessorFactory] start to load Processor: {}", definition);
for (ProcessorFactory pf : processorFactoryList) {
final String pfName = pf.getClass().getSimpleName();
if (!Optional.ofNullable(pf.supportTypes()).orElse(Collections.emptySet()).contains(processorType)) {
log.info("[ProcessorFactory] [{}] can't load type={}, skip!", pfName, processorType);
continue;
}
throw new PowerJobException("fetch Processor failed, please check your processorType and processorInfo config");
});
log.info("[ProcessorFactory] [{}] try to load processor: {}", pfName, definition);
try {
ProcessorBean processorBean = pf.build(definition);
if (processorBean != null) {
log.info("[ProcessorFactory] [{}] load processor successfully: {}", pfName, definition);
return processorBean;
}
} catch (Throwable t) {
log.error("[ProcessorFactory] [{}] load processor failed: {}", pfName, definition, t);
}
}
throw new PowerJobException("fetch Processor failed, please check your processorType and processorInfo config");
}
}

View File

@ -46,7 +46,9 @@ public class JarContainerProcessorFactory implements ProcessorFactory {
if (omsContainer != null) {
return new ProcessorBean()
.setProcessor(omsContainer.getProcessor(className))
.setClassLoader(omsContainer.getContainerClassLoader());
.setClassLoader(omsContainer.getContainerClassLoader())
.setStable(false)
;
} else {
log.warn("[ProcessorFactory] load container failed. processor info : {}", processorInfo);
}