Merge branch '5.1.0'

This commit is contained in:
tjq 2024-08-12 00:39:33 +08:00
commit 0bf95cf419
231 changed files with 10324 additions and 1293 deletions

View File

@ -1,20 +1,22 @@
/* /*
SQL MySQL8 SQL SQL MySQL8 SQL
使 SQL使 SpringDataJPA SQL 使 SQL使 SpringDataJPA SQL
*/
/*
Navicat Premium Data Transfer Navicat Premium Data Transfer
Source Server : Local@3306 Source Server : Local@3306
Source Server Type : MySQL Source Server Type : MySQL
Source Server Version : 80300 (8.3.0) Source Server Version : 80300 (8.3.0)
Source Host : localhost:3306 Source Host : localhost:3306
Source Schema : powerjob4 Source Schema : powerjob5
Target Server Type : MySQL Target Server Type : MySQL
Target Server Version : 80300 (8.3.0) Target Server Version : 80300 (8.3.0)
File Encoding : 65001 File Encoding : 65001
Date: 02/03/2024 18:51:36 Date: 11/08/2024 23:23:30
*/ */
SET NAMES utf8mb4; SET NAMES utf8mb4;
@ -27,10 +29,16 @@ DROP TABLE IF EXISTS `app_info`;
CREATE TABLE `app_info` ( CREATE TABLE `app_info` (
`id` bigint NOT NULL AUTO_INCREMENT, `id` bigint NOT NULL AUTO_INCREMENT,
`app_name` varchar(255) DEFAULT NULL, `app_name` varchar(255) DEFAULT NULL,
`creator` bigint DEFAULT NULL,
`current_server` varchar(255) DEFAULT NULL, `current_server` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL, `gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL, `gmt_modified` datetime(6) DEFAULT NULL,
`modifier` bigint DEFAULT NULL,
`namespace_id` bigint DEFAULT NULL,
`password` varchar(255) DEFAULT NULL, `password` varchar(255) DEFAULT NULL,
`tags` varchar(255) DEFAULT NULL,
`title` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`), PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_app_info` (`app_name`) UNIQUE KEY `uidx01_app_info` (`app_name`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci; ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
@ -125,6 +133,27 @@ CREATE TABLE `job_info` (
KEY `idx01_job_info` (`app_id`,`status`,`time_expression_type`,`next_trigger_time`) KEY `idx01_job_info` (`app_id`,`status`,`time_expression_type`,`next_trigger_time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci; ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for namespace
-- ----------------------------
DROP TABLE IF EXISTS `namespace`;
CREATE TABLE `namespace` (
`id` bigint NOT NULL AUTO_INCREMENT,
`code` varchar(255) DEFAULT NULL,
`creator` bigint DEFAULT NULL,
`dept` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`modifier` bigint DEFAULT NULL,
`name` varchar(255) DEFAULT NULL,
`status` int DEFAULT NULL,
`tags` varchar(255) DEFAULT NULL,
`token` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_namespace` (`code`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ---------------------------- -- ----------------------------
-- Table structure for oms_lock -- Table structure for oms_lock
-- ---------------------------- -- ----------------------------
@ -138,6 +167,21 @@ CREATE TABLE `oms_lock` (
`ownerip` varchar(255) DEFAULT NULL, `ownerip` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`), PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_oms_lock` (`lock_name`) UNIQUE KEY `uidx01_oms_lock` (`lock_name`)
) ENGINE=InnoDB AUTO_INCREMENT=4 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for pwjb_user_info
-- ----------------------------
DROP TABLE IF EXISTS `pwjb_user_info`;
CREATE TABLE `pwjb_user_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`password` varchar(255) DEFAULT NULL,
`username` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_username` (`username`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci; ) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ---------------------------- -- ----------------------------
@ -154,24 +198,62 @@ CREATE TABLE `server_info` (
KEY `idx01_server_info` (`gmt_modified`) KEY `idx01_server_info` (`gmt_modified`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci; ) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for sundry
-- ----------------------------
DROP TABLE IF EXISTS `sundry`;
CREATE TABLE `sundry` (
`id` bigint NOT NULL AUTO_INCREMENT,
`content` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`pkey` varchar(255) DEFAULT NULL,
`skey` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_sundry` (`pkey`,`skey`)
) ENGINE=InnoDB AUTO_INCREMENT=3 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ---------------------------- -- ----------------------------
-- Table structure for user_info -- Table structure for user_info
-- ---------------------------- -- ----------------------------
DROP TABLE IF EXISTS `user_info`; DROP TABLE IF EXISTS `user_info`;
CREATE TABLE `user_info` ( CREATE TABLE `user_info` (
`id` bigint NOT NULL AUTO_INCREMENT, `id` bigint NOT NULL AUTO_INCREMENT,
`account_type` varchar(255) DEFAULT NULL,
`email` varchar(255) DEFAULT NULL, `email` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL, `extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL, `gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL, `gmt_modified` datetime(6) DEFAULT NULL,
`nick` varchar(255) DEFAULT NULL,
`origin_username` varchar(255) DEFAULT NULL,
`password` varchar(255) DEFAULT NULL, `password` varchar(255) DEFAULT NULL,
`phone` varchar(255) DEFAULT NULL, `phone` varchar(255) DEFAULT NULL,
`status` int DEFAULT NULL,
`token_login_verify_info` varchar(255) DEFAULT NULL,
`username` varchar(255) DEFAULT NULL, `username` varchar(255) DEFAULT NULL,
`web_hook` varchar(255) DEFAULT NULL, `web_hook` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`), PRIMARY KEY (`id`),
KEY `uidx01_user_info` (`username`), UNIQUE KEY `uidx01_user_name` (`username`),
KEY `uidx02_user_info` (`email`) KEY `uidx02_user_info` (`email`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci; ) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for user_role
-- ----------------------------
DROP TABLE IF EXISTS `user_role`;
CREATE TABLE `user_role` (
`id` bigint NOT NULL AUTO_INCREMENT,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`role` int DEFAULT NULL,
`scope` int DEFAULT NULL,
`target` bigint DEFAULT NULL,
`user_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `uidx01_user_id` (`user_id`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ---------------------------- -- ----------------------------
-- Table structure for workflow_info -- Table structure for workflow_info
@ -244,4 +326,3 @@ CREATE TABLE `workflow_node_info` (
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci; ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
SET FOREIGN_KEY_CHECKS = 1; SET FOREIGN_KEY_CHECKS = 1;

View File

@ -0,0 +1,243 @@
/*
Navicat Premium Data Transfer
Source Server : Local@3306
Source Server Type : MySQL
Source Server Version : 80300 (8.3.0)
Source Host : localhost:3306
Source Schema : powerjob4
Target Server Type : MySQL
Target Server Version : 80300 (8.3.0)
File Encoding : 65001
Date: 02/03/2024 18:51:36
*/
SET NAMES utf8mb4;
SET FOREIGN_KEY_CHECKS = 0;
-- ----------------------------
-- Table structure for app_info
-- ----------------------------
DROP TABLE IF EXISTS `app_info`;
CREATE TABLE `app_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_name` varchar(255) DEFAULT NULL,
`current_server` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`password` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_app_info` (`app_name`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for container_info
-- ----------------------------
DROP TABLE IF EXISTS `container_info`;
CREATE TABLE `container_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_id` bigint DEFAULT NULL,
`container_name` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`last_deploy_time` datetime(6) DEFAULT NULL,
`source_info` varchar(255) DEFAULT NULL,
`source_type` int DEFAULT NULL,
`status` int DEFAULT NULL,
`version` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_container_info` (`app_id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for instance_info
-- ----------------------------
DROP TABLE IF EXISTS `instance_info`;
CREATE TABLE `instance_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`actual_trigger_time` bigint DEFAULT NULL,
`app_id` bigint DEFAULT NULL,
`expected_trigger_time` bigint DEFAULT NULL,
`finished_time` bigint DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`instance_id` bigint DEFAULT NULL,
`instance_params` longtext,
`job_id` bigint DEFAULT NULL,
`job_params` longtext,
`last_report_time` bigint DEFAULT NULL,
`result` longtext,
`running_times` bigint DEFAULT NULL,
`status` int DEFAULT NULL,
`task_tracker_address` varchar(255) DEFAULT NULL,
`type` int DEFAULT NULL,
`wf_instance_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_instance_info` (`job_id`,`status`),
KEY `idx02_instance_info` (`app_id`,`status`),
KEY `idx03_instance_info` (`instance_id`,`status`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for job_info
-- ----------------------------
DROP TABLE IF EXISTS `job_info`;
CREATE TABLE `job_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`advanced_runtime_config` varchar(255) DEFAULT NULL,
`alarm_config` varchar(255) DEFAULT NULL,
`app_id` bigint DEFAULT NULL,
`concurrency` int DEFAULT NULL,
`designated_workers` varchar(255) DEFAULT NULL,
`dispatch_strategy` int DEFAULT NULL,
`dispatch_strategy_config` varchar(255) DEFAULT NULL,
`execute_type` int DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`instance_retry_num` int DEFAULT NULL,
`instance_time_limit` bigint DEFAULT NULL,
`job_description` varchar(255) DEFAULT NULL,
`job_name` varchar(255) DEFAULT NULL,
`job_params` longtext,
`lifecycle` varchar(255) DEFAULT NULL,
`log_config` varchar(255) DEFAULT NULL,
`max_instance_num` int DEFAULT NULL,
`max_worker_count` int DEFAULT NULL,
`min_cpu_cores` double NOT NULL,
`min_disk_space` double NOT NULL,
`min_memory_space` double NOT NULL,
`next_trigger_time` bigint DEFAULT NULL,
`notify_user_ids` varchar(255) DEFAULT NULL,
`processor_info` varchar(255) DEFAULT NULL,
`processor_type` int DEFAULT NULL,
`status` int DEFAULT NULL,
`tag` varchar(255) DEFAULT NULL,
`task_retry_num` int DEFAULT NULL,
`time_expression` varchar(255) DEFAULT NULL,
`time_expression_type` int DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_job_info` (`app_id`,`status`,`time_expression_type`,`next_trigger_time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for oms_lock
-- ----------------------------
DROP TABLE IF EXISTS `oms_lock`;
CREATE TABLE `oms_lock` (
`id` bigint NOT NULL AUTO_INCREMENT,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`lock_name` varchar(255) DEFAULT NULL,
`max_lock_time` bigint DEFAULT NULL,
`ownerip` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_oms_lock` (`lock_name`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for server_info
-- ----------------------------
DROP TABLE IF EXISTS `server_info`;
CREATE TABLE `server_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`ip` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_server_info` (`ip`),
KEY `idx01_server_info` (`gmt_modified`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for user_info
-- ----------------------------
DROP TABLE IF EXISTS `user_info`;
CREATE TABLE `user_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`email` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`password` varchar(255) DEFAULT NULL,
`phone` varchar(255) DEFAULT NULL,
`username` varchar(255) DEFAULT NULL,
`web_hook` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `uidx01_user_info` (`username`),
KEY `uidx02_user_info` (`email`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for workflow_info
-- ----------------------------
DROP TABLE IF EXISTS `workflow_info`;
CREATE TABLE `workflow_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_id` bigint DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`lifecycle` varchar(255) DEFAULT NULL,
`max_wf_instance_num` int DEFAULT NULL,
`next_trigger_time` bigint DEFAULT NULL,
`notify_user_ids` varchar(255) DEFAULT NULL,
`pedag` longtext,
`status` int DEFAULT NULL,
`time_expression` varchar(255) DEFAULT NULL,
`time_expression_type` int DEFAULT NULL,
`wf_description` varchar(255) DEFAULT NULL,
`wf_name` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_workflow_info` (`app_id`,`status`,`time_expression_type`,`next_trigger_time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for workflow_instance_info
-- ----------------------------
DROP TABLE IF EXISTS `workflow_instance_info`;
CREATE TABLE `workflow_instance_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`actual_trigger_time` bigint DEFAULT NULL,
`app_id` bigint DEFAULT NULL,
`dag` longtext,
`expected_trigger_time` bigint DEFAULT NULL,
`finished_time` bigint DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`parent_wf_instance_id` bigint DEFAULT NULL,
`result` longtext,
`status` int DEFAULT NULL,
`wf_context` longtext,
`wf_init_params` longtext,
`wf_instance_id` bigint DEFAULT NULL,
`workflow_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_wf_instance` (`wf_instance_id`),
KEY `idx01_wf_instance` (`workflow_id`,`status`,`app_id`,`expected_trigger_time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for workflow_node_info
-- ----------------------------
DROP TABLE IF EXISTS `workflow_node_info`;
CREATE TABLE `workflow_node_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_id` bigint NOT NULL,
`enable` bit(1) NOT NULL,
`extra` longtext,
`gmt_create` datetime(6) NOT NULL,
`gmt_modified` datetime(6) NOT NULL,
`job_id` bigint DEFAULT NULL,
`node_name` varchar(255) DEFAULT NULL,
`node_params` longtext,
`skip_when_failed` bit(1) NOT NULL,
`type` int DEFAULT NULL,
`workflow_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_workflow_node_info` (`workflow_id`,`gmt_create`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
SET FOREIGN_KEY_CHECKS = 1;

View File

@ -0,0 +1,323 @@
/*
Navicat Premium Data Transfer
Source Server : Local@3306
Source Server Type : MySQL
Source Server Version : 80300 (8.3.0)
Source Host : localhost:3306
Source Schema : powerjob5
Target Server Type : MySQL
Target Server Version : 80300 (8.3.0)
File Encoding : 65001
Date: 16/03/2024 22:07:31
*/
SET NAMES utf8mb4;
SET FOREIGN_KEY_CHECKS = 0;
-- ----------------------------
-- Table structure for app_info
-- ----------------------------
DROP TABLE IF EXISTS `app_info`;
CREATE TABLE `app_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_name` varchar(255) DEFAULT NULL,
`creator` bigint DEFAULT NULL,
`current_server` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`modifier` bigint DEFAULT NULL,
`namespace_id` bigint DEFAULT NULL,
`password` varchar(255) DEFAULT NULL,
`tags` varchar(255) DEFAULT NULL,
`title` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_app_info` (`app_name`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for container_info
-- ----------------------------
DROP TABLE IF EXISTS `container_info`;
CREATE TABLE `container_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_id` bigint DEFAULT NULL,
`container_name` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`last_deploy_time` datetime(6) DEFAULT NULL,
`source_info` varchar(255) DEFAULT NULL,
`source_type` int DEFAULT NULL,
`status` int DEFAULT NULL,
`version` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_container_info` (`app_id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for instance_info
-- ----------------------------
DROP TABLE IF EXISTS `instance_info`;
CREATE TABLE `instance_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`actual_trigger_time` bigint DEFAULT NULL,
`app_id` bigint DEFAULT NULL,
`expected_trigger_time` bigint DEFAULT NULL,
`finished_time` bigint DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`instance_id` bigint DEFAULT NULL,
`instance_params` longtext,
`job_id` bigint DEFAULT NULL,
`job_params` longtext,
`last_report_time` bigint DEFAULT NULL,
`result` longtext,
`running_times` bigint DEFAULT NULL,
`status` int DEFAULT NULL,
`task_tracker_address` varchar(255) DEFAULT NULL,
`type` int DEFAULT NULL,
`wf_instance_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_instance_info` (`job_id`,`status`),
KEY `idx02_instance_info` (`app_id`,`status`),
KEY `idx03_instance_info` (`instance_id`,`status`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for job_info
-- ----------------------------
DROP TABLE IF EXISTS `job_info`;
CREATE TABLE `job_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`advanced_runtime_config` varchar(255) DEFAULT NULL,
`alarm_config` varchar(255) DEFAULT NULL,
`app_id` bigint DEFAULT NULL,
`concurrency` int DEFAULT NULL,
`designated_workers` varchar(255) DEFAULT NULL,
`dispatch_strategy` int DEFAULT NULL,
`dispatch_strategy_config` varchar(255) DEFAULT NULL,
`execute_type` int DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`instance_retry_num` int DEFAULT NULL,
`instance_time_limit` bigint DEFAULT NULL,
`job_description` varchar(255) DEFAULT NULL,
`job_name` varchar(255) DEFAULT NULL,
`job_params` longtext,
`lifecycle` varchar(255) DEFAULT NULL,
`log_config` varchar(255) DEFAULT NULL,
`max_instance_num` int DEFAULT NULL,
`max_worker_count` int DEFAULT NULL,
`min_cpu_cores` double NOT NULL,
`min_disk_space` double NOT NULL,
`min_memory_space` double NOT NULL,
`next_trigger_time` bigint DEFAULT NULL,
`notify_user_ids` varchar(255) DEFAULT NULL,
`processor_info` varchar(255) DEFAULT NULL,
`processor_type` int DEFAULT NULL,
`status` int DEFAULT NULL,
`tag` varchar(255) DEFAULT NULL,
`task_retry_num` int DEFAULT NULL,
`time_expression` varchar(255) DEFAULT NULL,
`time_expression_type` int DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_job_info` (`app_id`,`status`,`time_expression_type`,`next_trigger_time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for namespace
-- ----------------------------
DROP TABLE IF EXISTS `namespace`;
CREATE TABLE `namespace` (
`id` bigint NOT NULL AUTO_INCREMENT,
`code` varchar(255) DEFAULT NULL,
`creator` bigint DEFAULT NULL,
`dept` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`modifier` bigint DEFAULT NULL,
`name` varchar(255) DEFAULT NULL,
`status` int DEFAULT NULL,
`tags` varchar(255) DEFAULT NULL,
`token` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_namespace` (`code`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for oms_lock
-- ----------------------------
DROP TABLE IF EXISTS `oms_lock`;
CREATE TABLE `oms_lock` (
`id` bigint NOT NULL AUTO_INCREMENT,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`lock_name` varchar(255) DEFAULT NULL,
`max_lock_time` bigint DEFAULT NULL,
`ownerip` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_oms_lock` (`lock_name`)
) ENGINE=InnoDB AUTO_INCREMENT=4 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for pwjb_user_info
-- ----------------------------
DROP TABLE IF EXISTS `pwjb_user_info`;
CREATE TABLE `pwjb_user_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`password` varchar(255) DEFAULT NULL,
`username` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_username` (`username`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for server_info
-- ----------------------------
DROP TABLE IF EXISTS `server_info`;
CREATE TABLE `server_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`ip` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_server_info` (`ip`),
KEY `idx01_server_info` (`gmt_modified`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for sundry
-- ----------------------------
DROP TABLE IF EXISTS `sundry`;
CREATE TABLE `sundry` (
`id` bigint NOT NULL AUTO_INCREMENT,
`content` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`pkey` varchar(255) DEFAULT NULL,
`skey` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_sundry` (`pkey`,`skey`)
) ENGINE=InnoDB AUTO_INCREMENT=3 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for user_info
-- ----------------------------
DROP TABLE IF EXISTS `user_info`;
CREATE TABLE `user_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`account_type` varchar(255) DEFAULT NULL,
`email` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`nick` varchar(255) DEFAULT NULL,
`origin_username` varchar(255) DEFAULT NULL,
`password` varchar(255) DEFAULT NULL,
`phone` varchar(255) DEFAULT NULL,
`status` int DEFAULT NULL,
`token_login_verify_info` varchar(255) DEFAULT NULL,
`username` varchar(255) DEFAULT NULL,
`web_hook` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_user_name` (`username`),
KEY `uidx02_user_info` (`email`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for user_role
-- ----------------------------
DROP TABLE IF EXISTS `user_role`;
CREATE TABLE `user_role` (
`id` bigint NOT NULL AUTO_INCREMENT,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`role` int DEFAULT NULL,
`scope` int DEFAULT NULL,
`target` bigint DEFAULT NULL,
`user_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `uidx01_user_id` (`user_id`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for workflow_info
-- ----------------------------
DROP TABLE IF EXISTS `workflow_info`;
CREATE TABLE `workflow_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_id` bigint DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`lifecycle` varchar(255) DEFAULT NULL,
`max_wf_instance_num` int DEFAULT NULL,
`next_trigger_time` bigint DEFAULT NULL,
`notify_user_ids` varchar(255) DEFAULT NULL,
`pedag` longtext,
`status` int DEFAULT NULL,
`time_expression` varchar(255) DEFAULT NULL,
`time_expression_type` int DEFAULT NULL,
`wf_description` varchar(255) DEFAULT NULL,
`wf_name` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_workflow_info` (`app_id`,`status`,`time_expression_type`,`next_trigger_time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for workflow_instance_info
-- ----------------------------
DROP TABLE IF EXISTS `workflow_instance_info`;
CREATE TABLE `workflow_instance_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`actual_trigger_time` bigint DEFAULT NULL,
`app_id` bigint DEFAULT NULL,
`dag` longtext,
`expected_trigger_time` bigint DEFAULT NULL,
`finished_time` bigint DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`parent_wf_instance_id` bigint DEFAULT NULL,
`result` longtext,
`status` int DEFAULT NULL,
`wf_context` longtext,
`wf_init_params` longtext,
`wf_instance_id` bigint DEFAULT NULL,
`workflow_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_wf_instance` (`wf_instance_id`),
KEY `idx01_wf_instance` (`workflow_id`,`status`,`app_id`,`expected_trigger_time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for workflow_node_info
-- ----------------------------
DROP TABLE IF EXISTS `workflow_node_info`;
CREATE TABLE `workflow_node_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_id` bigint NOT NULL,
`enable` bit(1) NOT NULL,
`extra` longtext,
`gmt_create` datetime(6) NOT NULL,
`gmt_modified` datetime(6) NOT NULL,
`job_id` bigint DEFAULT NULL,
`node_name` varchar(255) DEFAULT NULL,
`node_params` longtext,
`skip_when_failed` bit(1) NOT NULL,
`type` int DEFAULT NULL,
`workflow_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_workflow_node_info` (`workflow_id`,`gmt_create`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
SET FOREIGN_KEY_CHECKS = 1;

View File

@ -0,0 +1,323 @@
/*
Navicat Premium Data Transfer
Source Server : Local@3306
Source Server Type : MySQL
Source Server Version : 80300 (8.3.0)
Source Host : localhost:3306
Source Schema : powerjob5
Target Server Type : MySQL
Target Server Version : 80300 (8.3.0)
File Encoding : 65001
Date: 11/08/2024 23:23:30
*/
SET NAMES utf8mb4;
SET FOREIGN_KEY_CHECKS = 0;
-- ----------------------------
-- Table structure for app_info
-- ----------------------------
DROP TABLE IF EXISTS `app_info`;
CREATE TABLE `app_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_name` varchar(255) DEFAULT NULL,
`creator` bigint DEFAULT NULL,
`current_server` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`modifier` bigint DEFAULT NULL,
`namespace_id` bigint DEFAULT NULL,
`password` varchar(255) DEFAULT NULL,
`tags` varchar(255) DEFAULT NULL,
`title` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_app_info` (`app_name`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for container_info
-- ----------------------------
DROP TABLE IF EXISTS `container_info`;
CREATE TABLE `container_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_id` bigint DEFAULT NULL,
`container_name` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`last_deploy_time` datetime(6) DEFAULT NULL,
`source_info` varchar(255) DEFAULT NULL,
`source_type` int DEFAULT NULL,
`status` int DEFAULT NULL,
`version` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_container_info` (`app_id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for instance_info
-- ----------------------------
DROP TABLE IF EXISTS `instance_info`;
CREATE TABLE `instance_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`actual_trigger_time` bigint DEFAULT NULL,
`app_id` bigint DEFAULT NULL,
`expected_trigger_time` bigint DEFAULT NULL,
`finished_time` bigint DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`instance_id` bigint DEFAULT NULL,
`instance_params` longtext,
`job_id` bigint DEFAULT NULL,
`job_params` longtext,
`last_report_time` bigint DEFAULT NULL,
`result` longtext,
`running_times` bigint DEFAULT NULL,
`status` int DEFAULT NULL,
`task_tracker_address` varchar(255) DEFAULT NULL,
`type` int DEFAULT NULL,
`wf_instance_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_instance_info` (`job_id`,`status`),
KEY `idx02_instance_info` (`app_id`,`status`),
KEY `idx03_instance_info` (`instance_id`,`status`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for job_info
-- ----------------------------
DROP TABLE IF EXISTS `job_info`;
CREATE TABLE `job_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`advanced_runtime_config` varchar(255) DEFAULT NULL,
`alarm_config` varchar(255) DEFAULT NULL,
`app_id` bigint DEFAULT NULL,
`concurrency` int DEFAULT NULL,
`designated_workers` varchar(255) DEFAULT NULL,
`dispatch_strategy` int DEFAULT NULL,
`dispatch_strategy_config` varchar(255) DEFAULT NULL,
`execute_type` int DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`instance_retry_num` int DEFAULT NULL,
`instance_time_limit` bigint DEFAULT NULL,
`job_description` varchar(255) DEFAULT NULL,
`job_name` varchar(255) DEFAULT NULL,
`job_params` longtext,
`lifecycle` varchar(255) DEFAULT NULL,
`log_config` varchar(255) DEFAULT NULL,
`max_instance_num` int DEFAULT NULL,
`max_worker_count` int DEFAULT NULL,
`min_cpu_cores` double NOT NULL,
`min_disk_space` double NOT NULL,
`min_memory_space` double NOT NULL,
`next_trigger_time` bigint DEFAULT NULL,
`notify_user_ids` varchar(255) DEFAULT NULL,
`processor_info` varchar(255) DEFAULT NULL,
`processor_type` int DEFAULT NULL,
`status` int DEFAULT NULL,
`tag` varchar(255) DEFAULT NULL,
`task_retry_num` int DEFAULT NULL,
`time_expression` varchar(255) DEFAULT NULL,
`time_expression_type` int DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_job_info` (`app_id`,`status`,`time_expression_type`,`next_trigger_time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for namespace
-- ----------------------------
DROP TABLE IF EXISTS `namespace`;
CREATE TABLE `namespace` (
`id` bigint NOT NULL AUTO_INCREMENT,
`code` varchar(255) DEFAULT NULL,
`creator` bigint DEFAULT NULL,
`dept` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`modifier` bigint DEFAULT NULL,
`name` varchar(255) DEFAULT NULL,
`status` int DEFAULT NULL,
`tags` varchar(255) DEFAULT NULL,
`token` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_namespace` (`code`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for oms_lock
-- ----------------------------
DROP TABLE IF EXISTS `oms_lock`;
CREATE TABLE `oms_lock` (
`id` bigint NOT NULL AUTO_INCREMENT,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`lock_name` varchar(255) DEFAULT NULL,
`max_lock_time` bigint DEFAULT NULL,
`ownerip` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_oms_lock` (`lock_name`)
) ENGINE=InnoDB AUTO_INCREMENT=4 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for pwjb_user_info
-- ----------------------------
DROP TABLE IF EXISTS `pwjb_user_info`;
CREATE TABLE `pwjb_user_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`password` varchar(255) DEFAULT NULL,
`username` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_username` (`username`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for server_info
-- ----------------------------
DROP TABLE IF EXISTS `server_info`;
CREATE TABLE `server_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`ip` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_server_info` (`ip`),
KEY `idx01_server_info` (`gmt_modified`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for sundry
-- ----------------------------
DROP TABLE IF EXISTS `sundry`;
CREATE TABLE `sundry` (
`id` bigint NOT NULL AUTO_INCREMENT,
`content` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`pkey` varchar(255) DEFAULT NULL,
`skey` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_sundry` (`pkey`,`skey`)
) ENGINE=InnoDB AUTO_INCREMENT=3 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for user_info
-- ----------------------------
DROP TABLE IF EXISTS `user_info`;
CREATE TABLE `user_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`account_type` varchar(255) DEFAULT NULL,
`email` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`nick` varchar(255) DEFAULT NULL,
`origin_username` varchar(255) DEFAULT NULL,
`password` varchar(255) DEFAULT NULL,
`phone` varchar(255) DEFAULT NULL,
`status` int DEFAULT NULL,
`token_login_verify_info` varchar(255) DEFAULT NULL,
`username` varchar(255) DEFAULT NULL,
`web_hook` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_user_name` (`username`),
KEY `uidx02_user_info` (`email`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for user_role
-- ----------------------------
DROP TABLE IF EXISTS `user_role`;
CREATE TABLE `user_role` (
`id` bigint NOT NULL AUTO_INCREMENT,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`role` int DEFAULT NULL,
`scope` int DEFAULT NULL,
`target` bigint DEFAULT NULL,
`user_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `uidx01_user_id` (`user_id`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for workflow_info
-- ----------------------------
DROP TABLE IF EXISTS `workflow_info`;
CREATE TABLE `workflow_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_id` bigint DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`lifecycle` varchar(255) DEFAULT NULL,
`max_wf_instance_num` int DEFAULT NULL,
`next_trigger_time` bigint DEFAULT NULL,
`notify_user_ids` varchar(255) DEFAULT NULL,
`pedag` longtext,
`status` int DEFAULT NULL,
`time_expression` varchar(255) DEFAULT NULL,
`time_expression_type` int DEFAULT NULL,
`wf_description` varchar(255) DEFAULT NULL,
`wf_name` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_workflow_info` (`app_id`,`status`,`time_expression_type`,`next_trigger_time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for workflow_instance_info
-- ----------------------------
DROP TABLE IF EXISTS `workflow_instance_info`;
CREATE TABLE `workflow_instance_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`actual_trigger_time` bigint DEFAULT NULL,
`app_id` bigint DEFAULT NULL,
`dag` longtext,
`expected_trigger_time` bigint DEFAULT NULL,
`finished_time` bigint DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`parent_wf_instance_id` bigint DEFAULT NULL,
`result` longtext,
`status` int DEFAULT NULL,
`wf_context` longtext,
`wf_init_params` longtext,
`wf_instance_id` bigint DEFAULT NULL,
`workflow_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_wf_instance` (`wf_instance_id`),
KEY `idx01_wf_instance` (`workflow_id`,`status`,`app_id`,`expected_trigger_time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for workflow_node_info
-- ----------------------------
DROP TABLE IF EXISTS `workflow_node_info`;
CREATE TABLE `workflow_node_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_id` bigint NOT NULL,
`enable` bit(1) NOT NULL,
`extra` longtext,
`gmt_create` datetime(6) NOT NULL,
`gmt_modified` datetime(6) NOT NULL,
`job_id` bigint DEFAULT NULL,
`node_name` varchar(255) DEFAULT NULL,
`node_params` longtext,
`skip_when_failed` bit(1) NOT NULL,
`type` int DEFAULT NULL,
`workflow_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_workflow_node_info` (`workflow_id`,`gmt_create`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
SET FOREIGN_KEY_CHECKS = 1;

View File

@ -0,0 +1,7 @@
由于存在不同数据库、不同版本的升级,官方能给出的 upgrade SQL 相对有限,大家可参考以下方式自行生成升级 SQL
- 【官方脚本】参考官方每个版本的数据库全库建表文件(项目 others - sql - schema自行进行字段 DIFF
- 【自己动手版】导出当前您的 powerjob 数据库表结构,同时创建一个测试库,让 5.x 版本的 server 直连该测试库,自动建表。分别拿到两个版本的表结构 SQL 后,借用工具生产 update SQL 即可navigate 等数据库管理软件均支持结构对比)
参考文档https://www.yuque.com/powerjob/guidence/upgrade

View File

@ -0,0 +1,88 @@
-- Upgrade SQL FROM 4.1.x to 4.2.x
-- ----------------------------
-- Table change for app_info
-- ----------------------------
SET FOREIGN_KEY_CHECKS=0;
ALTER TABLE `app_info` ADD COLUMN `creator` bigint NULL DEFAULT NULL;
ALTER TABLE `app_info` ADD COLUMN `extra` varchar(255) NULL DEFAULT NULL;
ALTER TABLE `app_info` ADD COLUMN `modifier` bigint NULL DEFAULT NULL;
ALTER TABLE `app_info` ADD COLUMN `namespace_id` bigint NULL DEFAULT NULL;
ALTER TABLE `app_info` ADD COLUMN `tags` varchar(255) NULL DEFAULT NULL;
ALTER TABLE `app_info` ADD COLUMN `title` varchar(255) NULL DEFAULT NULL;
-- ----------------------------
-- Table change for user_info
-- ----------------------------
ALTER TABLE `user_info` ADD COLUMN `account_type` varchar(255) NULL DEFAULT NULL;
ALTER TABLE `user_info` ADD COLUMN `nick` varchar(255) NULL DEFAULT NULL;
ALTER TABLE `user_info` ADD COLUMN `origin_username` varchar(255) NULL DEFAULT NULL;
ALTER TABLE `user_info` ADD COLUMN `token_login_verify_info` varchar(255) NULL DEFAULT NULL;
ALTER TABLE `user_info` ADD UNIQUE INDEX `uidx01_user_name`(`username` ASC) USING BTREE;
-- ----------------------------
-- new table 'namespace'
-- ----------------------------
CREATE TABLE `namespace` (
`id` bigint NOT NULL AUTO_INCREMENT,
`code` varchar(255) NULL DEFAULT NULL,
`creator` bigint NULL DEFAULT NULL,
`dept` varchar(255) NULL DEFAULT NULL,
`extra` varchar(255) NULL DEFAULT NULL,
`gmt_create` datetime(6) NULL DEFAULT NULL,
`gmt_modified` datetime(6) NULL DEFAULT NULL,
`modifier` bigint NULL DEFAULT NULL,
`name` varchar(255) NULL DEFAULT NULL,
`status` int NULL DEFAULT NULL,
`tags` varchar(255) NULL DEFAULT NULL,
`token` varchar(255) NULL DEFAULT NULL,
PRIMARY KEY (`id`) USING BTREE,
UNIQUE INDEX `uidx01_namespace`(`code` ASC) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 2 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_0900_ai_ci ROW_FORMAT = Dynamic;
-- ----------------------------
-- new table 'pwjb_user_info'
-- ----------------------------
CREATE TABLE `pwjb_user_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`extra` varchar(255) NULL DEFAULT NULL,
`gmt_create` datetime(6) NULL DEFAULT NULL,
`gmt_modified` datetime(6) NULL DEFAULT NULL,
`password` varchar(255) NULL DEFAULT NULL,
`username` varchar(255) NULL DEFAULT NULL,
PRIMARY KEY (`id`) USING BTREE,
UNIQUE INDEX `uidx01_username`(`username` ASC) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 2 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_0900_ai_ci ROW_FORMAT = Dynamic;
-- ----------------------------
-- new table 'sundry'
-- ----------------------------
CREATE TABLE `sundry` (
`id` bigint NOT NULL AUTO_INCREMENT,
`content` varchar(255) NULL DEFAULT NULL,
`extra` varchar(255) NULL DEFAULT NULL,
`gmt_create` datetime(6) NULL DEFAULT NULL,
`gmt_modified` datetime(6) NULL DEFAULT NULL,
`pkey` varchar(255) NULL DEFAULT NULL,
`skey` varchar(255) NULL DEFAULT NULL,
PRIMARY KEY (`id`) USING BTREE,
UNIQUE INDEX `uidx01_sundry`(`pkey` ASC, `skey` ASC) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 3 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_0900_ai_ci ROW_FORMAT = Dynamic;
-- ----------------------------
-- new table 'user_role'
-- ----------------------------
CREATE TABLE `user_role` (
`id` bigint NOT NULL AUTO_INCREMENT,
`extra` varchar(255) NULL DEFAULT NULL,
`gmt_create` datetime(6) NULL DEFAULT NULL,
`gmt_modified` datetime(6) NULL DEFAULT NULL,
`role` int NULL DEFAULT NULL,
`scope` int NULL DEFAULT NULL,
`target` bigint NULL DEFAULT NULL,
`user_id` bigint NULL DEFAULT NULL,
PRIMARY KEY (`id`) USING BTREE,
INDEX `uidx01_user_id`(`user_id` ASC) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 2 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_0900_ai_ci ROW_FORMAT = Dynamic;

View File

@ -6,7 +6,7 @@
<groupId>tech.powerjob</groupId> <groupId>tech.powerjob</groupId>
<artifactId>powerjob</artifactId> <artifactId>powerjob</artifactId>
<version>4.3.9</version> <version>5.1.0</version>
<packaging>pom</packaging> <packaging>pom</packaging>
<name>powerjob</name> <name>powerjob</name>
<url>http://www.powerjob.tech</url> <url>http://www.powerjob.tech</url>

View File

@ -5,18 +5,19 @@
<parent> <parent>
<artifactId>powerjob</artifactId> <artifactId>powerjob</artifactId>
<groupId>tech.powerjob</groupId> <groupId>tech.powerjob</groupId>
<version>4.3.9</version> <version>5.1.0</version>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<artifactId>powerjob-client</artifactId> <artifactId>powerjob-client</artifactId>
<version>4.3.9</version> <version>5.1.0</version>
<packaging>jar</packaging> <packaging>jar</packaging>
<properties> <properties>
<junit.version>5.9.1</junit.version> <junit.version>5.9.1</junit.version>
<logback.version>1.2.13</logback.version>
<fastjson.version>1.2.83</fastjson.version> <fastjson.version>1.2.83</fastjson.version>
<powerjob.common.version>4.3.9</powerjob.common.version> <powerjob.common.version>5.1.0</powerjob.common.version>
<mvn.shade.plugin.version>3.2.4</mvn.shade.plugin.version> <mvn.shade.plugin.version>3.2.4</mvn.shade.plugin.version>
</properties> </properties>
@ -44,6 +45,13 @@
<version>${junit.version}</version> <version>${junit.version}</version>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<!-- log for test stage -->
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<version>${logback.version}</version>
<scope>test</scope>
</dependency>
</dependencies> </dependencies>
<build> <build>

View File

@ -0,0 +1,71 @@
package tech.powerjob.client;
import lombok.Getter;
import lombok.Setter;
import lombok.ToString;
import lombok.experimental.Accessors;
import tech.powerjob.client.common.Protocol;
import tech.powerjob.client.extension.ClientExtension;
import java.io.Serializable;
import java.util.List;
import java.util.Map;
/**
* 客户端配置
*
* @author 程序帕鲁
* @since 2024/2/20
*/
@Getter
@Setter
@ToString
@Accessors(chain = true)
public class ClientConfig implements Serializable {
/**
* 执行器 AppName
*/
private String appName;
/**
* 执行器密码
*/
private String password;
/**
* 地址列表支持格式
* - IP:Port, eg: 192.168.1.1:7700
* - 域名, eg: powerjob.apple-inc.com
*/
private List<String> addressList;
/**
* 客户端通讯协议
*/
private Protocol protocol = Protocol.HTTP;
/**
* 连接超时时间
*/
private Integer connectionTimeout;
/**
* 指定了等待服务器响应数据的最长时间更具体地说这是从服务器开始返回响应数据包括HTTP头和数据客户端读取数据的超时时间
*/
private Integer readTimeout;
/**
* 指定了向服务器发送数据的最长时间这是从客户端开始发送数据如POST请求的正文到数据完全发送出去的时间
*/
private Integer writeTimeout;
/**
* 默认携带的请求头
* 用于流量被基础设施识别
*/
private Map<String, String> defaultHeaders;
/**
* 客户端行为扩展
*/
private ClientExtension clientExtension;
}

View File

@ -1,28 +1,32 @@
package tech.powerjob.client; package tech.powerjob.client;
import com.alibaba.fastjson.JSON; import com.alibaba.fastjson.JSON;
import tech.powerjob.common.enums.InstanceStatus; import com.google.common.collect.Lists;
import tech.powerjob.common.OmsConstant; import com.google.common.collect.Maps;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import tech.powerjob.client.module.AppAuthRequest;
import tech.powerjob.client.module.AppAuthResult;
import tech.powerjob.client.service.PowerRequestBody;
import tech.powerjob.client.service.RequestService;
import tech.powerjob.client.service.impl.ClusterRequestServiceOkHttp3Impl;
import tech.powerjob.common.OpenAPIConstant; import tech.powerjob.common.OpenAPIConstant;
import tech.powerjob.common.enums.EncryptType;
import tech.powerjob.common.enums.InstanceStatus;
import tech.powerjob.common.exception.PowerJobException; import tech.powerjob.common.exception.PowerJobException;
import tech.powerjob.common.request.http.SaveJobInfoRequest; import tech.powerjob.common.request.http.SaveJobInfoRequest;
import tech.powerjob.common.request.http.SaveWorkflowNodeRequest; import tech.powerjob.common.request.http.SaveWorkflowNodeRequest;
import tech.powerjob.common.request.http.SaveWorkflowRequest; import tech.powerjob.common.request.http.SaveWorkflowRequest;
import tech.powerjob.common.request.query.JobInfoQuery; import tech.powerjob.common.request.query.JobInfoQuery;
import tech.powerjob.common.response.*; import tech.powerjob.common.response.*;
import tech.powerjob.common.utils.CommonUtils;
import tech.powerjob.common.utils.HttpUtils;
import tech.powerjob.common.serialize.JsonUtils; import tech.powerjob.common.serialize.JsonUtils;
import com.google.common.collect.Lists; import tech.powerjob.common.utils.CommonUtils;
import lombok.extern.slf4j.Slf4j; import tech.powerjob.common.utils.DigestUtils;
import okhttp3.FormBody;
import okhttp3.MediaType;
import okhttp3.RequestBody;
import org.apache.commons.lang3.StringUtils;
import java.io.Closeable;
import java.io.IOException; import java.io.IOException;
import java.util.List; import java.util.List;
import java.util.Objects; import java.util.Map;
import static tech.powerjob.client.TypeStore.*; import static tech.powerjob.client.TypeStore.*;
@ -33,14 +37,44 @@ import static tech.powerjob.client.TypeStore.*;
* @since 2020/4/15 * @since 2020/4/15
*/ */
@Slf4j @Slf4j
public class PowerJobClient implements IPowerJobClient { public class PowerJobClient implements IPowerJobClient, Closeable {
private Long appId; private Long appId;
private String currentAddress;
private final List<String> allAddress; private final RequestService requestService;
private static final String URL_PATTERN = "http://%s%s%s"; public PowerJobClient(ClientConfig config) {
List<String> addressList = config.getAddressList();
String appName = config.getAppName();
CommonUtils.requireNonNull(addressList, "addressList can't be null!");
CommonUtils.requireNonNull(appName, "appName can't be null");
this.requestService = new ClusterRequestServiceOkHttp3Impl(config);
AppAuthRequest appAuthRequest = new AppAuthRequest();
appAuthRequest.setAppName(appName);
appAuthRequest.setEncryptedPassword(DigestUtils.md5(config.getPassword()));
appAuthRequest.setEncryptType(EncryptType.MD5.getCode());
String assertResponse = requestService.request(OpenAPIConstant.AUTH_APP, PowerRequestBody.newJsonRequestBody(appAuthRequest));
if (StringUtils.isNotEmpty(assertResponse)) {
ResultDTO<AppAuthResult> resultDTO = JSON.parseObject(assertResponse, APP_AUTH_RESULT_TYPE);
if (resultDTO.isSuccess()) {
appId = resultDTO.getData().getAppId();
} else {
throw new PowerJobException(resultDTO.getMessage());
}
}
if (appId == null) {
throw new PowerJobException("appId is null, please check your config");
}
log.info("[PowerJobClient] [INIT] {}'s PowerJobClient bootstrap successfully", appName);
}
/** /**
* Init PowerJobClient with domain, appName and password. * Init PowerJobClient with domain, appName and password.
* *
@ -49,7 +83,7 @@ public class PowerJobClient implements IPowerJobClient {
* @param password password of the application * @param password password of the application
*/ */
public PowerJobClient(String domain, String appName, String password) { public PowerJobClient(String domain, String appName, String password) {
this(Lists.newArrayList(domain), appName, password); this(new ClientConfig().setAppName(appName).setPassword(password).setAddressList(Lists.newArrayList(domain)));
} }
@ -61,48 +95,7 @@ public class PowerJobClient implements IPowerJobClient {
* @param password password of the application * @param password password of the application
*/ */
public PowerJobClient(List<String> addressList, String appName, String password) { public PowerJobClient(List<String> addressList, String appName, String password) {
this(new ClientConfig().setAppName(appName).setPassword(password).setAddressList(addressList));
CommonUtils.requireNonNull(addressList, "addressList can't be null!");
CommonUtils.requireNonNull(appName, "appName can't be null");
allAddress = addressList;
for (String addr : addressList) {
String url = getUrl(OpenAPIConstant.ASSERT, addr);
try {
String result = assertApp(appName, password, url);
if (StringUtils.isNotEmpty(result)) {
ResultDTO<Long> resultDTO = JSON.parseObject(result, LONG_RESULT_TYPE);
if (resultDTO.isSuccess()) {
appId = resultDTO.getData();
currentAddress = addr;
break;
} else {
throw new PowerJobException(resultDTO.getMessage());
}
}
} catch (IOException ignore) {
//
}
}
if (StringUtils.isEmpty(currentAddress)) {
throw new PowerJobException("no server available for PowerJobClient");
}
log.info("[PowerJobClient] {}'s PowerJobClient bootstrap successfully, using server: {}", appName, currentAddress);
}
private static String assertApp(String appName, String password, String url) throws IOException {
FormBody.Builder builder = new FormBody.Builder()
.add("appName", appName);
if (password != null) {
builder.add("password", password);
}
return HttpUtils.post(url, builder.build());
}
private static String getUrl(String path, String address) {
return String.format(URL_PATTERN, address, OpenAPIConstant.WEB_PATH, path);
} }
/* ************* Job 区 ************* */ /* ************* Job 区 ************* */
@ -118,9 +111,7 @@ public class PowerJobClient implements IPowerJobClient {
public ResultDTO<Long> saveJob(SaveJobInfoRequest request) { public ResultDTO<Long> saveJob(SaveJobInfoRequest request) {
request.setAppId(appId); request.setAppId(appId);
MediaType jsonType = MediaType.parse(OmsConstant.JSON_MEDIA_TYPE); String post = requestService.request(OpenAPIConstant.SAVE_JOB, PowerRequestBody.newJsonRequestBody(request));
String json = JSON.toJSONString(request);
String post = postHA(OpenAPIConstant.SAVE_JOB, RequestBody.create(jsonType, json));
return JSON.parseObject(post, LONG_RESULT_TYPE); return JSON.parseObject(post, LONG_RESULT_TYPE);
} }
@ -133,21 +124,20 @@ public class PowerJobClient implements IPowerJobClient {
*/ */
@Override @Override
public ResultDTO<Long> copyJob(Long jobId) { public ResultDTO<Long> copyJob(Long jobId) {
RequestBody body = new FormBody.Builder() Map<String, String> param = Maps.newHashMap();
.add("jobId", jobId.toString()) param.put("jobId", jobId.toString());
.add("appId", appId.toString()) param.put("appId", appId.toString());
.build();
String post = postHA(OpenAPIConstant.COPY_JOB, body); String post = requestService.request(OpenAPIConstant.COPY_JOB, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, LONG_RESULT_TYPE); return JSON.parseObject(post, LONG_RESULT_TYPE);
} }
@Override @Override
public ResultDTO<SaveJobInfoRequest> exportJob(Long jobId) { public ResultDTO<SaveJobInfoRequest> exportJob(Long jobId) {
RequestBody body = new FormBody.Builder() Map<String, String> param = Maps.newHashMap();
.add("jobId", jobId.toString()) param.put("jobId", jobId.toString());
.add("appId", appId.toString()) param.put("appId", appId.toString());
.build(); String post = requestService.request(OpenAPIConstant.EXPORT_JOB, PowerRequestBody.newFormRequestBody(param));
String post = postHA(OpenAPIConstant.EXPORT_JOB, body);
return JSON.parseObject(post, SAVE_JOB_INFO_REQUEST_RESULT_TYPE); return JSON.parseObject(post, SAVE_JOB_INFO_REQUEST_RESULT_TYPE);
} }
@ -159,11 +149,10 @@ public class PowerJobClient implements IPowerJobClient {
*/ */
@Override @Override
public ResultDTO<JobInfoDTO> fetchJob(Long jobId) { public ResultDTO<JobInfoDTO> fetchJob(Long jobId) {
RequestBody body = new FormBody.Builder() Map<String, String> param = Maps.newHashMap();
.add("jobId", jobId.toString()) param.put("jobId", jobId.toString());
.add("appId", appId.toString()) param.put("appId", appId.toString());
.build(); String post = requestService.request(OpenAPIConstant.FETCH_JOB, PowerRequestBody.newFormRequestBody(param));
String post = postHA(OpenAPIConstant.FETCH_JOB, body);
return JSON.parseObject(post, JOB_RESULT_TYPE); return JSON.parseObject(post, JOB_RESULT_TYPE);
} }
@ -174,10 +163,9 @@ public class PowerJobClient implements IPowerJobClient {
*/ */
@Override @Override
public ResultDTO<List<JobInfoDTO>> fetchAllJob() { public ResultDTO<List<JobInfoDTO>> fetchAllJob() {
RequestBody body = new FormBody.Builder() Map<String, String> param = Maps.newHashMap();
.add("appId", appId.toString()) param.put("appId", appId.toString());
.build(); String post = requestService.request(OpenAPIConstant.FETCH_ALL_JOB, PowerRequestBody.newFormRequestBody(param));
String post = postHA(OpenAPIConstant.FETCH_ALL_JOB, body);
return JSON.parseObject(post, LIST_JOB_RESULT_TYPE); return JSON.parseObject(post, LIST_JOB_RESULT_TYPE);
} }
@ -190,9 +178,7 @@ public class PowerJobClient implements IPowerJobClient {
@Override @Override
public ResultDTO<List<JobInfoDTO>> queryJob(JobInfoQuery powerQuery) { public ResultDTO<List<JobInfoDTO>> queryJob(JobInfoQuery powerQuery) {
powerQuery.setAppIdEq(appId); powerQuery.setAppIdEq(appId);
MediaType jsonType = MediaType.parse(OmsConstant.JSON_MEDIA_TYPE); String post = requestService.request(OpenAPIConstant.QUERY_JOB, PowerRequestBody.newJsonRequestBody(powerQuery));
String json = JsonUtils.toJSONStringUnsafe(powerQuery);
String post = postHA(OpenAPIConstant.QUERY_JOB, RequestBody.create(jsonType, json));
return JSON.parseObject(post, LIST_JOB_RESULT_TYPE); return JSON.parseObject(post, LIST_JOB_RESULT_TYPE);
} }
@ -204,11 +190,10 @@ public class PowerJobClient implements IPowerJobClient {
*/ */
@Override @Override
public ResultDTO<Void> disableJob(Long jobId) { public ResultDTO<Void> disableJob(Long jobId) {
RequestBody body = new FormBody.Builder() Map<String, String> param = Maps.newHashMap();
.add("jobId", jobId.toString()) param.put("jobId", jobId.toString());
.add("appId", appId.toString()) param.put("appId", appId.toString());
.build(); String post = requestService.request(OpenAPIConstant.DISABLE_JOB, PowerRequestBody.newFormRequestBody(param));
String post = postHA(OpenAPIConstant.DISABLE_JOB, body);
return JSON.parseObject(post, VOID_RESULT_TYPE); return JSON.parseObject(post, VOID_RESULT_TYPE);
} }
@ -220,11 +205,10 @@ public class PowerJobClient implements IPowerJobClient {
*/ */
@Override @Override
public ResultDTO<Void> enableJob(Long jobId) { public ResultDTO<Void> enableJob(Long jobId) {
RequestBody body = new FormBody.Builder() Map<String, String> param = Maps.newHashMap();
.add("jobId", jobId.toString()) param.put("jobId", jobId.toString());
.add("appId", appId.toString()) param.put("appId", appId.toString());
.build(); String post = requestService.request(OpenAPIConstant.ENABLE_JOB, PowerRequestBody.newFormRequestBody(param));
String post = postHA(OpenAPIConstant.ENABLE_JOB, body);
return JSON.parseObject(post, VOID_RESULT_TYPE); return JSON.parseObject(post, VOID_RESULT_TYPE);
} }
@ -236,11 +220,10 @@ public class PowerJobClient implements IPowerJobClient {
*/ */
@Override @Override
public ResultDTO<Void> deleteJob(Long jobId) { public ResultDTO<Void> deleteJob(Long jobId) {
RequestBody body = new FormBody.Builder() Map<String, String> param = Maps.newHashMap();
.add("jobId", jobId.toString()) param.put("jobId", jobId.toString());
.add("appId", appId.toString()) param.put("appId", appId.toString());
.build(); String post = requestService.request(OpenAPIConstant.DELETE_JOB, PowerRequestBody.newFormRequestBody(param));
String post = postHA(OpenAPIConstant.DELETE_JOB, body);
return JSON.parseObject(post, VOID_RESULT_TYPE); return JSON.parseObject(post, VOID_RESULT_TYPE);
} }
@ -254,15 +237,16 @@ public class PowerJobClient implements IPowerJobClient {
*/ */
@Override @Override
public ResultDTO<Long> runJob(Long jobId, String instanceParams, long delayMS) { public ResultDTO<Long> runJob(Long jobId, String instanceParams, long delayMS) {
FormBody.Builder builder = new FormBody.Builder()
.add("jobId", jobId.toString()) Map<String, String> param = Maps.newHashMap();
.add("appId", appId.toString()) param.put("jobId", jobId.toString());
.add("delay", String.valueOf(delayMS)); param.put("appId", appId.toString());
param.put("delay", String.valueOf(delayMS));
if (StringUtils.isNotEmpty(instanceParams)) { if (StringUtils.isNotEmpty(instanceParams)) {
builder.add("instanceParams", instanceParams); param.put("instanceParams", instanceParams);
} }
String post = postHA(OpenAPIConstant.RUN_JOB, builder.build()); String post = requestService.request(OpenAPIConstant.RUN_JOB, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, LONG_RESULT_TYPE); return JSON.parseObject(post, LONG_RESULT_TYPE);
} }
@ -280,11 +264,12 @@ public class PowerJobClient implements IPowerJobClient {
*/ */
@Override @Override
public ResultDTO<Void> stopInstance(Long instanceId) { public ResultDTO<Void> stopInstance(Long instanceId) {
RequestBody body = new FormBody.Builder()
.add("instanceId", instanceId.toString()) Map<String, String> param = Maps.newHashMap();
.add("appId", appId.toString()) param.put("instanceId", instanceId.toString());
.build(); param.put("appId", appId.toString());
String post = postHA(OpenAPIConstant.STOP_INSTANCE, body);
String post = requestService.request(OpenAPIConstant.STOP_INSTANCE, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, VOID_RESULT_TYPE); return JSON.parseObject(post, VOID_RESULT_TYPE);
} }
@ -297,11 +282,10 @@ public class PowerJobClient implements IPowerJobClient {
*/ */
@Override @Override
public ResultDTO<Void> cancelInstance(Long instanceId) { public ResultDTO<Void> cancelInstance(Long instanceId) {
RequestBody body = new FormBody.Builder() Map<String, String> param = Maps.newHashMap();
.add("instanceId", instanceId.toString()) param.put("instanceId", instanceId.toString());
.add("appId", appId.toString()) param.put("appId", appId.toString());
.build(); String post = requestService.request(OpenAPIConstant.CANCEL_INSTANCE, PowerRequestBody.newFormRequestBody(param));
String post = postHA(OpenAPIConstant.CANCEL_INSTANCE, body);
return JSON.parseObject(post, VOID_RESULT_TYPE); return JSON.parseObject(post, VOID_RESULT_TYPE);
} }
@ -314,11 +298,10 @@ public class PowerJobClient implements IPowerJobClient {
*/ */
@Override @Override
public ResultDTO<Void> retryInstance(Long instanceId) { public ResultDTO<Void> retryInstance(Long instanceId) {
RequestBody body = new FormBody.Builder() Map<String, String> param = Maps.newHashMap();
.add("instanceId", instanceId.toString()) param.put("instanceId", instanceId.toString());
.add("appId", appId.toString()) param.put("appId", appId.toString());
.build(); String post = requestService.request(OpenAPIConstant.RETRY_INSTANCE, PowerRequestBody.newFormRequestBody(param));
String post = postHA(OpenAPIConstant.RETRY_INSTANCE, body);
return JSON.parseObject(post, VOID_RESULT_TYPE); return JSON.parseObject(post, VOID_RESULT_TYPE);
} }
@ -330,10 +313,10 @@ public class PowerJobClient implements IPowerJobClient {
*/ */
@Override @Override
public ResultDTO<Integer> fetchInstanceStatus(Long instanceId) { public ResultDTO<Integer> fetchInstanceStatus(Long instanceId) {
RequestBody body = new FormBody.Builder() Map<String, String> param = Maps.newHashMap();
.add("instanceId", instanceId.toString()) param.put("instanceId", instanceId.toString());
.build(); param.put("appId", appId.toString());
String post = postHA(OpenAPIConstant.FETCH_INSTANCE_STATUS, body); String post = requestService.request(OpenAPIConstant.FETCH_INSTANCE_STATUS, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, INTEGER_RESULT_TYPE); return JSON.parseObject(post, INTEGER_RESULT_TYPE);
} }
@ -345,10 +328,10 @@ public class PowerJobClient implements IPowerJobClient {
*/ */
@Override @Override
public ResultDTO<InstanceInfoDTO> fetchInstanceInfo(Long instanceId) { public ResultDTO<InstanceInfoDTO> fetchInstanceInfo(Long instanceId) {
RequestBody body = new FormBody.Builder() Map<String, String> param = Maps.newHashMap();
.add("instanceId", instanceId.toString()) param.put("instanceId", instanceId.toString());
.build(); param.put("appId", appId.toString());
String post = postHA(OpenAPIConstant.FETCH_INSTANCE_INFO, body); String post = requestService.request(OpenAPIConstant.FETCH_INSTANCE_INFO, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, INSTANCE_RESULT_TYPE); return JSON.parseObject(post, INSTANCE_RESULT_TYPE);
} }
@ -364,10 +347,9 @@ public class PowerJobClient implements IPowerJobClient {
@Override @Override
public ResultDTO<Long> saveWorkflow(SaveWorkflowRequest request) { public ResultDTO<Long> saveWorkflow(SaveWorkflowRequest request) {
request.setAppId(appId); request.setAppId(appId);
MediaType jsonType = MediaType.parse(OmsConstant.JSON_MEDIA_TYPE);
// 中坑记录 FastJSON 序列化会导致 Server 接收时 pEWorkflowDAG null无语.jpg // 中坑记录 FastJSON 序列化会导致 Server 接收时 pEWorkflowDAG null无语.jpg
String json = JsonUtils.toJSONStringUnsafe(request); String json = JsonUtils.toJSONStringUnsafe(request);
String post = postHA(OpenAPIConstant.SAVE_WORKFLOW, RequestBody.create(jsonType, json)); String post = requestService.request(OpenAPIConstant.SAVE_WORKFLOW, PowerRequestBody.newJsonRequestBody(json));
return JSON.parseObject(post, LONG_RESULT_TYPE); return JSON.parseObject(post, LONG_RESULT_TYPE);
} }
@ -379,11 +361,12 @@ public class PowerJobClient implements IPowerJobClient {
*/ */
@Override @Override
public ResultDTO<Long> copyWorkflow(Long workflowId) { public ResultDTO<Long> copyWorkflow(Long workflowId) {
RequestBody body = new FormBody.Builder()
.add("workflowId", workflowId.toString()) Map<String, String> param = Maps.newHashMap();
.add("appId", appId.toString()) param.put("workflowId", workflowId.toString());
.build(); param.put("appId", appId.toString());
String post = postHA(OpenAPIConstant.COPY_WORKFLOW, body);
String post = requestService.request(OpenAPIConstant.COPY_WORKFLOW, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, LONG_RESULT_TYPE); return JSON.parseObject(post, LONG_RESULT_TYPE);
} }
@ -399,9 +382,9 @@ public class PowerJobClient implements IPowerJobClient {
for (SaveWorkflowNodeRequest saveWorkflowNodeRequest : requestList) { for (SaveWorkflowNodeRequest saveWorkflowNodeRequest : requestList) {
saveWorkflowNodeRequest.setAppId(appId); saveWorkflowNodeRequest.setAppId(appId);
} }
MediaType jsonType = MediaType.parse(OmsConstant.JSON_MEDIA_TYPE);
String json = JsonUtils.toJSONStringUnsafe(requestList); String json = JsonUtils.toJSONStringUnsafe(requestList);
String post = postHA(OpenAPIConstant.SAVE_WORKFLOW_NODE, RequestBody.create(jsonType, json)); String post = requestService.request(OpenAPIConstant.SAVE_WORKFLOW_NODE, PowerRequestBody.newJsonRequestBody(json));
return JSON.parseObject(post, WF_NODE_LIST_RESULT_TYPE); return JSON.parseObject(post, WF_NODE_LIST_RESULT_TYPE);
} }
@ -415,11 +398,10 @@ public class PowerJobClient implements IPowerJobClient {
*/ */
@Override @Override
public ResultDTO<WorkflowInfoDTO> fetchWorkflow(Long workflowId) { public ResultDTO<WorkflowInfoDTO> fetchWorkflow(Long workflowId) {
RequestBody body = new FormBody.Builder() Map<String, String> param = Maps.newHashMap();
.add("workflowId", workflowId.toString()) param.put("workflowId", workflowId.toString());
.add("appId", appId.toString()) param.put("appId", appId.toString());
.build(); String post = requestService.request(OpenAPIConstant.FETCH_WORKFLOW, PowerRequestBody.newFormRequestBody(param));
String post = postHA(OpenAPIConstant.FETCH_WORKFLOW, body);
return JSON.parseObject(post, WF_RESULT_TYPE); return JSON.parseObject(post, WF_RESULT_TYPE);
} }
@ -431,11 +413,10 @@ public class PowerJobClient implements IPowerJobClient {
*/ */
@Override @Override
public ResultDTO<Void> disableWorkflow(Long workflowId) { public ResultDTO<Void> disableWorkflow(Long workflowId) {
RequestBody body = new FormBody.Builder() Map<String, String> param = Maps.newHashMap();
.add("workflowId", workflowId.toString()) param.put("workflowId", workflowId.toString());
.add("appId", appId.toString()) param.put("appId", appId.toString());
.build(); String post = requestService.request(OpenAPIConstant.DISABLE_WORKFLOW, PowerRequestBody.newFormRequestBody(param));
String post = postHA(OpenAPIConstant.DISABLE_WORKFLOW, body);
return JSON.parseObject(post, VOID_RESULT_TYPE); return JSON.parseObject(post, VOID_RESULT_TYPE);
} }
@ -447,11 +428,10 @@ public class PowerJobClient implements IPowerJobClient {
*/ */
@Override @Override
public ResultDTO<Void> enableWorkflow(Long workflowId) { public ResultDTO<Void> enableWorkflow(Long workflowId) {
RequestBody body = new FormBody.Builder() Map<String, String> param = Maps.newHashMap();
.add("workflowId", workflowId.toString()) param.put("workflowId", workflowId.toString());
.add("appId", appId.toString()) param.put("appId", appId.toString());
.build(); String post = requestService.request(OpenAPIConstant.ENABLE_WORKFLOW, PowerRequestBody.newFormRequestBody(param));
String post = postHA(OpenAPIConstant.ENABLE_WORKFLOW, body);
return JSON.parseObject(post, VOID_RESULT_TYPE); return JSON.parseObject(post, VOID_RESULT_TYPE);
} }
@ -463,11 +443,10 @@ public class PowerJobClient implements IPowerJobClient {
*/ */
@Override @Override
public ResultDTO<Void> deleteWorkflow(Long workflowId) { public ResultDTO<Void> deleteWorkflow(Long workflowId) {
RequestBody body = new FormBody.Builder() Map<String, String> param = Maps.newHashMap();
.add("workflowId", workflowId.toString()) param.put("workflowId", workflowId.toString());
.add("appId", appId.toString()) param.put("appId", appId.toString());
.build(); String post = requestService.request(OpenAPIConstant.DELETE_WORKFLOW, PowerRequestBody.newFormRequestBody(param));
String post = postHA(OpenAPIConstant.DELETE_WORKFLOW, body);
return JSON.parseObject(post, VOID_RESULT_TYPE); return JSON.parseObject(post, VOID_RESULT_TYPE);
} }
@ -481,14 +460,17 @@ public class PowerJobClient implements IPowerJobClient {
*/ */
@Override @Override
public ResultDTO<Long> runWorkflow(Long workflowId, String initParams, long delayMS) { public ResultDTO<Long> runWorkflow(Long workflowId, String initParams, long delayMS) {
FormBody.Builder builder = new FormBody.Builder()
.add("workflowId", workflowId.toString()) Map<String, String> param = Maps.newHashMap();
.add("appId", appId.toString()) param.put("workflowId", workflowId.toString());
.add("delay", String.valueOf(delayMS)); param.put("appId", appId.toString());
param.put("delay", String.valueOf(delayMS));
if (StringUtils.isNotEmpty(initParams)) { if (StringUtils.isNotEmpty(initParams)) {
builder.add("initParams", initParams); param.put("initParams", initParams);
} }
String post = postHA(OpenAPIConstant.RUN_WORKFLOW, builder.build()); String post = requestService.request(OpenAPIConstant.RUN_WORKFLOW, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, LONG_RESULT_TYPE); return JSON.parseObject(post, LONG_RESULT_TYPE);
} }
@ -506,11 +488,12 @@ public class PowerJobClient implements IPowerJobClient {
*/ */
@Override @Override
public ResultDTO<Void> stopWorkflowInstance(Long wfInstanceId) { public ResultDTO<Void> stopWorkflowInstance(Long wfInstanceId) {
RequestBody body = new FormBody.Builder()
.add("wfInstanceId", wfInstanceId.toString()) Map<String, String> param = Maps.newHashMap();
.add("appId", appId.toString()) param.put("wfInstanceId", wfInstanceId.toString());
.build(); param.put("appId", appId.toString());
String post = postHA(OpenAPIConstant.STOP_WORKFLOW_INSTANCE, body);
String post = requestService.request(OpenAPIConstant.STOP_WORKFLOW_INSTANCE, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, VOID_RESULT_TYPE); return JSON.parseObject(post, VOID_RESULT_TYPE);
} }
@ -522,11 +505,10 @@ public class PowerJobClient implements IPowerJobClient {
*/ */
@Override @Override
public ResultDTO<Void> retryWorkflowInstance(Long wfInstanceId) { public ResultDTO<Void> retryWorkflowInstance(Long wfInstanceId) {
RequestBody body = new FormBody.Builder() Map<String, String> param = Maps.newHashMap();
.add("wfInstanceId", wfInstanceId.toString()) param.put("wfInstanceId", wfInstanceId.toString());
.add("appId", appId.toString()) param.put("appId", appId.toString());
.build(); String post = requestService.request(OpenAPIConstant.RETRY_WORKFLOW_INSTANCE, PowerRequestBody.newFormRequestBody(param));
String post = postHA(OpenAPIConstant.RETRY_WORKFLOW_INSTANCE, body);
return JSON.parseObject(post, VOID_RESULT_TYPE); return JSON.parseObject(post, VOID_RESULT_TYPE);
} }
@ -539,12 +521,13 @@ public class PowerJobClient implements IPowerJobClient {
*/ */
@Override @Override
public ResultDTO<Void> markWorkflowNodeAsSuccess(Long wfInstanceId, Long nodeId) { public ResultDTO<Void> markWorkflowNodeAsSuccess(Long wfInstanceId, Long nodeId) {
RequestBody body = new FormBody.Builder()
.add("wfInstanceId", wfInstanceId.toString()) Map<String, String> param = Maps.newHashMap();
.add("nodeId", nodeId.toString()) param.put("wfInstanceId", wfInstanceId.toString());
.add("appId", appId.toString()) param.put("appId", appId.toString());
.build(); param.put("nodeId", nodeId.toString());
String post = postHA(OpenAPIConstant.MARK_WORKFLOW_NODE_AS_SUCCESS, body);
String post = requestService.request(OpenAPIConstant.MARK_WORKFLOW_NODE_AS_SUCCESS, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, VOID_RESULT_TYPE); return JSON.parseObject(post, VOID_RESULT_TYPE);
} }
@ -556,47 +539,17 @@ public class PowerJobClient implements IPowerJobClient {
*/ */
@Override @Override
public ResultDTO<WorkflowInstanceInfoDTO> fetchWorkflowInstanceInfo(Long wfInstanceId) { public ResultDTO<WorkflowInstanceInfoDTO> fetchWorkflowInstanceInfo(Long wfInstanceId) {
RequestBody body = new FormBody.Builder()
.add("wfInstanceId", wfInstanceId.toString()) Map<String, String> param = Maps.newHashMap();
.add("appId", appId.toString()) param.put("wfInstanceId", wfInstanceId.toString());
.build(); param.put("appId", appId.toString());
String post = postHA(OpenAPIConstant.FETCH_WORKFLOW_INSTANCE_INFO, body);
String post = requestService.request(OpenAPIConstant.FETCH_WORKFLOW_INSTANCE_INFO, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, WF_INSTANCE_RESULT_TYPE); return JSON.parseObject(post, WF_INSTANCE_RESULT_TYPE);
} }
@Override
private String postHA(String path, RequestBody requestBody) { public void close() throws IOException {
requestService.close();
// 先尝试默认地址
String url = getUrl(path, currentAddress);
try {
String res = HttpUtils.post(url, requestBody);
if (StringUtils.isNotEmpty(res)) {
return res;
}
} catch (IOException e) {
log.warn("[PowerJobClient] request url:{} failed, reason is {}.", url, e.toString());
}
// 失败开始重试
for (String addr : allAddress) {
if (Objects.equals(addr, currentAddress)) {
continue;
}
url = getUrl(path, addr);
try {
String res = HttpUtils.post(url, requestBody);
if (StringUtils.isNotEmpty(res)) {
log.warn("[PowerJobClient] server change: from({}) -> to({}).", currentAddress, addr);
currentAddress = addr;
return res;
}
} catch (IOException e) {
log.warn("[PowerJobClient] request url:{} failed, reason is {}.", url, e.toString());
}
}
log.error("[PowerJobClient] do post for path: {} failed because of no server available in {}.", path, allAddress);
throw new PowerJobException("no server available when send post request");
} }
} }

View File

@ -1,6 +1,7 @@
package tech.powerjob.client; package tech.powerjob.client;
import com.alibaba.fastjson.TypeReference; import com.alibaba.fastjson.TypeReference;
import tech.powerjob.client.module.AppAuthResult;
import tech.powerjob.common.request.http.SaveJobInfoRequest; import tech.powerjob.common.request.http.SaveJobInfoRequest;
import tech.powerjob.common.response.*; import tech.powerjob.common.response.*;
@ -14,6 +15,7 @@ import java.util.List;
*/ */
public class TypeStore { public class TypeStore {
public static final TypeReference<ResultDTO<AppAuthResult>> APP_AUTH_RESULT_TYPE = new TypeReference<ResultDTO<AppAuthResult>>(){};
public static final TypeReference<ResultDTO<Void>> VOID_RESULT_TYPE = new TypeReference<ResultDTO<Void>>(){}; public static final TypeReference<ResultDTO<Void>> VOID_RESULT_TYPE = new TypeReference<ResultDTO<Void>>(){};
public static final TypeReference<ResultDTO<Integer>> INTEGER_RESULT_TYPE = new TypeReference<ResultDTO<Integer>>(){}; public static final TypeReference<ResultDTO<Integer>> INTEGER_RESULT_TYPE = new TypeReference<ResultDTO<Integer>>(){};

View File

@ -0,0 +1,28 @@
package tech.powerjob.client.common;
import lombok.Getter;
/**
* Protocol
*
* @author tjq
* @since 2024/2/20
*/
@Getter
public enum Protocol {
HTTP("http"),
HTTPS("https");
private final String protocol;
Protocol(String protocol) {
this.protocol = protocol;
}
@Override
public String toString() {
return protocol;
}
}

View File

@ -0,0 +1,19 @@
package tech.powerjob.client.extension;
import java.util.List;
/**
* 扩展服务
*
* @author tjq
* @since 2024/8/11
*/
public interface ClientExtension {
/**
* 动态提供地址适用于 server 部署在动态集群上的场景
* @param context 上下文
* @return 地址格式要求同 ClientConfig#addressList
*/
List<String> addressProvider(ExtensionContext context);
}

View File

@ -0,0 +1,10 @@
package tech.powerjob.client.extension;
/**
* 扩展上下文
*
* @author tjq
* @since 2024/8/11
*/
public class ExtensionContext {
}

View File

@ -0,0 +1,39 @@
package tech.powerjob.client.module;
import lombok.Getter;
import lombok.Setter;
import lombok.ToString;
import java.io.Serializable;
import java.util.Map;
/**
* App 鉴权请求
*
* @author tjq
* @since 2024/2/19
*/
@Getter
@Setter
@ToString
public class AppAuthRequest implements Serializable {
/**
* 应用名称
*/
private String appName;
/**
* 加密后密码
*/
private String encryptedPassword;
/**
* 加密类型
*/
private String encryptType;
/**
* 额外参数方便开发者传递其他参数
*/
private Map<String, Object> extra;
}

View File

@ -0,0 +1,30 @@
package tech.powerjob.client.module;
import lombok.Getter;
import lombok.Setter;
import lombok.ToString;
import java.io.Serializable;
import java.util.Map;
/**
* App 鉴权响应
*
* @author tjq
* @since 2024/2/21
*/
@Getter
@Setter
@ToString
public class AppAuthResult implements Serializable {
private Long appId;
private String token;
/**
* 额外参数
* 有安全需求的开发者可执行扩展
*/
private Map<String, Object> extra;
}

View File

@ -0,0 +1,26 @@
package tech.powerjob.client.service;
import lombok.Data;
import lombok.experimental.Accessors;
import java.io.Serializable;
import java.util.Map;
/**
* HTTP 响应
*
* @author tjq
* @since 2024/8/10
*/
@Data
@Accessors(chain = true)
public class HttpResponse implements Serializable {
private boolean success;
private int code;
private String response;
private Map<String, String> headers;
}

View File

@ -0,0 +1,47 @@
package tech.powerjob.client.service;
import com.google.common.collect.Maps;
import lombok.Getter;
import tech.powerjob.common.enums.MIME;
import java.util.Map;
/**
* 请求体
*
* @author tjq
* @since 2024/8/10
*/
@Getter
public class PowerRequestBody {
private MIME mime;
private Object payload;
private final Map<String, String> headers = Maps.newHashMap();
private PowerRequestBody() {
}
public static PowerRequestBody newJsonRequestBody(Object data) {
PowerRequestBody powerRequestBody = new PowerRequestBody();
powerRequestBody.mime = MIME.APPLICATION_JSON;
powerRequestBody.payload = data;
return powerRequestBody;
}
public static PowerRequestBody newFormRequestBody(Map<String, String> form) {
PowerRequestBody powerRequestBody = new PowerRequestBody();
powerRequestBody.mime = MIME.APPLICATION_FORM;
powerRequestBody.payload = form;
return powerRequestBody;
}
public void addHeaders(Map<String, String> hs) {
if (hs == null || hs.isEmpty()) {
return;
}
this.headers.putAll(hs);
}
}

View File

@ -0,0 +1,15 @@
package tech.powerjob.client.service;
import java.io.Closeable;
/**
* 请求服务
*
* @author tjq
* @since 2024/2/20
*/
public interface RequestService extends Closeable {
String request(String path, PowerRequestBody powerRequestBody);
}

View File

@ -0,0 +1,107 @@
package tech.powerjob.client.service.impl;
import com.alibaba.fastjson.JSONObject;
import com.google.common.collect.Maps;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import tech.powerjob.client.ClientConfig;
import tech.powerjob.client.TypeStore;
import tech.powerjob.client.module.AppAuthRequest;
import tech.powerjob.client.module.AppAuthResult;
import tech.powerjob.client.service.HttpResponse;
import tech.powerjob.client.service.PowerRequestBody;
import tech.powerjob.common.OpenAPIConstant;
import tech.powerjob.common.enums.EncryptType;
import tech.powerjob.common.exception.PowerJobException;
import tech.powerjob.common.response.ResultDTO;
import tech.powerjob.common.utils.DigestUtils;
import tech.powerjob.common.utils.MapUtils;
import java.util.Map;
/**
* 封装鉴权相关逻辑
*
* @author tjq
* @since 2024/2/21
*/
@Slf4j
abstract class AppAuthClusterRequestService extends ClusterRequestService {
protected AppAuthResult appAuthResult;
public AppAuthClusterRequestService(ClientConfig config) {
super(config);
}
@Override
public String request(String path, PowerRequestBody powerRequestBody) {
// 若不存在 appAuthResult则首先进行鉴权
if (appAuthResult == null) {
refreshAppAuthResult();
}
HttpResponse httpResponse = doRequest(path, powerRequestBody);
// 如果 auth 成功则代表请求有效直接返回
String authStatus = MapUtils.getString(httpResponse.getHeaders(), OpenAPIConstant.RESPONSE_HEADER_AUTH_STATUS);
if (Boolean.TRUE.toString().equalsIgnoreCase(authStatus)) {
return httpResponse.getResponse();
}
// 否则请求无效刷新鉴权后重新请求
log.warn("[PowerJobClient] auth failed[authStatus: {}], try to refresh the auth info", authStatus);
refreshAppAuthResult();
httpResponse = doRequest(path, powerRequestBody);
// 只要请求不失败直接返回如果鉴权失败则返回鉴权错误信息server 保证 response 永远非空
return httpResponse.getResponse();
}
private HttpResponse doRequest(String path, PowerRequestBody powerRequestBody) {
// 添加鉴权信息
Map<String, String> authHeaders = buildAuthHeader();
powerRequestBody.addHeaders(authHeaders);
HttpResponse httpResponse = clusterHaRequest(path, powerRequestBody);
// 任何请求不成功都直接报错
if (!httpResponse.isSuccess()) {
throw new PowerJobException("REMOTE_SERVER_INNER_EXCEPTION");
}
return httpResponse;
}
private Map<String, String> buildAuthHeader() {
Map<String, String> authHeader = Maps.newHashMap();
authHeader.put(OpenAPIConstant.REQUEST_HEADER_APP_ID, String.valueOf(appAuthResult.getAppId()));
authHeader.put(OpenAPIConstant.REQUEST_HEADER_ACCESS_TOKEN, appAuthResult.getToken());
return authHeader;
}
@SneakyThrows
private void refreshAppAuthResult() {
AppAuthRequest appAuthRequest = buildAppAuthRequest();
HttpResponse httpResponse = clusterHaRequest(OpenAPIConstant.AUTH_APP, PowerRequestBody.newJsonRequestBody(appAuthRequest));
if (!httpResponse.isSuccess()) {
throw new PowerJobException("AUTH_APP_EXCEPTION!");
}
ResultDTO<AppAuthResult> authResultDTO = JSONObject.parseObject(httpResponse.getResponse(), TypeStore.APP_AUTH_RESULT_TYPE);
if (!authResultDTO.isSuccess()) {
throw new PowerJobException("AUTH_FAILED_" + authResultDTO.getMessage());
}
log.warn("[PowerJobClient] refresh auth info successfully!");
this.appAuthResult = authResultDTO.getData();
}
protected AppAuthRequest buildAppAuthRequest() {
AppAuthRequest appAuthRequest = new AppAuthRequest();
appAuthRequest.setAppName(config.getAppName());
appAuthRequest.setEncryptedPassword(DigestUtils.md5(config.getPassword()));
appAuthRequest.setEncryptType(EncryptType.MD5.getCode());
return appAuthRequest;
}
}

View File

@ -0,0 +1,140 @@
package tech.powerjob.client.service.impl;
import lombok.extern.slf4j.Slf4j;
import tech.powerjob.client.ClientConfig;
import tech.powerjob.client.extension.ClientExtension;
import tech.powerjob.client.extension.ExtensionContext;
import tech.powerjob.client.service.HttpResponse;
import tech.powerjob.client.service.PowerRequestBody;
import tech.powerjob.client.service.RequestService;
import tech.powerjob.common.OpenAPIConstant;
import tech.powerjob.common.exception.PowerJobException;
import tech.powerjob.common.utils.CollectionUtils;
import javax.net.ssl.X509TrustManager;
import java.io.IOException;
import java.security.cert.X509Certificate;
import java.util.List;
import java.util.Objects;
/**
* 集群请求服务
* 封装网络相关通用逻辑
*
* @author tjq
* @since 2024/2/21
*/
@Slf4j
abstract class ClusterRequestService implements RequestService {
protected final ClientConfig config;
/**
* 当前地址上次请求成功的地址
*/
protected String currentAddress;
/**
* 地址格式
* 协议://域名/OpenAPI/子路径
*/
protected static final String URL_PATTERN = "%s://%s%s%s";
/**
* 默认超时时间
*/
protected static final Integer DEFAULT_TIMEOUT_SECONDS = 2;
protected static final int HTTP_SUCCESS_CODE = 200;
public ClusterRequestService(ClientConfig config) {
this.config = config;
this.currentAddress = config.getAddressList().get(0);
}
/**
* 具体某一次 HTTP 请求的实现
* @param url 完整请求地址
* @param body 请求体
* @return 响应
* @throws IOException 异常
*/
protected abstract HttpResponse sendHttpRequest(String url, PowerRequestBody body) throws IOException;
/**
* 封装集群请求能力
* @param path 请求 PATH
* @param powerRequestBody 请求体
* @return 响应
*/
protected HttpResponse clusterHaRequest(String path, PowerRequestBody powerRequestBody) {
// 先尝试默认地址
String url = getUrl(path, currentAddress);
try {
return sendHttpRequest(url, powerRequestBody);
} catch (IOException e) {
log.warn("[ClusterRequestService] request url:{} failed, reason is {}.", url, e.toString());
}
List<String> addressList = fetchAddressList();
// 失败开始重试
for (String addr : addressList) {
if (Objects.equals(addr, currentAddress)) {
continue;
}
url = getUrl(path, addr);
try {
HttpResponse res = sendHttpRequest(url, powerRequestBody);
log.warn("[ClusterRequestService] server change: from({}) -> to({}).", currentAddress, addr);
currentAddress = addr;
return res;
} catch (IOException e) {
log.warn("[ClusterRequestService] request url:{} failed, reason is {}.", url, e.toString());
}
}
log.error("[ClusterRequestService] do post for path: {} failed because of no server available in {}.", path, addressList);
throw new PowerJobException("no server available when send post request");
}
private List<String> fetchAddressList() {
ClientExtension clientExtension = config.getClientExtension();
if (clientExtension != null) {
List<String> addressList = clientExtension.addressProvider(new ExtensionContext());
if (!CollectionUtils.isEmpty(addressList)) {
return addressList;
}
}
return config.getAddressList();
}
/**
* 不验证证书
* X.509 是一个国际标准定义了公钥证书的格式这个标准是由国际电信联盟ITU-T制定的用于公钥基础设施PKI中数字证书的创建和分发X.509证书主要用于在公开网络上验证实体的身份如服务器或客户端的身份验证过程中确保通信双方是可信的X.509证书广泛应用于多种安全协议中包括SSL/TLS它是实现HTTPS的基础
*/
protected static class NoVerifyX509TrustManager implements X509TrustManager {
@Override
public void checkClientTrusted(X509Certificate[] arg0, String arg1) {
}
@Override
public void checkServerTrusted(X509Certificate[] arg0, String arg1) {
// 不验证
}
@Override
public X509Certificate[] getAcceptedIssuers() {
return new X509Certificate[0];
}
}
private String getUrl(String path, String address) {
String protocol = config.getProtocol().getProtocol();
return String.format(URL_PATTERN, protocol, address, OpenAPIConstant.WEB_PATH, path);
}
}

View File

@ -0,0 +1,148 @@
package tech.powerjob.client.service.impl;
import com.google.common.collect.Maps;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import okhttp3.*;
import tech.powerjob.client.ClientConfig;
import tech.powerjob.client.common.Protocol;
import tech.powerjob.client.service.HttpResponse;
import tech.powerjob.client.service.PowerRequestBody;
import tech.powerjob.common.OmsConstant;
import tech.powerjob.common.serialize.JsonUtils;
import javax.net.ssl.*;
import java.io.IOException;
import java.security.SecureRandom;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.TimeUnit;
/**
* desc
*
* @author tjq
* @since 2024/2/20
*/
@Slf4j
public class ClusterRequestServiceOkHttp3Impl extends AppAuthClusterRequestService {
private final OkHttpClient okHttpClient;
public ClusterRequestServiceOkHttp3Impl(ClientConfig config) {
super(config);
// 初始化 HTTP 客户端
if (Protocol.HTTPS.equals(config.getProtocol())) {
okHttpClient = initHttpsNoVerifyClient();
} else {
okHttpClient = initHttpClient();
}
}
@Override
protected HttpResponse sendHttpRequest(String url, PowerRequestBody powerRequestBody) throws IOException {
// 添加公共 header
powerRequestBody.addHeaders(config.getDefaultHeaders());
Object obj = powerRequestBody.getPayload();
RequestBody requestBody = null;
switch (powerRequestBody.getMime()) {
case APPLICATION_JSON:
MediaType jsonType = MediaType.parse(OmsConstant.JSON_MEDIA_TYPE);
String body = obj instanceof String ? (String) obj : JsonUtils.toJSONStringUnsafe(obj);
requestBody = RequestBody.create(jsonType, body);
break;
case APPLICATION_FORM:
FormBody.Builder formBuilder = new FormBody.Builder();
Map<String, String> formObj = (Map<String, String>) obj;
formObj.forEach(formBuilder::add);
requestBody = formBuilder.build();
}
Request request = new Request.Builder()
.post(requestBody)
.headers(Headers.of(powerRequestBody.getHeaders()))
.url(url)
.build();
try (Response response = okHttpClient.newCall(request).execute()) {
int code = response.code();
HttpResponse httpResponse = new HttpResponse()
.setCode(code)
.setSuccess(code == HTTP_SUCCESS_CODE);
ResponseBody body = response.body();
if (body != null) {
httpResponse.setResponse(body.string());
}
Headers respHeaders = response.headers();
Set<String> headerNames = respHeaders.names();
Map<String, String> respHeaderMap = Maps.newHashMap();
headerNames.forEach(hdKey -> respHeaderMap.put(hdKey, respHeaders.get(hdKey)));
httpResponse.setHeaders(respHeaderMap);
return httpResponse;
}
}
@SneakyThrows
private OkHttpClient initHttpClient() {
OkHttpClient.Builder okHttpBuilder = commonOkHttpBuilder();
return okHttpBuilder.build();
}
@SneakyThrows
private OkHttpClient initHttpsNoVerifyClient() {
X509TrustManager trustManager = new NoVerifyX509TrustManager();
SSLContext sslContext = SSLContext.getInstance("TLS");
sslContext.init(null, new TrustManager[]{trustManager}, new SecureRandom());
SSLSocketFactory sslSocketFactory = sslContext.getSocketFactory();
OkHttpClient.Builder okHttpBuilder = commonOkHttpBuilder();
// 不需要校验证书
okHttpBuilder.sslSocketFactory(sslSocketFactory, trustManager);
// 不校验 url中的 hostname
okHttpBuilder.hostnameVerifier((String hostname, SSLSession session) -> true);
return okHttpBuilder.build();
}
private OkHttpClient.Builder commonOkHttpBuilder() {
return new OkHttpClient.Builder()
// 设置读取超时时间
.readTimeout(Optional.ofNullable(config.getReadTimeout()).orElse(DEFAULT_TIMEOUT_SECONDS), TimeUnit.SECONDS)
// 设置写的超时时间
.writeTimeout(Optional.ofNullable(config.getWriteTimeout()).orElse(DEFAULT_TIMEOUT_SECONDS), TimeUnit.SECONDS)
// 设置连接超时时间
.connectTimeout(Optional.ofNullable(config.getConnectionTimeout()).orElse(DEFAULT_TIMEOUT_SECONDS), TimeUnit.SECONDS)
.callTimeout(Optional.ofNullable(config.getConnectionTimeout()).orElse(DEFAULT_TIMEOUT_SECONDS), TimeUnit.SECONDS);
}
@Override
public void close() throws IOException {
// 关闭 Dispatcher
okHttpClient.dispatcher().executorService().shutdown();
// 清理连接池
okHttpClient.connectionPool().evictAll();
// 清理缓存如果有使用
Cache cache = okHttpClient.cache();
if (cache != null) {
cache.close();
}
}
}

View File

@ -1,5 +1,6 @@
package tech.powerjob.client.test; package tech.powerjob.client.test;
import com.google.common.collect.Lists;
import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeAll;
import tech.powerjob.client.IPowerJobClient; import tech.powerjob.client.IPowerJobClient;
import tech.powerjob.client.PowerJobClient; import tech.powerjob.client.PowerJobClient;
@ -16,6 +17,6 @@ public class ClientInitializer {
@BeforeAll @BeforeAll
public static void initClient() throws Exception { public static void initClient() throws Exception {
powerJobClient = new PowerJobClient("127.0.0.1:7700", "powerjob-worker-samples", "powerjob123"); powerJobClient = new PowerJobClient(Lists.newArrayList("127.0.0.1:7700", "127.0.0.1:7701"), "powerjob-worker-samples", "powerjob123");
} }
} }

View File

@ -1,6 +1,9 @@
package tech.powerjob.client.test; package tech.powerjob.client.test;
import com.alibaba.fastjson.JSONObject; import com.alibaba.fastjson.JSONObject;
import lombok.extern.slf4j.Slf4j;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import tech.powerjob.client.PowerJobClient; import tech.powerjob.client.PowerJobClient;
import tech.powerjob.common.enums.ExecuteType; import tech.powerjob.common.enums.ExecuteType;
import tech.powerjob.common.enums.ProcessorType; import tech.powerjob.common.enums.ProcessorType;
@ -9,11 +12,6 @@ import tech.powerjob.common.request.http.SaveJobInfoRequest;
import tech.powerjob.common.response.InstanceInfoDTO; import tech.powerjob.common.response.InstanceInfoDTO;
import tech.powerjob.common.response.JobInfoDTO; import tech.powerjob.common.response.JobInfoDTO;
import tech.powerjob.common.response.ResultDTO; import tech.powerjob.common.response.ResultDTO;
import lombok.SneakyThrows;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import java.util.concurrent.TimeUnit;
/** /**
* Test cases for {@link PowerJobClient} * Test cases for {@link PowerJobClient}
@ -22,17 +20,18 @@ import java.util.concurrent.TimeUnit;
* @author Echo009 * @author Echo009
* @since 2020/4/15 * @since 2020/4/15
*/ */
@Slf4j
class TestClient extends ClientInitializer { class TestClient extends ClientInitializer {
public static final long JOB_ID = 4L; public static final long JOB_ID = 1L;
@Test @Test
void testSaveJob() { void testSaveJob() {
SaveJobInfoRequest newJobInfo = new SaveJobInfoRequest(); SaveJobInfoRequest newJobInfo = new SaveJobInfoRequest();
newJobInfo.setId(JOB_ID); newJobInfo.setId(JOB_ID);
newJobInfo.setJobName("omsOpenAPIJobccccc"); newJobInfo.setJobName("omsOpenAPIJobccccc" + System.currentTimeMillis());
newJobInfo.setJobDescription("test OpenAPI"); newJobInfo.setJobDescription("test OpenAPI" + System.currentTimeMillis());
newJobInfo.setJobParams("{'aa':'bb'}"); newJobInfo.setJobParams("{'aa':'bb'}");
newJobInfo.setTimeExpressionType(TimeExpressionType.CRON); newJobInfo.setTimeExpressionType(TimeExpressionType.CRON);
newJobInfo.setTimeExpression("0 0 * * * ? "); newJobInfo.setTimeExpression("0 0 * * * ? ");
@ -45,8 +44,10 @@ class TestClient extends ClientInitializer {
newJobInfo.setMinMemorySpace(1.2); newJobInfo.setMinMemorySpace(1.2);
newJobInfo.setMinDiskSpace(1.3); newJobInfo.setMinDiskSpace(1.3);
log.info("[TestClient] [testSaveJob] SaveJobInfoRequest: {}", JSONObject.toJSONString(newJobInfo));
ResultDTO<Long> resultDTO = powerJobClient.saveJob(newJobInfo); ResultDTO<Long> resultDTO = powerJobClient.saveJob(newJobInfo);
System.out.println(JSONObject.toJSONString(resultDTO)); log.info("[TestClient] [testSaveJob] result: {}", JSONObject.toJSONString(resultDTO));
Assertions.assertNotNull(resultDTO); Assertions.assertNotNull(resultDTO);
} }
@ -107,21 +108,21 @@ class TestClient extends ClientInitializer {
@Test @Test
void testFetchInstanceInfo() { void testFetchInstanceInfo() {
ResultDTO<InstanceInfoDTO> res = powerJobClient.fetchInstanceInfo(205436386851946560L); ResultDTO<InstanceInfoDTO> res = powerJobClient.fetchInstanceInfo(702482902331424832L);
System.out.println(res); System.out.println(res);
Assertions.assertNotNull(res); Assertions.assertNotNull(res);
} }
@Test @Test
void testStopInstance() { void testStopInstance() {
ResultDTO<Void> res = powerJobClient.stopInstance(205436995885858880L); ResultDTO<Void> res = powerJobClient.stopInstance(702482902331424832L);
System.out.println(res); System.out.println(res);
Assertions.assertNotNull(res); Assertions.assertNotNull(res);
} }
@Test @Test
void testFetchInstanceStatus() { void testFetchInstanceStatus() {
ResultDTO<Integer> res = powerJobClient.fetchInstanceStatus(205436995885858880L); ResultDTO<Integer> res = powerJobClient.fetchInstanceStatus(702482902331424832L);
System.out.println(res); System.out.println(res);
Assertions.assertNotNull(res); Assertions.assertNotNull(res);
} }
@ -135,19 +136,19 @@ class TestClient extends ClientInitializer {
Assertions.assertTrue(cancelRes.isSuccess()); Assertions.assertTrue(cancelRes.isSuccess());
} }
@Test // @Test
@SneakyThrows // @SneakyThrows
void testCancelInstanceInDatabase() { // void testCancelInstanceInDatabase() {
ResultDTO<Long> startRes = powerJobClient.runJob(15L, "start by OhMyClient", 2000000); // ResultDTO<Long> startRes = powerJobClient.runJob(15L, "start by OhMyClient", 2000000);
System.out.println("runJob result: " + JSONObject.toJSONString(startRes)); // System.out.println("runJob result: " + JSONObject.toJSONString(startRes));
//
// Restart server manually and clear all the data in time wheeler. // // Restart server manually and clear all the data in time wheeler.
TimeUnit.MINUTES.sleep(1); // TimeUnit.MINUTES.sleep(1);
//
ResultDTO<Void> cancelRes = powerJobClient.cancelInstance(startRes.getData()); // ResultDTO<Void> cancelRes = powerJobClient.cancelInstance(startRes.getData());
System.out.println("cancelJob result: " + JSONObject.toJSONString(cancelRes)); // System.out.println("cancelJob result: " + JSONObject.toJSONString(cancelRes));
Assertions.assertTrue(cancelRes.isSuccess()); // Assertions.assertTrue(cancelRes.isSuccess());
} // }
@Test @Test
void testRetryInstance() { void testRetryInstance() {

View File

@ -0,0 +1,35 @@
package tech.powerjob.client.test;
import com.alibaba.fastjson.JSONObject;
import lombok.extern.slf4j.Slf4j;
import org.junit.jupiter.api.Test;
import tech.powerjob.common.response.JobInfoDTO;
import tech.powerjob.common.response.ResultDTO;
import tech.powerjob.common.utils.CommonUtils;
/**
* 测试容灾能力
*
* @author tjq
* @since 2024/8/11
*/
@Slf4j
public class TestClusterHA extends ClientInitializer {
@Test
void testHa() {
// 人工让 server 启停
for (int i = 0; i < 1000000; i++) {
CommonUtils.easySleep(100);
ResultDTO<JobInfoDTO> jobInfoDTOResultDTO = powerJobClient.fetchJob(1L);
log.info("[TestClusterHA] response: {}", JSONObject.toJSONString(jobInfoDTOResultDTO));
if (!jobInfoDTOResultDTO.isSuccess()) {
throw new RuntimeException("request failed!");
}
}
}
}

View File

@ -29,7 +29,7 @@ import java.util.List;
*/ */
class TestWorkflow extends ClientInitializer { class TestWorkflow extends ClientInitializer {
private static final long WF_ID = 1; private static final long WF_ID = 2;
@Test @Test
void initTestData() { void initTestData() {

View File

@ -5,12 +5,12 @@
<parent> <parent>
<artifactId>powerjob</artifactId> <artifactId>powerjob</artifactId>
<groupId>tech.powerjob</groupId> <groupId>tech.powerjob</groupId>
<version>4.3.9</version> <version>5.1.0</version>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<artifactId>powerjob-common</artifactId> <artifactId>powerjob-common</artifactId>
<version>4.3.9</version> <version>5.1.0</version>
<packaging>jar</packaging> <packaging>jar</packaging>
<properties> <properties>
@ -20,7 +20,7 @@
<guava.version>31.1-jre</guava.version> <guava.version>31.1-jre</guava.version>
<okhttp.version>3.14.9</okhttp.version> <okhttp.version>3.14.9</okhttp.version>
<kryo.version>5.3.0</kryo.version> <kryo.version>5.3.0</kryo.version>
<jackson.version>2.14.0-rc1</jackson.version> <jackson.version>2.14.3</jackson.version>
<junit.version>5.9.0</junit.version> <junit.version>5.9.0</junit.version>
</properties> </properties>
@ -77,6 +77,13 @@
<artifactId>jackson-databind</artifactId> <artifactId>jackson-databind</artifactId>
<version>${jackson.version}</version> <version>${jackson.version}</version>
</dependency> </dependency>
<!-- 解决 Java8 data/time 类型处理问题 #869 -->
<dependency>
<groupId>com.fasterxml.jackson.datatype</groupId>
<artifactId>jackson-datatype-jsr310</artifactId>
<version>${jackson.version}</version>
</dependency>
<!-- Junit tests --> <!-- Junit tests -->
<dependency> <dependency>

View File

@ -30,4 +30,6 @@ public class OmsConstant {
public static final String HTTP_HEADER_CONTENT_TYPE = "Content-Type"; public static final String HTTP_HEADER_CONTENT_TYPE = "Content-Type";
public static final String JSON_MEDIA_TYPE = "application/json; charset=utf-8"; public static final String JSON_MEDIA_TYPE = "application/json; charset=utf-8";
public static final String NULL = "null";
} }

View File

@ -16,6 +16,8 @@ public class OpenAPIConstant {
public static final String ASSERT = "/assert"; public static final String ASSERT = "/assert";
public static final String AUTH_APP = "/authApp";
/* ************* JOB 区 ************* */ /* ************* JOB 区 ************* */
public static final String SAVE_JOB = "/saveJob"; public static final String SAVE_JOB = "/saveJob";
@ -56,4 +58,12 @@ public class OpenAPIConstant {
public static final String RETRY_WORKFLOW_INSTANCE = "/retryWfInstance"; public static final String RETRY_WORKFLOW_INSTANCE = "/retryWfInstance";
public static final String FETCH_WORKFLOW_INSTANCE_INFO = "/fetchWfInstanceInfo"; public static final String FETCH_WORKFLOW_INSTANCE_INFO = "/fetchWfInstanceInfo";
public static final String MARK_WORKFLOW_NODE_AS_SUCCESS = "/markWorkflowNodeAsSuccess"; public static final String MARK_WORKFLOW_NODE_AS_SUCCESS = "/markWorkflowNodeAsSuccess";
/* ************* 鉴权 ************* */
public static final String REQUEST_HEADER_ACCESS_TOKEN = "X-POWERJOB-ACCESS-TOKEN";
public static final String REQUEST_HEADER_APP_ID = "X-POWERJOB-APP-ID";
public static final String RESPONSE_HEADER_AUTH_STATUS = "X-POWERJOB-AUTH-PASSED";
} }

View File

@ -0,0 +1,22 @@
package tech.powerjob.common.enums;
import lombok.AllArgsConstructor;
import lombok.Getter;
/**
* 加密类型
*
* @author tjq
* @since 2024/8/10
*/
@Getter
@AllArgsConstructor
public enum EncryptType {
NONE("none"),
MD5("md5")
;
private final String code;
}

View File

@ -0,0 +1,67 @@
package tech.powerjob.common.enums;
import lombok.AllArgsConstructor;
import lombok.Getter;
/**
* 鉴权错误信息
*
* @author tjq
* @since 2024/2/11
*/
@Getter
@AllArgsConstructor
public enum ErrorCodes {
USER_NOT_LOGIN("-100", "UserNotLoggedIn"),
USER_NOT_EXIST("-101", "UserNotExist"),
USER_AUTH_FAILED("-102", "UserAuthFailed"),
/**
* 账户被停用
*/
USER_DISABLED("-103", "UserDisabled"),
NO_PERMISSION("-200", "NoPermission"),
/**
* 无效请求一般是参数问题
*/
INVALID_REQUEST("-300", "INVALID_REQUEST"),
INCORRECT_PASSWORD("-400", "INCORRECT_PASSWORD"),
/**
* 非法令牌
*/
INVALID_TOKEN("-401", "INVALID_TOKEN"),
/**
* 无效 APP无法找到 app
*/
INVALID_APP("-402", "INVALID_APP"),
/**
* 令牌过期
*/
TOKEN_EXPIRED("-403", "TOKEN_EXPIRED"),
/**
* 系统内部异常
*/
SYSTEM_UNKNOWN_ERROR("-500", "SYS_UNKNOWN_ERROR"),
/**
* OPENAPI 错误码号段 -10XX
*/
OPEN_API_AUTH_FAILED("-1002", "OPEN_API_AUTH_FAILED"),
/**
* PowerJobClient 错误码号段
*/
CLIENT_HTTP_REQUEST_FAILED("-2001", "CLIENT_HTTP_REQUEST_FAILED"),
;
private final String code;
private final String msg;
}

View File

@ -0,0 +1,22 @@
package tech.powerjob.common.enums;
import lombok.AllArgsConstructor;
import lombok.Getter;
/**
* <a href="https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/MIME_types">消息内容类型</a>
*
* @author tjq
* @since 2024/8/10
*/
@Getter
@AllArgsConstructor
public enum MIME {
APPLICATION_JSON("application/json; charset=utf-8"),
APPLICATION_FORM("application/x-www-form-urlencoded")
;
private final String code;
}

View File

@ -1,4 +1,4 @@
package tech.powerjob.server.common.constants; package tech.powerjob.common.enums;
import lombok.AllArgsConstructor; import lombok.AllArgsConstructor;
import lombok.Getter; import lombok.Getter;
@ -13,10 +13,16 @@ import lombok.Getter;
@AllArgsConstructor @AllArgsConstructor
public enum SwitchableStatus { public enum SwitchableStatus {
/** /**
* * 启用
*/ */
ENABLE(1), ENABLE(1),
/**
* 关闭
*/
DISABLE(2), DISABLE(2),
/**
* 软删除
*/
DELETED(99); DELETED(99);
private final int v; private final int v;

View File

@ -1,13 +1,21 @@
package tech.powerjob.common.exception; package tech.powerjob.common.exception;
import lombok.Getter;
import lombok.Setter;
import tech.powerjob.common.enums.ErrorCodes;
/** /**
* PowerJob 运行时异常 * PowerJob 运行时异常
* *
* @author tjq * @author tjq
* @since 2020/5/26 * @since 2020/5/26
*/ */
@Setter
@Getter
public class PowerJobException extends RuntimeException { public class PowerJobException extends RuntimeException {
protected String code;
public PowerJobException() { public PowerJobException() {
} }
@ -15,6 +23,11 @@ public class PowerJobException extends RuntimeException {
super(message); super(message);
} }
public PowerJobException(ErrorCodes errorCode, String extraMsg) {
super(extraMsg == null ? errorCode.getMsg() : errorCode.getMsg().concat(":").concat(extraMsg));
this.code = errorCode.getCode();
}
public PowerJobException(String message, Throwable cause) { public PowerJobException(String message, Throwable cause) {
super(message, cause); super(message, cause);
} }

View File

@ -0,0 +1,47 @@
package tech.powerjob.common.response;
import lombok.Getter;
import lombok.Setter;
import org.apache.commons.lang3.exception.ExceptionUtils;
import tech.powerjob.common.enums.ErrorCodes;
import tech.powerjob.common.exception.PowerJobException;
/**
* 新的 Result带状态码
*
* @author 程序帕鲁
* @since 2024/2/19
*/
@Getter
@Setter
public class PowerResultDTO<T> extends ResultDTO<T> {
private String code;
public static <T> PowerResultDTO<T> s(T data) {
PowerResultDTO<T> r = new PowerResultDTO<>();
r.success = true;
r.data = data;
return r;
}
public static <T> PowerResultDTO<T> f(String message) {
PowerResultDTO<T> r = new PowerResultDTO<>();
r.success = false;
r.message = message;
return r;
}
public static <T> PowerResultDTO<T> f(Throwable t) {
PowerResultDTO<T> f = f(ExceptionUtils.getStackTrace(t));
f.setCode(ErrorCodes.SYSTEM_UNKNOWN_ERROR.getCode());
return f;
}
public static <T> PowerResultDTO<T> f(PowerJobException pje) {
PowerResultDTO<T> f = f(pje.getMessage());
f.setCode(pje.getCode());
return f;
}
}

View File

@ -8,6 +8,11 @@ import org.apache.commons.lang3.exception.ExceptionUtils;
/** /**
* The result object returned by the request * The result object returned by the request
* <p>
* 低版本由于 Jackson 序列化配置问题导致无法在此对象上新增任何字段了否则会报错 com.fasterxml.jackson.databind.exc.UnrecognizedPropertyException: Unrecognized field "code" (class tech.powerjob.common.response.ObjectResultDTO), not marked as ignorable (3 known properties: "data", "success", "message"])
* at [Source: (String)"{"success":true,"code":null,"data":2,"message":null}"; line: 1, column: 28] (through reference chain: tech.powerjob.common.response.ObjectResultDTO["code"])
* <p>
* 短期内所有的新增字段需求都通过新对象继承实现
* *
* @author tjq * @author tjq
* @since 2020/3/30 * @since 2020/3/30
@ -17,9 +22,9 @@ import org.apache.commons.lang3.exception.ExceptionUtils;
@ToString @ToString
public class ResultDTO<T> implements PowerSerializable { public class ResultDTO<T> implements PowerSerializable {
private boolean success; protected boolean success;
private T data; protected T data;
private String message; protected String message;
public static <T> ResultDTO<T> success(T data) { public static <T> ResultDTO<T> success(T data) {
ResultDTO<T> r = new ResultDTO<>(); ResultDTO<T> r = new ResultDTO<>();

View File

@ -2,7 +2,6 @@ package tech.powerjob.common.serialize;
import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.MapperFeature; import com.fasterxml.jackson.databind.MapperFeature;
@ -35,6 +34,13 @@ public class JsonUtils {
static { static {
JSON_MAPPER.setSerializationInclusion(JsonInclude.Include.NON_NULL); JSON_MAPPER.setSerializationInclusion(JsonInclude.Include.NON_NULL);
// 非核心功能可降级尽可能降低依赖冲突概率
try {
JSON_MAPPER.registerModule(new com.fasterxml.jackson.datatype.jsr310.JavaTimeModule());
} catch (Exception e) {
log.warn("[JsonUtils] registerJavaTimeModule failed, PowerJob can't process Java 8 date/time type now!", e);
}
} }
private static final TypeReference<Map<String, Object>> MAP_TYPE_REFERENCE = new TypeReference<Map<String, Object>> () {}; private static final TypeReference<Map<String, Object>> MAP_TYPE_REFERENCE = new TypeReference<Map<String, Object>> () {};
@ -65,8 +71,9 @@ public class JsonUtils {
try { try {
return JSON_MAPPER.writeValueAsString(obj); return JSON_MAPPER.writeValueAsString(obj);
}catch (Exception e) { }catch (Exception e) {
throw new PowerJobException(e); ExceptionUtils.rethrow(e);
} }
throw new ImpossibleException();
} }
public static byte[] toBytes(Object obj) { public static byte[] toBytes(Object obj) {
@ -78,7 +85,7 @@ public class JsonUtils {
return null; return null;
} }
public static <T> T parseObject(String json, Class<T> clz) throws JsonProcessingException { public static <T> T parseObject(String json, Class<T> clz) throws Exception {
return JSON_MAPPER.readValue(json, clz); return JSON_MAPPER.readValue(json, clz);
} }

View File

@ -7,6 +7,8 @@ import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateFormatUtils; import org.apache.commons.lang3.time.DateFormatUtils;
import java.util.Collection; import java.util.Collection;
import java.util.Date;
import java.util.Map;
import java.util.UUID; import java.util.UUID;
import java.util.function.Supplier; import java.util.function.Supplier;
@ -128,6 +130,16 @@ public class CommonUtils {
throw new PowerJobException(msg); throw new PowerJobException(msg);
} }
} }
if (obj instanceof Collection) {
if (CollectionUtils.isEmpty((Collection<?>) obj)) {
throw new PowerJobException(msg);
}
}
if (obj instanceof Map) {
if (MapUtils.isEmpty((Map<?, ?>) obj)) {
throw new PowerJobException(msg);
}
}
return obj; return obj;
} }
@ -147,6 +159,13 @@ public class CommonUtils {
return OmsConstant.NONE; return OmsConstant.NONE;
} }
public static String formatTime(Date date) {
if (date == null) {
return OmsConstant.NONE;
}
return formatTime(date.getTime());
}
/** /**
* 格式化字符串如果是 null 或空则显示 N/A * 格式化字符串如果是 null 或空则显示 N/A
* @param str 字符串 * @param str 字符串

View File

@ -0,0 +1,47 @@
package tech.powerjob.common.utils;
import lombok.SneakyThrows;
import org.apache.commons.lang3.StringUtils;
import java.math.BigInteger;
import java.security.MessageDigest;
/**
* 加密工具
*
* @author tjq
* @since 2023/3/25
*/
public class DigestUtils {
/**
* 32位小写 md5
* @param input 输入
* @return md5
*/
@SneakyThrows
public static String md5(String input) {
if (StringUtils.isEmpty(input)) {
return null;
}
MessageDigest md5 = MessageDigest.getInstance("MD5");
md5.update(input.getBytes());
byte[] byteArray = md5.digest();
BigInteger bigInt = new BigInteger(1, byteArray);
// 参数16表示16进制
StringBuilder result = new StringBuilder(bigInt.toString(16));
// 不足32位高位补零
while(result.length() < 32) {
result.insert(0, "0");
}
return result.toString();
}
public static String rePassword(String password, String salt) {
String f1 = String.format("%s_%s_z", salt, password);
return String.format("%s_%s_b", salt, md5(f1));
}
}

View File

@ -21,6 +21,7 @@ public class HttpUtils {
client = new OkHttpClient.Builder() client = new OkHttpClient.Builder()
.connectTimeout(1, TimeUnit.SECONDS) .connectTimeout(1, TimeUnit.SECONDS)
.readTimeout(5, TimeUnit.SECONDS) .readTimeout(5, TimeUnit.SECONDS)
.writeTimeout(10, TimeUnit.SECONDS)
.build(); .build();
} }

View File

@ -12,6 +12,26 @@ import java.util.Map;
*/ */
public class MapUtils { public class MapUtils {
public static <K> String getString(Map<? super K, ?> map, K key) {
if (map != null) {
Object answer = map.get(key);
if (answer != null) {
return answer.toString();
}
}
return null;
}
public static <K> String getString(Map<? super K, ?> map, K key, String defaultValue) {
String answer = getString(map, key);
if (answer == null) {
answer = defaultValue;
}
return answer;
}
public static <K> Long getLong(Map<? super K, ?> map, K key, Long defaultValue) { public static <K> Long getLong(Map<? super K, ?> map, K key, Long defaultValue) {
Long answer = getLong(map, key); Long answer = getLong(map, key);
if (answer == null) { if (answer == null) {
@ -55,4 +75,12 @@ public class MapUtils {
return null; return null;
} }
public static boolean isEmpty(Map<?, ?> map) {
return map == null || map.isEmpty();
}
public static boolean isNotEmpty(Map<?, ?> map) {
return !isEmpty(map);
}
} }

View File

@ -217,9 +217,10 @@ public class NetUtils {
if (networkInterfaceChecker == null) { if (networkInterfaceChecker == null) {
return false; return false;
} }
log.info("[Net] try to choose NetworkInterface by NetworkInterfaceChecker, current NetworkInterface: {}", networkInterface);
try { try {
return networkInterfaceChecker.ok(networkInterface, getFirstReachableInetAddress(networkInterface)); boolean ok = networkInterfaceChecker.ok(networkInterface, getFirstReachableInetAddress(networkInterface));
log.info("[Net] try to choose NetworkInterface by NetworkInterfaceChecker, current NetworkInterface[{}], ok: {}", networkInterface, ok);
return ok;
} catch (Exception e) { } catch (Exception e) {
log.warn("[Net] isPassedCheckerNetworkInterface failed, current networkInterface: {}", networkInterface, e); log.warn("[Net] isPassedCheckerNetworkInterface failed, current networkInterface: {}", networkInterface, e);
} }

View File

@ -35,7 +35,13 @@ public class PingPongSocketServer implements PingPongServer {
} }
// 接收连接如果没有连接accept() 方法会阻塞 // 接收连接如果没有连接accept() 方法会阻塞
try (Socket socket = serverSocket.accept();OutputStream outputStream = socket.getOutputStream();) { try (Socket socket = serverSocket.accept();OutputStream outputStream = socket.getOutputStream();) {
socket.setSoTimeout(2000);
socket.setKeepAlive(false);
outputStream.write(PingPongUtils.PONG.getBytes(StandardCharsets.UTF_8)); outputStream.write(PingPongUtils.PONG.getBytes(StandardCharsets.UTF_8));
// BufferedReader.readLine() 会等待直到遇到换行符\n或回车符\r\n才会返回一行内容如果服务器发送的数据没有这些换行符readLine() 会一直阻塞直到超时
outputStream.write(System.lineSeparator().getBytes(StandardCharsets.UTF_8));
outputStream.flush(); outputStream.flush();
} catch (Exception e) { } catch (Exception e) {
if (!terminated) { if (!terminated) {

View File

@ -30,6 +30,9 @@ public class PingPongUtils {
try (Socket s = new Socket(targetIp, targetPort);InputStream is = s.getInputStream();OutputStream os = s.getOutputStream();BufferedReader br = new BufferedReader(new InputStreamReader(is))) { try (Socket s = new Socket(targetIp, targetPort);InputStream is = s.getInputStream();OutputStream os = s.getOutputStream();BufferedReader br = new BufferedReader(new InputStreamReader(is))) {
s.setSoTimeout(2000);
s.setKeepAlive(false);
// 发送 PING 请求 // 发送 PING 请求
os.write(PING.getBytes(StandardCharsets.UTF_8)); os.write(PING.getBytes(StandardCharsets.UTF_8));
os.flush(); os.flush();

View File

@ -0,0 +1,81 @@
package tech.powerjob.common.serialize;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.SneakyThrows;
import lombok.experimental.Accessors;
import lombok.extern.slf4j.Slf4j;
import org.junit.jupiter.api.Test;
import java.io.Serializable;
import java.time.LocalDateTime;
import java.util.Map;
/**
* test json utils
*
* @author tjq
* @since 2024/3/16
*/
@Slf4j
class JsonUtilsTest {
@Test
@SneakyThrows
void simpleTest() {
Person person = new Person().setName("mubao").setAge(18);
String jsonString = JsonUtils.toJSONString(person);
log.info("[JsonUtilsTest] person: {}, jsonString: {}", person, jsonString);
assert jsonString != null;
Person person2 = JsonUtils.parseObject(jsonString, Person.class);
assert person2.equals(person);
}
@Test
@SneakyThrows
void testAdvanceApi() {
PersonPlus personPlus = new PersonPlus();
personPlus.setName("gongbao").setAge(3);
personPlus.setBirthDay(LocalDateTime.now());
String jsonString = JsonUtils.toJSONString(personPlus);
PersonPlus personPlus2 = JsonUtils.parseObject(jsonString, PersonPlus.class);
assert personPlus2.equals(personPlus);
}
@Test
@SneakyThrows
void testMoreOrLessFields() {
PersonPlus personPlus = new PersonPlus().setBirthDay(LocalDateTime.now());
personPlus.setName("gongbao").setAge(3);
String originJsonStr = JsonUtils.toJSONString(personPlus);
Map<String, Object> personPlusMapMore = JsonUtils.parseMap(originJsonStr);
personPlusMapMore.put("extraKey", System.currentTimeMillis());
PersonPlus personPlusByMoreFieldsJsonStr = JsonUtils.parseObject(JsonUtils.toJSONString(personPlusMapMore), PersonPlus.class);
assert personPlusByMoreFieldsJsonStr.equals(personPlus);
Map<String, Object> personPlusMapLess = JsonUtils.parseMap(originJsonStr);
personPlusMapLess.remove("birthDay");
PersonPlus personPlusByLessFieldsJsonStr = JsonUtils.parseObject(JsonUtils.toJSONString(personPlusMapLess), PersonPlus.class);
assert personPlusByLessFieldsJsonStr.getName().equals(personPlus.getName());
assert personPlusByLessFieldsJsonStr.getAge().equals(personPlus.getAge());
}
@Data
@Accessors(chain = true)
static class Person implements Serializable {
private String name;
private Integer age;
}
@Data
@Accessors(chain = true)
@EqualsAndHashCode(callSuper = true)
static class PersonPlus extends Person {
private LocalDateTime birthDay;
}
}

View File

@ -0,0 +1,39 @@
package tech.powerjob.common.utils;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import org.junit.jupiter.api.Test;
import tech.powerjob.common.exception.PowerJobException;
import java.util.Collections;
import java.util.Map;
import static org.junit.jupiter.api.Assertions.*;
/**
* CommonUtilsTest
*
* @author tjq
* @since 2024/8/11
*/
class CommonUtilsTest {
@Test
void testRequireNonNull() {
assertThrowsExactly(PowerJobException.class, () -> CommonUtils.requireNonNull(null, "NULL_OBJ"));
assertThrowsExactly(PowerJobException.class, () -> CommonUtils.requireNonNull("", "EMPTY_STR"));
assertThrowsExactly(PowerJobException.class, () -> CommonUtils.requireNonNull(Lists.newArrayList(), "EMPTY_COLLECTION"));
assertThrowsExactly(PowerJobException.class, () -> CommonUtils.requireNonNull(Collections.emptyMap(), "EMPTY_MAP"));
Map<String, Object> map = Maps.newHashMap();
map.put("1", 1);
CommonUtils.requireNonNull(1, "NORMAL");
CommonUtils.requireNonNull("1", "NORMAL");
CommonUtils.requireNonNull(Lists.newArrayList("1"), "NORMAL");
CommonUtils.requireNonNull(map, "NORMAL");
}
}

View File

@ -5,12 +5,12 @@
<parent> <parent>
<artifactId>powerjob</artifactId> <artifactId>powerjob</artifactId>
<groupId>tech.powerjob</groupId> <groupId>tech.powerjob</groupId>
<version>4.3.9</version> <version>5.1.0</version>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<artifactId>powerjob-official-processors</artifactId> <artifactId>powerjob-official-processors</artifactId>
<version>4.3.9</version> <version>5.1.0</version>
<packaging>jar</packaging> <packaging>jar</packaging>
<properties> <properties>
@ -20,7 +20,7 @@
<!-- 不会被打包的部分scope 只能是 test 或 provide --> <!-- 不会被打包的部分scope 只能是 test 或 provide -->
<junit.version>5.9.1</junit.version> <junit.version>5.9.1</junit.version>
<logback.version>1.2.13</logback.version> <logback.version>1.2.13</logback.version>
<powerjob.worker.version>4.3.9</powerjob.worker.version> <powerjob.worker.version>5.1.0</powerjob.worker.version>
<h2.db.version>2.2.224</h2.db.version> <h2.db.version>2.2.224</h2.db.version>
<mysql.version>8.0.28</mysql.version> <mysql.version>8.0.28</mysql.version>
<spring.version>5.3.31</spring.version> <spring.version>5.3.31</spring.version>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<artifactId>powerjob</artifactId> <artifactId>powerjob</artifactId>
<groupId>tech.powerjob</groupId> <groupId>tech.powerjob</groupId>
<version>4.3.9</version> <version>5.1.0</version>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<packaging>pom</packaging> <packaging>pom</packaging>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<artifactId>powerjob-remote</artifactId> <artifactId>powerjob-remote</artifactId>
<groupId>tech.powerjob</groupId> <groupId>tech.powerjob</groupId>
<version>4.3.9</version> <version>5.1.0</version>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
@ -21,8 +21,8 @@
<logback.version>1.2.13</logback.version> <logback.version>1.2.13</logback.version>
<springboot.version>2.7.18</springboot.version> <springboot.version>2.7.18</springboot.version>
<powerjob-remote-impl-http.version>4.3.9</powerjob-remote-impl-http.version> <powerjob-remote-impl-http.version>5.1.0</powerjob-remote-impl-http.version>
<powerjob-remote-impl-akka.version>4.3.9</powerjob-remote-impl-akka.version> <powerjob-remote-impl-akka.version>5.1.0</powerjob-remote-impl-akka.version>
<gatling.version>3.9.0</gatling.version> <gatling.version>3.9.0</gatling.version>
<gatling-maven-plugin.version>4.2.9</gatling-maven-plugin.version> <gatling-maven-plugin.version>4.2.9</gatling-maven-plugin.version>

View File

@ -5,11 +5,11 @@
<parent> <parent>
<artifactId>powerjob-remote</artifactId> <artifactId>powerjob-remote</artifactId>
<groupId>tech.powerjob</groupId> <groupId>tech.powerjob</groupId>
<version>4.3.9</version> <version>5.1.0</version>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<version>4.3.9</version> <version>5.1.0</version>
<artifactId>powerjob-remote-framework</artifactId> <artifactId>powerjob-remote-framework</artifactId>
<properties> <properties>
@ -17,7 +17,7 @@
<maven.compiler.target>8</maven.compiler.target> <maven.compiler.target>8</maven.compiler.target>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<powerjob-common.version>4.3.9</powerjob-common.version> <powerjob-common.version>5.1.0</powerjob-common.version>
<reflections.version>0.10.2</reflections.version> <reflections.version>0.10.2</reflections.version>

View File

@ -19,7 +19,14 @@ import java.io.Serializable;
@Accessors(chain = true) @Accessors(chain = true)
public class CSInitializerConfig implements Serializable { public class CSInitializerConfig implements Serializable {
/**
* 需要绑定的地址本地
*/
private Address bindAddress; private Address bindAddress;
/**
* 外部地址需要 NAT 等情况存在
*/
private Address externalAddress;
private ServerType serverType; private ServerType serverType;
} }

View File

@ -30,6 +30,10 @@ public class EngineConfig implements Serializable {
* 绑定的本地地址 * 绑定的本地地址
*/ */
private Address bindAddress; private Address bindAddress;
/**
* 外部地址需要 NAT 等情况存在
*/
private Address externalAddress;
/** /**
* actor实例交由使用侧自己实例化以便自行注入各种 bean * actor实例交由使用侧自己实例化以便自行注入各种 bean
*/ */

View File

@ -4,9 +4,11 @@ import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.exception.ExceptionUtils; import org.apache.commons.lang3.exception.ExceptionUtils;
import org.reflections.Reflections; import org.reflections.Reflections;
import tech.powerjob.common.OmsConstant; import tech.powerjob.common.OmsConstant;
import tech.powerjob.common.enums.Protocol;
import tech.powerjob.common.exception.PowerJobException; import tech.powerjob.common.exception.PowerJobException;
import tech.powerjob.remote.framework.cs.CSInitializer; import tech.powerjob.remote.framework.cs.CSInitializer;
import java.util.Optional;
import java.util.Set; import java.util.Set;
/** /**
@ -18,8 +20,25 @@ import java.util.Set;
@Slf4j @Slf4j
class CSInitializerFactory { class CSInitializerFactory {
private static final String OFFICIAL_HTTP_CS_INITIALIZER = "tech.powerjob.remote.http.HttpVertxCSInitializer";
/**
* 未来底层框架摆脱 vertx 时可能会用这个 classnameor 开发者自己实现的 http 协议也可以用这个 classname总之预留战未来
*/
private static final String OFFICIAL_HTTP_CS_INITIALIZER2 = "tech.powerjob.remote.http.HttpCSInitializer";
private static final String OFFICIAL_AKKA_CS_INITIALIZER = "tech.powerjob.remote.akka.AkkaCSInitializer";
private static final String EXTEND_CS_INITIALIZER_PATTERN = "tech.powerjob.remote.%s.CSInitializer";
static CSInitializer build(String targetType) { static CSInitializer build(String targetType) {
CSInitializer officialCSInitializer = tryLoadCSInitializerByClassName(targetType);
if (officialCSInitializer != null) {
return officialCSInitializer;
}
log.info("[CSInitializerFactory] try load CSInitializerFactory by name failed, start to use Reflections!");
// JAVA SPI 机制太笨了短期内继续保留 Reflections 官网下高版本兼容性
Reflections reflections = new Reflections(OmsConstant.PACKAGE); Reflections reflections = new Reflections(OmsConstant.PACKAGE);
Set<Class<? extends CSInitializer>> cSInitializerClzSet = reflections.getSubTypesOf(CSInitializer.class); Set<Class<? extends CSInitializer>> cSInitializerClzSet = reflections.getSubTypesOf(CSInitializer.class);
@ -41,4 +60,52 @@ class CSInitializerFactory {
throw new PowerJobException(String.format("can't load CSInitializer[%s], ensure your package name start with 'tech.powerjob' and import the dependencies!", targetType)); throw new PowerJobException(String.format("can't load CSInitializer[%s], ensure your package name start with 'tech.powerjob' and import the dependencies!", targetType));
} }
/**
* 官方组件直接使用固定类名尝试加载确保 reflections 不兼容情况下至少能使用官方通讯协议
* @param targetType 协议类型
* @return CSInitializer
*/
private static CSInitializer tryLoadCSInitializerByClassName(String targetType) {
if (Protocol.HTTP.name().equalsIgnoreCase(targetType)) {
Optional<CSInitializer> httpCsIOpt = tryLoadCSInitializerByClzName(OFFICIAL_HTTP_CS_INITIALIZER);
if (httpCsIOpt.isPresent()) {
return httpCsIOpt.get();
}
Optional<CSInitializer> httpCsIOpt2 = tryLoadCSInitializerByClzName(OFFICIAL_HTTP_CS_INITIALIZER2);
if (httpCsIOpt2.isPresent()) {
return httpCsIOpt2.get();
}
}
if (Protocol.AKKA.name().equalsIgnoreCase(targetType)) {
Optional<CSInitializer> akkaCSIOpt = tryLoadCSInitializerByClzName(OFFICIAL_AKKA_CS_INITIALIZER);
if (akkaCSIOpt.isPresent()) {
return akkaCSIOpt.get();
}
}
// 尝试加载按规范命名的处理器比如使用方自定义了 http2 协议将其类名定为 tech.powerjob.remote.http2.CSInitializer 依然可确保在 Reflections 不可用的情况下完成加载
String clz = String.format(EXTEND_CS_INITIALIZER_PATTERN, targetType);
Optional<CSInitializer> extOpt = tryLoadCSInitializerByClzName(clz);
return extOpt.orElse(null);
}
private static Optional<CSInitializer> tryLoadCSInitializerByClzName(String clzName) {
try {
log.info("[CSInitializerFactory] try to load CSInitializer by classname: {}", clzName);
Class<?> clz = Class.forName(clzName);
CSInitializer o = (CSInitializer) clz.getDeclaredConstructor().newInstance();
log.info("[CSInitializerFactory] load CSInitializer[{}] successfully, obj: {}", clzName, o);
return Optional.of(o);
} catch (ClassNotFoundException ce) {
log.warn("[CSInitializerFactory] load CSInitializer by classname[{}] failed due to ClassNotFound: {}", clzName, ExceptionUtils.getMessage(ce));
} catch (Exception e) {
log.warn("[CSInitializerFactory] load CSInitializer by classname[{}] failed.", clzName, e);
}
return Optional.empty();
}
} }

View File

@ -41,6 +41,7 @@ public class PowerJobRemoteEngine implements RemoteEngine {
csInitializer.init(new CSInitializerConfig() csInitializer.init(new CSInitializerConfig()
.setBindAddress(engineConfig.getBindAddress()) .setBindAddress(engineConfig.getBindAddress())
.setExternalAddress(engineConfig.getExternalAddress())
.setServerType(engineConfig.getServerType()) .setServerType(engineConfig.getServerType())
); );

View File

@ -5,19 +5,19 @@
<parent> <parent>
<artifactId>powerjob-remote</artifactId> <artifactId>powerjob-remote</artifactId>
<groupId>tech.powerjob</groupId> <groupId>tech.powerjob</groupId>
<version>4.3.9</version> <version>5.1.0</version>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<artifactId>powerjob-remote-impl-akka</artifactId> <artifactId>powerjob-remote-impl-akka</artifactId>
<version>4.3.9</version> <version>5.1.0</version>
<properties> <properties>
<maven.compiler.source>8</maven.compiler.source> <maven.compiler.source>8</maven.compiler.source>
<maven.compiler.target>8</maven.compiler.target> <maven.compiler.target>8</maven.compiler.target>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<powerjob-remote-framework.version>4.3.9</powerjob-remote-framework.version> <powerjob-remote-framework.version>5.1.0</powerjob-remote-framework.version>
<akka.version>2.6.13</akka.version> <akka.version>2.6.13</akka.version>
</properties> </properties>

View File

@ -9,6 +9,7 @@ import com.google.common.collect.Maps;
import com.typesafe.config.Config; import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory; import com.typesafe.config.ConfigFactory;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import tech.powerjob.common.serialize.JsonUtils; import tech.powerjob.common.serialize.JsonUtils;
import tech.powerjob.remote.framework.actor.ActorInfo; import tech.powerjob.remote.framework.actor.ActorInfo;
import tech.powerjob.remote.framework.base.Address; import tech.powerjob.remote.framework.base.Address;
@ -47,8 +48,22 @@ public class AkkaCSInitializer implements CSInitializer {
// 初始化 ActorSystemmacOS上 new ServerSocket 检测端口占用的方法并不生效可能是AKKA是Scala写的缘故没办法...只能靠异常重试了 // 初始化 ActorSystemmacOS上 new ServerSocket 检测端口占用的方法并不生效可能是AKKA是Scala写的缘故没办法...只能靠异常重试了
Map<String, Object> overrideConfig = Maps.newHashMap(); Map<String, Object> overrideConfig = Maps.newHashMap();
overrideConfig.put("akka.remote.artery.canonical.hostname", bindAddress.getHost());
overrideConfig.put("akka.remote.artery.canonical.port", bindAddress.getPort()); Address externalAddress = config.getExternalAddress();
if (externalAddress == null || StringUtils.equalsIgnoreCase(externalAddress.toFullAddress(), bindAddress.toFullAddress())) {
overrideConfig.put("akka.remote.artery.canonical.hostname", bindAddress.getHost());
overrideConfig.put("akka.remote.artery.canonical.port", bindAddress.getPort());
log.info("[PowerJob-AKKA] not exist externalIp, overrideConfig: {}", overrideConfig);
} else {
overrideConfig.put("akka.remote.artery.canonical.hostname", externalAddress.getHost());
overrideConfig.put("akka.remote.artery.canonical.port", externalAddress.getPort());
overrideConfig.put("akka.remote.artery.bind.hostname", "0.0.0.0");
overrideConfig.put("akka.remote.artery.bind.port", bindAddress.getPort());
log.info("[PowerJob-AKKA] exist externalAddress[{}], final overrideConfig: {}", externalAddress, overrideConfig);
}
Config akkaBasicConfig = ConfigFactory.load(AkkaConstant.AKKA_CONFIG); Config akkaBasicConfig = ConfigFactory.load(AkkaConstant.AKKA_CONFIG);
Config akkaFinalConfig = ConfigFactory.parseMap(overrideConfig).withFallback(akkaBasicConfig); Config akkaFinalConfig = ConfigFactory.parseMap(overrideConfig).withFallback(akkaBasicConfig);

View File

@ -5,12 +5,12 @@
<parent> <parent>
<artifactId>powerjob-remote</artifactId> <artifactId>powerjob-remote</artifactId>
<groupId>tech.powerjob</groupId> <groupId>tech.powerjob</groupId>
<version>4.3.9</version> <version>5.1.0</version>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<artifactId>powerjob-remote-impl-http</artifactId> <artifactId>powerjob-remote-impl-http</artifactId>
<version>4.3.9</version> <version>5.1.0</version>
<properties> <properties>
<maven.compiler.source>8</maven.compiler.source> <maven.compiler.source>8</maven.compiler.source>
@ -18,7 +18,7 @@
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<vertx.version>4.3.7</vertx.version> <vertx.version>4.3.7</vertx.version>
<powerjob-remote-framework.version>4.3.9</powerjob-remote-framework.version> <powerjob-remote-framework.version>5.1.0</powerjob-remote-framework.version>
</properties> </properties>
<dependencies> <dependencies>

View File

@ -41,6 +41,8 @@ import java.util.concurrent.TimeUnit;
* - vertx 唯一的缺点是其作为相对上层的框架可能存在较为严重的包冲突问题尤其是对于那些本身跑在 vertx-framework 上的用户 * - vertx 唯一的缺点是其作为相对上层的框架可能存在较为严重的包冲突问题尤其是对于那些本身跑在 vertx-framework 上的用户
* - 不过该问题可以通过更换协议解决预计后续提供一个基于 netty 和自定义协议的实现 * - 不过该问题可以通过更换协议解决预计后续提供一个基于 netty 和自定义协议的实现
* *
* 20240316 note注意类名被强依赖后续若有改动需要同步更改
*
* @author tjq * @author tjq
* @since 2022/12/31 * @since 2022/12/31
*/ */

View File

@ -5,12 +5,12 @@
<parent> <parent>
<artifactId>powerjob</artifactId> <artifactId>powerjob</artifactId>
<groupId>tech.powerjob</groupId> <groupId>tech.powerjob</groupId>
<version>4.3.9</version> <version>5.1.0</version>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<artifactId>powerjob-server</artifactId> <artifactId>powerjob-server</artifactId>
<version>4.3.9</version> <version>5.1.0</version>
<packaging>pom</packaging> <packaging>pom</packaging>
<modules> <modules>
@ -22,6 +22,7 @@
<module>powerjob-server-migrate</module> <module>powerjob-server-migrate</module>
<module>powerjob-server-core</module> <module>powerjob-server-core</module>
<module>powerjob-server-monitor</module> <module>powerjob-server-monitor</module>
<module>powerjob-server-auth</module>
</modules> </modules>
@ -50,9 +51,9 @@
<groovy.version>3.0.10</groovy.version> <groovy.version>3.0.10</groovy.version>
<cron-utils.version>9.2.1</cron-utils.version> <cron-utils.version>9.2.1</cron-utils.version>
<powerjob-common.version>4.3.9</powerjob-common.version> <powerjob-common.version>5.1.0</powerjob-common.version>
<powerjob-remote-impl-http.version>4.3.9</powerjob-remote-impl-http.version> <powerjob-remote-impl-http.version>5.1.0</powerjob-remote-impl-http.version>
<powerjob-remote-impl-akka.version>4.3.9</powerjob-remote-impl-akka.version> <powerjob-remote-impl-akka.version>5.1.0</powerjob-remote-impl-akka.version>
<springdoc-openapi-ui.version>1.6.14</springdoc-openapi-ui.version> <springdoc-openapi-ui.version>1.6.14</springdoc-openapi-ui.version>
<aliyun-sdk-oss.version>3.17.1</aliyun-sdk-oss.version> <aliyun-sdk-oss.version>3.17.1</aliyun-sdk-oss.version>
<aws-java-sdk-s3.version>1.12.665</aws-java-sdk-s3.version> <aws-java-sdk-s3.version>1.12.665</aws-java-sdk-s3.version>
@ -96,12 +97,23 @@
<artifactId>powerjob-server-migrate</artifactId> <artifactId>powerjob-server-migrate</artifactId>
<version>${project.version}</version> <version>${project.version}</version>
</dependency> </dependency>
<dependency>
<groupId>tech.powerjob</groupId>
<artifactId>powerjob-server-auth</artifactId>
<version>${project.version}</version>
</dependency>
<dependency> <dependency>
<groupId>tech.powerjob</groupId> <groupId>tech.powerjob</groupId>
<artifactId>powerjob-server-starter</artifactId> <artifactId>powerjob-server-starter</artifactId>
<version>${project.version}</version> <version>${project.version}</version>
</dependency> </dependency>
<dependency>
<groupId>tech.powerjob</groupId>
<artifactId>powerjob-client</artifactId>
<version>${project.version}</version>
</dependency>
<!-- 存储扩展-MongoDB未使用可移除 --> <!-- 存储扩展-MongoDB未使用可移除 -->
<dependency> <dependency>
<groupId>org.mongodb</groupId> <groupId>org.mongodb</groupId>
@ -120,7 +132,6 @@
<artifactId>aws-java-sdk-s3</artifactId> <artifactId>aws-java-sdk-s3</artifactId>
<version>${aws-java-sdk-s3.version}</version> <version>${aws-java-sdk-s3.version}</version>
</dependency> </dependency>
<!-- https://mvnrepository.com/artifact/org.apache.commons/commons-collections4 --> <!-- https://mvnrepository.com/artifact/org.apache.commons/commons-collections4 -->
<dependency> <dependency>
<groupId>org.apache.commons</groupId> <groupId>org.apache.commons</groupId>

View File

@ -0,0 +1,58 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<groupId>tech.powerjob</groupId>
<artifactId>powerjob-server</artifactId>
<version>5.1.0</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>powerjob-server-auth</artifactId>
<version>${project.parent.version}</version>
<properties>
<maven.compiler.source>8</maven.compiler.source>
<maven.compiler.target>8</maven.compiler.target>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<jjwt.version>0.11.5</jjwt.version>
<dingtalk.version>1.1.86</dingtalk.version>
</properties>
<dependencies>
<dependency>
<groupId>tech.powerjob</groupId>
<artifactId>powerjob-server-persistence</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>io.jsonwebtoken</groupId>
<artifactId>jjwt-api</artifactId>
<version>${jjwt.version}</version>
</dependency>
<dependency>
<groupId>io.jsonwebtoken</groupId>
<artifactId>jjwt-impl</artifactId>
<version>${jjwt.version}</version>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>io.jsonwebtoken</groupId>
<artifactId>jjwt-jackson</artifactId>
<version>${jjwt.version}</version>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>com.aliyun</groupId>
<artifactId>dingtalk</artifactId>
<version>${dingtalk.version}</version>
</dependency>
</dependencies>
</project>

View File

@ -0,0 +1,48 @@
package tech.powerjob.server.auth;
/**
* LoginUserHolder
*
* @author tjq
* @since 2023/4/16
*/
public class LoginUserHolder {
private static final ThreadLocal<PowerJobUser> TL = new ThreadLocal<>();
public static PowerJobUser get() {
return TL.get();
}
public static void set(PowerJobUser powerJobUser) {
TL.set(powerJobUser);
}
public static void clean() {
TL.remove();
}
/**
* 获取用户名
* @return 存在则返回常规用户名否则返回 unknown
*/
public static String getUserName() {
PowerJobUser powerJobUser = get();
if (powerJobUser != null) {
return powerJobUser.getUsername();
}
return "UNKNOWN";
}
/**
* 获取用户ID
* @return 存在则返回否则返回 null
*/
public static Long getUserId() {
PowerJobUser powerJobUser = get();
if (powerJobUser != null) {
return powerJobUser.getId();
}
return null;
}
}

View File

@ -0,0 +1,40 @@
package tech.powerjob.server.auth;
import lombok.AllArgsConstructor;
import lombok.Getter;
/**
* 权限
*
* @author tjq
* @since 2023/3/20
*/
@Getter
@AllArgsConstructor
public enum Permission {
/**
* 不需要权限
*/
NONE(1),
/**
* 读权限查看控制台数据
*/
READ(10),
/**
* 写权限新增/修改任务等
*/
WRITE(20),
/**
* 运维权限比如任务的执行
*/
OPS(30),
/**
* 超级权限
*/
SU(100)
;
private int v;
}

View File

@ -0,0 +1,44 @@
package tech.powerjob.server.auth;
import lombok.Getter;
import lombok.Setter;
import lombok.ToString;
import java.io.Serializable;
/**
* PowerJob 登陆用户
*
* @author tjq
* @since 2023/3/20
*/
@Getter
@Setter
@ToString
public class PowerJobUser implements Serializable {
private Long id;
private String username;
/**
* 手机号
*/
private String phone;
/**
* 邮箱地址
*/
private String email;
/**
* webHook
*/
private String webHook;
/**
* 扩展字段
*/
private String extra;
/* ************** 以上为数据库字段 ************** */
private String jwtToken;
}

View File

@ -0,0 +1,53 @@
package tech.powerjob.server.auth;
import com.google.common.collect.Sets;
import lombok.AllArgsConstructor;
import lombok.Getter;
import java.util.Set;
import static tech.powerjob.server.auth.Permission.*;
/**
* 角色
* PowerJob 采用 RBAC 实现权限出于实际需求的考虑不决定采用动态权限模型因此 RBAC 中的角色和权限均在此处定义
* 如果有自定义诉求可以修改 Role 的定义
*
* @author tjq
* @since 2023/3/20
*/
@Getter
@AllArgsConstructor
public enum Role {
/**
* 观察者默认只读权限
*/
OBSERVER(10, Sets.newHashSet(READ)),
/**
* 技术质量 + 操作权限
*/
QA(20, Sets.newHashSet(READ, OPS)),
/**
* 开发者 + 编辑 + 操作权限
*/
DEVELOPER(30, Sets.newHashSet(READ, WRITE, OPS)),
/**
* 管理员
*/
ADMIN(40, Sets.newHashSet(READ, WRITE, OPS, SU))
;
private final int v;
private final Set<Permission> permissions;
public static Role of(int vv) {
for (Role role : values()) {
if (vv == role.v) {
return role;
}
}
throw new IllegalArgumentException("unknown role: " + vv);
}
}

View File

@ -0,0 +1,40 @@
package tech.powerjob.server.auth;
import lombok.AllArgsConstructor;
import lombok.Getter;
/**
* 权限范围
*
* @author tjq
* @since 2023/9/3
*/
@Getter
@AllArgsConstructor
public enum RoleScope {
/**
* NAMESPACE 权限
*/
NAMESPACE(1),
/**
* APP 级别权限
*/
APP(10),
/**
* 全局权限
*/
GLOBAL(666)
;
private final int v;
public static RoleScope of(int vv) {
for (RoleScope rs : values()) {
if (vv == rs.v) {
return rs;
}
}
throw new IllegalArgumentException("unknown RoleScope: " + vv);
}
}

View File

@ -0,0 +1,53 @@
package tech.powerjob.server.auth.common;
/**
* 常量
*
* @author tjq
* @since 2024/2/11
*/
public class AuthConstants {
/* ********** 账号体系唯一标识推荐开发者接入第三方登录体系时也使用4位编码便于前端统一做样式 ********** */
/**
* PowerJob自建账号体系
*/
public static final String ACCOUNT_TYPE_POWER_JOB = "PWJB";
/**
* 钉钉
*/
public static final String ACCOUNT_TYPE_DING = "DING";
/**
* 企业微信预留蹲一个 contributor
*/
public static final String ACCOUNT_TYPE_WX = "QYWX";
/**
* 飞书预留蹲一个 contributor +1
*/
public static final String ACCOUNT_LARK = "LARK";
public static final String PARAM_KEY_USERNAME = "username";
public static final String PARAM_KEY_PASSWORD = "password";
/**
* 前端参数-密码加密类型官方版本出于成本未进行前后端传输的对称加密接入方有需求可自行实现此处定义加密协议字段
*/
public static final String PARAM_KEY_ENCRYPTION = "encryption";
/* ********** 账号体系 ********** */
/**
* JWT key
* 前端 header 默认首字母大写保持一致方便处理
*/
public static final String OLD_JWT_NAME = "Power_jwt";
public static final String JWT_NAME = "PowerJwt";
/**
* 前端跳转到指定页面指令
*/
public static final String FE_REDIRECT_KEY = "FE-REDIRECT:";
public static final String TIPS_NO_PERMISSION_TO_SEE = "NO_PERMISSION_TO_SEE";
public static final Long GLOBAL_ADMIN_TARGET_ID = 1L;
}

View File

@ -0,0 +1,23 @@
package tech.powerjob.server.auth.common;
import lombok.Getter;
import tech.powerjob.common.enums.ErrorCodes;
import tech.powerjob.common.exception.PowerJobException;
/**
* 鉴权相关错误
*
* @author tjq
* @since 2024/2/10
*/
@Getter
public class PowerJobAuthException extends PowerJobException {
public PowerJobAuthException(ErrorCodes errorCode) {
this(errorCode, null);
}
public PowerJobAuthException(ErrorCodes errorCode, String extraMsg) {
super(errorCode, extraMsg);
}
}

View File

@ -0,0 +1,27 @@
package tech.powerjob.server.auth.common.utils;
import tech.powerjob.common.OmsConstant;
import javax.servlet.http.HttpServletRequest;
/**
* HttpServletUtils
*
* @author tjq
* @since 2024/2/12
*/
public class HttpServletUtils {
public static String fetchFromHeader(String key, HttpServletRequest httpServletRequest) {
// headercookie 都能获取
String v = httpServletRequest.getHeader(key);
// 解决 window.localStorage.getItem null 的问题
if (OmsConstant.NULL.equalsIgnoreCase(v) || "undefined".equalsIgnoreCase(v)) {
return null;
}
return v;
}
}

View File

@ -0,0 +1,45 @@
package tech.powerjob.server.auth.interceptor;
import tech.powerjob.server.auth.Permission;
import tech.powerjob.server.auth.RoleScope;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* API 权限
*
* @author tjq
* @since 2023/3/20
*/
@Target(ElementType.METHOD)
@Retention(RetentionPolicy.RUNTIME)
public @interface ApiPermission {
/**
* API 名称
* @return 空使用服务.方法名代替
*/
String name() default "";
RoleScope roleScope() default RoleScope.APP;
/**
* 需要的权限
* @return 权限
*/
Permission requiredPermission() default Permission.SU;
/**
* 固定权限不支持的场景需要使用动态权限
* @return 动态权限
*/
Class<? extends DynamicPermissionPlugin> dynamicPermissionPlugin() default EmptyPlugin.class;
/**
* 新增场景需要授权插件执行授权
* @return 授权插件
*/
Class<? extends GrantPermissionPlugin> grandPermissionPlugin() default EmptyPlugin.class;
}

View File

@ -0,0 +1,65 @@
package tech.powerjob.server.auth.interceptor;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.aspectj.lang.JoinPoint;
import org.aspectj.lang.annotation.AfterReturning;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.annotation.Pointcut;
import org.aspectj.lang.reflect.MethodSignature;
import org.springframework.core.annotation.AnnotationUtils;
import org.springframework.stereotype.Component;
import java.lang.reflect.Method;
/**
* ApiPermission 切面
* 主要用于执行授权插件完成创建后授权
*
* @author tjq
* @since 2024/2/11
*/
@Slf4j
@Aspect
@Component
public class ApiPermissionAspect {
@Pointcut("@annotation(ApiPermission)")
public void apiPermissionPointcut() {
// 定义切入点
}
/**
* 后置返回
* 如果第一个参数为JoinPoint则第二个参数为返回值的信息
* 如果第一个参数不为JoinPoint则第一个参数为returning中对应的参数
* returning限定了只有目标方法返回值与通知方法参数类型匹配时才能执行后置返回通知否则不执行
* 参数为Object类型将匹配任何目标返回值
* After注解标注的方法会在目标方法执行后运行无论目标方法是正常完成还是抛出异常它相当于finally块因为它总是执行所以适用于释放资源等清理活动@After注解不能访问目标方法的返回值
* AfterReturning注解标注的方法仅在目标方法成功执行后即正常返回运行它可以访问目标方法的返回值使用@AfterReturning可以在方法正常返回后执行一些逻辑比如对返回值进行处理或验证
*/
@AfterReturning(value = "apiPermissionPointcut()", returning = "result")
public void doAfterReturningAdvice1(JoinPoint joinPoint, Object result) {
// 入参
Object[] args = joinPoint.getArgs();
// 获取目标方法
MethodSignature signature = (MethodSignature) joinPoint.getSignature();
Method method = signature.getMethod();
ApiPermission annotationAnno = AnnotationUtils.getAnnotation(method, ApiPermission.class);
assert annotationAnno != null;
Class<? extends GrantPermissionPlugin> grandPermissionPluginClz = annotationAnno.grandPermissionPlugin();
try {
GrantPermissionPlugin grandPermissionPlugin = grandPermissionPluginClz.getDeclaredConstructor().newInstance();
grandPermissionPlugin.grant(args, result, method, joinPoint.getTarget());
} catch (Exception e) {
log.error("[ApiPermissionAspect] process ApiPermission grant failed", e);
ExceptionUtils.rethrow(e);
}
}
}

View File

@ -0,0 +1,15 @@
package tech.powerjob.server.auth.interceptor;
import tech.powerjob.server.auth.Permission;
import javax.servlet.http.HttpServletRequest;
/**
* 动态权限
*
* @author tjq
* @since 2023/9/3
*/
public interface DynamicPermissionPlugin {
Permission calculate(HttpServletRequest request, Object handler);
}

View File

@ -0,0 +1,24 @@
package tech.powerjob.server.auth.interceptor;
import tech.powerjob.server.auth.Permission;
import javax.servlet.http.HttpServletRequest;
import java.lang.reflect.Method;
/**
*
*
* @author tjq
* @since 2024/2/12
*/
public class EmptyPlugin implements DynamicPermissionPlugin, GrantPermissionPlugin {
@Override
public Permission calculate(HttpServletRequest request, Object handler) {
return null;
}
@Override
public void grant(Object[] args, Object result, Method method, Object originBean) {
}
}

View File

@ -0,0 +1,21 @@
package tech.powerjob.server.auth.interceptor;
import java.lang.reflect.Method;
/**
* 授予权限插件
*
* @author tjq
* @since 2024/2/11
*/
public interface GrantPermissionPlugin {
/**
* 授权
* @param args 入参
* @param result 响应
* @param method 被调用方法
* @param originBean 原始对象
*/
void grant(Object[] args, Object result, Method method, Object originBean);
}

View File

@ -0,0 +1,135 @@
package tech.powerjob.server.auth.interceptor;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.springframework.lang.NonNull;
import org.springframework.stereotype.Component;
import org.springframework.web.method.HandlerMethod;
import org.springframework.web.servlet.HandlerInterceptor;
import tech.powerjob.common.exception.ImpossibleException;
import tech.powerjob.common.exception.PowerJobException;
import tech.powerjob.server.auth.LoginUserHolder;
import tech.powerjob.server.auth.Permission;
import tech.powerjob.server.auth.PowerJobUser;
import tech.powerjob.server.auth.RoleScope;
import tech.powerjob.common.enums.ErrorCodes;
import tech.powerjob.server.auth.common.PowerJobAuthException;
import tech.powerjob.server.auth.common.utils.HttpServletUtils;
import tech.powerjob.server.auth.service.login.PowerJobLoginService;
import tech.powerjob.server.auth.service.permission.PowerJobPermissionService;
import tech.powerjob.server.common.Loggers;
import javax.annotation.Resource;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.lang.reflect.Method;
import java.util.Optional;
/**
* login auth and permission check
*
* @author tjq
* @since 2023/3/25
*/
@Slf4j
@Component
public class PowerJobAuthInterceptor implements HandlerInterceptor {
@Resource
private PowerJobLoginService powerJobLoginService;
@Resource
private PowerJobPermissionService powerJobPermissionService;
@Override
public boolean preHandle(@NonNull HttpServletRequest request,@NonNull HttpServletResponse response,@NonNull Object handler) throws Exception {
if (!(handler instanceof HandlerMethod)) {
return true;
}
HandlerMethod handlerMethod = (HandlerMethod) handler;
final Method method = handlerMethod.getMethod();
final ApiPermission apiPermissionAnno = method.getAnnotation(ApiPermission.class);
// 无注解代表不需要权限无需登陆直接访问
if (apiPermissionAnno == null) {
return true;
}
// 尝试直接解析登陆
final Optional<PowerJobUser> loginUserOpt = powerJobLoginService.ifLogin(request);
// 未登录直接报错返回固定状态码前端拦截后跳转到登录页
if (!loginUserOpt.isPresent()) {
throw new PowerJobAuthException(ErrorCodes.USER_NOT_LOGIN);
}
// 登陆用户进行权限校验
final PowerJobUser powerJobUser = loginUserOpt.get();
// 写入上下文
LoginUserHolder.set(powerJobUser);
Permission requiredPermission = parsePermission(request, handler, apiPermissionAnno);
RoleScope roleScope = apiPermissionAnno.roleScope();
Long targetId = null;
if (RoleScope.NAMESPACE.equals(roleScope)) {
final String namespaceIdStr = HttpServletUtils.fetchFromHeader("NamespaceId", request);
if (StringUtils.isNotEmpty(namespaceIdStr)) {
targetId = Long.valueOf(namespaceIdStr);
}
}
if (RoleScope.APP.equals(roleScope)) {
final String appIdStr = HttpServletUtils.fetchFromHeader("AppId", request);
if (StringUtils.isNotEmpty(appIdStr)) {
targetId = Long.valueOf(appIdStr);
}
}
final boolean hasPermission = powerJobPermissionService.hasPermission(powerJobUser.getId(), roleScope, targetId, requiredPermission);
if (hasPermission) {
return true;
}
final String resourceName = parseResourceName(apiPermissionAnno, handlerMethod);
Loggers.WEB.info("[PowerJobAuthInterceptor] user[{}] has no permission to access: {}", powerJobUser.getUsername(), resourceName);
throw new PowerJobException("Permission denied!");
}
@Override
public void afterCompletion(@NonNull HttpServletRequest request, @NonNull HttpServletResponse response, @NonNull Object handler, Exception ex) throws Exception {
LoginUserHolder.clean();
}
private static String parseResourceName(ApiPermission apiPermission, HandlerMethod handlerMethod) {
final String name = apiPermission.name();
if (StringUtils.isNotEmpty(name)) {
return name;
}
try {
final String clzName = handlerMethod.getBean().getClass().getSimpleName();
final String methodName = handlerMethod.getMethod().getName();
return String.format("%s_%s", clzName, methodName);
} catch (Exception ignore) {
}
return "UNKNOWN";
}
private static Permission parsePermission(HttpServletRequest request, Object handler, ApiPermission apiPermission) {
Class<? extends DynamicPermissionPlugin> dynamicPermissionPlugin = apiPermission.dynamicPermissionPlugin();
if (EmptyPlugin.class.equals(dynamicPermissionPlugin)) {
return apiPermission.requiredPermission();
}
try {
DynamicPermissionPlugin dynamicPermission = dynamicPermissionPlugin.getDeclaredConstructor().newInstance();
return dynamicPermission.calculate(request, handler);
} catch (Throwable t) {
log.error("[PowerJobAuthService] process dynamicPermissionPlugin failed!", t);
ExceptionUtils.rethrow(t);
}
throw new ImpossibleException();
}
}

View File

@ -0,0 +1,16 @@
package tech.powerjob.server.auth.jwt;
import java.util.Map;
/**
* JWT 服务
*
* @author tjq
* @since 2023/3/20
*/
public interface JwtService {
String build(Map<String, Object> body, String extraSk);
ParseResult parse(String jwt, String extraSk);
}

View File

@ -0,0 +1,35 @@
package tech.powerjob.server.auth.jwt;
import lombok.Data;
import lombok.experimental.Accessors;
import java.io.Serializable;
import java.util.Map;
/**
* 解析结果
*
* @author tjq
* @since 2024/8/11
*/
@Data
@Accessors(chain = true)
public class ParseResult implements Serializable {
/**
* 解析状态
*/
private Status status;
/**
* 解析结果
*/
private Map<String, Object> result;
private String msg;
public enum Status {
SUCCESS,
EXPIRED,
FAILED
}
}

View File

@ -0,0 +1,13 @@
package tech.powerjob.server.auth.jwt;
/**
* JWT 安全性的核心
* 对安全性有要求的接入方可以自行重新该方法自定义自己的安全 token 生成策略
*
* @author tjq
* @since 2023/3/20
*/
public interface SecretProvider {
String fetchSecretKey();
}

View File

@ -0,0 +1,47 @@
package tech.powerjob.server.auth.jwt.impl;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.core.env.Environment;
import org.springframework.stereotype.Component;
import tech.powerjob.server.auth.jwt.SecretProvider;
import tech.powerjob.common.utils.DigestUtils;
import javax.annotation.Resource;
/**
* PowerJob 默认实现
*
* @author tjq
* @since 2023/3/20
*/
@Slf4j
@Component
public class DefaultSecretProvider implements SecretProvider {
@Resource
private Environment environment;
private static final String PROPERTY_KEY = "spring.datasource.core.jdbc-url";
@Override
public String fetchSecretKey() {
// 考虑到大部分用户都是开箱即用此处还是提供一个相对安全的默认实现JDBC URL 部署时必会改skey 不固定更安全
try {
String propertyValue = environment.getProperty(PROPERTY_KEY);
if (StringUtils.isNotEmpty(propertyValue)) {
String md5 = DigestUtils.md5(propertyValue);
log.debug("[DefaultSecretProvider] propertyValue: {} ==> md5: {}", propertyValue, md5);
if (StringUtils.isNotEmpty(md5)) {
return md5;
}
}
} catch (Exception ignore) {
}
return "ZQQZJ";
}
}

View File

@ -0,0 +1,104 @@
package tech.powerjob.server.auth.jwt.impl;
import com.google.common.collect.Maps;
import io.jsonwebtoken.*;
import io.jsonwebtoken.io.Decoders;
import io.jsonwebtoken.security.Keys;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import tech.powerjob.server.auth.jwt.JwtService;
import tech.powerjob.server.auth.jwt.ParseResult;
import tech.powerjob.server.auth.jwt.SecretProvider;
import javax.annotation.Resource;
import java.security.Key;
import java.util.Date;
import java.util.Map;
import java.util.UUID;
/**
* JWT 默认实现
*
* @author tjq
* @since 2023/3/20
*/
@Slf4j
@Service
public class JwtServiceImpl implements JwtService {
@Resource
private SecretProvider secretProvider;
/**
* JWT 客户端过期时间
*/
@Value("${oms.auth.security.jwt.expire-seconds:604800}")
private int jwtExpireTime;
/**
* <a href="https://music.163.com/#/song?id=167975">GoodSong</a>
*/
private static final String BASE_SECURITY =
"CengMengXiangZhangJianZouTianYa" +
"KanYiKanShiJieDeFanHua" +
"NianShaoDeXinZongYouXieQingKuang" +
"RuJinWoSiHaiWeiJia"
;
@Override
public String build(Map<String, Object> body, String extraSk) {
final String secret = fetchSk(extraSk);
return innerBuild(secret, jwtExpireTime, body);
}
static String innerBuild(String secret, int expireSeconds, Map<String, Object> body) {
JwtBuilder jwtBuilder = Jwts.builder()
.setHeaderParam("typ", "JWT")
.addClaims(body)
.setSubject("PowerJob")
.setExpiration(new Date(System.currentTimeMillis() + 1000L * expireSeconds))
.setId(UUID.randomUUID().toString())
.signWith(genSecretKey(secret), SignatureAlgorithm.HS256);
return jwtBuilder.compact();
}
@Override
public ParseResult parse(String jwt, String extraSk) {
try {
Map<String, Object> parseResult = innerParse(fetchSk(extraSk), jwt);
return new ParseResult().setStatus(ParseResult.Status.SUCCESS).setResult(parseResult);
} catch (ExpiredJwtException expiredJwtException) {
return new ParseResult().setStatus(ParseResult.Status.EXPIRED).setMsg(expiredJwtException.getMessage());
} catch (Exception e) {
log.warn("[JwtService] parse jwt[{}] with extraSk[{}] failed", jwt, extraSk, e);
return new ParseResult().setStatus(ParseResult.Status.FAILED).setMsg(ExceptionUtils.getMessage(e));
}
}
private String fetchSk(String extraSk) {
if (StringUtils.isEmpty(extraSk)) {
return secretProvider.fetchSecretKey();
}
return secretProvider.fetchSecretKey().concat(extraSk);
}
static Map<String, Object> innerParse(String secret, String jwtStr) {
final Jws<Claims> claimsJws = Jwts.parserBuilder()
.setSigningKey(genSecretKey(secret))
.build()
.parseClaimsJws(jwtStr);
Map<String, Object> ret = Maps.newHashMap();
ret.putAll(claimsJws.getBody());
return ret;
}
private static Key genSecretKey(String secret) {
byte[] keyBytes = Decoders.BASE64.decode(BASE_SECURITY.concat(secret));
return Keys.hmacShaKeyFor(keyBytes);
}
}

View File

@ -0,0 +1,30 @@
package tech.powerjob.server.auth.login;
import lombok.Data;
import lombok.experimental.Accessors;
import java.io.Serializable;
/**
* 登录类型描述
*
* @author tjq
* @since 2024/2/10
*/
@Data
@Accessors(chain = true)
public class LoginTypeInfo implements Serializable {
/**
* 登录类型唯一标识
*/
private String type;
/**
* 描述名称前端展示用
*/
private String name;
/**
* 展示用的 ICON
*/
private String iconUrl;
}

View File

@ -0,0 +1,24 @@
package tech.powerjob.server.auth.login;
import lombok.Data;
import lombok.experimental.Accessors;
import javax.servlet.http.HttpServletRequest;
/**
* 第三方登录请求
*
* @author tjq
* @since 2024/2/10
*/
@Data
@Accessors(chain = true)
public class ThirdPartyLoginRequest {
/**
* 原始参数给第三方登录方式一个服务端和前端交互的数据通道PowerJob 本身不感知其中的内容
*/
private String originParams;
private transient HttpServletRequest httpServletRequest;
}

View File

@ -0,0 +1,42 @@
package tech.powerjob.server.auth.login;
import javax.servlet.http.HttpServletRequest;
/**
* 第三方登录服务
*
* @author tjq
* @since 2024/2/10
*/
public interface ThirdPartyLoginService {
/**
* 登陆服务的类型
* @return 登陆服务类型比如 PowerJob / DingTalk
*/
LoginTypeInfo loginType();
/**
* 生成登陆的重定向 URL
* @param httpServletRequest http请求
* @return 重定向地址
*/
String generateLoginUrl(HttpServletRequest httpServletRequest);
/**
* 执行第三方登录
* @param loginRequest 上下文
* @return 登录地址
*/
ThirdPartyUser login(ThirdPartyLoginRequest loginRequest);
/**
* JWT 登录的回调校验
* @param username 用户名称
* @param tokenLoginVerifyInfo 二次校验信息
* @return 是否通过
*/
default boolean tokenLoginVerify(String username, TokenLoginVerifyInfo tokenLoginVerifyInfo) {
return true;
}
}

View File

@ -0,0 +1,47 @@
package tech.powerjob.server.auth.login;
import lombok.Data;
import lombok.experimental.Accessors;
/**
* 第三方用户
*
* @author tjq
* @since 2024/2/10
*/
@Data
@Accessors(chain = true)
public class ThirdPartyUser {
/**
* 用户的唯一标识用于关联到 PowerJob username
*/
private String username;
/**
* JWT 登录的二次校验配置
* 可空空则代表放弃二次校验会出现第三方登录改了密码当 PowerJob JWT 登录依然可用的情况
*/
private TokenLoginVerifyInfo tokenLoginVerifyInfo;
/* ******** 以下全部选填即可,只是方便数据同步,后续都可以去 PowerJob 控制台更改 ******** */
/**
* 用户昵称
*/
private String nick;
/**
* 手机号
*/
private String phone;
/**
* 邮箱地址
*/
private String email;
/**
* web 回调地址
*/
private String webHook;
/**
* 扩展字段
*/
private String extra;
}

View File

@ -0,0 +1,31 @@
package tech.powerjob.server.auth.login;
import lombok.Data;
import java.io.Serializable;
/**
* JWT 登录时的校验信息
*
* @author tjq
* @since 2024/2/16
*/
@Data
public class TokenLoginVerifyInfo implements Serializable {
/**
* 加密 token 部分比如密码的 md5会直接写入 JWT 下发给前端
* 如果需要使用 JWT 二次校验则该参数必须存在
*/
private String encryptedToken;
/**
* 补充信息用于二次校验
*/
private String additionalInfo;
/**
* 依然是预留字段第三方实现自用即可
*/
private String extra;
}

View File

@ -0,0 +1,142 @@
package tech.powerjob.server.auth.login.impl;
import com.aliyun.dingtalkcontact_1_0.models.GetUserHeaders;
import com.aliyun.dingtalkcontact_1_0.models.GetUserResponseBody;
import com.aliyun.dingtalkoauth2_1_0.models.GetUserTokenRequest;
import com.aliyun.dingtalkoauth2_1_0.models.GetUserTokenResponse;
import com.aliyun.teaopenapi.models.Config;
import com.aliyun.teautil.models.RuntimeOptions;
import lombok.SneakyThrows;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import tech.powerjob.common.exception.PowerJobException;
import tech.powerjob.server.auth.common.AuthConstants;
import tech.powerjob.server.auth.login.*;
import tech.powerjob.server.common.Loggers;
import javax.servlet.http.HttpServletRequest;
import java.net.URLEncoder;
import java.nio.charset.StandardCharsets;
/**
* <a href="https://open.dingtalk.com/document/orgapp/tutorial-obtaining-user-personal-information">钉钉账号体系登录第三方网站</a>
* PowerJob 官方支持钉钉账号体系登录原因
* 1. 钉钉作为当下用户体量最大的企业级办公软件覆盖率足够高提供钉钉支持能让更多开发者开箱即用
* 2. 钉钉的 API 设计和 PowerJob 设想一致算是个最佳实践其他企业内部的账号体系可参考这套流程进行接入
* - PowerJob 重定向到第三方账号体系登陆页 -> 第三方完成登陆 -> 跳转回调 PowerJob auth 接口 -> PowerJob 解析回调登陆信息完整用户关联
*
* @author tjq
* @since 2023/3/26
*/
@Service
public class DingTalkLoginService implements ThirdPartyLoginService {
/*
配置示例
oms.auth.dingtalk.appkey=dinggzqqzqqzqqzqq
oms.auth.dingtalk.appSecret=iY-FS8mzqqzqq_xEizqqzqqzqqzqqzqqzqqYEbkZOal
oms.auth.dingtalk.callbackUrl=http://localhost:7700
*/
/**
* 钉钉应用 AppKey
*/
@Value("${oms.auth.dingtalk.appkey:#{null}}")
private String dingTalkAppKey;
/**
* 钉钉应用 AppSecret
*/
@Value("${oms.auth.dingtalk.appSecret:#{null}}")
private String dingTalkAppSecret;
/**
* 回调地址powerjob 前端控制台地址 powerjob-console 地址
* 比如本地调试时为 <a href="http://localhost:7700">LocalDemoCallbackUrl</a>
* 部署后则为 <a href="http://try.powerjob.tech">demoCallBackUrl</a>
*/
@Value("${oms.auth.dingtalk.callbackUrl:#{null}}")
private String dingTalkCallbackUrl;
@Override
public LoginTypeInfo loginType() {
return new LoginTypeInfo()
.setType(AuthConstants.ACCOUNT_TYPE_DING)
.setName("DingTalk")
;
}
@Override
@SneakyThrows
public String generateLoginUrl(HttpServletRequest httpServletRequest) {
if (StringUtils.isAnyEmpty(dingTalkAppKey, dingTalkAppSecret, dingTalkCallbackUrl)) {
throw new IllegalArgumentException("please config 'oms.auth.dingtalk.appkey', 'oms.auth.dingtalk.appSecret' and 'oms.auth.dingtalk.callbackUrl' in properties!");
}
String urlString = URLEncoder.encode(dingTalkCallbackUrl, StandardCharsets.UTF_8.name());
String url = "https://login.dingtalk.com/oauth2/auth?" +
"redirect_uri=" + urlString +
"&response_type=code" +
"&client_id=" + dingTalkAppKey +
"&scope=openid" +
"&state=" + AuthConstants.ACCOUNT_TYPE_DING +
"&prompt=consent";
Loggers.WEB.info("[DingTalkBizLoginService] login url: {}", url);
return url;
}
@Override
@SneakyThrows
public ThirdPartyUser login(ThirdPartyLoginRequest loginRequest) {
try {
com.aliyun.dingtalkoauth2_1_0.Client client = authClient();
GetUserTokenRequest getUserTokenRequest = new GetUserTokenRequest()
//应用基础信息-应用信息的AppKey,请务必替换为开发的应用AppKey
.setClientId(dingTalkAppKey)
//应用基础信息-应用信息的AppSecret,请务必替换为开发的应用AppSecret
.setClientSecret(dingTalkAppSecret)
.setCode(loginRequest.getHttpServletRequest().getParameter("authCode"))
.setGrantType("authorization_code");
GetUserTokenResponse getUserTokenResponse = client.getUserToken(getUserTokenRequest);
//获取用户个人 token
String accessToken = getUserTokenResponse.getBody().getAccessToken();
// 查询钉钉用户
final GetUserResponseBody dingUser = getUserinfo(accessToken);
// 将钉钉用户的唯一ID PowerJob 账户体系的唯一键 username 关联
if (dingUser != null) {
ThirdPartyUser bizUser = new ThirdPartyUser();
bizUser.setUsername(dingUser.getUnionId());
bizUser.setNick(dingUser.getNick());
bizUser.setPhone(dingUser.getMobile());
bizUser.setEmail(dingUser.getEmail());
return bizUser;
}
} catch (Exception e) {
Loggers.WEB.error("[DingTalkBizLoginService] login by dingTalk failed!", e);
throw e;
}
throw new PowerJobException("login from dingTalk failed!");
}
/* 以下代码均拷自钉钉官网示例 */
private static com.aliyun.dingtalkoauth2_1_0.Client authClient() throws Exception {
Config config = new Config();
config.protocol = "https";
config.regionId = "central";
return new com.aliyun.dingtalkoauth2_1_0.Client(config);
}
private static com.aliyun.dingtalkcontact_1_0.Client contactClient() throws Exception {
Config config = new Config();
config.protocol = "https";
config.regionId = "central";
return new com.aliyun.dingtalkcontact_1_0.Client(config);
}
private GetUserResponseBody getUserinfo(String accessToken) throws Exception {
com.aliyun.dingtalkcontact_1_0.Client client = contactClient();
GetUserHeaders getUserHeaders = new GetUserHeaders();
getUserHeaders.xAcsDingtalkAccessToken = accessToken;
//获取用户个人信息如需获取当前授权人的信息unionId参数必须传me
return client.getUserWithOptions("me", getUserHeaders, new RuntimeOptions()).getBody();
}
}

View File

@ -0,0 +1,121 @@
package tech.powerjob.server.auth.login.impl;
import org.apache.commons.collections4.MapUtils;
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Service;
import tech.powerjob.common.exception.PowerJobException;
import tech.powerjob.common.serialize.JsonUtils;
import tech.powerjob.server.auth.common.AuthConstants;
import tech.powerjob.common.enums.ErrorCodes;
import tech.powerjob.server.auth.common.PowerJobAuthException;
import tech.powerjob.server.auth.login.*;
import tech.powerjob.server.common.Loggers;
import tech.powerjob.common.utils.DigestUtils;
import tech.powerjob.server.persistence.remote.model.PwjbUserInfoDO;
import tech.powerjob.server.persistence.remote.repository.PwjbUserInfoRepository;
import javax.annotation.Resource;
import javax.servlet.http.HttpServletRequest;
import java.util.Map;
import java.util.Optional;
/**
* PowerJob 自带的登陆服务
* 和应用主框架无关依然属于第三方登录体系
*
* @author tjq
* @since 2023/3/20
*/
@Service
public class PwjbAccountLoginService implements ThirdPartyLoginService {
@Resource
private PwjbUserInfoRepository pwjbUserInfoRepository;
@Override
public LoginTypeInfo loginType() {
return new LoginTypeInfo()
.setType(AuthConstants.ACCOUNT_TYPE_POWER_JOB)
.setName("PowerJob Account")
;
}
@Override
public String generateLoginUrl(HttpServletRequest httpServletRequest) {
// 前端实现跳转服务端返回特殊指令
return AuthConstants.FE_REDIRECT_KEY.concat("powerjobLogin");
}
@Override
public ThirdPartyUser login(ThirdPartyLoginRequest loginRequest) {
final String loginInfo = loginRequest.getOriginParams();
if (StringUtils.isEmpty(loginInfo)) {
throw new IllegalArgumentException("can't find login Info");
}
Map<String, Object> loginInfoMap = JsonUtils.parseMap(loginInfo);
final String username = MapUtils.getString(loginInfoMap, AuthConstants.PARAM_KEY_USERNAME);
final String password = MapUtils.getString(loginInfoMap, AuthConstants.PARAM_KEY_PASSWORD);
final String encryption = MapUtils.getString(loginInfoMap, AuthConstants.PARAM_KEY_ENCRYPTION);
Loggers.WEB.debug("[PowerJobLoginService] username: {}, password: {}, encryption: {}", username, password, encryption);
if (StringUtils.isAnyEmpty(username, password)) {
Loggers.WEB.debug("[PowerJobLoginService] username or password is empty, login failed!");
throw new PowerJobAuthException(ErrorCodes.INVALID_REQUEST);
}
final Optional<PwjbUserInfoDO> userInfoOpt = pwjbUserInfoRepository.findByUsername(username);
if (!userInfoOpt.isPresent()) {
Loggers.WEB.debug("[PowerJobLoginService] can't find user by username: {}", username);
throw new PowerJobAuthException(ErrorCodes.USER_NOT_EXIST);
}
final PwjbUserInfoDO dbUser = userInfoOpt.get();
if (DigestUtils.rePassword(password, username).equals(dbUser.getPassword())) {
ThirdPartyUser bizUser = new ThirdPartyUser();
bizUser.setUsername(username);
// 回填第一次创建的信息
String extra = dbUser.getExtra();
if (StringUtils.isNotEmpty(extra)) {
ThirdPartyUser material = JsonUtils.parseObjectIgnoreException(extra, ThirdPartyUser.class);
if (material != null) {
bizUser.setEmail(material.getEmail());
bizUser.setNick(material.getNick());
bizUser.setPhone(material.getPhone());
bizUser.setWebHook(material.getWebHook());
}
}
// 下发加密的密码作为 JWT 的一部分方便处理改密码后失效的场景
TokenLoginVerifyInfo tokenLoginVerifyInfo = new TokenLoginVerifyInfo();
tokenLoginVerifyInfo.setEncryptedToken(dbUser.getPassword());
bizUser.setTokenLoginVerifyInfo(tokenLoginVerifyInfo);
return bizUser;
}
Loggers.WEB.debug("[PowerJobLoginService] user[{}]'s password is incorrect, login failed!", username);
throw new PowerJobException("password is incorrect");
}
@Override
public boolean tokenLoginVerify(String username, TokenLoginVerifyInfo tokenLoginVerifyInfo) {
if (tokenLoginVerifyInfo == null) {
return false;
}
final Optional<PwjbUserInfoDO> userInfoOpt = pwjbUserInfoRepository.findByUsername(username);
if (userInfoOpt.isPresent()) {
String dbPassword = userInfoOpt.get().getPassword();
return StringUtils.equals(dbPassword, tokenLoginVerifyInfo.getEncryptedToken());
}
return false;
}
}

View File

@ -0,0 +1,32 @@
package tech.powerjob.server.auth.service.login;
import lombok.Data;
import lombok.experimental.Accessors;
import javax.servlet.http.HttpServletRequest;
/**
* 执行登录的请求
*
* @author tjq
* @since 2024/2/10
*/
@Data
@Accessors(chain = true)
public class LoginRequest {
/**
* 登录类型
*/
private String loginType;
/**
* 原始参数给第三方登录方式一个服务端和前端交互的数据通道PowerJob 本身不感知其中的内容
*/
private String originParams;
/**
* http原始请求第三方回调参数传递类型无法枚举直接传递 HttpServletRequest 满足扩展性要求
*/
private transient HttpServletRequest httpServletRequest;
}

View File

@ -0,0 +1,48 @@
package tech.powerjob.server.auth.service.login;
import tech.powerjob.server.auth.PowerJobUser;
import tech.powerjob.server.auth.common.PowerJobAuthException;
import tech.powerjob.server.auth.login.LoginTypeInfo;
import javax.servlet.http.HttpServletRequest;
import java.util.List;
import java.util.Optional;
/**
* PowerJob 登录服务
*
* @author tjq
* @since 2024/2/10
*/
public interface PowerJobLoginService {
/**
* 获取全部可登录的类型
* @return 全部可登录类型
*/
List<LoginTypeInfo> fetchSupportLoginTypes();
/**
* 获取第三方登录链接
* @param loginType 登录类型
* @param httpServletRequest http请求
* @return 重定向地址
*/
String fetchThirdPartyLoginUrl(String loginType, HttpServletRequest httpServletRequest);
/**
* 执行真正的登录请求底层调用第三方登录服务完成登录
* @param loginRequest 登录请求
* @return 登录完成的 PowerJobUser
* @throws PowerJobAuthException 鉴权失败抛出异常
*/
PowerJobUser doLogin(LoginRequest loginRequest) throws PowerJobAuthException;
/**
* JWT 信息中解析用户登录信息
* @param httpServletRequest httpServletRequest
* @return PowerJob 用户
*/
Optional<PowerJobUser> ifLogin(HttpServletRequest httpServletRequest);
}

View File

@ -0,0 +1,255 @@
package tech.powerjob.server.auth.service.login.impl;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import lombok.Data;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections4.MapUtils;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import tech.powerjob.common.serialize.JsonUtils;
import tech.powerjob.server.auth.LoginUserHolder;
import tech.powerjob.server.auth.PowerJobUser;
import tech.powerjob.server.auth.common.AuthConstants;
import tech.powerjob.common.enums.ErrorCodes;
import tech.powerjob.server.auth.common.PowerJobAuthException;
import tech.powerjob.server.auth.common.utils.HttpServletUtils;
import tech.powerjob.server.auth.jwt.JwtService;
import tech.powerjob.server.auth.login.*;
import tech.powerjob.server.auth.service.login.LoginRequest;
import tech.powerjob.server.auth.service.login.PowerJobLoginService;
import tech.powerjob.server.common.Loggers;
import tech.powerjob.common.enums.SwitchableStatus;
import tech.powerjob.server.persistence.remote.model.UserInfoDO;
import tech.powerjob.server.persistence.remote.repository.UserInfoRepository;
import javax.servlet.http.HttpServletRequest;
import java.io.Serializable;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.stream.Collectors;
/**
* PowerJob 登录服务
*
* @author tjq
* @since 2024/2/10
*/
@Slf4j
@Service
public class PowerJobLoginServiceImpl implements PowerJobLoginService {
private final JwtService jwtService;
private final UserInfoRepository userInfoRepository;
private final Map<String, ThirdPartyLoginService> code2ThirdPartyLoginService;
@Autowired
public PowerJobLoginServiceImpl(JwtService jwtService, UserInfoRepository userInfoRepository, List<ThirdPartyLoginService> thirdPartyLoginServices) {
this.jwtService = jwtService;
this.userInfoRepository = userInfoRepository;
code2ThirdPartyLoginService = Maps.newHashMap();
thirdPartyLoginServices.forEach(s -> {
code2ThirdPartyLoginService.put(s.loginType().getType(), s);
log.info("[PowerJobLoginService] register ThirdPartyLoginService: {}", s.loginType());
});
}
@Override
public List<LoginTypeInfo> fetchSupportLoginTypes() {
return Lists.newArrayList(code2ThirdPartyLoginService.values()).stream().map(ThirdPartyLoginService::loginType).collect(Collectors.toList());
}
@Override
public String fetchThirdPartyLoginUrl(String type, HttpServletRequest httpServletRequest) {
final ThirdPartyLoginService thirdPartyLoginService = fetchBizLoginService(type);
return thirdPartyLoginService.generateLoginUrl(httpServletRequest);
}
@Override
public PowerJobUser doLogin(LoginRequest loginRequest) throws PowerJobAuthException {
final String loginType = loginRequest.getLoginType();
final ThirdPartyLoginService thirdPartyLoginService = fetchBizLoginService(loginType);
ThirdPartyLoginRequest thirdPartyLoginRequest = new ThirdPartyLoginRequest()
.setOriginParams(loginRequest.getOriginParams())
.setHttpServletRequest(loginRequest.getHttpServletRequest());
final ThirdPartyUser bizUser = thirdPartyLoginService.login(thirdPartyLoginRequest);
String dbUserName = String.format("%s_%s", loginType, bizUser.getUsername());
Optional<UserInfoDO> powerJobUserOpt = userInfoRepository.findByUsername(dbUserName);
// 如果不存在用户先同步创建用户
if (!powerJobUserOpt.isPresent()) {
UserInfoDO newUser = new UserInfoDO();
newUser.setUsername(dbUserName);
// 写入账号体系类型
newUser.setAccountType(loginType);
newUser.setOriginUsername(bizUser.getUsername());
newUser.setTokenLoginVerifyInfo(JsonUtils.toJSONString(bizUser.getTokenLoginVerifyInfo()));
// 同步素材
newUser.setEmail(bizUser.getEmail());
newUser.setPhone(bizUser.getPhone());
newUser.setNick(bizUser.getNick());
newUser.setWebHook(bizUser.getWebHook());
newUser.setExtra(bizUser.getExtra());
Loggers.WEB.info("[PowerJobLoginService] sync user to PowerJobUserSystem: {}", dbUserName);
userInfoRepository.saveAndFlush(newUser);
powerJobUserOpt = userInfoRepository.findByUsername(dbUserName);
} else {
UserInfoDO dbUserInfoDO = powerJobUserOpt.get();
checkUserStatus(dbUserInfoDO);
// 更新二次校验的 TOKEN 信息
dbUserInfoDO.setTokenLoginVerifyInfo(JsonUtils.toJSONString(bizUser.getTokenLoginVerifyInfo()));
dbUserInfoDO.setGmtModified(new Date());
userInfoRepository.saveAndFlush(dbUserInfoDO);
}
PowerJobUser ret = new PowerJobUser();
// 理论上 100% 存在
if (powerJobUserOpt.isPresent()) {
final UserInfoDO dbUser = powerJobUserOpt.get();
BeanUtils.copyProperties(dbUser, ret);
ret.setUsername(dbUserName);
}
fillJwt(ret, Optional.ofNullable(bizUser.getTokenLoginVerifyInfo()).map(TokenLoginVerifyInfo::getEncryptedToken).orElse(null));
return ret;
}
@Override
public Optional<PowerJobUser> ifLogin(HttpServletRequest httpServletRequest) {
final Optional<JwtBody> jwtBodyOpt = parseJwt(httpServletRequest);
if (!jwtBodyOpt.isPresent()) {
return Optional.empty();
}
JwtBody jwtBody = jwtBodyOpt.get();
Optional<UserInfoDO> dbUserInfoOpt = userInfoRepository.findByUsername(jwtBody.getUsername());
if (!dbUserInfoOpt.isPresent()) {
throw new PowerJobAuthException(ErrorCodes.USER_NOT_EXIST);
}
UserInfoDO dbUser = dbUserInfoOpt.get();
checkUserStatus(dbUser);
PowerJobUser powerJobUser = new PowerJobUser();
String tokenLoginVerifyInfoStr = dbUser.getTokenLoginVerifyInfo();
TokenLoginVerifyInfo tokenLoginVerifyInfo = Optional.ofNullable(tokenLoginVerifyInfoStr).map(x -> JsonUtils.parseObjectIgnoreException(x, TokenLoginVerifyInfo.class)).orElse(new TokenLoginVerifyInfo());
// DB 中的 encryptedToken 存在代表需要二次校验
if (StringUtils.isNotEmpty(tokenLoginVerifyInfo.getEncryptedToken())) {
if (!StringUtils.equals(jwtBody.getEncryptedToken(), tokenLoginVerifyInfo.getEncryptedToken())) {
throw new PowerJobAuthException(ErrorCodes.INVALID_TOKEN);
}
ThirdPartyLoginService thirdPartyLoginService = code2ThirdPartyLoginService.get(dbUser.getAccountType());
boolean tokenLoginVerifyOk = thirdPartyLoginService.tokenLoginVerify(dbUser.getOriginUsername(), tokenLoginVerifyInfo);
if (!tokenLoginVerifyOk) {
throw new PowerJobAuthException(ErrorCodes.USER_AUTH_FAILED);
}
}
BeanUtils.copyProperties(dbUser, powerJobUser);
// 兼容某些直接通过 ifLogin 判断登录的场景
LoginUserHolder.set(powerJobUser);
return Optional.of(powerJobUser);
}
/**
* 检查 user 状态
* @param dbUser user
*/
private void checkUserStatus(UserInfoDO dbUser) {
int accountStatus = Optional.ofNullable(dbUser.getStatus()).orElse(SwitchableStatus.ENABLE.getV());
if (accountStatus == SwitchableStatus.DISABLE.getV()) {
throw new PowerJobAuthException(ErrorCodes.USER_DISABLED);
}
}
private ThirdPartyLoginService fetchBizLoginService(String loginType) {
final ThirdPartyLoginService loginService = code2ThirdPartyLoginService.get(loginType);
if (loginService == null) {
throw new PowerJobAuthException(ErrorCodes.INVALID_REQUEST, "can't find ThirdPartyLoginService by type: " + loginType);
}
return loginService;
}
private void fillJwt(PowerJobUser powerJobUser, String encryptedToken) {
// 不能下发 userId容易被轮询爆破
JwtBody jwtBody = new JwtBody();
jwtBody.setUsername(powerJobUser.getUsername());
if (StringUtils.isNotEmpty(encryptedToken)) {
jwtBody.setEncryptedToken(encryptedToken);
}
Map<String, Object> jwtMap = JsonUtils.parseMap(JsonUtils.toJSONString(jwtBody));
powerJobUser.setJwtToken(jwtService.build(jwtMap, null));
}
@SneakyThrows
private Optional<JwtBody> parseJwt(HttpServletRequest httpServletRequest) {
// headercookie 都能获取
String jwtStr = HttpServletUtils.fetchFromHeader(AuthConstants.JWT_NAME, httpServletRequest);
if (StringUtils.isEmpty(jwtStr)) {
jwtStr = HttpServletUtils.fetchFromHeader(AuthConstants.OLD_JWT_NAME, httpServletRequest);
}
/*
开发阶段跨域无法简单传输 cookies暂时采取 header 方案传输 JWT
if (StringUtils.isEmpty(jwtStr)) {
for (Cookie cookie : Optional.ofNullable(httpServletRequest.getCookies()).orElse(new Cookie[]{})) {
if (cookie.getName().equals(AuthConstants.JWT_NAME)) {
jwtStr = cookie.getValue();
}
}
}
*/
if (StringUtils.isEmpty(jwtStr)) {
return Optional.empty();
}
final Map<String, Object> jwtBodyMap = jwtService.parse(jwtStr, null).getResult();
if (MapUtils.isEmpty(jwtBodyMap)) {
return Optional.empty();
}
return Optional.ofNullable(JsonUtils.parseObject(JsonUtils.toJSONString(jwtBodyMap), JwtBody.class));
}
@Data
static class JwtBody implements Serializable {
private String username;
private String encryptedToken;
}
}

View File

@ -0,0 +1,64 @@
package tech.powerjob.server.auth.service.permission;
import tech.powerjob.server.auth.Permission;
import tech.powerjob.server.auth.Role;
import tech.powerjob.server.auth.RoleScope;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* PowerJob 鉴权服务
*
* @author tjq
* @since 2024/2/11
*/
public interface PowerJobPermissionService {
/**
* 判断用户是否有访问权限
* @param userId userId
* @param roleScope 权限范围
* @param target 权限目标ID
* @param permission 要求的权限
* @return 是否有权限
*/
boolean hasPermission(Long userId, RoleScope roleScope, Long target, Permission permission);
/**
* 授予用户角色
* @param roleScope 权限范围
* @param target 权限目标
* @param userId 用户ID
* @param role 角色
* @param extra 其他
*/
void grantRole(RoleScope roleScope, Long target, Long userId, Role role, String extra);
/**
* 回收用户角色
* @param roleScope 权限范围
* @param target 权限目标
* @param userId 用户ID
* @param role 角色
*/
void retrieveRole(RoleScope roleScope, Long target, Long userId, Role role);
/**
* 获取有相关权限的用户
* @param roleScope 角色范围
* @param target 目标
* @return 角色对应的用户列表user 可能重复需要用 SET 去重save APP/namespace 等场景创建人自动被授权成为 ADMIN如果用户在面板将自己添加到管理员就会存在2套授权机制2次授权出现重复
*/
Map<Role, Set<Long>> fetchUserWithPermissions(RoleScope roleScope, Long target);
/**
* 获取用户有权限的目标
* @param roleScope 角色范围
* @param userId 用户ID
* @return result
*/
Map<Role, List<Long>> fetchUserHadPermissionTargets(RoleScope roleScope, Long userId);
}

View File

@ -0,0 +1,175 @@
package tech.powerjob.server.auth.service.permission;
import com.google.common.collect.*;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Service;
import tech.powerjob.server.auth.Permission;
import tech.powerjob.server.auth.Role;
import tech.powerjob.server.auth.RoleScope;
import tech.powerjob.server.persistence.remote.model.AppInfoDO;
import tech.powerjob.server.persistence.remote.model.UserRoleDO;
import tech.powerjob.server.persistence.remote.repository.AppInfoRepository;
import tech.powerjob.server.persistence.remote.repository.UserRoleRepository;
import javax.annotation.Resource;
import java.util.*;
/**
* PowerJobPermissionService
*
* @author tjq
* @since 2024/2/11
*/
@Slf4j
@Service
public class PowerJobPermissionServiceImpl implements PowerJobPermissionService {
@Resource
private AppInfoRepository appInfoRepository;
@Resource
private UserRoleRepository userRoleRepository;
@Override
public boolean hasPermission(Long userId, RoleScope roleScope, Long target, Permission requiredPermission) {
final List<UserRoleDO> userRoleList = Optional.ofNullable(userRoleRepository.findAllByUserId(userId)).orElse(Collections.emptyList());
Multimap<Long, Role> appId2Role = ArrayListMultimap.create();
Multimap<Long, Role> namespaceId2Role = ArrayListMultimap.create();
List<Role> globalRoles = Lists.newArrayList();
for (UserRoleDO userRole : userRoleList) {
final Role role = Role.of(userRole.getRole());
// 处理全局权限
if (RoleScope.GLOBAL.getV() == userRole.getScope()) {
if (Role.ADMIN.equals(role)) {
return true;
}
globalRoles.add(role);
}
if (RoleScope.NAMESPACE.getV() == userRole.getScope()) {
namespaceId2Role.put(userRole.getTarget(), role);
}
if (RoleScope.APP.getV() == userRole.getScope()) {
appId2Role.put(userRole.getTarget(), role);
}
}
// 前置判断需要的权限新增场景还没有 appId or namespaceId
if (requiredPermission == Permission.NONE) {
return true;
}
// 检验全局穿透权限
for (Role role : globalRoles) {
if (role.getPermissions().contains(requiredPermission)) {
return true;
}
}
// 无超级管理员权限校验普通权限
if (RoleScope.APP.equals(roleScope)) {
return checkAppPermission(target, requiredPermission, appId2Role, namespaceId2Role);
}
if (RoleScope.NAMESPACE.equals(roleScope)) {
return checkNamespacePermission(target, requiredPermission, namespaceId2Role);
}
return false;
}
@Override
public void grantRole(RoleScope roleScope, Long target, Long userId, Role role, String extra) {
UserRoleDO userRoleDO = new UserRoleDO();
userRoleDO.setGmtCreate(new Date());
userRoleDO.setGmtModified(new Date());
userRoleDO.setExtra(extra);
userRoleDO.setScope(roleScope.getV());
userRoleDO.setTarget(target);
userRoleDO.setUserId(userId);
userRoleDO.setRole(role.getV());
userRoleRepository.saveAndFlush(userRoleDO);
log.info("[PowerJobPermissionService] [grantPermission] saveAndFlush userRole successfully: {}", userRoleDO);
}
@Override
public void retrieveRole(RoleScope roleScope, Long target, Long userId, Role role) {
List<UserRoleDO> originUserRole = userRoleRepository.findAllByScopeAndTargetAndRoleAndUserId(roleScope.getV(), target, role.getV(), userId);
log.info("[PowerJobPermissionService] [retrievePermission] origin rule: {}", originUserRole);
Optional.ofNullable(originUserRole).orElse(Collections.emptyList()).forEach(r -> {
userRoleRepository.deleteById(r.getId());
log.info("[PowerJobPermissionService] [retrievePermission] delete UserRole: {}", r);
});
}
@Override
public Map<Role, Set<Long>> fetchUserWithPermissions(RoleScope roleScope, Long target) {
List<UserRoleDO> permissionUserList = userRoleRepository.findAllByScopeAndTarget(roleScope.getV(), target);
Map<Role, Set<Long>> ret = Maps.newHashMap();
Optional.ofNullable(permissionUserList).orElse(Collections.emptyList()).forEach(userRoleDO -> {
Role role = Role.of(userRoleDO.getRole());
Set<Long> userIds = ret.computeIfAbsent(role, ignore -> Sets.newHashSet());
userIds.add(userRoleDO.getUserId());
});
return ret;
}
@Override
public Map<Role, List<Long>> fetchUserHadPermissionTargets(RoleScope roleScope, Long userId) {
Map<Role, List<Long>> ret = Maps.newHashMap();
List<UserRoleDO> userRoleDOList = userRoleRepository.findAllByUserIdAndScope(userId, roleScope.getV());
Optional.ofNullable(userRoleDOList).orElse(Collections.emptyList()).forEach(r -> {
Role role = Role.of(r.getRole());
List<Long> targetIds = ret.computeIfAbsent(role, ignore -> Lists.newArrayList());
targetIds.add(r.getTarget());
});
return ret;
}
private boolean checkAppPermission(Long targetId, Permission requiredPermission, Multimap<Long, Role> appId2Role, Multimap<Long, Role> namespaceId2Role) {
final Collection<Role> appRoles = appId2Role.get(targetId);
for (Role role : appRoles) {
if (role.getPermissions().contains(requiredPermission)) {
return true;
}
}
// 校验 namespace 穿透权限
Optional<AppInfoDO> appInfoOpt = appInfoRepository.findById(targetId);
if (!appInfoOpt.isPresent()) {
throw new IllegalArgumentException("can't find appInfo by appId in permission check: " + targetId);
}
Long namespaceId = Optional.ofNullable(appInfoOpt.get().getNamespaceId()).orElse(-1L);
Collection<Role> namespaceRoles = namespaceId2Role.get(namespaceId);
for (Role role : namespaceRoles) {
if (role.getPermissions().contains(requiredPermission)) {
return true;
}
}
return false;
}
private boolean checkNamespacePermission(Long targetId, Permission requiredPermission, Multimap<Long, Role> namespaceId2Role) {
Collection<Role> namespaceRoles = namespaceId2Role.get(targetId);
for (Role role : namespaceRoles) {
if (role.getPermissions().contains(requiredPermission)) {
return true;
}
}
return false;
}
}

View File

@ -5,7 +5,7 @@
<parent> <parent>
<artifactId>powerjob-server</artifactId> <artifactId>powerjob-server</artifactId>
<groupId>tech.powerjob</groupId> <groupId>tech.powerjob</groupId>
<version>4.3.9</version> <version>5.1.0</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>

View File

@ -0,0 +1,18 @@
package tech.powerjob.server.common;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* 统一定义日志
*
* @author tjq
* @since 2023/3/25
*/
public class Loggers {
/**
* Web 层统一日志
*/
public static final Logger WEB = LoggerFactory.getLogger("P_SERVER_LOGGER_WEB");
}

View File

@ -3,6 +3,8 @@ package tech.powerjob.server.common;
import com.google.common.base.Joiner; import com.google.common.base.Joiner;
import com.google.common.base.Splitter; import com.google.common.base.Splitter;
import java.util.Map;
/** /**
* Splitter & Joiner * Splitter & Joiner
* *
@ -16,4 +18,9 @@ public class SJ {
public static final Joiner MONITOR_JOINER = Joiner.on("|").useForNull("-"); public static final Joiner MONITOR_JOINER = Joiner.on("|").useForNull("-");
private static final Splitter.MapSplitter MAP_SPLITTER = Splitter.onPattern(";").withKeyValueSeparator(":");
public static Map<String, String> splitKvString(String kvString) {
return MAP_SPLITTER.split(kvString);
}
} }

View File

@ -0,0 +1,92 @@
package tech.powerjob.server.common.utils;
import lombok.SneakyThrows;
import tech.powerjob.common.utils.DigestUtils;
import javax.crypto.Cipher;
import javax.crypto.spec.GCMParameterSpec;
import javax.crypto.spec.SecretKeySpec;
import java.nio.charset.StandardCharsets;
import java.security.SecureRandom;
import java.util.Base64;
public class AESUtil {
private static final String ALGORITHM = "AES";
private static final String TRANSFORMATION = "AES/GCM/NoPadding";
private static final int KEY_SIZE = 256; // AES 256-bit
private static final int GCM_NONCE_LENGTH = 12; // GCM nonce length (12 bytes)
private static final int GCM_TAG_LENGTH = 16; // GCM authentication tag length (16 bytes)
// SecureRandom 实例用于生成 nonce
private static final SecureRandom secureRandom = new SecureRandom();
/**
* 生成密钥
*
* @param key 传入的密钥字符串必须是 32 字节256 长度
* @return SecretKeySpec 实例
*/
private static SecretKeySpec getKey(String key) {
byte[] keyBytes = key.getBytes(StandardCharsets.UTF_8);
// 不足 32 字节则使用 MD5 转为 32
if (keyBytes.length != KEY_SIZE / 8) {
keyBytes = DigestUtils.md5(key).getBytes(StandardCharsets.UTF_8);
}
return new SecretKeySpec(keyBytes, ALGORITHM);
}
/**
* 加密
*
* @param data 要加密的数据
* @param key 加密密钥
* @return 加密后的数据Base64 编码包含 nonce
*/
@SneakyThrows
public static String encrypt(String data, String key) {
byte[] nonce = new byte[GCM_NONCE_LENGTH];
secureRandom.nextBytes(nonce); // 生成随机的 nonce
Cipher cipher = Cipher.getInstance(TRANSFORMATION);
GCMParameterSpec gcmParameterSpec = new GCMParameterSpec(GCM_TAG_LENGTH * 8, nonce);
cipher.init(Cipher.ENCRYPT_MODE, getKey(key), gcmParameterSpec);
byte[] encryptedData = cipher.doFinal(data.getBytes(StandardCharsets.UTF_8));
// nonce 和密文连接在一起然后进行 Base64 编码
byte[] combinedData = new byte[nonce.length + encryptedData.length];
System.arraycopy(nonce, 0, combinedData, 0, nonce.length);
System.arraycopy(encryptedData, 0, combinedData, nonce.length, encryptedData.length);
return Base64.getEncoder().encodeToString(combinedData);
}
/**
* 解密
*
* @param encryptedData 要解密的数据Base64 编码包含 nonce
* @param key 解密密钥
* @return 解密后的数据
*/
@SneakyThrows
public static String decrypt(String encryptedData, String key) {
byte[] combinedData = Base64.getDecoder().decode(encryptedData);
// 提取 nonce
byte[] nonce = new byte[GCM_NONCE_LENGTH];
System.arraycopy(combinedData, 0, nonce, 0, nonce.length);
// 提取实际的加密数据
byte[] encryptedText = new byte[combinedData.length - nonce.length];
System.arraycopy(combinedData, nonce.length, encryptedText, 0, encryptedText.length);
Cipher cipher = Cipher.getInstance(TRANSFORMATION);
GCMParameterSpec gcmParameterSpec = new GCMParameterSpec(GCM_TAG_LENGTH * 8, nonce);
cipher.init(Cipher.DECRYPT_MODE, getKey(key), gcmParameterSpec);
byte[] decryptedData = cipher.doFinal(encryptedText);
return new String(decryptedData, StandardCharsets.UTF_8);
}
}

View File

@ -0,0 +1,28 @@
package tech.powerjob.server.common.utils;
import org.junit.jupiter.api.Test;
/**
* AESUtilTest
*
* @author tjq
* @since 2024/8/10
*/
class AESUtilTest {
@Test
void testAes() throws Exception {
String sk = "ChinaNo.1_ChinaNo.1_ChinaNo.1";
String txt = "kyksjdfh";
String encrypt = AESUtil.encrypt(txt, sk);
System.out.println(encrypt);
String decrypt = AESUtil.decrypt(encrypt, sk);
System.out.println(decrypt);
assert txt.equals(decrypt);
}
}

View File

@ -5,7 +5,7 @@
<parent> <parent>
<artifactId>powerjob-server</artifactId> <artifactId>powerjob-server</artifactId>
<groupId>tech.powerjob</groupId> <groupId>tech.powerjob</groupId>
<version>4.3.9</version> <version>5.1.0</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>

View File

@ -37,7 +37,7 @@ import tech.powerjob.common.utils.NetUtils;
import tech.powerjob.common.utils.SegmentLock; import tech.powerjob.common.utils.SegmentLock;
import tech.powerjob.remote.framework.base.URL; import tech.powerjob.remote.framework.base.URL;
import tech.powerjob.server.common.constants.ContainerSourceType; import tech.powerjob.server.common.constants.ContainerSourceType;
import tech.powerjob.server.common.constants.SwitchableStatus; import tech.powerjob.common.enums.SwitchableStatus;
import tech.powerjob.server.common.module.WorkerInfo; import tech.powerjob.server.common.module.WorkerInfo;
import tech.powerjob.server.common.utils.OmsFileUtils; import tech.powerjob.server.common.utils.OmsFileUtils;
import tech.powerjob.server.extension.LockService; import tech.powerjob.server.extension.LockService;
@ -336,7 +336,6 @@ public class ContainerService {
sb.append("WARN: there exists multi version container now, please redeploy to fix this problem").append(System.lineSeparator()); sb.append("WARN: there exists multi version container now, please redeploy to fix this problem").append(System.lineSeparator());
} }
sb.append("divisive version ==> ").append(System.lineSeparator());
version2DeployedContainerInfoList.asMap().forEach((version, deployedContainerInfos) -> { version2DeployedContainerInfoList.asMap().forEach((version, deployedContainerInfos) -> {
sb.append("[version] ").append(version).append(System.lineSeparator()); sb.append("[version] ").append(version).append(System.lineSeparator());
deployedContainerInfos.forEach(deployedContainerInfo -> sb.append(String.format("Address: %s, DeployedTime: %s", deployedContainerInfo.getWorkerAddress(), CommonUtils.formatTime(deployedContainerInfo.getDeployedTime()))).append(System.lineSeparator())); deployedContainerInfos.forEach(deployedContainerInfo -> sb.append(String.format("Address: %s, DeployedTime: %s", deployedContainerInfo.getWorkerAddress(), CommonUtils.formatTime(deployedContainerInfo.getDeployedTime()))).append(System.lineSeparator()));

View File

@ -12,7 +12,7 @@ import tech.powerjob.common.serialize.JsonUtils;
import tech.powerjob.common.utils.NetUtils; import tech.powerjob.common.utils.NetUtils;
import tech.powerjob.remote.framework.actor.Handler; import tech.powerjob.remote.framework.actor.Handler;
import tech.powerjob.remote.framework.actor.ProcessType; import tech.powerjob.remote.framework.actor.ProcessType;
import tech.powerjob.server.common.constants.SwitchableStatus; import tech.powerjob.common.enums.SwitchableStatus;
import tech.powerjob.server.common.module.WorkerInfo; import tech.powerjob.server.common.module.WorkerInfo;
import tech.powerjob.server.common.utils.SpringUtils; import tech.powerjob.server.common.utils.SpringUtils;
import tech.powerjob.server.monitor.MonitorService; import tech.powerjob.server.monitor.MonitorService;

View File

@ -12,7 +12,7 @@ import tech.powerjob.common.enums.InstanceStatus;
import tech.powerjob.common.enums.TimeExpressionType; import tech.powerjob.common.enums.TimeExpressionType;
import tech.powerjob.common.enums.WorkflowInstanceStatus; import tech.powerjob.common.enums.WorkflowInstanceStatus;
import tech.powerjob.server.common.Holder; import tech.powerjob.server.common.Holder;
import tech.powerjob.server.common.constants.SwitchableStatus; import tech.powerjob.common.enums.SwitchableStatus;
import tech.powerjob.server.core.DispatchService; import tech.powerjob.server.core.DispatchService;
import tech.powerjob.server.core.instance.InstanceManager; import tech.powerjob.server.core.instance.InstanceManager;
import tech.powerjob.server.core.workflow.WorkflowInstanceManager; import tech.powerjob.server.core.workflow.WorkflowInstanceManager;

Some files were not shown because too many files have changed in this diff Show More