[release] Merge branch 'v3.1.1'

This commit is contained in:
tjq 2020-06-24 22:12:43 +08:00
commit 4cbbc0a2c1
45 changed files with 573 additions and 377 deletions

View File

@ -2,14 +2,14 @@
Navicat Premium Data Transfer Navicat Premium Data Transfer
Source Server Type : MySQL Source Server Type : MySQL
Source Server Version : 50724 Source Server Version : 80020
Source Schema : oms-product Source Schema : powerjob-product
Target Server Type : MySQL Target Server Type : MySQL
Target Server Version : 50724 Target Server Version : 80020
File Encoding : 65001 File Encoding : 65001
Date: 07/06/2020 11:11:47 Date: 23/06/2020 22:30:06
*/ */
SET NAMES utf8mb4; SET NAMES utf8mb4;
@ -20,130 +20,131 @@ SET FOREIGN_KEY_CHECKS = 0;
-- ---------------------------- -- ----------------------------
DROP TABLE IF EXISTS `app_info`; DROP TABLE IF EXISTS `app_info`;
CREATE TABLE `app_info` ( CREATE TABLE `app_info` (
`id` bigint(20) NOT NULL AUTO_INCREMENT, `id` bigint NOT NULL AUTO_INCREMENT,
`app_name` varchar(255) DEFAULT NULL, `app_name` varchar(255) DEFAULT NULL,
`current_server` varchar(255) DEFAULT NULL, `current_server` varchar(255) DEFAULT NULL,
`description` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL, `gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL, `gmt_modified` datetime(6) DEFAULT NULL,
`password` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`), PRIMARY KEY (`id`),
UNIQUE KEY `appNameUK` (`app_name`) UNIQUE KEY `appNameUK` (`app_name`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; ) ENGINE=InnoDB AUTO_INCREMENT=3 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ---------------------------- -- ----------------------------
-- Table structure for container_info -- Table structure for container_info
-- ---------------------------- -- ----------------------------
DROP TABLE IF EXISTS `container_info`; DROP TABLE IF EXISTS `container_info`;
CREATE TABLE `container_info` ( CREATE TABLE `container_info` (
`id` bigint(20) NOT NULL AUTO_INCREMENT, `id` bigint NOT NULL AUTO_INCREMENT,
`app_id` bigint(20) DEFAULT NULL, `app_id` bigint DEFAULT NULL,
`container_name` varchar(255) DEFAULT NULL, `container_name` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL, `gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL, `gmt_modified` datetime(6) DEFAULT NULL,
`last_deploy_time` datetime(6) DEFAULT NULL, `last_deploy_time` datetime(6) DEFAULT NULL,
`source_info` varchar(255) DEFAULT NULL, `source_info` varchar(255) DEFAULT NULL,
`source_type` int(11) DEFAULT NULL, `source_type` int DEFAULT NULL,
`status` int(11) DEFAULT NULL, `status` int DEFAULT NULL,
`version` varchar(255) DEFAULT NULL, `version` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`), PRIMARY KEY (`id`),
KEY `IDX8hixyaktlnwil2w9up6b0p898` (`app_id`) KEY `IDX8hixyaktlnwil2w9up6b0p898` (`app_id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ---------------------------- -- ----------------------------
-- Table structure for instance_info -- Table structure for instance_info
-- ---------------------------- -- ----------------------------
DROP TABLE IF EXISTS `instance_info`; DROP TABLE IF EXISTS `instance_info`;
CREATE TABLE `instance_info` ( CREATE TABLE `instance_info` (
`id` bigint(20) NOT NULL AUTO_INCREMENT, `id` bigint NOT NULL AUTO_INCREMENT,
`actual_trigger_time` bigint(20) DEFAULT NULL, `actual_trigger_time` bigint DEFAULT NULL,
`app_id` bigint(20) DEFAULT NULL, `app_id` bigint DEFAULT NULL,
`expected_trigger_time` bigint(20) DEFAULT NULL, `expected_trigger_time` bigint DEFAULT NULL,
`finished_time` bigint(20) DEFAULT NULL, `finished_time` bigint DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL, `gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL, `gmt_modified` datetime(6) DEFAULT NULL,
`instance_id` bigint(20) DEFAULT NULL, `instance_id` bigint DEFAULT NULL,
`instance_params` text, `instance_params` text,
`job_id` bigint(20) DEFAULT NULL, `job_id` bigint DEFAULT NULL,
`last_report_time` bigint DEFAULT NULL,
`result` text, `result` text,
`running_times` bigint(20) DEFAULT NULL, `running_times` bigint DEFAULT NULL,
`status` int(11) DEFAULT NULL, `status` int DEFAULT NULL,
`task_tracker_address` varchar(255) DEFAULT NULL, `task_tracker_address` varchar(255) DEFAULT NULL,
`type` int(11) DEFAULT NULL, `type` int DEFAULT NULL,
`wf_instance_id` bigint(20) DEFAULT NULL, `wf_instance_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`), PRIMARY KEY (`id`),
KEY `IDX5b1nhpe5je7gc5s1ur200njr7` (`job_id`), KEY `IDX5b1nhpe5je7gc5s1ur200njr7` (`job_id`),
KEY `IDXjnji5lrr195kswk6f7mfhinrs` (`app_id`), KEY `IDXjnji5lrr195kswk6f7mfhinrs` (`app_id`),
KEY `IDXa98hq3yu0l863wuotdjl7noum` (`instance_id`) KEY `IDXa98hq3yu0l863wuotdjl7noum` (`instance_id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ---------------------------- -- ----------------------------
-- Table structure for job_info -- Table structure for job_info
-- ---------------------------- -- ----------------------------
DROP TABLE IF EXISTS `job_info`; DROP TABLE IF EXISTS `job_info`;
CREATE TABLE `job_info` ( CREATE TABLE `job_info` (
`id` bigint(20) NOT NULL AUTO_INCREMENT, `id` bigint NOT NULL AUTO_INCREMENT,
`app_id` bigint(20) DEFAULT NULL, `app_id` bigint DEFAULT NULL,
`concurrency` int(11) DEFAULT NULL, `concurrency` int DEFAULT NULL,
`designated_workers` varchar(255) DEFAULT NULL, `designated_workers` varchar(255) DEFAULT NULL,
`execute_type` int(11) DEFAULT NULL, `execute_type` int DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL, `gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL, `gmt_modified` datetime(6) DEFAULT NULL,
`instance_retry_num` int(11) DEFAULT NULL, `instance_retry_num` int DEFAULT NULL,
`instance_time_limit` bigint(20) DEFAULT NULL, `instance_time_limit` bigint DEFAULT NULL,
`job_description` varchar(255) DEFAULT NULL, `job_description` varchar(255) DEFAULT NULL,
`job_name` varchar(255) DEFAULT NULL, `job_name` varchar(255) DEFAULT NULL,
`job_params` varchar(255) DEFAULT NULL, `job_params` varchar(255) DEFAULT NULL,
`max_instance_num` int(11) DEFAULT NULL, `max_instance_num` int DEFAULT NULL,
`max_worker_count` int(11) DEFAULT NULL, `max_worker_count` int DEFAULT NULL,
`min_cpu_cores` double NOT NULL, `min_cpu_cores` double NOT NULL,
`min_disk_space` double NOT NULL, `min_disk_space` double NOT NULL,
`min_memory_space` double NOT NULL, `min_memory_space` double NOT NULL,
`next_trigger_time` bigint(20) DEFAULT NULL, `next_trigger_time` bigint DEFAULT NULL,
`notify_user_ids` varchar(255) DEFAULT NULL, `notify_user_ids` varchar(255) DEFAULT NULL,
`processor_info` text, `processor_info` text,
`processor_type` int(11) DEFAULT NULL, `processor_type` int DEFAULT NULL,
`status` int(11) DEFAULT NULL, `status` int DEFAULT NULL,
`task_retry_num` int(11) DEFAULT NULL, `task_retry_num` int DEFAULT NULL,
`time_expression` varchar(255) DEFAULT NULL, `time_expression` varchar(255) DEFAULT NULL,
`time_expression_type` int(11) DEFAULT NULL, `time_expression_type` int DEFAULT NULL,
PRIMARY KEY (`id`), PRIMARY KEY (`id`),
KEY `IDXk2xprmn3lldmlcb52i36udll1` (`app_id`) KEY `IDXk2xprmn3lldmlcb52i36udll1` (`app_id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ---------------------------- -- ----------------------------
-- Table structure for oms_lock -- Table structure for oms_lock
-- ---------------------------- -- ----------------------------
DROP TABLE IF EXISTS `oms_lock`; DROP TABLE IF EXISTS `oms_lock`;
CREATE TABLE `oms_lock` ( CREATE TABLE `oms_lock` (
`id` bigint(20) NOT NULL AUTO_INCREMENT, `id` bigint NOT NULL AUTO_INCREMENT,
`gmt_create` datetime(6) DEFAULT NULL, `gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL, `gmt_modified` datetime(6) DEFAULT NULL,
`lock_name` varchar(255) DEFAULT NULL, `lock_name` varchar(255) DEFAULT NULL,
`max_lock_time` bigint(20) DEFAULT NULL, `max_lock_time` bigint DEFAULT NULL,
`ownerip` varchar(255) DEFAULT NULL, `ownerip` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`), PRIMARY KEY (`id`),
UNIQUE KEY `lockNameUK` (`lock_name`) UNIQUE KEY `lockNameUK` (`lock_name`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ---------------------------- -- ----------------------------
-- Table structure for server_info -- Table structure for server_info
-- ---------------------------- -- ----------------------------
DROP TABLE IF EXISTS `server_info`; DROP TABLE IF EXISTS `server_info`;
CREATE TABLE `server_info` ( CREATE TABLE `server_info` (
`id` bigint(20) NOT NULL AUTO_INCREMENT, `id` bigint NOT NULL AUTO_INCREMENT,
`gmt_create` datetime(6) DEFAULT NULL, `gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL, `gmt_modified` datetime(6) DEFAULT NULL,
`ip` varchar(255) DEFAULT NULL, `ip` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`), PRIMARY KEY (`id`),
UNIQUE KEY `UKtk8ytgpl7mpukhnvhbl82kgvy` (`ip`) UNIQUE KEY `UKtk8ytgpl7mpukhnvhbl82kgvy` (`ip`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4; ) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ---------------------------- -- ----------------------------
-- Table structure for user_info -- Table structure for user_info
-- ---------------------------- -- ----------------------------
DROP TABLE IF EXISTS `user_info`; DROP TABLE IF EXISTS `user_info`;
CREATE TABLE `user_info` ( CREATE TABLE `user_info` (
`id` bigint(20) NOT NULL AUTO_INCREMENT, `id` bigint NOT NULL AUTO_INCREMENT,
`email` varchar(255) DEFAULT NULL, `email` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL, `gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL, `gmt_modified` datetime(6) DEFAULT NULL,
@ -151,47 +152,47 @@ CREATE TABLE `user_info` (
`phone` varchar(255) DEFAULT NULL, `phone` varchar(255) DEFAULT NULL,
`username` varchar(255) DEFAULT NULL, `username` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`) PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ---------------------------- -- ----------------------------
-- Table structure for workflow_info -- Table structure for workflow_info
-- ---------------------------- -- ----------------------------
DROP TABLE IF EXISTS `workflow_info`; DROP TABLE IF EXISTS `workflow_info`;
CREATE TABLE `workflow_info` ( CREATE TABLE `workflow_info` (
`id` bigint(20) NOT NULL AUTO_INCREMENT, `id` bigint NOT NULL AUTO_INCREMENT,
`app_id` bigint(20) DEFAULT NULL, `app_id` bigint DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL, `gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL, `gmt_modified` datetime(6) DEFAULT NULL,
`max_wf_instance_num` int(11) DEFAULT NULL, `max_wf_instance_num` int DEFAULT NULL,
`next_trigger_time` bigint(20) DEFAULT NULL, `next_trigger_time` bigint DEFAULT NULL,
`notify_user_ids` varchar(255) DEFAULT NULL, `notify_user_ids` varchar(255) DEFAULT NULL,
`pedag` text, `pedag` text,
`status` int(11) DEFAULT NULL, `status` int DEFAULT NULL,
`time_expression` varchar(255) DEFAULT NULL, `time_expression` varchar(255) DEFAULT NULL,
`time_expression_type` int(11) DEFAULT NULL, `time_expression_type` int DEFAULT NULL,
`wf_description` varchar(255) DEFAULT NULL, `wf_description` varchar(255) DEFAULT NULL,
`wf_name` varchar(255) DEFAULT NULL, `wf_name` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`), PRIMARY KEY (`id`),
KEY `IDX7uo5w0e3beeho3fnx9t7eiol3` (`app_id`) KEY `IDX7uo5w0e3beeho3fnx9t7eiol3` (`app_id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ---------------------------- -- ----------------------------
-- Table structure for workflow_instance_info -- Table structure for workflow_instance_info
-- ---------------------------- -- ----------------------------
DROP TABLE IF EXISTS `workflow_instance_info`; DROP TABLE IF EXISTS `workflow_instance_info`;
CREATE TABLE `workflow_instance_info` ( CREATE TABLE `workflow_instance_info` (
`id` bigint(20) NOT NULL AUTO_INCREMENT, `id` bigint NOT NULL AUTO_INCREMENT,
`actual_trigger_time` bigint(20) DEFAULT NULL, `actual_trigger_time` bigint DEFAULT NULL,
`app_id` bigint(20) DEFAULT NULL, `app_id` bigint DEFAULT NULL,
`dag` text, `dag` text,
`finished_time` bigint(20) DEFAULT NULL, `finished_time` bigint DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL, `gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL, `gmt_modified` datetime(6) DEFAULT NULL,
`result` text, `result` text,
`status` int(11) DEFAULT NULL, `status` int DEFAULT NULL,
`wf_instance_id` bigint(20) DEFAULT NULL, `wf_instance_id` bigint DEFAULT NULL,
`workflow_id` bigint(20) DEFAULT NULL, `workflow_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`) PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
SET FOREIGN_KEY_CHECKS = 1; SET FOREIGN_KEY_CHECKS = 1;

View File

@ -62,8 +62,10 @@ if [ "$startup" = "y" ] || [ "$startup" = "Y" ]; then
echo "================== 准备启动 powerjob-server ==================" echo "================== 准备启动 powerjob-server =================="
docker run -d \ docker run -d \
--name powerjob-server \ --name powerjob-server \
-p 7700:7700 -p 10086:10086 \ -p 7700:7700 -p 10086:10086 -p 5001:5005 -p 10001:10000 \
-e JVMOPTIONS="-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5005 -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.port=10000 -Dcom.sun.management.jmxremote.rmi.port=10000 -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false" \
-e PARAMS="--spring.profiles.active=pre" \ -e PARAMS="--spring.profiles.active=pre" \
-e TZ="Asia/Shanghai" \
-v ~/docker/powerjob-server:/root/powerjob-server -v ~/.m2:/root/.m2 \ -v ~/docker/powerjob-server:/root/powerjob-server -v ~/.m2:/root/.m2 \
tjqq/powerjob-server:$version tjqq/powerjob-server:$version
sleep 1 sleep 1
@ -74,8 +76,21 @@ if [ "$startup" = "y" ] || [ "$startup" = "Y" ]; then
serverIP=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' powerjob-server) serverIP=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' powerjob-server)
serverAddress="$serverIP:7700" serverAddress="$serverIP:7700"
echo "使用的Server地址$serverAddress" echo "使用的Server地址$serverAddress"
docker run -d -e PARAMS="--app powerjob-agent-test --server $serverAddress" -p 27777:27777 --name powerjob-agent -v ~/docker/powerjob-agent:/root tjqq/powerjob-agent:$version docker run -d \
docker run -d -e PARAMS="--app powerjob-agent-test --server $serverAddress" -p 27778:27777 --name powerjob-agent2 -v ~/docker/powerjob-agent2:/root tjqq/powerjob-agent:$version --name powerjob-agent \
-p 27777:27777 -p 5002:5005 -p 10002:10000 \
-e JVMOPTIONS="-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5005 -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.port=10000 -Dcom.sun.management.jmxremote.rmi.port=10000 -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false" \
-e PARAMS="--app powerjob-agent-test --server $serverAddress" \
-v ~/docker/powerjob-agent:/root \
tjqq/powerjob-agent:$version
docker run -d \
--name powerjob-agent2 \
-p 27778:27777 -p 5003:5005 -p 10003:10000 \
-e JVMOPTIONS="-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5005" \
-e PARAMS="--app powerjob-agent-test --server $serverAddress" \
-v ~/docker/powerjob-agent2:/root \
tjqq/powerjob-agent:$version
tail -f -n 100 ~/docker/powerjob-agent/powerjob/logs/powerjob-agent-application.log tail -f -n 100 ~/docker/powerjob-agent/powerjob/logs/powerjob-agent-application.log
fi fi

View File

@ -1,12 +0,0 @@
#!/bin/sh
# 一键部署脚本,请勿挪动脚本
cd `dirname $0`/../.. || exit
echo "================== 构建 jar =================="
mvn clean package -DskipTests -Pdev -e -U
echo "================== 拷贝 jar =================="
/bin/cp -rf powerjob-server/target/*.jar others/powerjob-server.jar
ls -l others/powerjob-server.jar
echo "================== debug 模式启动 =================="
nohup java -agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5005 -jar others/powerjob-server.jar > powerjob-server.log &
sleep 100
tail --pid=$$ -f -n 1000 others/powerjob-server.log

View File

@ -1,37 +0,0 @@
#!/bin/bash
cd `dirname $0`/../.. || exit
echo "================== 构建 jar =================="
mvn clean package -Pdev -DskipTests -U -e -pl powerjob-server,powerjob-worker-agent -am
echo "================== 拷贝 jar =================="
/bin/cp -rf powerjob-server/target/*.jar powerjob-server/docker/powerjob-server.jar
/bin/cp -rf powerjob-worker-agent/target/*.jar powerjob-worker-agent/powerjob-agent.jar
echo "================== 关闭老应用 =================="
docker stop powerjob-server
docker stop powerjob-agent
docker stop powerjob-agent2
echo "================== 删除老容器 =================="
docker container rm powerjob-server
docker container rm powerjob-agent
docker container rm powerjob-agent2
echo "================== 删除旧镜像 =================="
docker rmi -f tjqq/powerjob-server:latest
docker rmi -f tjqq/powerjob-agent:latest
echo "================== 构建 powerjob-server 镜像 =================="
docker build -t tjqq/powerjob-server:latest powerjob-server/docker/. || exit
echo "================== 构建 powerjob-agent 镜像 =================="
docker build -t tjqq/powerjob-agent:latest powerjob-worker-agent/. || exit
echo "================== 准备启动 powerjob-server =================="
docker run -d \
--name powerjob-server \
-p 7700:7700 -p 10086:10086 \
-e PARAMS="--spring.profiles.active=product --spring.datasource.core.jdbc-url=jdbc:mysql://172.27.147.252:3306/oms-product?serverTimezone=Asia/Shanghai&useUnicode=true&characterEncoding=UTF-8 --spring.data.mongodb.uri=mongodb://172.27.147.252:27017/oms-product" \
-v ~/docker/powerjob-server:/root/powerjob-server -v ~/.m2:/root/.m2 \
tjqq/powerjob-server:latest
sleep 60
echo "================== 准备启动 powerjob-client =================="
serverIP=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' powerjob-server)
serverAddress="$serverIP:7700"
echo "使用的Server地址$serverAddress"
docker run -d -e PARAMS="--app powerjob-agent-test --server $serverAddress" -p 27777:27777 --name powerjob-agent -v ~/docker/powerjob-agent:/root tjqq/powerjob-agent:latest
docker run -d -e PARAMS="--app powerjob-agent-test --server $serverAddress" -p 27778:27777 --name powerjob-agent2 -v ~/docker/powerjob-agent2:/root tjqq/powerjob-agent:latest

View File

@ -0,0 +1,55 @@
#!/bin/bash
cd `dirname $0`/../.. || exit
echo "================== 构建 jar =================="
mvn clean package -Pdev -DskipTests -U -e -pl powerjob-server,powerjob-worker-agent -am
echo "================== 拷贝 jar =================="
/bin/cp -rf powerjob-server/target/*.jar powerjob-server/docker/powerjob-server.jar
/bin/cp -rf powerjob-worker-agent/target/*.jar powerjob-worker-agent/powerjob-agent.jar
echo "================== 关闭老应用 =================="
docker stop powerjob-server
docker stop powerjob-agent
docker stop powerjob-agent2
echo "================== 删除老容器 =================="
docker container rm powerjob-server
docker container rm powerjob-agent
docker container rm powerjob-agent2
echo "================== 删除旧镜像 =================="
docker rmi -f tjqq/powerjob-server:latest
docker rmi -f tjqq/powerjob-agent:latest
echo "================== 构建 powerjob-server 镜像 =================="
docker build -t tjqq/powerjob-server:latest powerjob-server/docker/. || exit
echo "================== 构建 powerjob-agent 镜像 =================="
docker build -t tjqq/powerjob-agent:latest powerjob-worker-agent/. || exit
echo "================== 准备启动 powerjob-server =================="
docker run -d \
--restart=always \
--name powerjob-server \
-p 7700:7700 -p 10086:10086 -p 5001:5005 -p 10001:10000 \
-e JVMOPTIONS="-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5005 -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.port=10000 -Dcom.sun.management.jmxremote.rmi.port=10000 -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false" \
-e PARAMS="--spring.profiles.active=product --spring.datasource.core.jdbc-url=jdbc:mysql://remotehost:3306/powerjob-product?useUnicode=true&characterEncoding=UTF-8 --spring.data.mongodb.uri=mongodb://remotehost:27017/powerjob-product" \
-v ~/docker/powerjob-server:/root/powerjob-server -v ~/.m2:/root/.m2 \
tjqq/powerjob-server:latest
sleep 60
echo "================== 准备启动 powerjob-agent =================="
serverIP=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' powerjob-server)
serverAddress="$serverIP:7700"
echo "使用的Server地址$serverAddress"
docker run -d \
--restart=always \
--name powerjob-agent \
-p 27777:27777 -p 5002:5005 -p 10002:10000 \
-e JVMOPTIONS="-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5005 -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.port=10000 -Dcom.sun.management.jmxremote.rmi.port=10000 -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false" \
-e PARAMS="--app powerjob-agent-test --server $serverAddress" \
-v ~/docker/powerjob-agent:/root \
tjqq/powerjob-agent:latest
docker run -d \
--restart=always \
--name powerjob-agent2 \
-p 27778:27777 -p 5003:5005 -p 10003:10000 \
-e JVMOPTIONS="-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5005 -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.port=10000 -Dcom.sun.management.jmxremote.rmi.port=10000 -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false" \
-e PARAMS="--app powerjob-agent-test --server $serverAddress" \
-v ~/docker/powerjob-agent2:/root \
tjqq/powerjob-agent:latest

View File

@ -10,11 +10,11 @@
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<artifactId>powerjob-client</artifactId> <artifactId>powerjob-client</artifactId>
<version>3.1.0</version> <version>3.1.1</version>
<packaging>jar</packaging> <packaging>jar</packaging>
<properties> <properties>
<powerjob.common.version>3.1.0</powerjob.common.version> <powerjob.common.version>3.1.1</powerjob.common.version>
<junit.version>5.6.1</junit.version> <junit.version>5.6.1</junit.version>
</properties> </properties>

View File

@ -10,7 +10,7 @@
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<artifactId>powerjob-common</artifactId> <artifactId>powerjob-common</artifactId>
<version>3.1.0</version> <version>3.1.1</version>
<packaging>jar</packaging> <packaging>jar</packaging>
<properties> <properties>

View File

@ -43,7 +43,7 @@ public class InstanceDetail implements OmsSerializable {
private String startTime; private String startTime;
private String finishedTime; private String finishedTime;
private String result; private String result;
private String status; private int status;
} }
// MapReduce Broadcast 任务的 extra -> // MapReduce Broadcast 任务的 extra ->

View File

@ -1,5 +1,5 @@
# 基础镜像 # 基础镜像(支持 amd64 & arm64based on Ubuntu 18.04.4 LTS
FROM openjdk:8 FROM adoptopenjdk:8-jdk-hotspot
# 维护者 # 维护者
MAINTAINER tengjiqi@gmail.com MAINTAINER tengjiqi@gmail.com
# 下载并安装 maven # 下载并安装 maven
@ -11,13 +11,14 @@ COPY settings.xml /opt/powerjob-maven/conf/settings.xml
# 设置 maven 环境变量maven invoker 读取该变量调用 maven # 设置 maven 环境变量maven invoker 读取该变量调用 maven
ENV M2_HOME=/opt/powerjob-maven ENV M2_HOME=/opt/powerjob-maven
# 设置时区Debian专用方法 # 设置时区
ENV TZ=Asia/Shanghai ENV TZ=Asia/Shanghai
# 设置其他环境变量 # 设置其他环境变量
ENV APP_NAME=powerjob-server ENV APP_NAME=powerjob-server
# 传递 SpringBoot 启动参数 # 传递 SpringBoot 启动参数 和 JVM参数
ENV PARAMS="" ENV PARAMS=""
ENV JVMOPTIONS=""
# 将应用 jar 包拷入 docker # 将应用 jar 包拷入 docker
COPY powerjob-server.jar /powerjob-server.jar COPY powerjob-server.jar /powerjob-server.jar
# 暴露端口HTTP + AKKA # 暴露端口HTTP + AKKA
@ -27,4 +28,4 @@ RUN mkdir -p /root/powerjob-server
# 挂载数据卷,将文件直接输出到宿主机(注意,此处挂载的是匿名卷,即在宿主机位置随机) # 挂载数据卷,将文件直接输出到宿主机(注意,此处挂载的是匿名卷,即在宿主机位置随机)
VOLUME /root/powerjob-server VOLUME /root/powerjob-server
# 启动应用 # 启动应用
ENTRYPOINT ["sh","-c","java -jar /powerjob-server.jar $PARAMS"] ENTRYPOINT ["sh","-c","java $JVMOPTIONS -jar /powerjob-server.jar $PARAMS"]

View File

@ -10,13 +10,13 @@
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<artifactId>powerjob-server</artifactId> <artifactId>powerjob-server</artifactId>
<version>3.1.0</version> <version>3.1.1</version>
<packaging>jar</packaging> <packaging>jar</packaging>
<properties> <properties>
<swagger.version>2.9.2</swagger.version> <swagger.version>2.9.2</swagger.version>
<springboot.version>2.2.6.RELEASE</springboot.version> <springboot.version>2.2.6.RELEASE</springboot.version>
<powerjob.common.version>3.1.0</powerjob.common.version> <powerjob.common.version>3.1.1</powerjob.common.version>
<mysql.version>8.0.19</mysql.version> <mysql.version>8.0.19</mysql.version>
<h2.db.version>1.4.200</h2.db.version> <h2.db.version>1.4.200</h2.db.version>
<zip4j.version>2.5.2</zip4j.version> <zip4j.version>2.5.2</zip4j.version>

View File

@ -68,7 +68,7 @@ public interface InstanceInfoRepository extends JpaRepository<InstanceInfoDO, Lo
List<Long> findByJobIdInAndStatusIn(List<Long> jobIds, List<Integer> status); List<Long> findByJobIdInAndStatusIn(List<Long> jobIds, List<Integer> status);
// 删除历史数据JPA自带的删除居然是根据ID循环删2000条数据删了几秒也太拉垮了吧... // 删除历史数据JPA自带的删除居然是根据ID循环删2000条数据删了几秒也太拉垮了吧...
// 结果只能用 int // 结果只能用 int
@Modifying @Modifying
@Transactional @Transactional
@Query(value = "delete from instance_info where gmt_modified < ?1", nativeQuery = true) @Query(value = "delete from instance_info where gmt_modified < ?1", nativeQuery = true)

View File

@ -9,6 +9,7 @@ import com.github.kfcfans.powerjob.server.persistence.core.model.JobInfoDO;
import com.github.kfcfans.powerjob.server.persistence.core.repository.InstanceInfoRepository; import com.github.kfcfans.powerjob.server.persistence.core.repository.InstanceInfoRepository;
import com.github.kfcfans.powerjob.server.service.ha.WorkerManagerService; import com.github.kfcfans.powerjob.server.service.ha.WorkerManagerService;
import com.github.kfcfans.powerjob.server.service.instance.InstanceManager; import com.github.kfcfans.powerjob.server.service.instance.InstanceManager;
import com.github.kfcfans.powerjob.server.service.instance.InstanceMetadataService;
import com.google.common.base.Splitter; import com.google.common.base.Splitter;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.google.common.collect.Sets; import com.google.common.collect.Sets;
@ -39,6 +40,8 @@ public class DispatchService {
@Resource @Resource
private InstanceManager instanceManager; private InstanceManager instanceManager;
@Resource @Resource
private InstanceMetadataService instanceMetadataService;
@Resource
private InstanceInfoRepository instanceInfoRepository; private InstanceInfoRepository instanceInfoRepository;
private static final Splitter commaSplitter = Splitter.on(","); private static final Splitter commaSplitter = Splitter.on(",");
@ -142,5 +145,8 @@ public class DispatchService {
// 修改状态 // 修改状态
instanceInfoRepository.update4TriggerSucceed(instanceId, WAITING_WORKER_RECEIVE.getV(), currentRunningTimes + 1, current, taskTrackerAddress, dbInstanceParams, now); instanceInfoRepository.update4TriggerSucceed(instanceId, WAITING_WORKER_RECEIVE.getV(), currentRunningTimes + 1, current, taskTrackerAddress, dbInstanceParams, now);
// 装载缓存
instanceMetadataService.loadJobInfo(instanceId, jobInfo);
} }
} }

View File

@ -11,7 +11,7 @@ import com.github.kfcfans.powerjob.server.persistence.core.model.JobInfoDO;
import com.github.kfcfans.powerjob.server.persistence.local.LocalInstanceLogDO; import com.github.kfcfans.powerjob.server.persistence.local.LocalInstanceLogDO;
import com.github.kfcfans.powerjob.server.persistence.local.LocalInstanceLogRepository; import com.github.kfcfans.powerjob.server.persistence.local.LocalInstanceLogRepository;
import com.github.kfcfans.powerjob.server.persistence.mongodb.GridFsManager; import com.github.kfcfans.powerjob.server.persistence.mongodb.GridFsManager;
import com.github.kfcfans.powerjob.server.service.instance.InstanceManager; import com.github.kfcfans.powerjob.server.service.instance.InstanceMetadataService;
import com.google.common.base.Stopwatch; import com.google.common.base.Stopwatch;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.google.common.collect.Maps; import com.google.common.collect.Maps;
@ -46,7 +46,7 @@ import java.util.stream.Stream;
public class InstanceLogService { public class InstanceLogService {
@Resource @Resource
private InstanceManager instanceManager; private InstanceMetadataService instanceMetadataService;
@Resource @Resource
private GridFsManager gridFsManager; private GridFsManager gridFsManager;
// 本地数据库操作bean // 本地数据库操作bean
@ -320,13 +320,12 @@ public class InstanceLogService {
// 定时删除秒级任务的日志 // 定时删除秒级任务的日志
List<Long> frequentInstanceIds = Lists.newLinkedList(); List<Long> frequentInstanceIds = Lists.newLinkedList();
instanceId2LastReportTime.keySet().forEach(instanceId -> { instanceId2LastReportTime.keySet().forEach(instanceId -> {
JobInfoDO jobInfo = instanceManager.fetchJobInfo(instanceId); try {
if (jobInfo == null) { JobInfoDO jobInfo = instanceMetadataService.fetchJobInfoByInstanceId(instanceId);
return; if (TimeExpressionType.frequentTypes.contains(jobInfo.getTimeExpressionType())) {
} frequentInstanceIds.add(instanceId);
}
if (TimeExpressionType.frequentTypes.contains(jobInfo.getTimeExpressionType())) { }catch (Exception ignore) {
frequentInstanceIds.add(instanceId);
} }
}); });

View File

@ -65,7 +65,7 @@ public class DefaultMailAlarmService implements Alarmable {
javaMailSender.send(sm); javaMailSender.send(sm);
}catch (Exception e) { }catch (Exception e) {
log.error("[OmsMailAlarmService] send mail({}) failed.", sm, e); log.error("[OmsMailAlarmService] send mail({}) failed, reason is {}", sm, e.getMessage());
} }
} }
} }

View File

@ -30,7 +30,7 @@ public class ClusterStatusHolder {
// 集群中所有机器的最后心跳时间 // 集群中所有机器的最后心跳时间
private Map<String, Long> address2ActiveTime; private Map<String, Long> address2ActiveTime;
private static final long WORKER_TIMEOUT_MS = 30000; private static final long WORKER_TIMEOUT_MS = 60000;
public ClusterStatusHolder(String appName) { public ClusterStatusHolder(String appName) {
this.appName = appName; this.appName = appName;
@ -49,7 +49,7 @@ public class ClusterStatusHolder {
Long oldTime = address2ActiveTime.getOrDefault(workerAddress, -1L); Long oldTime = address2ActiveTime.getOrDefault(workerAddress, -1L);
if (heartbeatTime < oldTime) { if (heartbeatTime < oldTime) {
log.warn("[ClusterStatusHolder-{}] receive the old heartbeat: {}.", appName, heartbeat); log.warn("[ClusterStatusHolder-{}] receive the expired heartbeat from {}, serverTime: {}, heartTime: {}", appName, heartbeat.getWorkerAddress(), System.currentTimeMillis(), heartbeat.getHeartbeatTime());
return; return;
} }
@ -131,10 +131,25 @@ public class ClusterStatusHolder {
/** /**
* 释放所有本地存储的容器信息该操作会导致短暂的 listDeployedContainer 服务不可用 * 释放所有本地存储的容器信息该操作会导致短暂的 listDeployedContainer 服务不可用
*/ */
public void releaseContainerInfos() { public void release() {
log.info("[ClusterStatusHolder-{}] clean the containerInfos, listDeployedContainer service may down about 1min~", appName); log.info("[ClusterStatusHolder-{}] clean the containerInfos, listDeployedContainer service may down about 1min~", appName);
// 丢弃原来的所有数据准备重建 // 丢弃原来的所有数据准备重建
containerId2Infos = Maps.newConcurrentMap(); containerId2Infos = Maps.newConcurrentMap();
// 丢弃超时机器的信息
List<String> timeoutAddress = Lists.newLinkedList();
address2Metrics.forEach((addr, lastActiveTime) -> {
if (timeout(addr)) {
timeoutAddress.add(addr);
}
});
if (!timeoutAddress.isEmpty()) {
log.info("[ClusterStatusHolder-{}] detective timeout workers({}), try to release their infos.", appName, timeoutAddress);
timeoutAddress.forEach(addr -> {
address2ActiveTime.remove(addr);
address2Metrics.remove(addr);
});
}
} }
private boolean timeout(String address) { private boolean timeout(String address) {

View File

@ -94,9 +94,9 @@ public class WorkerManagerService {
} }
/** /**
* 释放所有本地存储的容器信息该操作会导致短暂的 listDeployedContainer 服务不可用 * 清理缓存信息防止 OOM
*/ */
public static void releaseContainerInfos() { public static void cleanUp() {
appId2ClusterStatus.values().forEach(ClusterStatusHolder::releaseContainerInfos); appId2ClusterStatus.values().forEach(ClusterStatusHolder::release);
} }
} }

View File

@ -8,7 +8,6 @@ import com.github.kfcfans.powerjob.server.persistence.core.model.InstanceInfoDO;
import com.github.kfcfans.powerjob.server.persistence.core.model.JobInfoDO; import com.github.kfcfans.powerjob.server.persistence.core.model.JobInfoDO;
import com.github.kfcfans.powerjob.server.persistence.core.model.UserInfoDO; import com.github.kfcfans.powerjob.server.persistence.core.model.UserInfoDO;
import com.github.kfcfans.powerjob.server.persistence.core.repository.InstanceInfoRepository; import com.github.kfcfans.powerjob.server.persistence.core.repository.InstanceInfoRepository;
import com.github.kfcfans.powerjob.server.persistence.core.repository.JobInfoRepository;
import com.github.kfcfans.powerjob.server.service.DispatchService; import com.github.kfcfans.powerjob.server.service.DispatchService;
import com.github.kfcfans.powerjob.server.service.InstanceLogService; import com.github.kfcfans.powerjob.server.service.InstanceLogService;
import com.github.kfcfans.powerjob.server.service.UserService; import com.github.kfcfans.powerjob.server.service.UserService;
@ -16,8 +15,6 @@ import com.github.kfcfans.powerjob.server.service.alarm.Alarmable;
import com.github.kfcfans.powerjob.server.service.alarm.JobInstanceAlarmContent; import com.github.kfcfans.powerjob.server.service.alarm.JobInstanceAlarmContent;
import com.github.kfcfans.powerjob.server.service.timing.schedule.HashedWheelTimerHolder; import com.github.kfcfans.powerjob.server.service.timing.schedule.HashedWheelTimerHolder;
import com.github.kfcfans.powerjob.server.service.workflow.WorkflowInstanceManager; import com.github.kfcfans.powerjob.server.service.workflow.WorkflowInstanceManager;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.BeanUtils; import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
@ -25,7 +22,6 @@ import org.springframework.stereotype.Service;
import javax.annotation.Resource; import javax.annotation.Resource;
import java.util.Date; import java.util.Date;
import java.util.List; import java.util.List;
import java.util.Optional;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
/** /**
@ -38,10 +34,6 @@ import java.util.concurrent.TimeUnit;
@Service @Service
public class InstanceManager { public class InstanceManager {
// 存储 instanceId 对应的 Job 信息便于重试
private static Cache<Long, JobInfoDO> instanceId2JobInfo;
// Spring Bean
@Resource @Resource
private DispatchService dispatchService; private DispatchService dispatchService;
@Resource @Resource
@ -49,21 +41,12 @@ public class InstanceManager {
@Resource(name = "omsCenterAlarmService") @Resource(name = "omsCenterAlarmService")
private Alarmable omsCenterAlarmService; private Alarmable omsCenterAlarmService;
@Resource @Resource
private InstanceInfoRepository instanceInfoRepository; private InstanceMetadataService instanceMetadataService;
@Resource @Resource
private JobInfoRepository jobInfoRepository; private InstanceInfoRepository instanceInfoRepository;
@Resource @Resource
private WorkflowInstanceManager workflowInstanceManager; private WorkflowInstanceManager workflowInstanceManager;
private static final int CACHE_CONCURRENCY_LEVEL = 8;
private static final int CACHE_MAX_SIZE = 4096;
static {
instanceId2JobInfo = CacheBuilder.newBuilder()
.concurrencyLevel(CACHE_CONCURRENCY_LEVEL)
.maximumSize(CACHE_MAX_SIZE)
.build();
}
/** /**
* 更新任务状态 * 更新任务状态
@ -71,19 +54,18 @@ public class InstanceManager {
*/ */
public void updateStatus(TaskTrackerReportInstanceStatusReq req) throws Exception { public void updateStatus(TaskTrackerReportInstanceStatusReq req) throws Exception {
Long jobId = req.getJobId();
Long instanceId = req.getInstanceId(); Long instanceId = req.getInstanceId();
// 获取相关数据 // 获取相关数据
JobInfoDO jobInfo = instanceId2JobInfo.get(instanceId, () -> { JobInfoDO jobInfo = instanceMetadataService.fetchJobInfoByInstanceId(req.getInstanceId());
Optional<JobInfoDO> jobInfoOpt = jobInfoRepository.findById(jobId);
return jobInfoOpt.orElseThrow(() -> new IllegalArgumentException("can't find JobIno by jobId: " + jobId));
});
InstanceInfoDO instanceInfo = instanceInfoRepository.findByInstanceId(instanceId); InstanceInfoDO instanceInfo = instanceInfoRepository.findByInstanceId(instanceId);
if (instanceInfo == null) {
log.warn("[InstanceManager-{}] can't find InstanceInfo from database", instanceId);
return;
}
// 丢弃过期的上报数据 // 丢弃过期的上报数据
if (req.getReportTime() <= instanceInfo.getLastReportTime()) { if (req.getReportTime() <= instanceInfo.getLastReportTime()) {
log.warn("[InstanceManager-{}] receive the expired status report request: {}, this report will br dropped.", instanceId, req); log.warn("[InstanceManager-{}] receive the expired status report request: {}, this report will be dropped.", instanceId, req);
return; return;
} }
@ -170,9 +152,11 @@ public class InstanceManager {
// 告警 // 告警
if (status == InstanceStatus.FAILED) { if (status == InstanceStatus.FAILED) {
JobInfoDO jobInfo = fetchJobInfo(instanceId); JobInfoDO jobInfo;
if (jobInfo == null) { try {
log.warn("[InstanceManager] can't find jobInfo by instanceId({}), alarm failed.", instanceId); jobInfo = instanceMetadataService.fetchJobInfoByInstanceId(instanceId);
}catch (Exception e) {
log.warn("[InstanceManager-{}] can't find jobInfo, alarm failed.", instanceId);
return; return;
} }
@ -185,31 +169,8 @@ public class InstanceManager {
omsCenterAlarmService.onJobInstanceFailed(content, userList); omsCenterAlarmService.onJobInstanceFailed(content, userList);
} }
// 过期缓存 // 主动移除缓存减小内存占用
instanceId2JobInfo.invalidate(instanceId); instanceMetadataService.invalidateJobInfo(instanceId);
} }
/**
* 根据任务实例ID查询任务相关信息
* @param instanceId 任务实例ID
* @return 任务元数据
*/
public JobInfoDO fetchJobInfo(Long instanceId) {
JobInfoDO jobInfo = instanceId2JobInfo.getIfPresent(instanceId);
if (jobInfo != null) {
return jobInfo;
}
InstanceInfoDO instanceInfo = instanceInfoRepository.findByInstanceId(instanceId);
if (instanceInfo != null) {
return jobInfoRepository.findById(instanceInfo.getJobId()).orElse(null);
}
return null;
}
/**
* 释放本地缓存防止内存泄漏
*/
public static void releaseCache() {
instanceId2JobInfo.cleanUp();
}
} }

View File

@ -0,0 +1,80 @@
package com.github.kfcfans.powerjob.server.service.instance;
import com.github.kfcfans.powerjob.server.persistence.core.model.InstanceInfoDO;
import com.github.kfcfans.powerjob.server.persistence.core.model.JobInfoDO;
import com.github.kfcfans.powerjob.server.persistence.core.repository.InstanceInfoRepository;
import com.github.kfcfans.powerjob.server.persistence.core.repository.JobInfoRepository;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import javax.annotation.Resource;
import java.util.Optional;
import java.util.concurrent.ExecutionException;
/**
* 存储 instance 对应的 JobInfo 信息
*
* @author tjq
* @since 2020/6/23
*/
@Service
public class InstanceMetadataService implements InitializingBean {
@Resource
private JobInfoRepository jobInfoRepository;
@Resource
private InstanceInfoRepository instanceInfoRepository;
// 缓存一旦生成任务实例其对应的 JobInfo 不应该再改变即使源数据改变
private Cache<Long, JobInfoDO> instanceId2JobInfoCache;
@Value("${oms.instance.metadata.cache.size}")
private int instanceMetadataCacheSize;
private static final int CACHE_CONCURRENCY_LEVEL = 4;
@Override
public void afterPropertiesSet() throws Exception {
instanceId2JobInfoCache = CacheBuilder.newBuilder()
.concurrencyLevel(CACHE_CONCURRENCY_LEVEL)
.maximumSize(instanceMetadataCacheSize)
.build();
}
/**
* 根据 instanceId 获取 JobInfo
* @param instanceId instanceId
* @return JobInfoDO
* @throws ExecutionException 异常
*/
public JobInfoDO fetchJobInfoByInstanceId(Long instanceId) throws ExecutionException {
return instanceId2JobInfoCache.get(instanceId, () -> {
InstanceInfoDO instanceInfo = instanceInfoRepository.findByInstanceId(instanceId);
if (instanceInfo != null) {
Optional<JobInfoDO> jobInfoOpt = jobInfoRepository.findById(instanceInfo.getJobId());
return jobInfoOpt.orElseThrow(() -> new IllegalArgumentException("can't find JobInfo by jobId: " + instanceInfo.getJobId()));
}
throw new IllegalArgumentException("can't find Instance by instanceId: " + instanceId);
});
}
/**
* 装载缓存
* @param instanceId instanceId
* @param jobInfoDO 原始的任务数据
*/
public void loadJobInfo(Long instanceId, JobInfoDO jobInfoDO) {
instanceId2JobInfoCache.put(instanceId, jobInfoDO);
}
/**
* 失效缓存
* @param instanceId instanceId
*/
public void invalidateJobInfo(Long instanceId) {
instanceId2JobInfoCache.invalidate(instanceId);
}
}

View File

@ -5,7 +5,6 @@ import com.github.kfcfans.powerjob.server.persistence.core.repository.InstanceIn
import com.github.kfcfans.powerjob.server.persistence.core.repository.WorkflowInstanceInfoRepository; import com.github.kfcfans.powerjob.server.persistence.core.repository.WorkflowInstanceInfoRepository;
import com.github.kfcfans.powerjob.server.persistence.mongodb.GridFsManager; import com.github.kfcfans.powerjob.server.persistence.mongodb.GridFsManager;
import com.github.kfcfans.powerjob.server.service.ha.WorkerManagerService; import com.github.kfcfans.powerjob.server.service.ha.WorkerManagerService;
import com.github.kfcfans.powerjob.server.service.instance.InstanceManager;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Stopwatch; import com.google.common.base.Stopwatch;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
@ -59,8 +58,7 @@ public class CleanService {
public void timingClean() { public void timingClean() {
// 释放本地缓存 // 释放本地缓存
WorkerManagerService.releaseContainerInfos(); WorkerManagerService.cleanUp();
InstanceManager.releaseCache();
// 删除数据库运行记录 // 删除数据库运行记录
cleanInstanceLog(); cleanInstanceLog();

View File

@ -1,5 +1,6 @@
package com.github.kfcfans.powerjob.server.web.controller; package com.github.kfcfans.powerjob.server.web.controller;
import com.github.kfcfans.powerjob.common.InstanceStatus;
import com.github.kfcfans.powerjob.common.model.InstanceDetail; import com.github.kfcfans.powerjob.common.model.InstanceDetail;
import com.github.kfcfans.powerjob.common.response.ResultDTO; import com.github.kfcfans.powerjob.common.response.ResultDTO;
import com.github.kfcfans.powerjob.server.akka.OhMyServer; import com.github.kfcfans.powerjob.server.akka.OhMyServer;
@ -21,6 +22,7 @@ import org.springframework.data.domain.Example;
import org.springframework.data.domain.Page; import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest; import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort;
import org.springframework.util.StringUtils;
import org.springframework.web.bind.annotation.*; import org.springframework.web.bind.annotation.*;
import javax.annotation.Resource; import javax.annotation.Resource;
@ -112,6 +114,10 @@ public class InstanceController {
BeanUtils.copyProperties(request, queryEntity); BeanUtils.copyProperties(request, queryEntity);
queryEntity.setType(request.getType().getV()); queryEntity.setType(request.getType().getV());
if (!StringUtils.isEmpty(request.getStatus())) {
queryEntity.setStatus(InstanceStatus.valueOf(request.getStatus()).getV());
}
Page<InstanceInfoDO> pageResult = instanceInfoRepository.findAll(Example.of(queryEntity), pageable); Page<InstanceInfoDO> pageResult = instanceInfoRepository.findAll(Example.of(queryEntity), pageable);
return ResultDTO.success(convertPage(pageResult)); return ResultDTO.success(convertPage(pageResult));
} }

View File

@ -3,6 +3,7 @@ package com.github.kfcfans.powerjob.server.web.controller;
import akka.actor.ActorSelection; import akka.actor.ActorSelection;
import akka.pattern.Patterns; import akka.pattern.Patterns;
import com.github.kfcfans.powerjob.common.InstanceStatus; import com.github.kfcfans.powerjob.common.InstanceStatus;
import com.github.kfcfans.powerjob.common.OmsConstant;
import com.github.kfcfans.powerjob.common.RemoteConstant; import com.github.kfcfans.powerjob.common.RemoteConstant;
import com.github.kfcfans.powerjob.common.model.SystemMetrics; import com.github.kfcfans.powerjob.common.model.SystemMetrics;
import com.github.kfcfans.powerjob.common.response.AskResponse; import com.github.kfcfans.powerjob.common.response.AskResponse;
@ -18,6 +19,7 @@ import com.github.kfcfans.powerjob.server.web.response.SystemOverviewVO;
import com.github.kfcfans.powerjob.server.web.response.WorkerStatusVO; import com.github.kfcfans.powerjob.server.web.response.WorkerStatusVO;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.time.DateFormatUtils;
import org.apache.commons.lang3.time.DateUtils; import org.apache.commons.lang3.time.DateUtils;
import org.springframework.util.StringUtils; import org.springframework.util.StringUtils;
import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.GetMapping;
@ -103,8 +105,10 @@ public class SystemInfoController {
Date date = DateUtils.addDays(new Date(), -1); Date date = DateUtils.addDays(new Date(), -1);
overview.setFailedInstanceCount(instanceInfoRepository.countByAppIdAndStatusAndGmtCreateAfter(appId, InstanceStatus.FAILED.getV(), date)); overview.setFailedInstanceCount(instanceInfoRepository.countByAppIdAndStatusAndGmtCreateAfter(appId, InstanceStatus.FAILED.getV(), date));
// 服务器时区
overview.setTimezone(TimeZone.getDefault().getDisplayName());
// 服务器时间 // 服务器时间
overview.setServerTime(System.currentTimeMillis()); overview.setServerTime(DateFormatUtils.format(new Date(), OmsConstant.TIME_PATTERN));
return ResultDTO.success(overview); return ResultDTO.success(overview);
} }

View File

@ -1,5 +1,6 @@
package com.github.kfcfans.powerjob.server.web.controller; package com.github.kfcfans.powerjob.server.web.controller;
import com.github.kfcfans.powerjob.common.WorkflowInstanceStatus;
import com.github.kfcfans.powerjob.common.response.ResultDTO; import com.github.kfcfans.powerjob.common.response.ResultDTO;
import com.github.kfcfans.powerjob.server.persistence.PageResult; import com.github.kfcfans.powerjob.server.persistence.PageResult;
import com.github.kfcfans.powerjob.server.persistence.core.model.WorkflowInstanceInfoDO; import com.github.kfcfans.powerjob.server.persistence.core.model.WorkflowInstanceInfoDO;
@ -13,6 +14,7 @@ import org.springframework.data.domain.Example;
import org.springframework.data.domain.Page; import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest; import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort;
import org.springframework.util.StringUtils;
import org.springframework.web.bind.annotation.*; import org.springframework.web.bind.annotation.*;
import javax.annotation.Resource; import javax.annotation.Resource;
@ -54,6 +56,11 @@ public class WorkflowInstanceController {
WorkflowInstanceInfoDO queryEntity = new WorkflowInstanceInfoDO(); WorkflowInstanceInfoDO queryEntity = new WorkflowInstanceInfoDO();
BeanUtils.copyProperties(req, queryEntity); BeanUtils.copyProperties(req, queryEntity);
if (!StringUtils.isEmpty(req.getStatus())) {
queryEntity.setStatus(WorkflowInstanceStatus.valueOf(req.getStatus()).getV());
}
Page<WorkflowInstanceInfoDO> ps = workflowInstanceInfoRepository.findAll(Example.of(queryEntity), pageable); Page<WorkflowInstanceInfoDO> ps = workflowInstanceInfoRepository.findAll(Example.of(queryEntity), pageable);
return ResultDTO.success(convertPage(ps)); return ResultDTO.success(convertPage(ps));

View File

@ -24,4 +24,6 @@ public class QueryInstanceRequest {
private Long instanceId; private Long instanceId;
private Long jobId; private Long jobId;
private Long wfInstanceId; private Long wfInstanceId;
private String status;
} }

View File

@ -21,4 +21,6 @@ public class QueryWorkflowInstanceRequest {
// 查询条件NORMAL/WORKFLOW // 查询条件NORMAL/WORKFLOW
private Long wfInstanceId; private Long wfInstanceId;
private Long workflowId; private Long workflowId;
private String status;
} }

View File

@ -13,6 +13,8 @@ public class SystemOverviewVO {
private long jobCount; private long jobCount;
private long runningInstanceCount; private long runningInstanceCount;
private long failedInstanceCount; private long failedInstanceCount;
// 服务器时区
private String timezone;
// 服务器时间 // 服务器时间
private long serverTime; private String serverTime;
} }

View File

@ -3,7 +3,7 @@ logging.config=classpath:logback-dev.xml
####### 数据库配置 ####### ####### 数据库配置 #######
spring.datasource.core.driver-class-name=com.mysql.cj.jdbc.Driver spring.datasource.core.driver-class-name=com.mysql.cj.jdbc.Driver
spring.datasource.core.jdbc-url=jdbc:mysql://remotehost:3391/powerjob-daily?useUnicode=true&characterEncoding=UTF-8 spring.datasource.core.jdbc-url=jdbc:mysql://remotehost:3306/powerjob-daily?useUnicode=true&characterEncoding=UTF-8
spring.datasource.core.username=root spring.datasource.core.username=root
spring.datasource.core.password=No1Bug2Please3! spring.datasource.core.password=No1Bug2Please3!
spring.datasource.core.hikari.maximum-pool-size=20 spring.datasource.core.hikari.maximum-pool-size=20
@ -21,8 +21,11 @@ spring.mail.properties.mail.smtp.starttls.enable=true
spring.mail.properties.mail.smtp.starttls.required=true spring.mail.properties.mail.smtp.starttls.required=true
####### 资源清理配置 ####### ####### 资源清理配置 #######
oms.log.retention.local=0 oms.log.retention.local=1
oms.log.retention.remote=0 oms.log.retention.remote=1
oms.container.retention.local=0 oms.container.retention.local=1
oms.container.retention.remote=0 oms.container.retention.remote=-1
oms.instanceinfo.retention=0 oms.instanceinfo.retention=1
####### 缓存配置 #######
oms.instance.metadata.cache.size=1024

View File

@ -3,7 +3,7 @@ logging.config=classpath:logback-product.xml
####### 数据库配置 ####### ####### 数据库配置 #######
spring.datasource.core.driver-class-name=com.mysql.cj.jdbc.Driver spring.datasource.core.driver-class-name=com.mysql.cj.jdbc.Driver
spring.datasource.core.jdbc-url=jdbc:mysql://remotehost:3391/powerjob-pre?useUnicode=true&characterEncoding=UTF-8 spring.datasource.core.jdbc-url=jdbc:mysql://remotehost:3306/powerjob-pre?useUnicode=true&characterEncoding=UTF-8
spring.datasource.core.username=root spring.datasource.core.username=root
spring.datasource.core.password=No1Bug2Please3! spring.datasource.core.password=No1Bug2Please3!
spring.datasource.core.hikari.maximum-pool-size=20 spring.datasource.core.hikari.maximum-pool-size=20
@ -25,4 +25,7 @@ oms.log.retention.local=3
oms.log.retention.remote=3 oms.log.retention.remote=3
oms.container.retention.local=3 oms.container.retention.local=3
oms.container.retention.remote=-1 oms.container.retention.remote=-1
oms.instanceinfo.retention=3 oms.instanceinfo.retention=3
####### 缓存配置 #######
oms.instance.metadata.cache.size=1024

View File

@ -25,4 +25,7 @@ oms.log.retention.local=7
oms.log.retention.remote=7 oms.log.retention.remote=7
oms.container.retention.local=7 oms.container.retention.local=7
oms.container.retention.remote=-1 oms.container.retention.remote=-1
oms.instanceinfo.retention=3 oms.instanceinfo.retention=3
####### 缓存配置 #######
oms.instance.metadata.cache.size=2048

View File

@ -5,7 +5,7 @@
<meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width,initial-scale=1.0"> <meta name="viewport" content="width=device-width,initial-scale=1.0">
<link rel="icon" href="/favicon.ico"> <link rel="icon" href="/favicon.ico">
<title>oms-console</title> <title>PowerJob</title>
<link href="/js/0.js" rel="prefetch"><link href="/js/1.js" rel="prefetch"><link href="/js/10.js" rel="prefetch"><link href="/js/11.js" rel="prefetch"><link href="/js/2.js" rel="prefetch"><link href="/js/3.js" rel="prefetch"><link href="/js/4.js" rel="prefetch"><link href="/js/5.js" rel="prefetch"><link href="/js/6.js" rel="prefetch"><link href="/js/7.js" rel="prefetch"><link href="/js/8.js" rel="prefetch"><link href="/js/9.js" rel="prefetch"><link href="/js/app.js" rel="preload" as="script"><link href="/js/chunk-vendors.js" rel="preload" as="script"></head> <link href="/js/0.js" rel="prefetch"><link href="/js/1.js" rel="prefetch"><link href="/js/10.js" rel="prefetch"><link href="/js/11.js" rel="prefetch"><link href="/js/2.js" rel="prefetch"><link href="/js/3.js" rel="prefetch"><link href="/js/4.js" rel="prefetch"><link href="/js/5.js" rel="prefetch"><link href="/js/6.js" rel="prefetch"><link href="/js/7.js" rel="prefetch"><link href="/js/8.js" rel="prefetch"><link href="/js/9.js" rel="prefetch"><link href="/js/app.js" rel="preload" as="script"><link href="/js/chunk-vendors.js" rel="preload" as="script"></head>
<body> <body>
<noscript> <noscript>

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,34 @@
package com.github.kfcfans.powerjob.server.test;
import com.github.kfcfans.powerjob.common.OmsConstant;
import org.apache.commons.lang3.time.DateFormatUtils;
import org.junit.jupiter.api.Test;
import java.util.Date;
import java.util.TimeZone;
/**
* 时区问题测试
*
* @author tjq
* @since 2020/6/24
*/
public class TimezoneTest {
@Test
public void testTimeZone() {
Date now = new Date();
System.out.println(now.toString());
System.out.println("timestamp before GMT: " + System.currentTimeMillis());
TimeZone.setDefault(TimeZone.getTimeZone("GMT"));
TimeZone timeZone = TimeZone.getDefault();
System.out.println(timeZone.getDisplayName());
System.out.println(new Date());
System.out.println(DateFormatUtils.format(new Date(), OmsConstant.TIME_PATTERN));
System.out.println("timestamp after GMT: " + System.currentTimeMillis());
}
}

View File

@ -1,11 +1,11 @@
# agent 没有 javac 需求,用 JRE 镜像 # 为了便于使用 arthasagent 也使用 jdk 而不是 jre
FROM openjdk:8-jre-slim FROM adoptopenjdk:8-jdk-hotspot
MAINTAINER tengjiqi@gmail.com MAINTAINER tengjiqi@gmail.com
# 设置时区
ENV TZ=Asia/Shanghai
ENV APP_NAME=powerjob-worker-agent ENV APP_NAME=powerjob-worker-agent
# 传递 SpringBoot 启动参数 和 JVM参数
ENV PARAMS="" ENV PARAMS=""
ENV JVMOPTIONS=""
COPY powerjob-agent.jar /powerjob-agent.jar COPY powerjob-agent.jar /powerjob-agent.jar
# 暴露端口AKKA-Client # 暴露端口AKKA-Client
@ -13,4 +13,4 @@ EXPOSE 27777
# 挂载数据卷,将文件直接输出到宿主机(注意,此处挂载的是匿名卷,即在宿主机位置随机) # 挂载数据卷,将文件直接输出到宿主机(注意,此处挂载的是匿名卷,即在宿主机位置随机)
VOLUME /root VOLUME /root
# 启动应用 # 启动应用
ENTRYPOINT ["sh","-c","java -jar /powerjob-agent.jar $PARAMS"] ENTRYPOINT ["sh","-c","java $JVMOPTIONS -jar /powerjob-agent.jar $PARAMS"]

View File

@ -10,12 +10,12 @@
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<artifactId>powerjob-worker-agent</artifactId> <artifactId>powerjob-worker-agent</artifactId>
<version>3.1.0</version> <version>3.1.1</version>
<packaging>jar</packaging> <packaging>jar</packaging>
<properties> <properties>
<powerjob.worker.version>3.1.0</powerjob.worker.version> <powerjob.worker.version>3.1.1</powerjob.worker.version>
<logback.version>1.2.3</logback.version> <logback.version>1.2.3</logback.version>
<picocli.version>4.3.2</picocli.version> <picocli.version>4.3.2</picocli.version>

View File

@ -10,11 +10,11 @@
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<artifactId>powerjob-worker-samples</artifactId> <artifactId>powerjob-worker-samples</artifactId>
<version>3.1.0</version> <version>3.1.1</version>
<properties> <properties>
<springboot.version>2.2.6.RELEASE</springboot.version> <springboot.version>2.2.6.RELEASE</springboot.version>
<powerjob.worker.version>3.1.0</powerjob.worker.version> <powerjob.worker.version>3.1.1</powerjob.worker.version>
<fastjson.version>1.2.68</fastjson.version> <fastjson.version>1.2.68</fastjson.version>
<!-- 部署时跳过该module --> <!-- 部署时跳过该module -->

View File

@ -10,12 +10,12 @@
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<artifactId>powerjob-worker</artifactId> <artifactId>powerjob-worker</artifactId>
<version>3.1.0</version> <version>3.1.1</version>
<packaging>jar</packaging> <packaging>jar</packaging>
<properties> <properties>
<spring.version>5.2.4.RELEASE</spring.version> <spring.version>5.2.4.RELEASE</spring.version>
<powerjob.common.version>3.1.0</powerjob.common.version> <powerjob.common.version>3.1.1</powerjob.common.version>
<h2.db.version>1.4.200</h2.db.version> <h2.db.version>1.4.200</h2.db.version>
<hikaricp.version>3.4.2</hikaricp.version> <hikaricp.version>3.4.2</hikaricp.version>
<junit.version>5.6.1</junit.version> <junit.version>5.6.1</junit.version>

View File

@ -62,18 +62,18 @@ public class ServerDiscoveryService {
} }
if (StringUtils.isEmpty(result)) { if (StringUtils.isEmpty(result)) {
log.warn("[OMS-ServerDiscoveryService] can't find any available server, this worker has been quarantined."); log.warn("[OmsServerDiscovery] can't find any available server, this worker has been quarantined.");
// Server 高可用的前提下连续失败多次说明该节点与外界失联Server已经将秒级任务转移到其他Worker需要杀死本地的任务 // Server 高可用的前提下连续失败多次说明该节点与外界失联Server已经将秒级任务转移到其他Worker需要杀死本地的任务
if (FAILED_COUNT++ > MAX_FAILED_COUNT) { if (FAILED_COUNT++ > MAX_FAILED_COUNT) {
log.error("[OMS-ServerDiscoveryService] can't find any available server for 3 consecutive times, It's time to kill all frequent job in this worker."); log.warn("[OmsServerDiscovery] can't find any available server for 3 consecutive times, It's time to kill all frequent job in this worker.");
List<Long> frequentInstanceIds = TaskTrackerPool.getAllFrequentTaskTrackerKeys(); List<Long> frequentInstanceIds = TaskTrackerPool.getAllFrequentTaskTrackerKeys();
if (!CollectionUtils.isEmpty(frequentInstanceIds)) { if (!CollectionUtils.isEmpty(frequentInstanceIds)) {
frequentInstanceIds.forEach(instanceId -> { frequentInstanceIds.forEach(instanceId -> {
TaskTracker taskTracker = TaskTrackerPool.remove(instanceId); TaskTracker taskTracker = TaskTrackerPool.remove(instanceId);
taskTracker.destroy(); taskTracker.destroy();
log.warn("[OMS-ServerDiscoveryService] kill frequent instance(instanceId={}) due to can't find any available server.", instanceId); log.warn("[OmsServerDiscovery] kill frequent instance(instanceId={}) due to can't find any available server.", instanceId);
}); });
} }
@ -83,7 +83,7 @@ public class ServerDiscoveryService {
}else { }else {
// 重置失败次数 // 重置失败次数
FAILED_COUNT = 0; FAILED_COUNT = 0;
log.debug("[OMS-ServerDiscoveryService] current server is {}.", result); log.debug("[OmsServerDiscovery] current server is {}.", result);
return result; return result;
} }
} }

View File

@ -1,26 +1,37 @@
package com.github.kfcfans.powerjob.worker.common.utils; package com.github.kfcfans.powerjob.worker.common.utils;
import java.util.LinkedHashMap; import com.google.common.cache.Cache;
import java.util.Map; import com.google.common.cache.CacheBuilder;
import java.util.function.BiConsumer;
/** /**
* LRULeast Recently Used 缓存 * LRULeast Recently Used 缓存
* before v3.1.1 使用 LinkedHashMap但存在修改时访问报错问题改用 Guava
* *
* @author tjq * @author tjq
* @since 2020/4/8 * @since 2020/4/8
*/ */
public class LRUCache<K, V> extends LinkedHashMap<K, V> { public class LRUCache<K, V> {
private final int cacheSize; private final Cache<K, V> innerCache;
public LRUCache(int cacheSize) { public LRUCache(int cacheSize) {
super((int) Math.ceil(cacheSize / 0.75) + 1, 0.75f, false); innerCache = CacheBuilder.newBuilder()
this.cacheSize = cacheSize; .concurrencyLevel(2)
.initialCapacity(cacheSize)
.build();
} }
@Override public void forEach(BiConsumer<? super K, ? super V> action) {
protected boolean removeEldestEntry(Map.Entry<K,V> eldest) { innerCache.asMap().forEach(action);
// 超过阈值时返回true进行LRU淘汰 }
return size() > cacheSize;
public V get(K key) {
return innerCache.getIfPresent(key);
}
public void put(K key, V value) {
innerCache.put(key, value);
} }
} }

View File

@ -7,6 +7,7 @@ import com.github.kfcfans.powerjob.common.model.DeployedContainerInfo;
import com.github.kfcfans.powerjob.common.request.ServerDeployContainerRequest; import com.github.kfcfans.powerjob.common.request.ServerDeployContainerRequest;
import com.github.kfcfans.powerjob.common.request.WorkerNeedDeployContainerRequest; import com.github.kfcfans.powerjob.common.request.WorkerNeedDeployContainerRequest;
import com.github.kfcfans.powerjob.common.response.AskResponse; import com.github.kfcfans.powerjob.common.response.AskResponse;
import com.github.kfcfans.powerjob.common.utils.CommonUtils;
import com.github.kfcfans.powerjob.worker.OhMyWorker; import com.github.kfcfans.powerjob.worker.OhMyWorker;
import com.github.kfcfans.powerjob.worker.common.utils.AkkaUtils; import com.github.kfcfans.powerjob.worker.common.utils.AkkaUtils;
import com.github.kfcfans.powerjob.worker.common.utils.OmsWorkerFileUtils; import com.github.kfcfans.powerjob.worker.common.utils.OmsWorkerFileUtils;
@ -67,7 +68,7 @@ public class OmsContainerFactory {
deployContainer(deployRequest); deployContainer(deployRequest);
} }
}catch (Exception e) { }catch (Exception e) {
log.error("[OmsContainer-{}] deployed container failed, exception is {}", containerId, e.toString()); log.error("[OmsContainer-{}] get container failed, exception is {}", containerId, e.toString());
} }
return CARGO.get(containerId); return CARGO.get(containerId);
@ -92,11 +93,11 @@ public class OmsContainerFactory {
return; return;
} }
try { String filePath = OmsWorkerFileUtils.getContainerDir() + containerId + "/" + version + ".jar";
// 下载Container到本地
File jarFile = new File(filePath);
// 下载Container到本地 try {
String filePath = OmsWorkerFileUtils.getContainerDir() + containerId + "/" + version + ".jar";
File jarFile = new File(filePath);
if (!jarFile.exists()) { if (!jarFile.exists()) {
FileUtils.forceMkdirParent(jarFile); FileUtils.forceMkdirParent(jarFile);
FileUtils.copyURLToFile(new URL(request.getDownloadURL()), jarFile, 5000, 300000); FileUtils.copyURLToFile(new URL(request.getDownloadURL()), jarFile, 5000, 300000);
@ -118,6 +119,8 @@ public class OmsContainerFactory {
}catch (Exception e) { }catch (Exception e) {
log.error("[OmsContainer-{}] deployContainer(name={},version={}) failed.", containerId, containerName, version, e); log.error("[OmsContainer-{}] deployContainer(name={},version={}) failed.", containerId, containerName, version, e);
// 如果部署失败则删除该 jar本次失败可能是下载jar出错导致不删除会导致这个版本永久无法重新部署
CommonUtils.executeIgnoreException(() -> FileUtils.forceDelete(jarFile));
} }
} }

View File

@ -114,7 +114,7 @@ public class FrequentTaskTracker extends TaskTracker {
InstanceDetail.SubInstanceDetail subDetail = new InstanceDetail.SubInstanceDetail(); InstanceDetail.SubInstanceDetail subDetail = new InstanceDetail.SubInstanceDetail();
BeanUtils.copyProperties(subInstanceInfo, subDetail); BeanUtils.copyProperties(subInstanceInfo, subDetail);
InstanceStatus status = InstanceStatus.of(subInstanceInfo.status); InstanceStatus status = InstanceStatus.of(subInstanceInfo.status);
subDetail.setStatus(status.getDes()); subDetail.setStatus(status.getV());
subDetail.setSubInstanceId(subId); subDetail.setSubInstanceId(subId);
// 设置时间 // 设置时间
@ -347,8 +347,8 @@ public class FrequentTaskTracker extends TaskTracker {
subInstanceId2TimeHolder.remove(subInstanceId); subInstanceId2TimeHolder.remove(subInstanceId);
// 更新缓存数据 // 更新缓存数据
if (recentSubInstanceInfo.containsKey(subInstanceId)) { SubInstanceInfo subInstanceInfo = recentSubInstanceInfo.get(subInstanceId);
SubInstanceInfo subInstanceInfo = recentSubInstanceInfo.get(subInstanceId); if (subInstanceInfo != null) {
subInstanceInfo.status = success ? InstanceStatus.SUCCEED.getV() : InstanceStatus.FAILED.getV(); subInstanceInfo.status = success ? InstanceStatus.SUCCEED.getV() : InstanceStatus.FAILED.getV();
subInstanceInfo.result = result; subInstanceInfo.result = result;
subInstanceInfo.finishedTime = System.currentTimeMillis(); subInstanceInfo.finishedTime = System.currentTimeMillis();