Merge branch '4.3.0'

This commit is contained in:
tjq 2023-01-28 12:17:12 +08:00
commit ae36ccf75a
184 changed files with 4689 additions and 2046 deletions

View File

@ -1,5 +1,9 @@
# [English](./README.md) | 简体中文 # [English](./README.md) | 简体中文
<p align="center">
祝大家兔年吉祥,新的一年身体健康,万事如意,阖家欢乐,幸福安康!
</p>
<p align="center"> <p align="center">
<img src="https://raw.githubusercontent.com/KFCFans/PowerJob/master/others/images/logo.png" alt="PowerJob" title="PowerJob" width="557"/> <img src="https://raw.githubusercontent.com/KFCFans/PowerJob/master/others/images/logo.png" alt="PowerJob" title="PowerJob" width="557"/>
</p> </p>
@ -34,8 +38,7 @@ PowerJob原OhMyScheduler是全新一代分布式调度与计算框架
PowerJob 的设计目标为企业级的分布式任务调度平台,即成为公司内部的**任务调度中间件**。整个公司统一部署调度中心 powerjob-server旗下所有业务线应用只需要依赖 `powerjob-worker` 即可接入调度中心获取任务调度与分布式计算能力。 PowerJob 的设计目标为企业级的分布式任务调度平台,即成为公司内部的**任务调度中间件**。整个公司统一部署调度中心 powerjob-server旗下所有业务线应用只需要依赖 `powerjob-worker` 即可接入调度中心获取任务调度与分布式计算能力。
### 在线试用 ### 在线试用
* 试用地址:[try.powerjob.tech](http://try.powerjob.tech/#/welcome?appName=powerjob-agent-test&password=123) * [点击查看试用说明和教程](https://www.yuque.com/powerjob/guidence/trial)
* [建议先阅读使用教程了解 PowerJob 的概念和基本用法](https://www.yuque.com/powerjob/guidence/trial)
### 同类产品对比 ### 同类产品对比
| | QuartZ | xxl-job | SchedulerX 2.0 | PowerJob | | | QuartZ | xxl-job | SchedulerX 2.0 | PowerJob |

View File

@ -5,14 +5,12 @@
version: '3' version: '3'
services: services:
powerjob-mysql: powerjob-mysql:
build:
context: ./others
environment: environment:
MYSQL_ROOT_HOST: "%" MYSQL_ROOT_HOST: "%"
MYSQL_ROOT_PASSWORD: No1Bug2Please3! MYSQL_ROOT_PASSWORD: No1Bug2Please3!
restart: always restart: always
container_name: powerjob-mysql container_name: powerjob-mysql
image: powerjob/powerjob-mysql:4.1.1 image: powerjob/powerjob-mysql:latest
ports: ports:
- "3307:3306" - "3307:3306"
volumes: volumes:
@ -21,7 +19,7 @@ services:
powerjob-server: powerjob-server:
container_name: powerjob-server container_name: powerjob-server
image: tjqq/powerjob-server:latest image: powerjob/powerjob-server:latest
restart: always restart: always
depends_on: depends_on:
- powerjob-mysql - powerjob-mysql
@ -36,7 +34,7 @@ services:
powerjob-worker-samples: powerjob-worker-samples:
container_name: powerjob-worker-samples container_name: powerjob-worker-samples
image: tjqq/powerjob-worker-samples:latest image: powerjob/powerjob-worker-samples:latest
restart: always restart: always
depends_on: depends_on:
- powerjob-mysql - powerjob-mysql

19
others/dev/build_test_env.sh Executable file
View File

@ -0,0 +1,19 @@
#!/bin/bash
# 构建 PowerJob 测试环境
echo "================== 关闭全部服务 =================="
docker-compose down
echo "================== 构建 jar =================="
cd `dirname $0`/../.. || exit
# mvn clean package -Pdev -DskipTests -U -e -pl powerjob-server,powerjob-worker-agent -am
# -U强制检查snapshot库 -pl指定需要构建的模块多模块逗号分割 -am同时构建依赖模块一般与pl连用 -Pxxx指定使用的配置文件
mvn clean package -Pdev -DskipTests
echo "================== 拷贝 jar =================="
/bin/cp -rf powerjob-server/powerjob-server-starter/target/*.jar powerjob-server/docker/powerjob-server.jar
/bin/cp -rf powerjob-worker-agent/target/*.jar powerjob-worker-agent/powerjob-agent.jar
ls -l powerjob-server/docker/powerjob-server.jar
ls -l powerjob-worker-agent/powerjob-agent.jar
cd others/dev
docker-compose build
docker-compose up

View File

@ -0,0 +1,75 @@
# 构建 PowerJob 测试环境
version: '3'
services:
powerjob-mysql:
build:
context: ../
environment:
MYSQL_ROOT_HOST: "%"
MYSQL_ROOT_PASSWORD: No1Bug2Please3!
restart: always
container_name: powerjob-mysql
image: powerjob/powerjob-mysql:test_env
ports:
- "3309:3306"
volumes:
- ~/powerjob-data/powerjob-mysql:/var/lib/mysql
command: --lower_case_table_names=1
powerjob-server:
build:
context: ../../powerjob-server/docker
container_name: powerjob-server
image: powerjob/powerjob-server:test_env
restart: always
depends_on:
- powerjob-mysql
environment:
PARAMS: "--spring.profiles.active=product --oms.mongodb.enable=false --spring.datasource.core.jdbc-url=jdbc:mysql://powerjob-mysql:3306/powerjob-daily?useUnicode=true&characterEncoding=UTF-8&serverTimezone=Asia/Shanghai"
JVMOPTIONS: "-Xmx1024m"
ports:
- "7700:7700"
- "10086:10086"
- "10010:10010"
volumes:
- ~/powerjob-data/powerjob-server:/root/powerjob/server/
powerjob-worker-agent:
build:
context: ../../powerjob-worker-agent
container_name: powerjob-worker-agent
image: powerjob/powerjob-worker-agent:test_env
restart: always
depends_on:
- powerjob-mysql
- powerjob-server
ports:
- "5002:5005"
- "10002:10000"
- "27777:27777"
volumes:
- ~/powerjob-data/powerjob-worker-agent:/root
entrypoint:
- "sh"
- "-c"
- "./wait-for-it.sh powerjob-server:7700 --strict -- java -Xmx768m -agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5005 -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.port=10000 -Dcom.sun.management.jmxremote.rmi.port=10000 -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -jar /powerjob-agent.jar --app powerjob-worker-samples --server powerjob-server:7700"
powerjob-worker-agent2:
container_name: powerjob-worker-agent2
image: powerjob/powerjob-worker-agent:test_env
restart: always
depends_on:
- powerjob-mysql
- powerjob-server
ports:
- "5003:5005"
- "10003:10000"
- "27778:27777"
volumes:
- ~/powerjob-data/powerjob-worker-agent2:/root
entrypoint:
- "sh"
- "-c"
- "./wait-for-it.sh powerjob-server:7700 --strict -- java -Xmx768m -agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5005 -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.port=10000 -Dcom.sun.management.jmxremote.rmi.port=10000 -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -jar /powerjob-agent.jar --app powerjob-worker-samples --server powerjob-server:7700"

View File

@ -33,11 +33,19 @@ read -r -p "是否重新构建镜像y/n:" rebuild
if [ "$rebuild" = "y" ] || [ "$rebuild" = "Y" ]; then if [ "$rebuild" = "y" ] || [ "$rebuild" = "Y" ]; then
echo "================== 删除旧镜像 ==================" echo "================== 删除旧镜像 =================="
docker rmi -f tjqq/powerjob-server:$version docker rmi -f tjqq/powerjob-server:$version
docker rmi -f powerjob/powerjob-server:$version
docker rmi -f tjqq/powerjob-agent:$version docker rmi -f tjqq/powerjob-agent:$version
docker rmi -f powerjob/powerjob-agent:$version
docker rmi -f powerjob/powerjob-mysql:$version
docker rmi -f powerjob/powerjob-worker-samples:$version
echo "================== 构建 powerjob-server 镜像 ==================" echo "================== 构建 powerjob-server 镜像 =================="
docker build -t tjqq/powerjob-server:$version powerjob-server/docker/. || exit docker build -t tjqq/powerjob-server:$version powerjob-server/docker/. || exit
echo "================== 构建 powerjob-agent 镜像 ==================" echo "================== 构建 powerjob-agent 镜像 =================="
docker build -t tjqq/powerjob-agent:$version powerjob-worker-agent/. || exit docker build -t tjqq/powerjob-agent:$version powerjob-worker-agent/. || exit
echo "================== 构建 powerjob-mysql 镜像 =================="
docker build -t powerjob/powerjob-mysql:$version others/. || exit
echo "================== 构建 powerjob-worker-samples 镜像 =================="
docker build -t powerjob/powerjob-worker-samples:$version powerjob-worker-samples/. || exit
read -r -p "是否正式发布该镜像y/n:" needrelease read -r -p "是否正式发布该镜像y/n:" needrelease
if [ "$needrelease" = "y" ] || [ "$needrelease" = "Y" ]; then if [ "$needrelease" = "y" ] || [ "$needrelease" = "Y" ]; then
@ -47,6 +55,25 @@ if [ "$rebuild" = "y" ] || [ "$rebuild" = "Y" ]; then
docker push tjqq/powerjob-server:$version docker push tjqq/powerjob-server:$version
echo "================== 正在推送 agent 镜像到中央仓库 ==================" echo "================== 正在推送 agent 镜像到中央仓库 =================="
docker push tjqq/powerjob-agent:$version docker push tjqq/powerjob-agent:$version
echo "================== 正在推送 powerjob-mysql 镜像到中央仓库 =================="
docker push powerjob/powerjob-mysql:$version
echo "================== 正在推送 samples 镜像到中央仓库 =================="
docker push powerjob/powerjob-worker-samples:$version
echo "================== 双写推送 =================="
docker tag tjqq/powerjob-server:$version powerjob/powerjob-server:$version
docker push powerjob/powerjob-server:$version
docker tag tjqq/powerjob-agent:$version powerjob/powerjob-agent:$version
docker push powerjob/powerjob-agent:$version
echo "================== 更新 LATEST 版本 =================="
docker tag powerjob/powerjob-server:$version powerjob/powerjob-server:latest
docker push powerjob/powerjob-server:latest
docker tag powerjob/powerjob-agent:$version powerjob/powerjob-agent:latest
docker push powerjob/powerjob-agent:latest
docker tag powerjob/powerjob-mysql:$version powerjob/powerjob-mysql:latest
docker push powerjob/powerjob-mysql:latest
docker tag powerjob/powerjob-worker-samples:$version powerjob/powerjob-worker-samples:latest
docker push powerjob/powerjob-worker-samples:latest
echo "================== Docker 推送完毕 =================="
fi fi
fi fi
fi fi

View File

@ -6,7 +6,7 @@
<groupId>tech.powerjob</groupId> <groupId>tech.powerjob</groupId>
<artifactId>powerjob</artifactId> <artifactId>powerjob</artifactId>
<version>3.0.0</version> <version>4.0.0</version>
<packaging>pom</packaging> <packaging>pom</packaging>
<name>powerjob</name> <name>powerjob</name>
<url>http://www.powerjob.tech</url> <url>http://www.powerjob.tech</url>
@ -44,6 +44,7 @@
<module>powerjob-worker-spring-boot-starter</module> <module>powerjob-worker-spring-boot-starter</module>
<module>powerjob-worker-samples</module> <module>powerjob-worker-samples</module>
<module>powerjob-official-processors</module> <module>powerjob-official-processors</module>
<module>powerjob-remote</module>
</modules> </modules>
<properties> <properties>

View File

@ -5,18 +5,18 @@
<parent> <parent>
<artifactId>powerjob</artifactId> <artifactId>powerjob</artifactId>
<groupId>tech.powerjob</groupId> <groupId>tech.powerjob</groupId>
<version>3.0.0</version> <version>4.0.0</version>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<artifactId>powerjob-client</artifactId> <artifactId>powerjob-client</artifactId>
<version>4.2.1</version> <version>4.3.0</version>
<packaging>jar</packaging> <packaging>jar</packaging>
<properties> <properties>
<junit.version>5.9.1</junit.version> <junit.version>5.9.1</junit.version>
<fastjson.version>1.2.83</fastjson.version> <fastjson.version>1.2.83</fastjson.version>
<powerjob.common.version>4.2.1</powerjob.common.version> <powerjob.common.version>4.3.0</powerjob.common.version>
<mvn.shade.plugin.version>3.2.4</mvn.shade.plugin.version> <mvn.shade.plugin.version>3.2.4</mvn.shade.plugin.version>
</properties> </properties>

View File

@ -5,12 +5,12 @@
<parent> <parent>
<artifactId>powerjob</artifactId> <artifactId>powerjob</artifactId>
<groupId>tech.powerjob</groupId> <groupId>tech.powerjob</groupId>
<version>3.0.0</version> <version>4.0.0</version>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<artifactId>powerjob-common</artifactId> <artifactId>powerjob-common</artifactId>
<version>4.2.1</version> <version>4.3.0</version>
<packaging>jar</packaging> <packaging>jar</packaging>
<properties> <properties>
@ -19,7 +19,6 @@
<commons.io.version>2.11.0</commons.io.version> <commons.io.version>2.11.0</commons.io.version>
<guava.version>31.1-jre</guava.version> <guava.version>31.1-jre</guava.version>
<okhttp.version>3.14.9</okhttp.version> <okhttp.version>3.14.9</okhttp.version>
<akka.version>2.6.12</akka.version>
<kryo.version>5.3.0</kryo.version> <kryo.version>5.3.0</kryo.version>
<jackson.version>2.14.0-rc1</jackson.version> <jackson.version>2.14.0-rc1</jackson.version>
<junit.version>5.9.0</junit.version> <junit.version>5.9.0</junit.version>
@ -54,18 +53,6 @@
<version>${okhttp.version}</version> <version>${okhttp.version}</version>
</dependency> </dependency>
<!-- akka remote -->
<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-remote_2.13</artifactId>
<version>${akka.version}</version>
</dependency>
<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-slf4j_2.13</artifactId>
<version>${akka.version}</version>
</dependency>
<!-- commons-io --> <!-- commons-io -->
<dependency> <dependency>
<groupId>commons-io</groupId> <groupId>commons-io</groupId>

View File

@ -8,6 +8,11 @@ package tech.powerjob.common;
*/ */
public class OmsConstant { public class OmsConstant {
/**
* package name
*/
public static final String PACKAGE = "tech.powerjob";
public static final int SERVER_DEFAULT_AKKA_PORT = 10086; public static final int SERVER_DEFAULT_AKKA_PORT = 10086;
public static final int SERVER_DEFAULT_HTTP_PORT = 10010; public static final int SERVER_DEFAULT_HTTP_PORT = 10010;
@ -17,6 +22,10 @@ public class OmsConstant {
public static final String NONE = "N/A"; public static final String NONE = "N/A";
public static final String COMMA = ","; public static final String COMMA = ",";
public static final String AND = "&";
public static final String EQUAL = "=";
public static final String LINE_SEPARATOR = "\r\n"; public static final String LINE_SEPARATOR = "\r\n";
public static final String HTTP_HEADER_CONTENT_TYPE = "Content-Type"; public static final String HTTP_HEADER_CONTENT_TYPE = "Content-Type";

View File

@ -23,6 +23,17 @@ public class PowerJobDKey {
*/ */
public static final String IGNORED_NETWORK_INTERFACE_REGEX = "powerjob.network.interface.ignored"; public static final String IGNORED_NETWORK_INTERFACE_REGEX = "powerjob.network.interface.ignored";
/**
* Enables compression during data transfer, such as gzip under the HTTP protocol. default value is 'false'
* Note that enabling compression reduces network usage, but increases CPU consumption
*/
public static final String TRANSPORTER_USE_COMPRESSING = "powerjob.transporter.compression.enabled";
/**
* keep-alive connection timeout(in seconds), value <= 0 means disable keepalive. default value is 75
*/
public static final String TRANSPORTER_KEEP_ALIVE_TIMEOUT = "powerjob.transporter.keepalive.timeout";
public static final String WORKER_STATUS_CHECK_PERIOD = "powerjob.worker.status-check.normal.period"; public static final String WORKER_STATUS_CHECK_PERIOD = "powerjob.worker.status-check.normal.period";
/** /**
@ -30,6 +41,8 @@ public class PowerJobDKey {
* <a href="https://stackoverflow.com/questions/16504140/thread-stop-deprecated">It's VERY dangerous</a> * <a href="https://stackoverflow.com/questions/16504140/thread-stop-deprecated">It's VERY dangerous</a>
*/ */
public static final String WORKER_ALLOWED_FORCE_STOP_THREAD = "powerjob.worker.allowed-force-stop-thread"; public static final String WORKER_ALLOWED_FORCE_STOP_THREAD = "powerjob.worker.allowed-force-stop-thread";
public static final String WORKER_WORK_SPACE = "powerjob.worker.workspace";
/** /**
* ms * ms
*/ */

View File

@ -9,12 +9,4 @@ import java.io.Serializable;
* @since 2020/4/16 * @since 2020/4/16
*/ */
public interface PowerSerializable extends Serializable { public interface PowerSerializable extends Serializable {
/**
* request path for http or other protocol, like '/worker/stopInstance'
* @return null for non-http request object or no-null path for http request needed object
*/
default String path() {
return null;
}
} }

View File

@ -1,18 +0,0 @@
package tech.powerjob.common;
/**
* HttpProtocolConstant
*
* @author tjq
* @since 2021/2/8
*/
public class ProtocolConstant {
public static final String SERVER_PATH_HEARTBEAT = "/server/heartbeat";
public static final String SERVER_PATH_STATUS_REPORT = "/server/statusReport";
public static final String SERVER_PATH_LOG_REPORT = "/server/logReport";
public static final String WORKER_PATH_DISPATCH_JOB = "/worker/runJob";
public static final String WORKER_PATH_STOP_INSTANCE = "/worker/stopInstance";
public static final String WORKER_PATH_QUERY_INSTANCE_INFO = "/worker/queryInstanceInfo";
}

View File

@ -12,25 +12,93 @@ public class RemoteConstant {
/* ************************ AKKA WORKER ************************ */ /* ************************ AKKA WORKER ************************ */
public static final int DEFAULT_WORKER_PORT = 27777; public static final int DEFAULT_WORKER_PORT = 27777;
public static final String WORKER_ACTOR_SYSTEM_NAME = "oms";
public static final String TASK_TRACKER_ACTOR_NAME = "task_tracker";
public static final String PROCESSOR_TRACKER_ACTOR_NAME = "processor_tracker";
public static final String WORKER_ACTOR_NAME = "worker";
public static final String TROUBLESHOOTING_ACTOR_NAME = "troubleshooting";
public static final String WORKER_AKKA_CONFIG_NAME = "oms-worker.akka.conf";
/* ************************ AKKA SERVER ************************ */
public static final String SERVER_ACTOR_SYSTEM_NAME = "oms-server";
public static final String SERVER_ACTOR_NAME = "server_actor";
public static final String SERVER_FRIEND_ACTOR_NAME = "friend_actor";
public static final String SERVER_AKKA_CONFIG_NAME = "oms-server.akka.conf";
/* ************************ OTHERS ************************ */ /* ************************ OTHERS ************************ */
public static final String EMPTY_ADDRESS = "N/A"; public static final String EMPTY_ADDRESS = "N/A";
public static final long DEFAULT_TIMEOUT_MS = 5000; public static final long DEFAULT_TIMEOUT_MS = 5000;
/* ************************ SERVER-self_side (s4s == server for server side) ************************ */
public static final String S4S_PATH = "friend";
/**
* server 集群间的心跳处理
*/
public static final String S4S_HANDLER_PING = "ping";
/**
* 处理其他 server 的执行请求
*/
public static final String S4S_HANDLER_PROCESS = "process";
/* ************************ SERVER-worker_sides4w == server for worker side ************************ */
public static final String S4W_PATH = "server";
/**
* server 处理在线日志
*/
public static final String S4W_HANDLER_REPORT_LOG = "reportLog";
/**
* server 处理 worker 心跳
*/
public static final String S4W_HANDLER_WORKER_HEARTBEAT = "workerHeartbeat";
/**
* server 处理 TaskTracker 上报的任务实例状态
*/
public static final String S4W_HANDLER_REPORT_INSTANCE_STATUS = "reportInstanceStatus";
/**
* server 查询任务的可执行集群
*/
public static final String S4W_HANDLER_QUERY_JOB_CLUSTER = "queryJobCluster";
/**
* server 处理 worker 请求部署容器命令
*/
public static final String S4W_HANDLER_WORKER_NEED_DEPLOY_CONTAINER = "queryContainer";
/* ************************ Worker-TaskTracker ************************ */
public static final String WTT_PATH = "taskTracker";
/**
* server 任务执行命令
*/
public static final String WTT_HANDLER_RUN_JOB = "runJob";
/**
* server 停止任务实例命令
*/
public static final String WTT_HANDLER_STOP_INSTANCE = "stopInstance";
/**
* sever 查询任务状态
*/
public static final String WTT_HANDLER_QUERY_INSTANCE_STATUS = "queryInstanceStatus";
/**
* PT 上报任务状态包含执行结果
*/
public static final String WTT_HANDLER_REPORT_TASK_STATUS = "reportTaskStatus";
/**
* PT 上报自身状态
*/
public static final String WTT_HANDLER_REPORT_PROCESSOR_TRACKER_STATUS = "reportProcessorTrackerStatus";
/**
* Map 任务
*/
public static final String WTT_HANDLER_MAP_TASK = "mapTask";
/* ************************ Worker-ProcessorTracker ************************ */
public static final String WPT_PATH = "processorTracker";
public static final String WPT_HANDLER_START_TASK = "startTask";
public static final String WPT_HANDLER_STOP_INSTANCE = "stopInstance";
/* ************************ Worker-NORMAL ************************ */
public static final String WORKER_PATH = "worker";
public static final String WORKER_HANDLER_DEPLOY_CONTAINER = "deployContainer";
public static final String WORKER_HANDLER_DESTROY_CONTAINER = "destroyContainer";
} }

View File

@ -0,0 +1,59 @@
package tech.powerjob.common.request;
import lombok.Setter;
import lombok.experimental.Accessors;
import org.apache.commons.lang3.StringUtils;
import tech.powerjob.common.enums.Protocol;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
/**
* 服务发现请求
*
* @author tjq
* @since 2023/1/21
*/
@Setter
@Accessors(chain = true)
public class ServerDiscoveryRequest implements Serializable {
private Long appId;
private String protocol;
private String currentServer;
private String clientVersion;
public Map<String, Object> toMap() {
Map<String, Object> ret = new HashMap<>();
ret.put("appId", appId);
ret.put("protocol", protocol);
if (StringUtils.isNotEmpty(currentServer)) {
ret.put("currentServer", currentServer);
}
if (StringUtils.isNotEmpty(clientVersion)) {
ret.put("clientVersion", clientVersion);
}
return ret;
}
public Long getAppId() {
return appId;
}
public String getProtocol() {
return Optional.ofNullable(protocol).orElse(Protocol.AKKA.name());
}
public String getCurrentServer() {
return currentServer;
}
public String getClientVersion() {
return clientVersion;
}
}

View File

@ -1,10 +1,9 @@
package tech.powerjob.common.request; package tech.powerjob.common.request;
import tech.powerjob.common.PowerSerializable;
import tech.powerjob.common.ProtocolConstant;
import lombok.AllArgsConstructor; import lombok.AllArgsConstructor;
import lombok.Data; import lombok.Data;
import lombok.NoArgsConstructor; import lombok.NoArgsConstructor;
import tech.powerjob.common.PowerSerializable;
/** /**
* 服务器查询实例运行状态需要返回详细的运行数据 * 服务器查询实例运行状态需要返回详细的运行数据
@ -18,8 +17,4 @@ import lombok.NoArgsConstructor;
public class ServerQueryInstanceStatusReq implements PowerSerializable { public class ServerQueryInstanceStatusReq implements PowerSerializable {
private Long instanceId; private Long instanceId;
@Override
public String path() {
return ProtocolConstant.WORKER_PATH_QUERY_INSTANCE_INFO;
}
} }

View File

@ -1,8 +1,7 @@
package tech.powerjob.common.request; package tech.powerjob.common.request;
import tech.powerjob.common.PowerSerializable;
import tech.powerjob.common.ProtocolConstant;
import lombok.Data; import lombok.Data;
import tech.powerjob.common.PowerSerializable;
import java.util.List; import java.util.List;
@ -98,9 +97,4 @@ public class ServerScheduleJobReq implements PowerSerializable {
* 日志配置 * 日志配置
*/ */
private String logConfig; private String logConfig;
@Override
public String path() {
return ProtocolConstant.WORKER_PATH_DISPATCH_JOB;
}
} }

View File

@ -1,10 +1,9 @@
package tech.powerjob.common.request; package tech.powerjob.common.request;
import tech.powerjob.common.PowerSerializable;
import tech.powerjob.common.ProtocolConstant;
import lombok.AllArgsConstructor; import lombok.AllArgsConstructor;
import lombok.Data; import lombok.Data;
import lombok.NoArgsConstructor; import lombok.NoArgsConstructor;
import tech.powerjob.common.PowerSerializable;
/** /**
@ -18,9 +17,4 @@ import lombok.NoArgsConstructor;
@AllArgsConstructor @AllArgsConstructor
public class ServerStopInstanceReq implements PowerSerializable { public class ServerStopInstanceReq implements PowerSerializable {
private Long instanceId; private Long instanceId;
@Override
public String path() {
return ProtocolConstant.WORKER_PATH_STOP_INSTANCE;
}
} }

View File

@ -3,10 +3,11 @@ package tech.powerjob.common.serialize;
import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.MapperFeature;
import com.fasterxml.jackson.databind.json.JsonMapper;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import tech.powerjob.common.exception.PowerJobException;
import org.apache.commons.lang3.exception.ExceptionUtils; import org.apache.commons.lang3.exception.ExceptionUtils;
import tech.powerjob.common.exception.PowerJobException;
import java.io.IOException; import java.io.IOException;
@ -19,13 +20,11 @@ import java.io.IOException;
@Slf4j @Slf4j
public class JsonUtils { public class JsonUtils {
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); private static final JsonMapper JSON_MAPPER = JsonMapper.builder()
.configure(MapperFeature.PROPAGATE_TRANSIENT_MARKER, true)
static { .configure(JsonParser.Feature.ALLOW_SINGLE_QUOTES, true)
OBJECT_MAPPER.configure(JsonParser.Feature.ALLOW_SINGLE_QUOTES, true); .configure(JsonParser.Feature.IGNORE_UNDEFINED, true)
// .build();
OBJECT_MAPPER.configure(JsonParser.Feature.IGNORE_UNDEFINED, true);
}
private JsonUtils(){ private JsonUtils(){
@ -33,15 +32,16 @@ public class JsonUtils {
public static String toJSONString(Object obj) { public static String toJSONString(Object obj) {
try { try {
return OBJECT_MAPPER.writeValueAsString(obj); return JSON_MAPPER.writeValueAsString(obj);
}catch (Exception ignore) { }catch (Exception e) {
log.error("[PowerJob] toJSONString failed", e);
} }
return null; return null;
} }
public static String toJSONStringUnsafe(Object obj) { public static String toJSONStringUnsafe(Object obj) {
try { try {
return OBJECT_MAPPER.writeValueAsString(obj); return JSON_MAPPER.writeValueAsString(obj);
}catch (Exception e) { }catch (Exception e) {
throw new PowerJobException(e); throw new PowerJobException(e);
} }
@ -49,31 +49,32 @@ public class JsonUtils {
public static byte[] toBytes(Object obj) { public static byte[] toBytes(Object obj) {
try { try {
return OBJECT_MAPPER.writeValueAsBytes(obj); return JSON_MAPPER.writeValueAsBytes(obj);
}catch (Exception ignore) { }catch (Exception e) {
log.error("[PowerJob] serialize failed", e);
} }
return null; return null;
} }
public static <T> T parseObject(String json, Class<T> clz) throws JsonProcessingException { public static <T> T parseObject(String json, Class<T> clz) throws JsonProcessingException {
return OBJECT_MAPPER.readValue(json, clz); return JSON_MAPPER.readValue(json, clz);
} }
public static <T> T parseObject(byte[] b, Class<T> clz) throws IOException { public static <T> T parseObject(byte[] b, Class<T> clz) throws IOException {
return OBJECT_MAPPER.readValue(b, clz); return JSON_MAPPER.readValue(b, clz);
} }
public static <T> T parseObject(byte[] b, TypeReference<T> typeReference) throws IOException { public static <T> T parseObject(byte[] b, TypeReference<T> typeReference) throws IOException {
return OBJECT_MAPPER.readValue(b, typeReference); return JSON_MAPPER.readValue(b, typeReference);
} }
public static <T> T parseObject(String json, TypeReference<T> typeReference) throws IOException { public static <T> T parseObject(String json, TypeReference<T> typeReference) throws IOException {
return OBJECT_MAPPER.readValue(json, typeReference); return JSON_MAPPER.readValue(json, typeReference);
} }
public static <T> T parseObjectIgnoreException(String json, Class<T> clz) { public static <T> T parseObjectIgnoreException(String json, Class<T> clz) {
try { try {
return OBJECT_MAPPER.readValue(json, clz); return JSON_MAPPER.readValue(json, clz);
}catch (Exception e) { }catch (Exception e) {
log.error("unable to parse json string to object,current string:{}",json,e); log.error("unable to parse json string to object,current string:{}",json,e);
return null; return null;
@ -83,7 +84,7 @@ public class JsonUtils {
public static <T> T parseObjectUnsafe(String json, Class<T> clz) { public static <T> T parseObjectUnsafe(String json, Class<T> clz) {
try { try {
return OBJECT_MAPPER.readValue(json, clz); return JSON_MAPPER.readValue(json, clz);
}catch (Exception e) { }catch (Exception e) {
ExceptionUtils.rethrow(e); ExceptionUtils.rethrow(e);
} }

View File

@ -0,0 +1,33 @@
package tech.powerjob.common.utils;
import java.util.Collection;
import java.util.Map;
/**
* CollectionUtils
*
* @author tjq
* @since 2023/1/20
*/
public class CollectionUtils {
/**
* Return {@code true} if the supplied Collection is {@code null} or empty.
* Otherwise, return {@code false}.
* @param collection the Collection to check
* @return whether the given Collection is empty
*/
public static boolean isEmpty(Collection<?> collection) {
return (collection == null || collection.isEmpty());
}
/**
* Return {@code true} if the supplied Map is {@code null} or empty.
* Otherwise, return {@code false}.
* @param map the Map to check
* @return whether the given Map is empty
*/
public static boolean isEmpty(Map<?, ?> map) {
return (map == null || map.isEmpty());
}
}

View File

@ -42,14 +42,20 @@ public class JavaUtils {
if (connection instanceof JarURLConnection) { if (connection instanceof JarURLConnection) {
return getImplementationVersion(((JarURLConnection) connection).getJarFile()); return getImplementationVersion(((JarURLConnection) connection).getJarFile());
} }
try (JarFile jarFile = new JarFile(new File(codeSourceLocation.toURI()))) { final File file = new File(codeSourceLocation.toURI());
// idea 场景查找版本失败
if (!file.exists() || file.isDirectory()) {
return "UNKNOWN";
}
try (JarFile jarFile = new JarFile(file)) {
return getImplementationVersion(jarFile); return getImplementationVersion(jarFile);
} }
} }
catch (Throwable t) { catch (Throwable t) {
log.warn("[JavaUtils] determinePackageVersion for clz[{}] failed, msg: {}", clz.getSimpleName(), t.toString()); log.warn("[JavaUtils] determinePackageVersion for clz[{}] failed, msg: {}", clz.getSimpleName(), t.toString());
// windows 下无权限访问会一直报错一直重试需要在此兼容
return "UNKNOWN";
} }
return null;
} }
private static String getImplementationVersion(JarFile jarFile) throws IOException { private static String getImplementationVersion(JarFile jarFile) throws IOException {
return jarFile.getManifest().getMainAttributes().getValue(Attributes.Name.IMPLEMENTATION_VERSION); return jarFile.getManifest().getMainAttributes().getValue(Attributes.Name.IMPLEMENTATION_VERSION);

View File

@ -5,12 +5,12 @@
<parent> <parent>
<artifactId>powerjob</artifactId> <artifactId>powerjob</artifactId>
<groupId>tech.powerjob</groupId> <groupId>tech.powerjob</groupId>
<version>3.0.0</version> <version>4.0.0</version>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<artifactId>powerjob-official-processors</artifactId> <artifactId>powerjob-official-processors</artifactId>
<version>1.2.2</version> <version>1.3.0</version>
<packaging>jar</packaging> <packaging>jar</packaging>
<properties> <properties>
@ -20,10 +20,11 @@
<!-- 不会被打包的部分scope 只能是 test 或 provide --> <!-- 不会被打包的部分scope 只能是 test 或 provide -->
<junit.version>5.9.1</junit.version> <junit.version>5.9.1</junit.version>
<logback.version>1.2.9</logback.version> <logback.version>1.2.9</logback.version>
<powerjob.worker.version>4.2.1</powerjob.worker.version> <powerjob.worker.version>4.3.0</powerjob.worker.version>
<spring.jdbc.version>5.2.9.RELEASE</spring.jdbc.version> <spring.jdbc.version>5.2.9.RELEASE</spring.jdbc.version>
<h2.db.version>2.1.214</h2.db.version> <h2.db.version>2.1.214</h2.db.version>
<mysql.version>8.0.28</mysql.version> <mysql.version>8.0.28</mysql.version>
<spring.version>5.3.23</spring.version>
<!-- 全部 shade 化,避免依赖冲突 --> <!-- 全部 shade 化,避免依赖冲突 -->
<fastjson.version>1.2.83</fastjson.version> <fastjson.version>1.2.83</fastjson.version>
@ -75,6 +76,13 @@
<scope>provided</scope> <scope>provided</scope>
</dependency> </dependency>
<!-- Spring 依赖(非强依赖) -->
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-context</artifactId>
<version>${spring.version}</version>
<scope>provided</scope>
</dependency>
<dependency> <dependency>
<groupId>org.springframework</groupId> <groupId>org.springframework</groupId>

View File

@ -1,5 +1,6 @@
package tech.powerjob.official.processors.impl.script; package tech.powerjob.official.processors.impl.script;
import tech.powerjob.worker.common.utils.PowerFileUtils;
import tech.powerjob.worker.core.processor.ProcessResult; import tech.powerjob.worker.core.processor.ProcessResult;
import tech.powerjob.worker.core.processor.TaskContext; import tech.powerjob.worker.core.processor.TaskContext;
import tech.powerjob.worker.log.OmsLogger; import tech.powerjob.worker.log.OmsLogger;
@ -34,7 +35,7 @@ public abstract class AbstractScriptProcessor extends CommonBasicProcessor {
protected static final String SH_SHELL = "/bin/sh"; protected static final String SH_SHELL = "/bin/sh";
protected static final String CMD_SHELL = "cmd.exe"; protected static final String CMD_SHELL = "cmd.exe";
private static final String WORKER_DIR = System.getProperty("user.home") + "/powerjob/worker/official_script_processor/"; private static final String WORKER_DIR = PowerFileUtils.workspace() + "/official_script_processor/";
@Override @Override
protected ProcessResult process0(TaskContext context) throws Exception { protected ProcessResult process0(TaskContext context) throws Exception {

View File

@ -3,13 +3,13 @@ package tech.powerjob.official.processors.impl.sql;
import tech.powerjob.worker.core.processor.TaskContext; import tech.powerjob.worker.core.processor.TaskContext;
import com.google.common.collect.Maps; import com.google.common.collect.Maps;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.util.Assert;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import javax.sql.DataSource; import javax.sql.DataSource;
import java.sql.Connection; import java.sql.Connection;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.Map; import java.util.Map;
import java.util.Objects;
/** /**
* 简单 Spring SQL 处理器目前只能用 Spring Bean 的方式加载 * 简单 Spring SQL 处理器目前只能用 Spring Bean 的方式加载
@ -75,8 +75,8 @@ public class SpringDatasourceSqlProcessor extends AbstractSqlProcessor {
* @param dataSource 数据源 * @param dataSource 数据源
*/ */
public void registerDataSource(String dataSourceName, DataSource dataSource) { public void registerDataSource(String dataSourceName, DataSource dataSource) {
Assert.notNull(dataSourceName, "DataSource name must not be null"); Objects.requireNonNull(dataSourceName, "DataSource name must not be null");
Assert.notNull(dataSource, "DataSource must not be null"); Objects.requireNonNull(dataSource, "DataSource must not be null");
dataSourceMap.put(dataSourceName, dataSource); dataSourceMap.put(dataSourceName, dataSource);
log.info("register data source({})' successfully.", dataSourceName); log.info("register data source({})' successfully.", dataSourceName);
} }

47
powerjob-remote/pom.xml Normal file
View File

@ -0,0 +1,47 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>powerjob</artifactId>
<groupId>tech.powerjob</groupId>
<version>4.0.0</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<packaging>pom</packaging>
<modules>
<module>powerjob-remote-framework</module>
<module>powerjob-remote-benchmark</module>
<module>powerjob-remote-impl-http</module>
<module>powerjob-remote-impl-akka</module>
</modules>
<artifactId>powerjob-remote</artifactId>
<properties>
<maven.compiler.source>8</maven.compiler.source>
<maven.compiler.target>8</maven.compiler.target>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<junit.version>5.9.0</junit.version>
<logback.version>1.2.9</logback.version>
</properties>
<dependencies>
<!-- Junit tests -->
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-api</artifactId>
<version>${junit.version}</version>
<scope>test</scope>
</dependency>
<!-- log for test stage -->
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<version>${logback.version}</version>
<scope>test</scope>
</dependency>
</dependencies>
</project>

View File

@ -0,0 +1,106 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>powerjob-remote</artifactId>
<groupId>tech.powerjob</groupId>
<version>4.0.0</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>powerjob-remote-benchmark</artifactId>
<properties>
<maven.compiler.source>8</maven.compiler.source>
<maven.compiler.target>8</maven.compiler.target>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<maven-compiler-plugin.version>3.10.1</maven-compiler-plugin.version>
<maven-jar-plugin.version>3.2.2</maven-jar-plugin.version>
<logback.version>1.2.9</logback.version>
<springboot.version>2.7.4</springboot.version>
<powerjob-remote-impl-http.version>4.3.0</powerjob-remote-impl-http.version>
<powerjob-remote-impl-akka.version>4.3.0</powerjob-remote-impl-akka.version>
<gatling.version>3.9.0</gatling.version>
<gatling-maven-plugin.version>4.2.9</gatling-maven-plugin.version>
</properties>
<dependencies>
<!-- 性能测试相关 -->
<dependency>
<groupId>io.gatling.highcharts</groupId>
<artifactId>gatling-charts-highcharts</artifactId>
<version>${gatling.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<version>${logback.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
<version>${springboot.version}</version>
</dependency>
<dependency>
<groupId>tech.powerjob</groupId>
<artifactId>powerjob-remote-impl-http</artifactId>
<version>${powerjob-remote-impl-http.version}</version>
</dependency>
<dependency>
<groupId>tech.powerjob</groupId>
<artifactId>powerjob-remote-impl-akka</artifactId>
<version>${powerjob-remote-impl-akka.version}</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
<version>${springboot.version}</version>
<configuration>
<mainClass>tech.powerjob.remote.benchmark.BenchmarkApplication</mainClass>
</configuration>
<executions>
<execution>
<goals>
<goal>repackage</goal><!--可以把依赖的包都打包到生成的Jar包中-->
</goals>
</execution>
</executions>
</plugin>
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
<version>${maven-compiler-plugin.version}</version>
</plugin>
<plugin>
<artifactId>maven-jar-plugin</artifactId>
<version>${maven-jar-plugin.version}</version>
</plugin>
<plugin>
<groupId>io.gatling</groupId>
<artifactId>gatling-maven-plugin</artifactId>
<version>${gatling-maven-plugin.version}</version>
<configuration>
<!-- Enterprise Cloud (https://cloud.gatling.io/) configuration reference: https://gatling.io/docs/gatling/reference/current/extensions/maven_plugin/#working-with-gatling-enterprise-cloud -->
<!-- Enterprise Self-Hosted configuration reference: https://gatling.io/docs/gatling/reference/current/extensions/maven_plugin/#working-with-gatling-enterprise-self-hosted -->
</configuration>
</plugin>
</plugins>
</build>
</project>

View File

@ -0,0 +1,19 @@
package tech.powerjob.remote.benchmark;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
/**
* 测试工程
* 用于 remote 协议压测
*
* @author tjq
* @since 2023/1/7
*/
@SpringBootApplication
public class BenchmarkApplication {
public static void main(String[] args) {
SpringApplication.run(BenchmarkApplication.class, args);
}
}

View File

@ -0,0 +1,68 @@
package tech.powerjob.remote.benchmark;
import com.google.common.collect.Lists;
import lombok.Getter;
import org.springframework.stereotype.Service;
import tech.powerjob.common.enums.Protocol;
import tech.powerjob.remote.framework.BenchmarkActor;
import tech.powerjob.remote.framework.base.Address;
import tech.powerjob.remote.framework.base.ServerType;
import tech.powerjob.remote.framework.engine.EngineConfig;
import tech.powerjob.remote.framework.engine.impl.PowerJobRemoteEngine;
import tech.powerjob.remote.framework.transporter.Transporter;
import javax.annotation.PostConstruct;
/**
* EngineService
*
* @author tjq
* @since 2023/1/7
*/
@Service
public class EngineService {
public static final String HOST = "127.0.0.1";
public static final int SERVER_AKKA_PORT = 10001;
public static final int SERVER_HTTP_PORT = 10002;
public static final int CLIENT_AKKA_PORT = 20001;
public static final int CLIENT_HTTP_PORT = 20002;
@Getter
private Transporter akkaTransporter;
@Getter
private Transporter httpTransporter;
@PostConstruct
public void init() {
// http server
new PowerJobRemoteEngine().start(new EngineConfig()
.setServerType(ServerType.SERVER)
.setActorList(Lists.newArrayList(new BenchmarkActor()))
.setType(Protocol.HTTP.name())
.setBindAddress(new Address().setHost(HOST).setPort(SERVER_HTTP_PORT)));
// akka server
new PowerJobRemoteEngine().start(new EngineConfig()
.setServerType(ServerType.SERVER)
.setActorList(Lists.newArrayList(new BenchmarkActor()))
.setType(Protocol.AKKA.name())
.setBindAddress(new Address().setHost(HOST).setPort(SERVER_AKKA_PORT)));
// http client
httpTransporter = new PowerJobRemoteEngine().start(new EngineConfig()
.setServerType(ServerType.WORKER)
.setActorList(Lists.newArrayList(new BenchmarkActor()))
.setType(Protocol.HTTP.name())
.setBindAddress(new Address().setHost(HOST).setPort(CLIENT_HTTP_PORT))).getTransporter();
// akka client
akkaTransporter = new PowerJobRemoteEngine().start(new EngineConfig()
.setServerType(ServerType.WORKER)
.setActorList(Lists.newArrayList(new BenchmarkActor()))
.setType(Protocol.AKKA.name())
.setBindAddress(new Address().setHost(HOST).setPort(CLIENT_AKKA_PORT))).getTransporter();
}
}

View File

@ -0,0 +1,82 @@
package tech.powerjob.remote.benchmark;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.BooleanUtils;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import tech.powerjob.common.enums.Protocol;
import tech.powerjob.remote.framework.BenchmarkActor;
import tech.powerjob.remote.framework.base.Address;
import tech.powerjob.remote.framework.base.HandlerLocation;
import tech.powerjob.remote.framework.base.URL;
import javax.annotation.Resource;
import java.util.concurrent.CompletionStage;
import static tech.powerjob.remote.benchmark.EngineService.*;
/**
* 压测测试入口
*
* @author tjq
* @since 2023/1/7
*/
@Slf4j
@RestController
@RequestMapping("/pressure")
public class PressureTestController {
private static final HandlerLocation HL = new HandlerLocation().setRootPath("benchmark").setMethodPath("standard");
@Resource
private EngineService engineService;
@GetMapping("/tell")
public void httpTell(String protocol, Integer blockMs, Integer responseSize, String content) {
Address address = new Address().setHost(HOST);
URL url = new URL().setLocation(HL).setAddress(address);
final BenchmarkActor.BenchmarkRequest request = new BenchmarkActor.BenchmarkRequest().setContent(content).setBlockingMills(blockMs).setResponseSize(responseSize);
try {
if (Protocol.HTTP.name().equalsIgnoreCase(protocol)) {
address.setPort(SERVER_HTTP_PORT);
engineService.getHttpTransporter().tell(url, request);
} else {
address.setPort(SERVER_AKKA_PORT);
engineService.getAkkaTransporter().tell(url, request);
}
} catch (Exception e) {
log.error("[HttpTell] process failed!", e);
ExceptionUtils.rethrow(e);
}
}
@GetMapping("/ask")
public void httpAsk(String protocol, Integer blockMs, Integer responseSize, String content, Boolean debug) {
Address address = new Address().setHost(HOST);
URL url = new URL().setLocation(HL).setAddress(address);
final BenchmarkActor.BenchmarkRequest request = new BenchmarkActor.BenchmarkRequest().setContent(content).setBlockingMills(blockMs).setResponseSize(responseSize);
try {
CompletionStage<BenchmarkActor.BenchmarkResponse> responseOpt = null;
if (Protocol.HTTP.name().equalsIgnoreCase(protocol)) {
address.setPort(SERVER_HTTP_PORT);
responseOpt = engineService.getHttpTransporter().ask(url, request, BenchmarkActor.BenchmarkResponse.class);
} else {
address.setPort(SERVER_AKKA_PORT);
responseOpt = engineService.getAkkaTransporter().ask(url, request, BenchmarkActor.BenchmarkResponse.class);
}
final BenchmarkActor.BenchmarkResponse response = responseOpt.toCompletableFuture().get();
if (BooleanUtils.isTrue(debug)) {
log.info("[httpAsk] response: {}", response);
}
} catch (Exception e) {
log.error("[httpAsk] process failed", e);
ExceptionUtils.rethrow(e);
}
}
}

View File

@ -0,0 +1,20 @@
import io.gatling.app.Gatling;
import io.gatling.core.config.GatlingPropertiesBuilder;
/**
* <a href="https://gatling.io/">压测启动入口</a>
*
* @author tjq
* @since 2023/1/8
*/
public class Engine {
public static void main(String[] args) {
GatlingPropertiesBuilder props = new GatlingPropertiesBuilder()
.resourcesDirectory(IDEPathHelper.mavenResourcesDirectory.toString())
.resultsDirectory(IDEPathHelper.resultsDirectory.toString())
.binariesDirectory(IDEPathHelper.mavenBinariesDirectory.toString());
Gatling.fromMap(props.build());
}
}

View File

@ -0,0 +1,33 @@
import java.net.URISyntaxException;
import java.nio.file.Path;
import java.nio.file.Paths;
import static java.util.Objects.requireNonNull;
/**
* @author <a href="https://github.com/gatling/gatling-maven-plugin-demo-java">gatling-maven-plugin-demo-java</a>
*/
public class IDEPathHelper {
static final Path mavenSourcesDirectory;
static final Path mavenResourcesDirectory;
static final Path mavenBinariesDirectory;
static final Path resultsDirectory;
static final Path recorderConfigFile;
static {
try {
Path projectRootDir = Paths.get(requireNonNull(IDEPathHelper.class.getResource("gatling.conf"), "Couldn't locate gatling.conf").toURI()).getParent().getParent().getParent();
Path mavenTargetDirectory = projectRootDir.resolve("target");
Path mavenSrcTestDirectory = projectRootDir.resolve("src").resolve("test");
mavenSourcesDirectory = mavenSrcTestDirectory.resolve("java");
mavenResourcesDirectory = mavenSrcTestDirectory.resolve("resources");
mavenBinariesDirectory = mavenTargetDirectory.resolve("test-classes");
resultsDirectory = mavenTargetDirectory.resolve("gatling");
recorderConfigFile = mavenResourcesDirectory.resolve("recorder.conf");
} catch (URISyntaxException e) {
throw new ExceptionInInitializerError(e);
}
}
}

View File

@ -0,0 +1,14 @@
package tech.powerjob.remote.benchmark;
/**
* Constant
* 压测时需要修改的常量
*
* @author tjq
* @since 2023/1/8
*/
public class Constant {
public static final String SERVER_HOST = "127.0.0.1";
}

View File

@ -0,0 +1,62 @@
package tech.powerjob.remote.benchmark;
import static io.gatling.javaapi.core.CoreDsl.*;
import static io.gatling.javaapi.http.HttpDsl.*;
import io.gatling.javaapi.core.*;
import io.gatling.javaapi.http.*;
/**
* HTTP 为入口压测
*
* @author tjq
* @since 2023/1/8
*/
public class HttpSimulation extends Simulation {
String baseUrl = String.format("http://%s:8080", Constant.SERVER_HOST);
HttpProtocolBuilder httpProtocol = http // 4
.baseUrl(baseUrl) // 5
.acceptHeader("application/json") // 6
.doNotTrackHeader("1")
.acceptLanguageHeader("en-US,en;q=0.5")
.acceptEncodingHeader("gzip, deflate")
.userAgentHeader("Mozilla/5.0 (Windows NT 5.1; rv:31.0) Gecko/20100101 Firefox/31.0");
ScenarioBuilder warmup = scenario("WarmupSimulation")
.exec(http("PowerJob-Warmup-HTTP")
.get("/pressure/ask?protocol=HTTP&debug=false&responseSize=1024"))
.exec(http("PowerJob-Warmup-AKKA")
.get("/pressure/ask?protocol=AKKA&debug=false&responseSize=1024"))
;
ScenarioBuilder httpAsk = scenario("HttpSimulation") // 7
.exec(http("PowerJob-Remote-Http") // 请求名称用于压测报表展示
.get("/pressure/ask?protocol=HTTP&debug=false&responseSize=1024")) // 9
;
ScenarioBuilder akkaAsk = scenario("AkkaSimulation") // 7
.exec(http("PowerJob-Remote-AKKA") // 请求名称用于压测报表展示
.get("/pressure/ask?protocol=AKKA&debug=false&responseSize=1024")) // 9
;
/*
atOnceUsers(10) 一次模拟的用户数量(10)
nothingFor(4 seconds) 在指定的时间段(4 seconds)内什么都不干
constantUsersPerSec(10) during(20 seconds) 以固定的速度模拟用户指定每秒模拟的用户数(10)指定模拟测试时间长度(20 seconds)
rampUsersPerSec(10) to (20) during(20 seconds) 在指定的时间(20 seconds)使每秒模拟的用户从数量1(10)逐渐增加到数量2(20)速度匀速
heavisideUsers(100) over(10 seconds) 在指定的时间(10 seconds)内使用类似单位阶跃函数的方法逐渐增加模拟并发的用户直到总数达到指定的数量(100).简单说就是每秒并发用户数递增
*/
{
setUp( // 11
warmup.injectOpen(constantUsersPerSec(50).during(10))
.andThen(
httpAsk.injectOpen(incrementUsersPerSec(100).times(10).eachLevelLasting(10))
)
.andThen(
akkaAsk.injectOpen(incrementUsersPerSec(100).times(10).eachLevelLasting(10))
)
).protocols(httpProtocol); // 13
}
}

View File

@ -0,0 +1,127 @@
#########################
# Gatling Configuration #
#########################
# This file contains all the settings configurable for Gatling with their default values
gatling {
core {
#outputDirectoryBaseName = "" # The prefix for each simulation result folder (then suffixed by the report generation timestamp)
#runDescription = "" # The description for this simulation run, displayed in each report
#encoding = "utf-8" # Encoding to use throughout Gatling for file and string manipulation
#simulationClass = "" # The FQCN of the simulation to run (when used in conjunction with noReports, the simulation for which assertions will be validated)
#elFileBodiesCacheMaxCapacity = 200 # Cache size for request body EL templates, set to 0 to disable
#rawFileBodiesCacheMaxCapacity = 200 # Cache size for request body Raw templates, set to 0 to disable
#rawFileBodiesInMemoryMaxSize = 1000 # Below this limit, raw file bodies will be cached in memory
#pebbleFileBodiesCacheMaxCapacity = 200 # Cache size for request body Peeble templates, set to 0 to disable
#feederAdaptiveLoadModeThreshold = 100 # File size threshold (in MB). Below load eagerly in memory, above use batch mode with default buffer size
#shutdownTimeout = 10000 # Milliseconds to wait for the actor system to shutdown
extract {
regex {
#cacheMaxCapacity = 200 # Cache size for the compiled regexes, set to 0 to disable caching
}
xpath {
#cacheMaxCapacity = 200 # Cache size for the compiled XPath queries, set to 0 to disable caching
}
jsonPath {
#cacheMaxCapacity = 200 # Cache size for the compiled jsonPath queries, set to 0 to disable caching
}
css {
#cacheMaxCapacity = 200 # Cache size for the compiled CSS selectors queries, set to 0 to disable caching
}
}
directory {
#simulations = user-files/simulations # Directory where simulation classes are located (for bundle packaging only)
#resources = user-files/resources # Directory where resources, such as feeder files and request bodies are located (for bundle packaging only)
#reportsOnly = "" # If set, name of report folder to look for in order to generate its report
#binaries = "" # If set, name of the folder where compiles classes are located: Defaults to GATLING_HOME/target.
#results = results # Name of the folder where all reports folder are located
}
}
socket {
#connectTimeout = 10000 # Timeout in millis for establishing a TCP socket
#tcpNoDelay = true
#soKeepAlive = false # if TCP keepalive configured at OS level should be used
#soReuseAddress = false
}
netty {
#useNativeTransport = true # if Netty native transport should be used instead of Java NIO
#allocator = "pooled" # switch to unpooled for unpooled ByteBufAllocator
#maxThreadLocalCharBufferSize = 200000 # Netty's default is 16k
}
ssl {
#useOpenSsl = true # if OpenSSL should be used instead of JSSE (only the latter can be debugged with -Djava.net.debug=ssl)
#useOpenSslFinalizers = false # if OpenSSL contexts should be freed with Finalizer or if using RefCounted is fine
#handshakeTimeout = 10000 # TLS handshake timeout in millis
#useInsecureTrustManager = true # Use an insecure TrustManager that trusts all server certificates
#enabledProtocols = [] # Array of enabled protocols for HTTPS, if empty use Netty's defaults
#enabledCipherSuites = [] # Array of enabled cipher suites for HTTPS, if empty enable all available ciphers
#sessionCacheSize = 0 # SSLSession cache size, set to 0 to use JDK's default
#sessionTimeout = 0 # SSLSession timeout in seconds, set to 0 to use JDK's default (24h)
#enableSni = true # When set to true, enable Server Name indication (SNI)
keyStore {
#type = "" # Type of SSLContext's KeyManagers store
#file = "" # Location of SSLContext's KeyManagers store
#password = "" # Password for SSLContext's KeyManagers store
#algorithm = "" # Algorithm used SSLContext's KeyManagers store
}
trustStore {
#type = "" # Type of SSLContext's TrustManagers store
#file = "" # Location of SSLContext's TrustManagers store
#password = "" # Password for SSLContext's TrustManagers store
#algorithm = "" # Algorithm used by SSLContext's TrustManagers store
}
}
charting {
#noReports = false # When set to true, don't generate HTML reports
#maxPlotPerSeries = 1000 # Number of points per graph in Gatling reports
#useGroupDurationMetric = false # Switch group timings from cumulated response time to group duration.
indicators {
#lowerBound = 800 # Lower bound for the requests' response time to track in the reports and the console summary
#higherBound = 1200 # Higher bound for the requests' response time to track in the reports and the console summary
#percentile1 = 50 # Value for the 1st percentile to track in the reports, the console summary and Graphite
#percentile2 = 75 # Value for the 2nd percentile to track in the reports, the console summary and Graphite
#percentile3 = 95 # Value for the 3rd percentile to track in the reports, the console summary and Graphite
#percentile4 = 99 # Value for the 4th percentile to track in the reports, the console summary and Graphite
}
}
http {
#fetchedCssCacheMaxCapacity = 200 # Cache size for CSS parsed content, set to 0 to disable
#fetchedHtmlCacheMaxCapacity = 200 # Cache size for HTML parsed content, set to 0 to disable
#perUserCacheMaxCapacity = 200 # Per virtual user cache size, set to 0 to disable
#warmUpUrl = "https://gatling.io" # The URL to use to warm-up the HTTP stack (blank means disabled)
#enableGA = true # Very light Google Analytics (Gatling and Java version), please support
#pooledConnectionIdleTimeout = 60000 # Timeout in millis for a connection to stay idle in the pool
#requestTimeout = 60000 # Timeout in millis for performing an HTTP request
#enableHostnameVerification = false # When set to true, enable hostname verification: SSLEngine.setHttpsEndpointIdentificationAlgorithm("HTTPS")
dns {
#queryTimeout = 5000 # Timeout in millis of each DNS query in millis
#maxQueriesPerResolve = 6 # Maximum allowed number of DNS queries for a given name resolution
}
}
jms {
#replyTimeoutScanPeriod = 1000 # scan period for timedout reply messages
}
data {
#writers = [console, file] # The list of DataWriters to which Gatling write simulation data (currently supported : console, file, graphite)
console {
#light = false # When set to true, displays a light version without detailed request stats
#writePeriod = 5 # Write interval, in seconds
}
file {
#bufferSize = 8192 # FileDataWriter's internal data buffer size, in bytes
}
leak {
#noActivityTimeout = 30 # Period, in seconds, for which Gatling may have no activity before considering a leak may be happening
}
graphite {
#light = false # only send the all* stats
#host = "localhost" # The host where the Carbon server is located
#port = 2003 # The port to which the Carbon server listens to (2003 is default for plaintext, 2004 is default for pickle)
#protocol = "tcp" # The protocol used to send data to Carbon (currently supported : "tcp", "udp")
#rootPathPrefix = "gatling" # The common prefix of all metrics sent to Graphite
#bufferSize = 8192 # Internal data buffer size, in bytes
#writePeriod = 1 # Write period, in seconds
}
}
}

View File

@ -0,0 +1,41 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>powerjob-remote</artifactId>
<groupId>tech.powerjob</groupId>
<version>4.0.0</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<version>4.3.0</version>
<artifactId>powerjob-remote-framework</artifactId>
<properties>
<maven.compiler.source>8</maven.compiler.source>
<maven.compiler.target>8</maven.compiler.target>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<powerjob-common.version>4.3.0</powerjob-common.version>
<reflections.version>0.10.2</reflections.version>
</properties>
<dependencies>
<dependency>
<groupId>tech.powerjob</groupId>
<artifactId>powerjob-common</artifactId>
<version>${powerjob-common.version}</version>
</dependency>
<dependency>
<groupId>org.reflections</groupId>
<artifactId>reflections</artifactId>
<version>${reflections.version}</version>
</dependency>
</dependencies>
</project>

View File

@ -0,0 +1,94 @@
package tech.powerjob.remote.framework;
import lombok.Data;
import lombok.experimental.Accessors;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.RandomStringUtils;
import tech.powerjob.common.PowerSerializable;
import tech.powerjob.common.utils.CommonUtils;
import tech.powerjob.remote.framework.actor.Actor;
import tech.powerjob.remote.framework.actor.Handler;
import java.util.Optional;
/**
* 基准测试
*
* @author tjq
* @since 2023/1/1
*/
@Slf4j
@Actor(path = "benchmark")
public class BenchmarkActor {
@Handler(path = "standard")
public BenchmarkResponse standardRequest(BenchmarkRequest request) {
long startTs = System.currentTimeMillis();
log.info("[BenchmarkActor] [standardRequest] receive request: {}", request);
BenchmarkResponse response = new BenchmarkResponse()
.setSuccess(true)
.setContent(request.getContent())
.setProcessThread(Thread.currentThread().getName())
.setServerReceiveTs(System.currentTimeMillis());
if (request.getResponseSize() != null && request.getResponseSize() > 0) {
response.setExtra(RandomStringUtils.randomPrint(request.getResponseSize()));
}
executeSleep(request);
response.setServerCost(System.currentTimeMillis() - startTs);
return response;
}
@Handler(path = "emptyReturn")
public void emptyReturn(BenchmarkRequest request) {
log.info("[BenchmarkActor] [emptyReturn] receive request: {}", request);
executeSleep(request);
}
@Handler(path = "stringReturn")
public String stringReturn(BenchmarkRequest request) {
log.info("[BenchmarkActor] [stringReturn] receive request: {}", request);
executeSleep(request);
return RandomStringUtils.randomPrint(Optional.ofNullable(request.getResponseSize()).orElse(100));
}
private static void executeSleep(BenchmarkRequest request) {
if (request.getBlockingMills() != null && request.getBlockingMills() > 0) {
CommonUtils.easySleep(request.getBlockingMills());
}
}
@Data
@Accessors(chain = true)
public static class BenchmarkRequest implements PowerSerializable {
/**
* 请求内容
*/
private String content;
/**
* 期望的响应大小可空
*/
private Integer responseSize;
/**
* 阻塞时间模拟 IO 耗时
*/
private Integer blockingMills;
}
@Data
@Accessors(chain = true)
public static class BenchmarkResponse implements PowerSerializable {
private boolean success;
/**
* 原路返回原来的 content
*/
private String content;
private String processThread;
private long serverReceiveTs;
private long serverCost;
private String extra;
}
}

View File

@ -0,0 +1,21 @@
package tech.powerjob.remote.framework.actor;
import java.lang.annotation.*;
/**
* 行为处理器
*
* @author tjq
* @since 2022/12/31
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.TYPE, ElementType.METHOD})
public @interface Actor {
/**
* root path
* @return root path
*/
String path();
}

View File

@ -0,0 +1,27 @@
package tech.powerjob.remote.framework.actor;
import lombok.Getter;
import lombok.Setter;
import lombok.experimental.Accessors;
import java.util.List;
/**
* ActorInfo
*
* @author tjq
* @since 2022/12/31
*/
@Getter
@Setter
@Accessors(chain = true)
public class ActorInfo {
private Object actor;
private Actor anno;
private List<HandlerInfo> handlerInfos;
}

View File

@ -0,0 +1,27 @@
package tech.powerjob.remote.framework.actor;
import java.lang.annotation.*;
/**
* Handler
*
* @author tjq
* @since 2022/12/31
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.TYPE, ElementType.METHOD})
public @interface Handler {
/**
* handler path
* @return handler path
*/
String path();
/**
* 处理类型
* @return 阻塞 or 非阻塞
*/
ProcessType processType() default ProcessType.BLOCKING;
}

View File

@ -0,0 +1,34 @@
package tech.powerjob.remote.framework.actor;
import lombok.Getter;
import lombok.Setter;
import lombok.ToString;
import lombok.experimental.Accessors;
import tech.powerjob.remote.framework.base.HandlerLocation;
import java.io.Serializable;
import java.lang.reflect.Method;
/**
* HandlerInfo
*
* @author tjq
* @since 2022/12/31
*/
@Getter
@Setter
@ToString
@Accessors(chain = true)
public class HandlerInfo {
private HandlerLocation location;
/**
* handler 对应的方法
*/
private Method method;
/**
* Handler 注解携带的信息
*/
private Handler anno;
}

View File

@ -0,0 +1,20 @@
package tech.powerjob.remote.framework.actor;
/**
* 处理器类型
*
* @author tjq
* @since 2023/1/1
*/
public enum ProcessType {
/**
* 阻塞式
*/
BLOCKING,
/**
* 非阻塞式
*/
NO_BLOCKING
}

View File

@ -0,0 +1,37 @@
package tech.powerjob.remote.framework.base;
import lombok.Getter;
import lombok.Setter;
import lombok.experimental.Accessors;
import java.io.Serializable;
/**
* 地址
*
* @author tjq
* @since 2022/12/31
*/
@Getter
@Setter
@Accessors(chain = true)
public class Address implements Serializable {
private String host;
private int port;
public String toFullAddress() {
return String.format("%s:%d", host, port);
}
public static Address fromIpv4(String ipv4) {
String[] split = ipv4.split(":");
return new Address()
.setHost(split[0])
.setPort(Integer.parseInt(split[1]));
}
@Override
public String toString() {
return toFullAddress();
}
}

View File

@ -0,0 +1,33 @@
package tech.powerjob.remote.framework.base;
import lombok.Getter;
import lombok.Setter;
import lombok.ToString;
import lombok.experimental.Accessors;
import java.io.Serializable;
/**
* handler location
*
* @author tjq
* @since 2022/12/31
*/
@Getter
@Setter
@ToString
@Accessors(chain = true)
public class HandlerLocation implements Serializable {
/**
* 根路径
*/
private String rootPath;
/**
* 方法路径
*/
private String methodPath;
public String toPath() {
return String.format("/%s/%s", rootPath, methodPath);
}
}

View File

@ -0,0 +1,16 @@
package tech.powerjob.remote.framework.base;
import java.io.IOException;
/**
* RemotingException
*
* @author tjq
* @since 2022/12/31
*/
public class RemotingException extends RuntimeException {
public RemotingException(String message) {
super(message);
}
}

View File

@ -0,0 +1,12 @@
package tech.powerjob.remote.framework.base;
/**
* 服务器类型类型
*
* @author tjq
* @since 2022/12/31
*/
public enum ServerType {
SERVER,
WORKER
}

View File

@ -0,0 +1,32 @@
package tech.powerjob.remote.framework.base;
import lombok.Data;
import lombok.experimental.Accessors;
import java.io.Serializable;
/**
* URL
*
* @author tjq
* @since 2022/12/31
*/
@Data
@Accessors(chain = true)
public class URL implements Serializable {
/**
* 调用的集群类型用于兼容 AKKA 等除了IP还需要指定 system 访问的情况
*/
private ServerType serverType;
/**
* remote address
*/
private Address address;
/**
* location
*/
private HandlerLocation location;
}

View File

@ -0,0 +1,43 @@
package tech.powerjob.remote.framework.cs;
import tech.powerjob.remote.framework.actor.ActorInfo;
import tech.powerjob.remote.framework.transporter.Transporter;
import java.io.Closeable;
import java.io.IOException;
import java.util.List;
/**
* client & server initializer
*
* @author MuBao
* @since 2022/12/31
*/
public interface CSInitializer {
/**
* 类型名称比如 akka, netty4httpJson
* @return 名称
*/
String type();
/**
* initialize the framework
* @param config config
*/
void init(CSInitializerConfig config);
/**
* build a Transporter by based network framework
* @return Transporter
*/
Transporter buildTransporter();
/**
* bind Actor, publish handler's service
* @param actorInfos actor infos
*/
void bindHandlers(List<ActorInfo> actorInfos);
void close() throws IOException;
}

View File

@ -0,0 +1,25 @@
package tech.powerjob.remote.framework.cs;
import lombok.Getter;
import lombok.Setter;
import lombok.experimental.Accessors;
import tech.powerjob.remote.framework.base.Address;
import tech.powerjob.remote.framework.base.ServerType;
import java.io.Serializable;
/**
* CSInitializerConfig
*
* @author tjq
* @since 2022/12/31
*/
@Getter
@Setter
@Accessors(chain = true)
public class CSInitializerConfig implements Serializable {
private Address bindAddress;
private ServerType serverType;
}

View File

@ -0,0 +1,37 @@
package tech.powerjob.remote.framework.engine;
import lombok.Data;
import lombok.experimental.Accessors;
import tech.powerjob.remote.framework.base.Address;
import tech.powerjob.remote.framework.base.ServerType;
import java.io.Serializable;
import java.util.List;
/**
* EngineConfig
*
* @author tjq
* @since 2022/12/31
*/
@Data
@Accessors(chain = true)
public class EngineConfig implements Serializable {
/**
* 服务类型
*/
private ServerType serverType;
/**
* 需要启动的引擎类型
*/
private String type;
/**
* 绑定的本地地址
*/
private Address bindAddress;
/**
* actor实例交由使用侧自己实例化以便自行注入各种 bean
*/
private List<Object> actorList;
}

View File

@ -0,0 +1,18 @@
package tech.powerjob.remote.framework.engine;
import lombok.Getter;
import lombok.Setter;
import tech.powerjob.remote.framework.transporter.Transporter;
/**
* 引擎输出
*
* @author tjq
* @since 2022/12/31
*/
@Getter
@Setter
public class EngineOutput {
private Transporter transporter;
}

View File

@ -0,0 +1,16 @@
package tech.powerjob.remote.framework.engine;
import java.io.IOException;
/**
* RemoteEngine
*
* @author tjq
* @since 2022/12/31
*/
public interface RemoteEngine {
EngineOutput start(EngineConfig engineConfig);
void close() throws IOException;
}

View File

@ -0,0 +1,89 @@
package tech.powerjob.remote.framework.engine.impl;
import com.google.common.collect.Lists;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.exception.ExceptionUtils;
import tech.powerjob.remote.framework.actor.Actor;
import tech.powerjob.remote.framework.actor.ActorInfo;
import tech.powerjob.remote.framework.actor.Handler;
import tech.powerjob.remote.framework.actor.HandlerInfo;
import tech.powerjob.remote.framework.base.HandlerLocation;
import java.lang.reflect.Method;
import java.util.List;
/**
* load all Actor
*
* @author tjq
* @since 2022/12/31
*/
@Slf4j
class ActorFactory {
static List<ActorInfo> load(List<Object> actorList) {
List<ActorInfo> actorInfos = Lists.newArrayList();
actorList.forEach(actor -> {
final Class<?> clz = actor.getClass();
try {
final Actor anno = clz.getAnnotation(Actor.class);
ActorInfo actorInfo = new ActorInfo().setActor(actor).setAnno(anno);
actorInfo.setHandlerInfos(loadHandlerInfos4Actor(actorInfo));
actorInfos.add(actorInfo);
} catch (Throwable t) {
log.error("[ActorFactory] process Actor[{}] failed!", clz);
ExceptionUtils.rethrow(t);
}
});
return actorInfos;
}
private static List<HandlerInfo> loadHandlerInfos4Actor(ActorInfo actorInfo) {
List<HandlerInfo> ret = Lists.newArrayList();
Actor anno = actorInfo.getAnno();
String rootPath = anno.path();
Object actor = actorInfo.getActor();
findHandlerMethod(rootPath, actor.getClass(), ret);
return ret;
}
private static void findHandlerMethod(String rootPath, Class<?> clz, List<HandlerInfo> result) {
Method[] declaredMethods = clz.getDeclaredMethods();
for (Method handlerMethod: declaredMethods) {
Handler handlerMethodAnnotation = handlerMethod.getAnnotation(Handler.class);
if (handlerMethodAnnotation == null) {
continue;
}
HandlerLocation handlerLocation = new HandlerLocation()
.setRootPath(suitPath(rootPath))
.setMethodPath(suitPath(handlerMethodAnnotation.path()));
HandlerInfo handlerInfo = new HandlerInfo()
.setAnno(handlerMethodAnnotation)
.setMethod(handlerMethod)
.setLocation(handlerLocation);
result.add(handlerInfo);
}
// 递归处理父类
final Class<?> superclass = clz.getSuperclass();
if (superclass != null) {
findHandlerMethod(rootPath, superclass, result);
}
}
static String suitPath(String path) {
if (path.startsWith("/")) {
return path.replaceFirst("/", "");
}
return path;
}
}

View File

@ -0,0 +1,44 @@
package tech.powerjob.remote.framework.engine.impl;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.reflections.Reflections;
import tech.powerjob.common.OmsConstant;
import tech.powerjob.common.exception.PowerJobException;
import tech.powerjob.remote.framework.cs.CSInitializer;
import java.util.Set;
/**
* build CSInitializer
*
* @author tjq
* @since 2022/12/31
*/
@Slf4j
class CSInitializerFactory {
static CSInitializer build(String targetType) {
Reflections reflections = new Reflections(OmsConstant.PACKAGE);
Set<Class<? extends CSInitializer>> cSInitializerClzSet = reflections.getSubTypesOf(CSInitializer.class);
log.info("[CSInitializerFactory] scan subTypeOf CSInitializer: {}", cSInitializerClzSet);
for (Class<? extends CSInitializer> clz : cSInitializerClzSet) {
try {
CSInitializer csInitializer = clz.getDeclaredConstructor().newInstance();
String type = csInitializer.type();
log.info("[CSInitializerFactory] new instance for CSInitializer[{}] successfully, type={}, object: {}", clz, type, csInitializer);
if (targetType.equalsIgnoreCase(type)) {
return csInitializer;
}
} catch (Exception e) {
log.error("[CSInitializerFactory] new instance for CSInitializer[{}] failed, maybe you should provide a non-parameter constructor", clz);
ExceptionUtils.rethrow(e);
}
}
throw new PowerJobException(String.format("can't load CSInitializer[%s], ensure your package name start with 'tech.powerjob' and import the dependencies!", targetType));
}
}

View File

@ -0,0 +1,66 @@
package tech.powerjob.remote.framework.engine.impl;
import com.google.common.base.Stopwatch;
import lombok.extern.slf4j.Slf4j;
import tech.powerjob.remote.framework.actor.ActorInfo;
import tech.powerjob.remote.framework.cs.CSInitializer;
import tech.powerjob.remote.framework.cs.CSInitializerConfig;
import tech.powerjob.remote.framework.engine.EngineConfig;
import tech.powerjob.remote.framework.engine.EngineOutput;
import tech.powerjob.remote.framework.engine.RemoteEngine;
import tech.powerjob.remote.framework.transporter.Transporter;
import java.io.IOException;
import java.util.List;
/**
* 初始化 PowerJob 整个网络层
*
* @author tjq
* @since 2022/12/31
*/
@Slf4j
public class PowerJobRemoteEngine implements RemoteEngine {
private CSInitializer csInitializer;
@Override
public EngineOutput start(EngineConfig engineConfig) {
final String engineType = engineConfig.getType();
EngineOutput engineOutput = new EngineOutput();
log.info("[PowerJobRemoteEngine] [{}] start remote engine with config: {}", engineType, engineConfig);
List<ActorInfo> actorInfos = ActorFactory.load(engineConfig.getActorList());
csInitializer = CSInitializerFactory.build(engineType);
String type = csInitializer.type();
Stopwatch sw = Stopwatch.createStarted();
log.info("[PowerJobRemoteEngine] [{}] try to startup CSInitializer[type={}]", engineType, type);
csInitializer.init(new CSInitializerConfig()
.setBindAddress(engineConfig.getBindAddress())
.setServerType(engineConfig.getServerType())
);
// 构建通讯器
Transporter transporter = csInitializer.buildTransporter();
engineOutput.setTransporter(transporter);
log.info("[PowerJobRemoteEngine] [{}] start to bind Handler", engineType);
actorInfos.forEach(actor -> actor.getHandlerInfos().forEach(handlerInfo -> log.info("[PowerJobRemoteEngine] [{}] PATH={}, handler={}", engineType, handlerInfo.getLocation().toPath(), handlerInfo.getMethod())));
// 绑定 handler
csInitializer.bindHandlers(actorInfos);
log.info("[PowerJobRemoteEngine] [{}] startup successfully, cost: {}", engineType, sw);
return engineOutput;
}
@Override
public void close() throws IOException {
csInitializer.close();
}
}

View File

@ -0,0 +1,7 @@
/**
* PowerJob 网络框架层
*
* @author tjq
* @since 2022/12/31
*/
package tech.powerjob.remote.framework;

View File

@ -0,0 +1,16 @@
package tech.powerjob.remote.framework.transporter;
/**
* 通讯协议
*
* @author tjq
* @since 2022/12/31
*/
public interface Protocol {
/**
* 通讯协议名称
* @return 协议名称
*/
String name();
}

View File

@ -0,0 +1,40 @@
package tech.powerjob.remote.framework.transporter;
import tech.powerjob.common.PowerSerializable;
import tech.powerjob.remote.framework.base.RemotingException;
import tech.powerjob.remote.framework.base.URL;
import java.util.concurrent.CompletionStage;
import java.util.concurrent.ExecutorService;
/**
* 通讯器封装与远程服务端交互逻辑
*
* @author tjq
* @since 2022/12/31
*/
public interface Transporter {
/**
* Protocol
* @return return protocol
*/
Protocol getProtocol();
/**
*send message
* @param url url
* @param request request
*/
void tell(URL url, PowerSerializable request);
/**
* ask by request
* @param url url
* @param request request
* @param clz response type
* @return CompletionStage
* @throws RemotingException remote exception
*/
<T> CompletionStage<T> ask(URL url, PowerSerializable request, Class<T> clz) throws RemotingException;
}

View File

@ -0,0 +1,35 @@
package tech.powerjob.remote.framework.utils;
import org.apache.commons.lang3.ArrayUtils;
import tech.powerjob.common.PowerSerializable;
import java.util.Optional;
/**
* RemoteUtils
*
* @author tjq
* @since 2023/1/1
*/
public class RemoteUtils {
public static Optional<Class<?>> findPowerSerialize(Class<?>[] parameterTypes) {
if (ArrayUtils.isEmpty(parameterTypes)) {
return Optional.empty();
}
for (Class<?> clz : parameterTypes) {
final Class<?>[] interfaces = clz.getInterfaces();
if (ArrayUtils.isEmpty(interfaces)) {
continue;
}
if (PowerSerializable.class.isAssignableFrom(clz)) {
return Optional.of(clz);
}
}
return Optional.empty();
}
}

View File

@ -0,0 +1,24 @@
package tech.powerjob.remote.framework.base;
import lombok.extern.slf4j.Slf4j;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.*;
/**
* test address
*
* @author tjq
* @since 2023/1/20
*/
@Slf4j
class AddressTest {
@Test
void testAddress() {
String ip = "192.168.1.1:10085";
final Address address = Address.fromIpv4(ip);
log.info("[AddressTest] parse address: {}", address);
assert ip.equals(address.toFullAddress());
}
}

View File

@ -0,0 +1,28 @@
package tech.powerjob.remote.framework.engine;
import com.google.common.collect.Sets;
import org.junit.jupiter.api.Test;
import tech.powerjob.remote.framework.base.Address;
import tech.powerjob.remote.framework.engine.impl.PowerJobRemoteEngine;
import static org.junit.jupiter.api.Assertions.*;
/**
* RemoteEngineTest
*
* @author tjq
* @since 2022/12/31
*/
class RemoteEngineTest {
@Test
void start() {
RemoteEngine remoteEngine = new PowerJobRemoteEngine();
EngineConfig engineConfig = new EngineConfig();
engineConfig.setType("TEST");
engineConfig.setBindAddress(new Address().setHost("127.0.0.1").setPort(10086));
remoteEngine.start(engineConfig);
}
}

View File

@ -0,0 +1,35 @@
package tech.powerjob.remote.framework.engine.impl;
import com.google.common.collect.Lists;
import lombok.extern.slf4j.Slf4j;
import org.junit.jupiter.api.Test;
import tech.powerjob.remote.framework.actor.ActorInfo;
import tech.powerjob.remote.framework.test.TestActor;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
/**
* HandlerFactoryTest
*
* @author tjq
* @since 2022/12/31
*/
@Slf4j
class ActorFactoryTest {
@Test
void load() {
ActorFactory.load(Lists.newArrayList(new TestActor()));
}
@Test
void testSuitPath() {
final String testPath1 = ActorFactory.suitPath("/test");
final String testPath2 = ActorFactory.suitPath("test");
log.info("[ActorFactoryTest] testPath1: {}, testPath2: {}", testPath1, testPath2);
assert testPath1.equals(testPath2);
}
}

View File

@ -0,0 +1,29 @@
package tech.powerjob.remote.framework.engine.impl;
import com.google.common.collect.Sets;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import tech.powerjob.common.exception.PowerJobException;
import static org.junit.jupiter.api.Assertions.*;
/**
* CSInitializerFactoryTest
*
* @author tjq
* @since 2022/12/31
*/
class CSInitializerFactoryTest {
@Test
void testBuildNormal() {
CSInitializerFactory.build("TEST");
}
@Test
void testNotFind() {
Assertions.assertThrows(PowerJobException.class, () -> {
CSInitializerFactory.build("omicron");
});
}
}

View File

@ -0,0 +1,42 @@
package tech.powerjob.remote.framework.test;
import lombok.extern.slf4j.Slf4j;
import tech.powerjob.remote.framework.actor.Actor;
import tech.powerjob.remote.framework.actor.Handler;
import java.util.Map;
/**
* TestActor
*
* @author tjq
* @since 2022/12/31
*/
@Slf4j
@Actor(path = "/test")
public class TestActor {
public static void simpleStaticMethod() {
}
public void simpleMethod() {
}
@Handler(path = "/method1")
public String handlerMethod1() {
log.info("[TestActor] handlerMethod1");
return "1";
}
@Handler(path = "/method2")
public String handlerMethod2(String name) {
log.info("[TestActor] handlerMethod2 req: {}", name);
return name;
}
@Handler(path = "/returnEmpty")
public void handlerEmpty(Map<String, Object> req) {
log.info("[TestActor] handlerEmpty req: {}", req);
}
}

View File

@ -0,0 +1,46 @@
package tech.powerjob.remote.framework.test;
import lombok.extern.slf4j.Slf4j;
import tech.powerjob.remote.framework.actor.ActorInfo;
import tech.powerjob.remote.framework.actor.HandlerInfo;
import tech.powerjob.remote.framework.cs.CSInitializer;
import tech.powerjob.remote.framework.cs.CSInitializerConfig;
import tech.powerjob.remote.framework.transporter.Transporter;
import java.io.IOException;
import java.util.List;
/**
* TestCSInitializer
*
* @author tjq
* @since 2022/12/31
*/
@Slf4j
public class TestCSInitializer implements CSInitializer {
@Override
public String type() {
return "TEST";
}
@Override
public void init(CSInitializerConfig config) {
log.info("TestCSInitializer#init");
}
@Override
public Transporter buildTransporter() {
log.info("TestCSInitializer#buildTransporter");
return null;
}
@Override
public void bindHandlers(List<ActorInfo> actorInfos) {
log.info("TestCSInitializer#actorInfos");
}
@Override
public void close() throws IOException {
log.info("TestCSInitializer#close");
}
}

View File

@ -0,0 +1,35 @@
package tech.powerjob.remote.framework.utils;
import lombok.extern.slf4j.Slf4j;
import org.junit.jupiter.api.Test;
import tech.powerjob.common.model.AlarmConfig;
import tech.powerjob.common.request.ServerScheduleJobReq;
import java.util.Optional;
/**
* RemoteUtilsTest
*
* @author tjq
* @since 2023/1/1
*/
@Slf4j
class RemoteUtilsTest {
@Test
void findPowerSerialize() {
Class<?>[] contains = {AlarmConfig.class, ServerScheduleJobReq.class};
Class<?>[] notContains = {AlarmConfig.class};
final Optional<Class<?>> notContainsResult = RemoteUtils.findPowerSerialize(notContains);
log.info("[RemoteUtilsTest] notContainsResult: {}", notContainsResult);
final Optional<Class<?>> containsResult = RemoteUtils.findPowerSerialize(contains);
log.info("[RemoteUtilsTest] containsResult: {}", containsResult);
assert !notContainsResult.isPresent();
assert containsResult.isPresent();
}
}

View File

@ -0,0 +1,45 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>powerjob-remote</artifactId>
<groupId>tech.powerjob</groupId>
<version>4.0.0</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>powerjob-remote-impl-akka</artifactId>
<version>4.3.0</version>
<properties>
<maven.compiler.source>8</maven.compiler.source>
<maven.compiler.target>8</maven.compiler.target>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<powerjob-remote-framework.version>4.3.0</powerjob-remote-framework.version>
<akka.version>2.6.20</akka.version>
</properties>
<dependencies>
<dependency>
<groupId>tech.powerjob</groupId>
<artifactId>powerjob-remote-framework</artifactId>
<version>${powerjob-remote-framework.version}</version>
</dependency>
<!-- akka remote -->
<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-remote_2.13</artifactId>
<version>${akka.version}</version>
</dependency>
<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-slf4j_2.13</artifactId>
<version>${akka.version}</version>
</dependency>
</dependencies>
</project>

View File

@ -0,0 +1,94 @@
package tech.powerjob.remote.akka;
import akka.actor.ActorRef;
import akka.actor.ActorSystem;
import akka.actor.DeadLetter;
import akka.actor.Props;
import akka.routing.RoundRobinPool;
import com.google.common.collect.Maps;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import lombok.extern.slf4j.Slf4j;
import tech.powerjob.common.serialize.JsonUtils;
import tech.powerjob.remote.framework.actor.ActorInfo;
import tech.powerjob.remote.framework.base.Address;
import tech.powerjob.remote.framework.cs.CSInitializer;
import tech.powerjob.remote.framework.cs.CSInitializerConfig;
import tech.powerjob.remote.framework.transporter.Transporter;
import java.io.IOException;
import java.util.List;
import java.util.Map;
/**
* AkkaCSInitializer
*
* @author tjq
* @since 2022/12/31
*/
@Slf4j
public class AkkaCSInitializer implements CSInitializer {
private ActorSystem actorSystem;
private CSInitializerConfig config;
@Override
public String type() {
return tech.powerjob.common.enums.Protocol.AKKA.name();
}
@Override
public void init(CSInitializerConfig config) {
this.config = config;
Address bindAddress = config.getBindAddress();
log.info("[PowerJob-AKKA] bindAddress: {}", bindAddress);
// 初始化 ActorSystemmacOS上 new ServerSocket 检测端口占用的方法并不生效可能是AKKA是Scala写的缘故没办法...只能靠异常重试了
Map<String, Object> overrideConfig = Maps.newHashMap();
overrideConfig.put("akka.remote.artery.canonical.hostname", bindAddress.getHost());
overrideConfig.put("akka.remote.artery.canonical.port", bindAddress.getPort());
Config akkaBasicConfig = ConfigFactory.load(AkkaConstant.AKKA_CONFIG);
Config akkaFinalConfig = ConfigFactory.parseMap(overrideConfig).withFallback(akkaBasicConfig);
log.info("[PowerJob-AKKA] try to start AKKA System.");
// 启动时绑定当前的 actorSystemName
String actorSystemName = AkkaConstant.fetchActorSystemName(config.getServerType());
this.actorSystem = ActorSystem.create(actorSystemName, akkaFinalConfig);
// 处理系统中产生的异常情况
ActorRef troubleshootingActor = actorSystem.actorOf(Props.create(AkkaTroubleshootingActor.class), "troubleshooting");
actorSystem.eventStream().subscribe(troubleshootingActor, DeadLetter.class);
log.info("[PowerJob-AKKA] initialize actorSystem[{}] successfully!", actorSystem.name());
}
@Override
public Transporter buildTransporter() {
return new AkkaTransporter(actorSystem);
}
@Override
public void bindHandlers(List<ActorInfo> actorInfos) {
int cores = Runtime.getRuntime().availableProcessors();
actorInfos.forEach(actorInfo -> {
String rootPath = actorInfo.getAnno().path();
AkkaMappingService.ActorConfig actorConfig = AkkaMappingService.parseActorName(rootPath);
log.info("[PowerJob-AKKA] start to process actor[path={},config={}]", rootPath, JsonUtils.toJSONString(actorConfig));
actorSystem.actorOf(AkkaProxyActor.props(actorInfo)
.withDispatcher("akka.".concat(actorConfig.getDispatcherName()))
.withRouter(new RoundRobinPool(cores)), actorConfig.getActorName());
});
}
@Override
public void close() throws IOException {
actorSystem.terminate();
}
}

View File

@ -0,0 +1,29 @@
package tech.powerjob.remote.akka;
import tech.powerjob.remote.framework.base.ServerType;
/**
* AkkaConstant
*
* @author tjq
* @since 2022/12/31
*/
public class AkkaConstant {
public static final String AKKA_CONFIG = "powerjob.akka.conf";
public static final String WORKER_ACTOR_SYSTEM_NAME = "oms";
public static final String SERVER_ACTOR_SYSTEM_NAME = "oms-server";
/**
* 获取 actorSystem 名称
* @param serverType 当前服务器类型powerjob-server serverpowerjob-worker worker
* @return actorSystemName
*/
public static String fetchActorSystemName(ServerType serverType) {
return serverType == ServerType.SERVER ? SERVER_ACTOR_SYSTEM_NAME : WORKER_ACTOR_SYSTEM_NAME;
}
}

View File

@ -0,0 +1,61 @@
package tech.powerjob.remote.akka;
import com.google.common.collect.Maps;
import lombok.Getter;
import lombok.Setter;
import lombok.experimental.Accessors;
import tech.powerjob.common.RemoteConstant;
import java.util.Map;
/**
* 构建 Actor Mapping
*
* @author tjq
* @since 2023/1/7
*/
public class AkkaMappingService {
/**
* Actor's RootPath -> Akka Actor Name
*/
private static final Map<String, ActorConfig> RP_2_ACTOR_CFG = Maps.newHashMap();
static {
addMappingRule(RemoteConstant.S4W_PATH, "server_actor", "w-r-c-d");
addMappingRule(RemoteConstant.S4S_PATH, "friend_actor", "friend-request-actor-dispatcher");
addMappingRule(RemoteConstant.WTT_PATH, "task_tracker", "task-tracker-dispatcher");
addMappingRule(RemoteConstant.WPT_PATH, "processor_tracker", "processor-tracker-dispatcher");
}
private static final String DEFAULT_DISPATCH_NAME = "common-dispatcher";
/**
* 根据 actor rootPath 获取 Akka Actor Name不存在改写则使用当前路径
* @param actorRootPath actorRootPath
* @return actorName
*/
public static ActorConfig parseActorName(String actorRootPath) {
return RP_2_ACTOR_CFG.getOrDefault(actorRootPath,
new ActorConfig()
.setActorName(actorRootPath)
.setDispatcherName(DEFAULT_DISPATCH_NAME)
);
}
@Getter
@Setter
@Accessors(chain = true)
public static class ActorConfig {
private String actorName;
private String dispatcherName;
}
private static void addMappingRule(String newActorPath, String oldActorName, String dispatchName) {
ActorConfig actorConfig = new ActorConfig()
.setActorName(oldActorName)
.setDispatcherName(dispatchName == null ? DEFAULT_DISPATCH_NAME : dispatchName);
RP_2_ACTOR_CFG.put(newActorPath, actorConfig);
}
}

View File

@ -0,0 +1,16 @@
package tech.powerjob.remote.akka;
import tech.powerjob.remote.framework.transporter.Protocol;
/**
* AkkaProtocol
*
* @author tjq
* @since 2022/12/31
*/
public class AkkaProtocol implements Protocol {
@Override
public String name() {
return tech.powerjob.common.enums.Protocol.AKKA.name();
}
}

View File

@ -0,0 +1,70 @@
package tech.powerjob.remote.akka;
import akka.actor.AbstractActor;
import akka.actor.Props;
import akka.japi.pf.ReceiveBuilder;
import lombok.extern.slf4j.Slf4j;
import tech.powerjob.common.exception.PowerJobException;
import tech.powerjob.remote.framework.actor.ActorInfo;
import tech.powerjob.remote.framework.actor.HandlerInfo;
import tech.powerjob.remote.framework.base.HandlerLocation;
import tech.powerjob.remote.framework.utils.RemoteUtils;
import java.lang.reflect.Method;
import java.util.Optional;
/**
* 代理用的 actor
*
* @author tjq
* @since 2023/1/6
*/
@Slf4j
public class AkkaProxyActor extends AbstractActor {
private final Receive receive;
private final ActorInfo actorInfo;
public static Props props(ActorInfo actorInfo) {
return Props.create(AkkaProxyActor.class, () -> new AkkaProxyActor(actorInfo));
}
public AkkaProxyActor(ActorInfo actorInfo) {
this.actorInfo = actorInfo;
final ReceiveBuilder receiveBuilder = receiveBuilder();
actorInfo.getHandlerInfos().forEach(handlerInfo -> {
final HandlerLocation location = handlerInfo.getLocation();
final Method handlerMethod = handlerInfo.getMethod();
final Optional<Class<?>> powerSerializeClz = RemoteUtils.findPowerSerialize(handlerMethod.getParameterTypes());
if (!powerSerializeClz.isPresent()) {
throw new PowerJobException("build proxy for handler failed due to handler args is not PowerSerialize: " + location);
}
final Class<?> bindClz = powerSerializeClz.get();
receiveBuilder.match(bindClz, req -> onReceiveProcessorReportTaskStatusReq(req, handlerInfo));
});
this.receive = receiveBuilder.build();
}
@Override
public Receive createReceive() {
return receive;
}
private <T> void onReceiveProcessorReportTaskStatusReq(T req, HandlerInfo handlerInfo) {
try {
final Object ret = handlerInfo.getMethod().invoke(actorInfo.getActor(), req);
if (ret == null) {
return;
}
if (ret instanceof Optional) {
if (!((Optional<?>) ret).isPresent()) {
return;
}
}
getSender().tell(ret, getSelf());
} catch (Exception e) {
log.error("[PowerJob-AKKA] process failed!", e);
}
}
}

View File

@ -0,0 +1,69 @@
package tech.powerjob.remote.akka;
import akka.actor.ActorSelection;
import akka.actor.ActorSystem;
import akka.pattern.Patterns;
import tech.powerjob.common.PowerSerializable;
import tech.powerjob.common.RemoteConstant;
import tech.powerjob.common.utils.CommonUtils;
import tech.powerjob.remote.framework.base.HandlerLocation;
import tech.powerjob.remote.framework.base.RemotingException;
import tech.powerjob.remote.framework.base.URL;
import tech.powerjob.remote.framework.transporter.Protocol;
import tech.powerjob.remote.framework.transporter.Transporter;
import java.time.Duration;
import java.util.concurrent.CompletionStage;
/**
* AkkaTransporter
*
* @author tjq
* @since 2022/12/31
*/
public class AkkaTransporter implements Transporter {
private final ActorSystem actorSystem;
/**
* akka://<actor system>@<hostname>:<port>/<actor path>
*/
private static final String AKKA_NODE_PATH = "akka://%s@%s/user/%s";
public AkkaTransporter(ActorSystem actorSystem) {
this.actorSystem = actorSystem;
}
@Override
public Protocol getProtocol() {
return new AkkaProtocol();
}
@Override
public void tell(URL url, PowerSerializable request) {
ActorSelection actorSelection = fetchActorSelection(url);
actorSelection.tell(request, null);
}
@Override
@SuppressWarnings("unchecked")
public <T> CompletionStage<T> ask(URL url, PowerSerializable request, Class<T> clz) throws RemotingException {
ActorSelection actorSelection = fetchActorSelection(url);
return (CompletionStage<T>) Patterns.ask(actorSelection, request, Duration.ofMillis(RemoteConstant.DEFAULT_TIMEOUT_MS));
}
private ActorSelection fetchActorSelection(URL url) {
HandlerLocation location = url.getLocation();
String targetActorSystemName = AkkaConstant.fetchActorSystemName(url.getServerType());
String targetActorName = AkkaMappingService.parseActorName(location.getRootPath()).getActorName();
CommonUtils.requireNonNull(targetActorName, "can't find actor by URL: " + location);
String address = url.getAddress().toFullAddress();
return actorSystem.actorSelection(String.format(AKKA_NODE_PATH, targetActorSystemName, address, targetActorName));
}
}

View File

@ -1,17 +1,17 @@
package tech.powerjob.worker.actors; package tech.powerjob.remote.akka;
import akka.actor.AbstractActor; import akka.actor.AbstractActor;
import akka.actor.DeadLetter; import akka.actor.DeadLetter;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
/** /**
* 处理系统异常的 Actor * TroubleshootingActor
* *
* @author 朱八 * @author tjq
* @since 2020/7/16 * @since 2022/12/31
*/ */
@Slf4j @Slf4j
public class TroubleshootingActor extends AbstractActor { public class AkkaTroubleshootingActor extends AbstractActor {
@Override @Override
public Receive createReceive() { public Receive createReceive() {
return receiveBuilder() return receiveBuilder()
@ -20,6 +20,6 @@ public class TroubleshootingActor extends AbstractActor {
} }
public void onReceiveDeadLetter(DeadLetter dl) { public void onReceiveDeadLetter(DeadLetter dl) {
log.warn("[TroubleshootingActor] receive DeadLetter: {}", dl); log.warn("[PowerJob-AKKA] receive DeadLetter: {}", dl);
} }
} }

View File

@ -1,6 +1,7 @@
package tech.powerjob.common.serialize; package tech.powerjob.remote.akka;
import akka.serialization.JSerializer; import akka.serialization.JSerializer;
import tech.powerjob.common.serialize.SerializerUtils;
/** /**
* Using custom serializers for akka-remote * Using custom serializers for akka-remote
@ -31,3 +32,4 @@ public class PowerAkkaSerializer extends JSerializer {
return false; return false;
} }
} }

View File

@ -0,0 +1,8 @@
/**
* 由于 AKKA 后续转向收费运营模式PowerJob 计划移除 akka 支持因此不再维护该 module
* 如果存在任何使用上的问题请切换到其他通讯协议建议使用 HTTP
*
* @author PowerJob发言人
* @since 2022/12/31
*/
package tech.powerjob.remote.akka;

View File

@ -0,0 +1,133 @@
akka {
loggers = ["akka.event.slf4j.Slf4jLogger"]
loglevel = "WARNING"
actor {
# cluster is better(recommend by official document), but I prefer remote
provider = remote
allow-java-serialization = off
serializers {
power-serializer = "tech.powerjob.remote.akka.PowerAkkaSerializer"
}
serialization-bindings {
"tech.powerjob.common.PowerSerializable" = power-serializer
}
}
remote {
artery {
transport = tcp # See Selecting a transport below
# over write by code
canonical.hostname = "127.0.0.1"
canonical.port = 25520
}
}
# dispatcher
task-tracker-dispatcher {
# Dispatcher is the name of the event-based dispatcher
type = Dispatcher
# What kind of ExecutionService to use
executor = "fork-join-executor"
# Configuration for the fork join pool
fork-join-executor {
# Min number of threads to cap factor-based parallelism number to
parallelism-min = 2
# Parallelism (threads) ... ceil(available processors * factor)
parallelism-factor = 4.0
# Max number of threads to cap factor-based parallelism number to
parallelism-max = 64
}
# Throughput defines the maximum number of messages to be
# processed per actor before the thread jumps to the next actor.
# Set to 1 for as fair as possible.
throughput = 10
}
processor-tracker-dispatcher {
type = Dispatcher
executor = "fork-join-executor"
fork-join-executor {
parallelism-min = 2
parallelism-factor = 2.0
parallelism-max = 64
}
throughput = 10
}
worker-common-dispatcher {
type = Dispatcher
executor = "fork-join-executor"
fork-join-executor {
parallelism-min = 2
parallelism-factor = 2.0
parallelism-max = 8
}
throughput = 10
}
##################### server config #####################
# worker-request-core-dispatcher
w-r-c-d {
# Dispatcher is the name of the event-based dispatcher
type = Dispatcher
# What kind of ExecutionService to use
executor = "fork-join-executor"
# Configuration for the fork join pool
fork-join-executor {
# Min number of threads to cap factor-based parallelism number to
parallelism-min = 2
# Parallelism (threads) ... ceil(available processors * factor)
parallelism-factor = 4.0
# Max number of threads to cap factor-based parallelism number to
parallelism-max = 128
}
# Throughput defines the maximum number of messages to be
# processed per actor before the thread jumps to the next actor.
# Set to 1 for as fair as possible.
throughput = 10
}
friend-request-actor-dispatcher {
# Dispatcher is the name of the event-based dispatcher
type = Dispatcher
# What kind of ExecutionService to use
executor = "fork-join-executor"
# Configuration for the fork join pool
fork-join-executor {
# Min number of threads to cap factor-based parallelism number to
parallelism-min = 2
# Parallelism (threads) ... ceil(available processors * factor)
parallelism-factor = 4.0
# Max number of threads to cap factor-based parallelism number to
parallelism-max = 128
}
# Throughput defines the maximum number of messages to be
# processed per actor before the thread jumps to the next actor.
# Set to 1 for as fair as possible.
throughput = 5
}
##################### default config #####################
common-dispatcher {
# Dispatcher is the name of the event-based dispatcher
type = Dispatcher
# What kind of ExecutionService to use
executor = "fork-join-executor"
# Configuration for the fork join pool
fork-join-executor {
# Min number of threads to cap factor-based parallelism number to
parallelism-min = 2
# Parallelism (threads) ... ceil(available processors * factor)
parallelism-factor = 4.0
# Max number of threads to cap factor-based parallelism number to
parallelism-max = 64
}
# Throughput defines the maximum number of messages to be
# processed per actor before the thread jumps to the next actor.
# Set to 1 for as fair as possible.
throughput = 10
}
}

View File

@ -0,0 +1,49 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>powerjob-remote</artifactId>
<groupId>tech.powerjob</groupId>
<version>4.0.0</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>powerjob-remote-impl-http</artifactId>
<version>4.3.0</version>
<properties>
<maven.compiler.source>8</maven.compiler.source>
<maven.compiler.target>8</maven.compiler.target>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<vertx.version>4.3.7</vertx.version>
<powerjob-remote-framework.version>4.3.0</powerjob-remote-framework.version>
</properties>
<dependencies>
<dependency>
<groupId>tech.powerjob</groupId>
<artifactId>powerjob-remote-framework</artifactId>
<version>${powerjob-remote-framework.version}</version>
</dependency>
<!-- https://mvnrepository.com/artifact/io.vertx/vertx-core -->
<dependency>
<groupId>io.vertx</groupId>
<artifactId>vertx-core</artifactId>
<version>${vertx.version}</version>
</dependency>
<dependency>
<groupId>io.vertx</groupId>
<artifactId>vertx-web</artifactId>
<version>${vertx.version}</version>
</dependency>
</dependencies>
</project>

View File

@ -0,0 +1,17 @@
package tech.powerjob.remote.http;
import tech.powerjob.remote.framework.transporter.Protocol;
/**
* HttpProtocol
*
* @author tjq
* @since 2022/12/31
*/
public class HttpProtocol implements Protocol {
@Override
public String name() {
return tech.powerjob.common.enums.Protocol.HTTP.name();
}
}

View File

@ -0,0 +1,145 @@
package tech.powerjob.remote.http;
import io.netty.handler.codec.http.HttpResponseStatus;
import io.vertx.core.Handler;
import io.vertx.core.Vertx;
import io.vertx.core.http.HttpClient;
import io.vertx.core.http.HttpServer;
import io.vertx.core.json.JsonObject;
import io.vertx.ext.web.RequestBody;
import io.vertx.ext.web.Route;
import io.vertx.ext.web.Router;
import io.vertx.ext.web.RoutingContext;
import io.vertx.ext.web.handler.BodyHandler;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import tech.powerjob.common.exception.PowerJobException;
import tech.powerjob.remote.framework.actor.ActorInfo;
import tech.powerjob.remote.framework.actor.HandlerInfo;
import tech.powerjob.remote.framework.actor.ProcessType;
import tech.powerjob.remote.framework.cs.CSInitializer;
import tech.powerjob.remote.framework.cs.CSInitializerConfig;
import tech.powerjob.remote.framework.transporter.Transporter;
import tech.powerjob.remote.framework.utils.RemoteUtils;
import tech.powerjob.remote.http.vertx.VertxInitializer;
import tech.powerjob.remote.http.vertx.VertxTransporter;
import java.io.IOException;
import java.lang.reflect.Method;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.TimeUnit;
/**
* HttpCSInitializer
* 在纠结了1晚上后最终决定选用 vertx 作为 http 底层而不是直接使用 netty理由如下
* - netty 实现容易但性能调优方面需要时间成本和实践经验 vertx 作为 netty "嫡系"框架 netty 的封装理论上炉火纯青性能不成问题
* - vertx 唯一的缺点是其作为相对上层的框架可能存在较为严重的包冲突问题尤其是对于那些本身跑在 vertx-framework 上的用户
* - 不过该问题可以通过更换协议解决预计后续提供一个基于 netty 和自定义协议的实现
*
* @author tjq
* @since 2022/12/31
*/
@Slf4j
public class HttpVertxCSInitializer implements CSInitializer {
private Vertx vertx;
private HttpServer httpServer;
private HttpClient httpClient;
private CSInitializerConfig config;
@Override
public String type() {
return tech.powerjob.common.enums.Protocol.HTTP.name();
}
@Override
public void init(CSInitializerConfig config) {
this.config = config;
vertx = VertxInitializer.buildVertx();
httpServer = VertxInitializer.buildHttpServer(vertx);
httpClient = VertxInitializer.buildHttpClient(vertx);
}
@Override
public Transporter buildTransporter() {
return new VertxTransporter(httpClient);
}
@Override
@SneakyThrows
public void bindHandlers(List<ActorInfo> actorInfos) {
Router router = Router.router(vertx);
// 处理请求响应
router.route().handler(BodyHandler.create());
actorInfos.forEach(actorInfo -> {
Optional.ofNullable(actorInfo.getHandlerInfos()).orElse(Collections.emptyList()).forEach(handlerInfo -> {
String handlerHttpPath = handlerInfo.getLocation().toPath();
ProcessType processType = handlerInfo.getAnno().processType();
Handler<RoutingContext> routingContextHandler = buildRequestHandler(actorInfo, handlerInfo);
Route route = router.post(handlerHttpPath);
if (processType == ProcessType.BLOCKING) {
route.blockingHandler(routingContextHandler, false);
} else {
route.handler(routingContextHandler);
}
});
});
// 启动 vertx http server
final int port = config.getBindAddress().getPort();
final String host = config.getBindAddress().getHost();
httpServer.requestHandler(router)
.exceptionHandler(e -> log.error("[PowerJob] unknown exception in Actor communication!", e))
.listen(port, host)
.toCompletionStage()
.toCompletableFuture()
.get(1, TimeUnit.MINUTES);
log.info("[PowerJobRemoteEngine] startup vertx HttpServer successfully!");
}
private Handler<RoutingContext> buildRequestHandler(ActorInfo actorInfo, HandlerInfo handlerInfo) {
Method method = handlerInfo.getMethod();
Optional<Class<?>> powerSerializeClz = RemoteUtils.findPowerSerialize(method.getParameterTypes());
// 内部框架严格模式绑定失败直接报错
if (!powerSerializeClz.isPresent()) {
throw new PowerJobException("can't find any 'PowerSerialize' object in handler args: " + handlerInfo.getLocation());
}
return ctx -> {
final RequestBody body = ctx.body();
final Object convertResult = body.asPojo(powerSerializeClz.get());
try {
Object response = method.invoke(actorInfo.getActor(), convertResult);
if (response != null) {
if (response instanceof String) {
ctx.end((String) response);
} else {
ctx.json(JsonObject.mapFrom(response));
}
return;
}
ctx.end();
} catch (Throwable t) {
// 注意这里是框架实际运行时日志输出用标准 PowerJob 格式
log.error("[PowerJob] invoke Handler[{}] failed!", handlerInfo.getLocation(), t);
ctx.fail(HttpResponseStatus.INTERNAL_SERVER_ERROR.code(), t);
}
};
}
@Override
public void close() throws IOException {
httpClient.close();
httpServer.close();
vertx.close();
}
}

View File

@ -0,0 +1,87 @@
package tech.powerjob.remote.http.vertx;
import io.vertx.core.Vertx;
import io.vertx.core.VertxOptions;
import io.vertx.core.http.HttpClient;
import io.vertx.core.http.HttpClientOptions;
import io.vertx.core.http.HttpServer;
import io.vertx.core.http.HttpServerOptions;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import tech.powerjob.common.OmsConstant;
import tech.powerjob.common.PowerJobDKey;
/**
* VertxInitializer
* PowerJob 只是将 vertx 作为 toolkit 使用
*
* @author tjq
* @since 2023/1/1
*/
@Slf4j
public class VertxInitializer {
/**
* 默认开启长连接 75S 超时
*/
private static final int DEFAULT_KEEP_ALIVE_TIMEOUT = 75;
private static final int CONNECTION_TIMEOUT_MS = 3000;
private static final int SERVER_IDLE_TIMEOUT_S = 300;
public static Vertx buildVertx() {
final int cpuCores = Runtime.getRuntime().availableProcessors();
VertxOptions options = new VertxOptions()
.setWorkerPoolSize(Math.max(16, 2 * cpuCores))
.setInternalBlockingPoolSize(Math.max(32, 4 * cpuCores));
log.info("[PowerJob-Vertx] use vertx options: {}", options);
return Vertx.vertx(options);
}
public static HttpServer buildHttpServer(Vertx vertx) {
HttpServerOptions httpServerOptions = new HttpServerOptions()
.setIdleTimeout(SERVER_IDLE_TIMEOUT_S);
tryEnableCompression(httpServerOptions);
log.info("[PowerJob-Vertx] use HttpServerOptions: {}", httpServerOptions.toJson());
return vertx.createHttpServer(httpServerOptions);
}
private static void tryEnableCompression(HttpServerOptions httpServerOptions) {
// 非核心组件不直接依赖类 import加载报错可忽略
try {
httpServerOptions
.addCompressor(io.netty.handler.codec.compression.StandardCompressionOptions.gzip())
.setCompressionSupported(true);
log.warn("[PowerJob-Vertx] enable server side compression successfully!");
} catch (Exception e) {
log.warn("[PowerJob-Vertx] enable server side compression failed!", e);
}
}
public static HttpClient buildHttpClient(Vertx vertx) {
HttpClientOptions httpClientOptions = new HttpClientOptions()
.setMetricsName(OmsConstant.PACKAGE)
.setConnectTimeout(CONNECTION_TIMEOUT_MS)
.setMaxPoolSize(Math.max(8, Runtime.getRuntime().availableProcessors()) * 2);
// 长连接
String keepaliveTimeout = System.getProperty(PowerJobDKey.TRANSPORTER_KEEP_ALIVE_TIMEOUT, String.valueOf(DEFAULT_KEEP_ALIVE_TIMEOUT));
int keepaliveTimeoutInt = Integer.parseInt(keepaliveTimeout);
if (keepaliveTimeoutInt > 0) {
httpClientOptions.setKeepAlive(true).setKeepAliveTimeout(keepaliveTimeoutInt);
} else {
httpClientOptions.setKeepAlive(false);
}
// 压缩判定
String enableCompressing = System.getProperty(PowerJobDKey.TRANSPORTER_USE_COMPRESSING);
if (StringUtils.isNotEmpty(enableCompressing)) {
httpClientOptions.setTryUseCompression(StringUtils.equalsIgnoreCase(enableCompressing, Boolean.TRUE.toString()));
}
log.info("[PowerJob-Vertx] use HttpClientOptions: {}", httpClientOptions.toJson());
return vertx.createHttpClient(httpClientOptions);
}
}

View File

@ -0,0 +1,85 @@
package tech.powerjob.remote.http.vertx;
import io.netty.handler.codec.http.HttpResponseStatus;
import io.vertx.core.Future;
import io.vertx.core.http.*;
import io.vertx.core.json.JsonObject;
import tech.powerjob.common.PowerSerializable;
import tech.powerjob.remote.framework.base.RemotingException;
import tech.powerjob.remote.framework.base.URL;
import tech.powerjob.remote.framework.transporter.Protocol;
import tech.powerjob.remote.framework.transporter.Transporter;
import tech.powerjob.remote.http.HttpProtocol;
import java.util.concurrent.CompletionStage;
/**
* VertxTransporter
*
* @author tjq
* @since 2023/1/1
*/
public class VertxTransporter implements Transporter {
private final HttpClient httpClient;
private static final Protocol PROTOCOL = new HttpProtocol();
public VertxTransporter(HttpClient httpClient) {
this.httpClient = httpClient;
}
@Override
public Protocol getProtocol() {
return PROTOCOL;
}
@Override
public void tell(URL url, PowerSerializable request) {
post(url, request, null);
}
@Override
public <T> CompletionStage<T> ask(URL url, PowerSerializable request, Class<T> clz) throws RemotingException {
return post(url, request, clz);
}
@SuppressWarnings("unchecked")
private <T> CompletionStage<T> post(URL url, PowerSerializable request, Class<T> clz) {
final String host = url.getAddress().getHost();
final int port = url.getAddress().getPort();
final String path = url.getLocation().toPath();
RequestOptions requestOptions = new RequestOptions()
.setMethod(HttpMethod.POST)
.setHost(host)
.setPort(port)
.setURI(path);
// 获取远程服务器的HTTP连接
Future<HttpClientRequest> httpClientRequestFuture = httpClient.request(requestOptions);
// 转换 -> 发送请求获取响应
Future<HttpClientResponse> responseFuture = httpClientRequestFuture.compose(httpClientRequest -> httpClientRequest.send(JsonObject.mapFrom(request).toBuffer()));
return responseFuture.compose(httpClientResponse -> {
// throw exception
final int statusCode = httpClientResponse.statusCode();
if (statusCode != HttpResponseStatus.OK.code()) {
// CompletableFuture.get() 时会传递抛出该异常
throw new RemotingException(String.format("request [host:%s,port:%s,url:%s] failed, status: %d, msg: %s",
host, port, path, statusCode, httpClientResponse.statusMessage()
));
}
return httpClientResponse.body().compose(x -> {
if (clz == null) {
return Future.succeededFuture(null);
}
if (clz.equals(String.class)) {
return Future.succeededFuture((T) x.toString());
}
return Future.succeededFuture(x.toJsonObject().mapTo(clz));
});
}).toCompletionStage();
}
}

View File

@ -0,0 +1,77 @@
package tech.powerjob.remote.http;
import com.google.common.collect.Lists;
import lombok.extern.slf4j.Slf4j;
import org.junit.jupiter.api.Test;
import tech.powerjob.common.enums.Protocol;
import tech.powerjob.common.utils.CommonUtils;
import tech.powerjob.remote.framework.BenchmarkActor;
import tech.powerjob.remote.framework.base.Address;
import tech.powerjob.remote.framework.base.HandlerLocation;
import tech.powerjob.remote.framework.base.URL;
import tech.powerjob.remote.framework.engine.EngineConfig;
import tech.powerjob.remote.framework.engine.EngineOutput;
import tech.powerjob.remote.framework.engine.RemoteEngine;
import tech.powerjob.remote.framework.engine.impl.PowerJobRemoteEngine;
import tech.powerjob.remote.framework.transporter.Transporter;
import java.util.concurrent.CompletionStage;
import java.util.concurrent.TimeUnit;
/**
* HttpVertxCSInitializerTest
*
* @author tjq
* @since 2023/1/2
*/
@Slf4j
class HttpVertxCSInitializerTest {
@Test
void testHttpVertxCSInitializerTest() throws Exception {
final Address address = new Address().setPort(7890).setHost("127.0.0.1");
EngineConfig engineConfig = new EngineConfig()
.setType(Protocol.HTTP.name())
.setBindAddress(address)
.setActorList(Lists.newArrayList(new BenchmarkActor()));
RemoteEngine engine = new PowerJobRemoteEngine();
EngineOutput engineOutput = engine.start(engineConfig);
log.info("[HttpVertxCSInitializerTest] engine start up successfully!");
Transporter transporter = engineOutput.getTransporter();
BenchmarkActor.BenchmarkRequest request = new BenchmarkActor.BenchmarkRequest()
.setContent("request from test")
.setBlockingMills(100)
.setResponseSize(10240);
log.info("[HttpVertxCSInitializerTest] test empty request!");
URL emptyURL = new URL()
.setAddress(address)
.setLocation(new HandlerLocation().setMethodPath("emptyReturn").setRootPath("benchmark"));
transporter.tell(emptyURL, request);
log.info("[HttpVertxCSInitializerTest] test string request!");
URL stringURL = new URL()
.setAddress(address)
.setLocation(new HandlerLocation().setMethodPath("stringReturn").setRootPath("benchmark"));
final String strResponse = transporter.ask(stringURL, request, String.class).toCompletableFuture().get();
log.info("[HttpVertxCSInitializerTest] strResponse: {}", strResponse);
log.info("[HttpVertxCSInitializerTest] test normal request!");
URL url = new URL()
.setAddress(address)
.setLocation(new HandlerLocation().setMethodPath("standard").setRootPath("benchmark"));
final CompletionStage<BenchmarkActor.BenchmarkResponse> benchmarkResponseCompletionStage = transporter.ask(url, request, BenchmarkActor.BenchmarkResponse.class);
final BenchmarkActor.BenchmarkResponse response = benchmarkResponseCompletionStage.toCompletableFuture().get(10, TimeUnit.SECONDS);
log.info("[HttpVertxCSInitializerTest] response: {}", response);
CommonUtils.easySleep(10000);
}
}

View File

@ -5,12 +5,12 @@
<parent> <parent>
<artifactId>powerjob</artifactId> <artifactId>powerjob</artifactId>
<groupId>tech.powerjob</groupId> <groupId>tech.powerjob</groupId>
<version>3.0.0</version> <version>4.0.0</version>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<artifactId>powerjob-server</artifactId> <artifactId>powerjob-server</artifactId>
<version>4.2.1</version> <version>4.3.0</version>
<packaging>pom</packaging> <packaging>pom</packaging>
<modules> <modules>
@ -28,7 +28,7 @@
<properties> <properties>
<swagger.version>2.9.2</swagger.version> <swagger.version>2.9.2</swagger.version>
<springboot.version>2.7.4</springboot.version> <springboot.version>2.7.4</springboot.version>
<powerjob.common.version>4.2.1</powerjob.common.version>
<!-- MySQL version that corresponds to spring-boot-dependencies version. --> <!-- MySQL version that corresponds to spring-boot-dependencies version. -->
<mysql.version>8.0.30</mysql.version> <mysql.version>8.0.30</mysql.version>
<ojdbc.version>19.7.0.0</ojdbc.version> <ojdbc.version>19.7.0.0</ojdbc.version>
@ -43,13 +43,16 @@
<commons.net.version>3.8.0</commons.net.version> <commons.net.version>3.8.0</commons.net.version>
<fastjson.version>1.2.83</fastjson.version> <fastjson.version>1.2.83</fastjson.version>
<dingding.version>1.0.1</dingding.version> <dingding.version>1.0.1</dingding.version>
<vertx-web.version>4.0.2</vertx-web.version>
<!-- skip this module when deploying. --> <!-- skip this module when deploying. -->
<maven.deploy.skip>true</maven.deploy.skip> <maven.deploy.skip>true</maven.deploy.skip>
<groovy.version>3.0.10</groovy.version> <groovy.version>3.0.10</groovy.version>
<cron-utils.version>9.1.6</cron-utils.version> <cron-utils.version>9.1.6</cron-utils.version>
<powerjob-common.version>4.3.0</powerjob-common.version>
<powerjob-remote-impl-http.version>4.3.0</powerjob-remote-impl-http.version>
<powerjob-remote-impl-akka.version>4.3.0</powerjob-remote-impl-akka.version>
</properties> </properties>
<dependencyManagement> <dependencyManagement>
@ -99,11 +102,21 @@
<dependencies> <dependencies>
<!-- oms-common --> <!-- 网络层 -->
<dependency> <dependency>
<groupId>tech.powerjob</groupId> <groupId>tech.powerjob</groupId>
<artifactId>powerjob-common</artifactId> <artifactId>powerjob-common</artifactId>
<version>${powerjob.common.version}</version> <version>${powerjob-common.version}</version>
</dependency>
<dependency>
<groupId>tech.powerjob</groupId>
<artifactId>powerjob-remote-impl-http</artifactId>
<version>${powerjob-remote-impl-http.version}</version>
</dependency>
<dependency>
<groupId>tech.powerjob</groupId>
<artifactId>powerjob-remote-impl-akka</artifactId>
<version>${powerjob-remote-impl-akka.version}</version>
</dependency> </dependency>
<!-- mysql --> <!-- mysql -->
@ -245,17 +258,6 @@
</exclusions> </exclusions>
</dependency> </dependency>
<dependency>
<groupId>io.vertx</groupId>
<artifactId>vertx-web</artifactId>
<version>${vertx-web.version}</version>
</dependency>
<dependency>
<groupId>io.vertx</groupId>
<artifactId>vertx-web-client</artifactId>
<version>${vertx-web.version}</version>
</dependency>
<dependency> <dependency>
<groupId>com.cronutils</groupId> <groupId>com.cronutils</groupId>
<artifactId>cron-utils</artifactId> <artifactId>cron-utils</artifactId>
@ -267,6 +269,12 @@
<groupId>io.springfox</groupId> <groupId>io.springfox</groupId>
<artifactId>springfox-swagger2</artifactId> <artifactId>springfox-swagger2</artifactId>
<version>${swagger.version}</version> <version>${swagger.version}</version>
<exclusions>
<exclusion>
<artifactId>guava</artifactId>
<groupId>com.google.guava</groupId>
</exclusion>
</exclusions>
</dependency> </dependency>
<!-- swagger2 ui --> <!-- swagger2 ui -->
<dependency> <dependency>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<artifactId>powerjob-server</artifactId> <artifactId>powerjob-server</artifactId>
<groupId>tech.powerjob</groupId> <groupId>tech.powerjob</groupId>
<version>4.2.1</version> <version>4.3.0</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>

View File

@ -15,5 +15,7 @@ public class ServerInfo {
private String ip; private String ip;
private long bornTime;
private String version = "UNKNOWN"; private String version = "UNKNOWN";
} }

View File

@ -5,7 +5,7 @@
<parent> <parent>
<artifactId>powerjob-server</artifactId> <artifactId>powerjob-server</artifactId>
<groupId>tech.powerjob</groupId> <groupId>tech.powerjob</groupId>
<version>4.2.1</version> <version>4.3.0</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>

View File

@ -3,14 +3,18 @@ package tech.powerjob.server.core;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.BeanUtils; import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils; import org.springframework.util.CollectionUtils;
import org.springframework.util.StringUtils;
import tech.powerjob.common.RemoteConstant; import tech.powerjob.common.RemoteConstant;
import tech.powerjob.common.SystemInstanceResult; import tech.powerjob.common.SystemInstanceResult;
import tech.powerjob.common.enums.*; import tech.powerjob.common.enums.ExecuteType;
import tech.powerjob.common.enums.InstanceStatus;
import tech.powerjob.common.enums.ProcessorType;
import tech.powerjob.common.enums.TimeExpressionType;
import tech.powerjob.common.request.ServerScheduleJobReq; import tech.powerjob.common.request.ServerScheduleJobReq;
import tech.powerjob.remote.framework.base.URL;
import tech.powerjob.server.common.Holder; import tech.powerjob.server.common.Holder;
import tech.powerjob.server.common.module.WorkerInfo; import tech.powerjob.server.common.module.WorkerInfo;
import tech.powerjob.server.core.instance.InstanceManager; import tech.powerjob.server.core.instance.InstanceManager;
@ -19,7 +23,8 @@ import tech.powerjob.server.core.lock.UseCacheLock;
import tech.powerjob.server.persistence.remote.model.InstanceInfoDO; import tech.powerjob.server.persistence.remote.model.InstanceInfoDO;
import tech.powerjob.server.persistence.remote.model.JobInfoDO; import tech.powerjob.server.persistence.remote.model.JobInfoDO;
import tech.powerjob.server.persistence.remote.repository.InstanceInfoRepository; import tech.powerjob.server.persistence.remote.repository.InstanceInfoRepository;
import tech.powerjob.server.remote.transport.TransportService; import tech.powerjob.server.remote.transporter.TransportService;
import tech.powerjob.server.remote.transporter.impl.ServerURLFactory;
import tech.powerjob.server.remote.worker.WorkerClusterQueryService; import tech.powerjob.server.remote.worker.WorkerClusterQueryService;
import java.util.ArrayList; import java.util.ArrayList;
@ -165,7 +170,8 @@ public class DispatchService {
WorkerInfo taskTracker = suitableWorkers.get(0); WorkerInfo taskTracker = suitableWorkers.get(0);
String taskTrackerAddress = taskTracker.getAddress(); String taskTrackerAddress = taskTracker.getAddress();
transportService.tell(Protocol.of(taskTracker.getProtocol()), taskTrackerAddress, req); URL workerUrl = ServerURLFactory.dispatchJob2Worker(taskTrackerAddress);
transportService.tell(taskTracker.getProtocol(), workerUrl, req);
log.info("[Dispatcher-{}|{}] send schedule request to TaskTracker[protocol:{},address:{}] successfully: {}.", jobId, instanceId, taskTracker.getProtocol(), taskTrackerAddress, req); log.info("[Dispatcher-{}|{}] send schedule request to TaskTracker[protocol:{},address:{}] successfully: {}.", jobId, instanceId, taskTracker.getProtocol(), taskTrackerAddress, req);
// 修改状态 // 修改状态

View File

@ -1,25 +1,5 @@
package tech.powerjob.server.core.container; package tech.powerjob.server.core.container;
import tech.powerjob.common.OmsConstant;
import tech.powerjob.common.enums.Protocol;
import tech.powerjob.common.model.DeployedContainerInfo;
import tech.powerjob.common.model.GitRepoInfo;
import tech.powerjob.common.request.ServerDeployContainerRequest;
import tech.powerjob.common.request.ServerDestroyContainerRequest;
import tech.powerjob.common.utils.CommonUtils;
import tech.powerjob.common.serialize.JsonUtils;
import tech.powerjob.common.utils.NetUtils;
import tech.powerjob.common.utils.SegmentLock;
import tech.powerjob.server.common.constants.ContainerSourceType;
import tech.powerjob.server.common.constants.SwitchableStatus;
import tech.powerjob.server.common.utils.OmsFileUtils;
import tech.powerjob.server.extension.LockService;
import tech.powerjob.server.persistence.remote.model.ContainerInfoDO;
import tech.powerjob.server.persistence.remote.repository.ContainerInfoRepository;
import tech.powerjob.server.persistence.mongodb.GridFsManager;
import tech.powerjob.server.remote.transport.TransportService;
import tech.powerjob.server.remote.worker.WorkerClusterQueryService;
import tech.powerjob.server.common.module.WorkerInfo;
import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.google.common.collect.Multimap; import com.google.common.collect.Multimap;
@ -28,6 +8,7 @@ import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.commons.io.filefilter.FileFilterUtils; import org.apache.commons.io.filefilter.FileFilterUtils;
import org.apache.commons.io.filefilter.IOFileFilter; import org.apache.commons.io.filefilter.IOFileFilter;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.exception.ExceptionUtils; import org.apache.commons.lang3.exception.ExceptionUtils;
import org.apache.commons.lang3.time.DateFormatUtils; import org.apache.commons.lang3.time.DateFormatUtils;
import org.apache.maven.shared.invoker.DefaultInvocationRequest; import org.apache.maven.shared.invoker.DefaultInvocationRequest;
@ -43,8 +24,29 @@ import org.eclipse.jgit.transport.UsernamePasswordCredentialsProvider;
import org.springframework.core.env.Environment; import org.springframework.core.env.Environment;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils; import org.springframework.util.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.springframework.web.multipart.MultipartFile; import org.springframework.web.multipart.MultipartFile;
import tech.powerjob.common.OmsConstant;
import tech.powerjob.common.model.DeployedContainerInfo;
import tech.powerjob.common.model.GitRepoInfo;
import tech.powerjob.common.request.ServerDeployContainerRequest;
import tech.powerjob.common.request.ServerDestroyContainerRequest;
import tech.powerjob.common.serialize.JsonUtils;
import tech.powerjob.common.utils.CommonUtils;
import tech.powerjob.common.utils.NetUtils;
import tech.powerjob.common.utils.SegmentLock;
import tech.powerjob.remote.framework.base.URL;
import tech.powerjob.server.common.constants.ContainerSourceType;
import tech.powerjob.server.common.constants.SwitchableStatus;
import tech.powerjob.server.common.module.WorkerInfo;
import tech.powerjob.server.common.utils.OmsFileUtils;
import tech.powerjob.server.extension.LockService;
import tech.powerjob.server.persistence.mongodb.GridFsManager;
import tech.powerjob.server.persistence.remote.model.ContainerInfoDO;
import tech.powerjob.server.persistence.remote.repository.ContainerInfoRepository;
import tech.powerjob.server.remote.server.redirector.DesignateServer;
import tech.powerjob.server.remote.transporter.impl.ServerURLFactory;
import tech.powerjob.server.remote.transporter.TransportService;
import tech.powerjob.server.remote.worker.WorkerClusterQueryService;
import javax.annotation.Resource; import javax.annotation.Resource;
import javax.websocket.RemoteEndpoint; import javax.websocket.RemoteEndpoint;
@ -128,7 +130,8 @@ public class ContainerService {
ServerDestroyContainerRequest destroyRequest = new ServerDestroyContainerRequest(container.getId()); ServerDestroyContainerRequest destroyRequest = new ServerDestroyContainerRequest(container.getId());
workerClusterQueryService.getAllAliveWorkers(container.getAppId()).forEach(workerInfo -> { workerClusterQueryService.getAllAliveWorkers(container.getAppId()).forEach(workerInfo -> {
transportService.tell(Protocol.AKKA, workerInfo.getAddress(), destroyRequest); final URL url = ServerURLFactory.destroyContainer2Worker(workerInfo.getAddress());
transportService.tell(workerInfo.getProtocol(), url, destroyRequest);
}); });
log.info("[ContainerService] delete container: {}.", container); log.info("[ContainerService] delete container: {}.", container);
@ -245,13 +248,11 @@ public class ContainerService {
container.setGmtModified(now); container.setGmtModified(now);
container.setLastDeployTime(now); container.setLastDeployTime(now);
containerInfoRepository.saveAndFlush(container); containerInfoRepository.saveAndFlush(container);
remote.sendText(String.format("SYSTEM: update current container version=%s successfully!", container.getVersion()));
// 开始部署需要分批进行 // 开始部署需要分批进行
Set<String> workerAddressList = workerClusterQueryService.getAllAliveWorkers(container.getAppId()) final List<WorkerInfo> allAliveWorkers = workerClusterQueryService.getAllAliveWorkers(container.getAppId());
.stream() if (allAliveWorkers.isEmpty()) {
.map(WorkerInfo::getAddress)
.collect(Collectors.toSet());
if (workerAddressList.isEmpty()) {
remote.sendText("SYSTEM: there is no worker available now, deploy failed!"); remote.sendText("SYSTEM: there is no worker available now, deploy failed!");
return; return;
} }
@ -262,10 +263,12 @@ public class ContainerService {
long sleepTime = calculateSleepTime(jarFile.length()); long sleepTime = calculateSleepTime(jarFile.length());
AtomicInteger count = new AtomicInteger(); AtomicInteger count = new AtomicInteger();
workerAddressList.forEach(akkaAddress -> { allAliveWorkers.forEach(workerInfo -> {
transportService.tell(Protocol.AKKA, akkaAddress, req);
remote.sendText("SYSTEM: send deploy request to " + akkaAddress); final URL url = ServerURLFactory.deployContainer2Worker(workerInfo.getAddress());
transportService.tell(workerInfo.getProtocol(), url, req);
remote.sendText("SYSTEM: send deploy request to " + url.getAddress());
if (count.incrementAndGet() % DEPLOY_BATCH_NUM == 0) { if (count.incrementAndGet() % DEPLOY_BATCH_NUM == 0) {
CommonUtils.executeIgnoreException(() -> Thread.sleep(sleepTime)); CommonUtils.executeIgnoreException(() -> Thread.sleep(sleepTime));
@ -285,6 +288,7 @@ public class ContainerService {
* @param containerId 容器ID * @param containerId 容器ID
* @return 拼接好的可阅读字符串 * @return 拼接好的可阅读字符串
*/ */
@DesignateServer
public String fetchDeployedInfo(Long appId, Long containerId) { public String fetchDeployedInfo(Long appId, Long containerId) {
List<DeployedContainerInfo> infoList = workerClusterQueryService.getDeployedContainerInfos(appId, containerId); List<DeployedContainerInfo> infoList = workerClusterQueryService.getDeployedContainerInfos(appId, containerId);
@ -417,7 +421,10 @@ public class ContainerService {
FileUtils.copyFile(jarWithDependency, localFile); FileUtils.copyFile(jarWithDependency, localFile);
return localFile; return localFile;
}finally { } catch (Throwable t) {
log.error("[ContainerService] prepareJarFile failed for container: {}", container, t);
remote.sendText("SYSTEM: [ERROR] prepare jar file failed: " + ExceptionUtils.getStackTrace(t));
} finally {
// 删除工作区数据 // 删除工作区数据
FileUtils.forceDelete(workerDir); FileUtils.forceDelete(workerDir);
} }

View File

@ -10,6 +10,8 @@ import tech.powerjob.common.request.*;
import tech.powerjob.common.response.AskResponse; import tech.powerjob.common.response.AskResponse;
import tech.powerjob.common.serialize.JsonUtils; import tech.powerjob.common.serialize.JsonUtils;
import tech.powerjob.common.utils.NetUtils; import tech.powerjob.common.utils.NetUtils;
import tech.powerjob.remote.framework.actor.Handler;
import tech.powerjob.remote.framework.actor.ProcessType;
import tech.powerjob.server.common.constants.SwitchableStatus; import tech.powerjob.server.common.constants.SwitchableStatus;
import tech.powerjob.server.common.module.WorkerInfo; import tech.powerjob.server.common.module.WorkerInfo;
import tech.powerjob.server.common.utils.SpringUtils; import tech.powerjob.server.common.utils.SpringUtils;
@ -28,6 +30,8 @@ import java.util.Optional;
import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.RejectedExecutionException;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import static tech.powerjob.common.RemoteConstant.*;
/** /**
* wrapper monitor for IWorkerRequestHandler * wrapper monitor for IWorkerRequestHandler
* *
@ -49,12 +53,13 @@ public abstract class AbWorkerRequestHandler implements IWorkerRequestHandler {
protected abstract void processWorkerHeartbeat0(WorkerHeartbeat heartbeat, WorkerHeartbeatEvent event); protected abstract void processWorkerHeartbeat0(WorkerHeartbeat heartbeat, WorkerHeartbeatEvent event);
protected abstract Optional<AskResponse> processTaskTrackerReportInstanceStatus0(TaskTrackerReportInstanceStatusReq req, TtReportInstanceStatusEvent event) throws Exception; protected abstract AskResponse processTaskTrackerReportInstanceStatus0(TaskTrackerReportInstanceStatusReq req, TtReportInstanceStatusEvent event) throws Exception;
protected abstract void processWorkerLogReport0(WorkerLogReportReq req, WorkerLogReportEvent event); protected abstract void processWorkerLogReport0(WorkerLogReportReq req, WorkerLogReportEvent event);
@Override @Override
@Handler(path = S4W_HANDLER_WORKER_HEARTBEAT, processType = ProcessType.NO_BLOCKING)
public void processWorkerHeartbeat(WorkerHeartbeat heartbeat) { public void processWorkerHeartbeat(WorkerHeartbeat heartbeat) {
long startMs = System.currentTimeMillis(); long startMs = System.currentTimeMillis();
WorkerHeartbeatEvent event = new WorkerHeartbeatEvent() WorkerHeartbeatEvent event = new WorkerHeartbeatEvent()
@ -71,7 +76,8 @@ public abstract class AbWorkerRequestHandler implements IWorkerRequestHandler {
} }
@Override @Override
public Optional<AskResponse> processTaskTrackerReportInstanceStatus(TaskTrackerReportInstanceStatusReq req) { @Handler(path = S4W_HANDLER_REPORT_INSTANCE_STATUS, processType = ProcessType.BLOCKING)
public AskResponse processTaskTrackerReportInstanceStatus(TaskTrackerReportInstanceStatusReq req) {
long startMs = System.currentTimeMillis(); long startMs = System.currentTimeMillis();
TtReportInstanceStatusEvent event = new TtReportInstanceStatusEvent() TtReportInstanceStatusEvent event = new TtReportInstanceStatusEvent()
.setAppId(req.getAppId()) .setAppId(req.getAppId())
@ -86,7 +92,7 @@ public abstract class AbWorkerRequestHandler implements IWorkerRequestHandler {
} catch (Exception e) { } catch (Exception e) {
event.setServerProcessStatus(TtReportInstanceStatusEvent.Status.FAILED); event.setServerProcessStatus(TtReportInstanceStatusEvent.Status.FAILED);
log.error("[WorkerRequestHandler] processTaskTrackerReportInstanceStatus failed for request: {}", req, e); log.error("[WorkerRequestHandler] processTaskTrackerReportInstanceStatus failed for request: {}", req, e);
return Optional.of(AskResponse.failed(ExceptionUtils.getMessage(e))); return AskResponse.failed(ExceptionUtils.getMessage(e));
} finally { } finally {
event.setServerProcessCost(System.currentTimeMillis() - startMs); event.setServerProcessCost(System.currentTimeMillis() - startMs);
monitorService.monitor(event); monitorService.monitor(event);
@ -94,6 +100,7 @@ public abstract class AbWorkerRequestHandler implements IWorkerRequestHandler {
} }
@Override @Override
@Handler(path = S4W_HANDLER_REPORT_LOG, processType = ProcessType.NO_BLOCKING)
public void processWorkerLogReport(WorkerLogReportReq req) { public void processWorkerLogReport(WorkerLogReportReq req) {
WorkerLogReportEvent event = new WorkerLogReportEvent() WorkerLogReportEvent event = new WorkerLogReportEvent()
@ -113,6 +120,7 @@ public abstract class AbWorkerRequestHandler implements IWorkerRequestHandler {
} }
@Override @Override
@Handler(path = S4W_HANDLER_QUERY_JOB_CLUSTER, processType = ProcessType.BLOCKING)
public AskResponse processWorkerQueryExecutorCluster(WorkerQueryExecutorClusterReq req) { public AskResponse processWorkerQueryExecutorCluster(WorkerQueryExecutorClusterReq req) {
AskResponse askResponse; AskResponse askResponse;
@ -137,6 +145,7 @@ public abstract class AbWorkerRequestHandler implements IWorkerRequestHandler {
} }
@Override @Override
@Handler(path = S4W_HANDLER_WORKER_NEED_DEPLOY_CONTAINER, processType = ProcessType.BLOCKING)
public AskResponse processWorkerNeedDeployContainer(WorkerNeedDeployContainerRequest req) { public AskResponse processWorkerNeedDeployContainer(WorkerNeedDeployContainerRequest req) {
String port = environment.getProperty("local.server.port"); String port = environment.getProperty("local.server.port");

View File

@ -3,8 +3,6 @@ package tech.powerjob.server.core.handler;
import tech.powerjob.common.request.*; import tech.powerjob.common.request.*;
import tech.powerjob.common.response.AskResponse; import tech.powerjob.common.response.AskResponse;
import java.util.Optional;
/** /**
* 定义 server worker 之间需要处理的协议 * 定义 server worker 之间需要处理的协议
* *
@ -24,7 +22,7 @@ public interface IWorkerRequestHandler {
* @param req 上报请求 * @param req 上报请求
* @return 响应信息 * @return 响应信息
*/ */
Optional<AskResponse> processTaskTrackerReportInstanceStatus(TaskTrackerReportInstanceStatusReq req); AskResponse processTaskTrackerReportInstanceStatus(TaskTrackerReportInstanceStatusReq req);
/** /**
* 处理 worker 查询执行器集群 * 处理 worker 查询执行器集群
@ -34,7 +32,7 @@ public interface IWorkerRequestHandler {
AskResponse processWorkerQueryExecutorCluster(WorkerQueryExecutorClusterReq req); AskResponse processWorkerQueryExecutorCluster(WorkerQueryExecutorClusterReq req);
/** /**
* 处理 worker 日志推送请求 * 处理 worker 日志推送请求内部使用线程池异步处理非阻塞
* @param req 请求 * @param req 请求
*/ */
void processWorkerLogReport(WorkerLogReportReq req); void processWorkerLogReport(WorkerLogReportReq req);

View File

@ -1,27 +0,0 @@
package tech.powerjob.server.core.handler;
import org.springframework.stereotype.Component;
/**
* WorkerRequestHandlerHolder
*
* @author tjq
* @since 2022/9/11
*/
@Component
public class WorkerRequestHandlerHolder {
private static IWorkerRequestHandler workerRequestHandler;
public WorkerRequestHandlerHolder(IWorkerRequestHandler injectedWorkerRequestHandler) {
workerRequestHandler = injectedWorkerRequestHandler;
}
public static IWorkerRequestHandler fetchWorkerRequestHandler() {
if (workerRequestHandler == null){
throw new IllegalStateException("WorkerRequestHandlerHolder not initialized!");
}
return workerRequestHandler;
}
}

View File

@ -4,11 +4,13 @@ import lombok.extern.slf4j.Slf4j;
import org.springframework.core.env.Environment; import org.springframework.core.env.Environment;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils; import org.springframework.util.CollectionUtils;
import tech.powerjob.common.RemoteConstant;
import tech.powerjob.common.enums.InstanceStatus; import tech.powerjob.common.enums.InstanceStatus;
import tech.powerjob.common.request.TaskTrackerReportInstanceStatusReq; import tech.powerjob.common.request.TaskTrackerReportInstanceStatusReq;
import tech.powerjob.common.request.WorkerHeartbeat; import tech.powerjob.common.request.WorkerHeartbeat;
import tech.powerjob.common.request.WorkerLogReportReq; import tech.powerjob.common.request.WorkerLogReportReq;
import tech.powerjob.common.response.AskResponse; import tech.powerjob.common.response.AskResponse;
import tech.powerjob.remote.framework.actor.Actor;
import tech.powerjob.server.core.instance.InstanceLogService; import tech.powerjob.server.core.instance.InstanceLogService;
import tech.powerjob.server.core.instance.InstanceManager; import tech.powerjob.server.core.instance.InstanceManager;
import tech.powerjob.server.core.workflow.WorkflowInstanceManager; import tech.powerjob.server.core.workflow.WorkflowInstanceManager;
@ -20,8 +22,6 @@ import tech.powerjob.server.persistence.remote.repository.ContainerInfoRepositor
import tech.powerjob.server.remote.worker.WorkerClusterManagerService; import tech.powerjob.server.remote.worker.WorkerClusterManagerService;
import tech.powerjob.server.remote.worker.WorkerClusterQueryService; import tech.powerjob.server.remote.worker.WorkerClusterQueryService;
import java.util.Optional;
/** /**
* receive and process worker's request * receive and process worker's request
* *
@ -30,6 +30,7 @@ import java.util.Optional;
*/ */
@Slf4j @Slf4j
@Component @Component
@Actor(path = RemoteConstant.S4W_PATH)
public class WorkerRequestHandlerImpl extends AbWorkerRequestHandler { public class WorkerRequestHandlerImpl extends AbWorkerRequestHandler {
private final InstanceManager instanceManager; private final InstanceManager instanceManager;
@ -52,7 +53,7 @@ public class WorkerRequestHandlerImpl extends AbWorkerRequestHandler {
} }
@Override @Override
protected Optional<AskResponse> processTaskTrackerReportInstanceStatus0(TaskTrackerReportInstanceStatusReq req, TtReportInstanceStatusEvent event) throws Exception { protected AskResponse processTaskTrackerReportInstanceStatus0(TaskTrackerReportInstanceStatusReq req, TtReportInstanceStatusEvent event) throws Exception {
// 2021/02/05 如果是工作流中的实例先尝试更新上下文信息再更新实例状态这里一定不会有异常 // 2021/02/05 如果是工作流中的实例先尝试更新上下文信息再更新实例状态这里一定不会有异常
if (req.getWfInstanceId() != null && !CollectionUtils.isEmpty(req.getAppendedWfContext())) { if (req.getWfInstanceId() != null && !CollectionUtils.isEmpty(req.getAppendedWfContext())) {
// 更新工作流上下文信息 // 更新工作流上下文信息
@ -63,9 +64,10 @@ public class WorkerRequestHandlerImpl extends AbWorkerRequestHandler {
// 结束状态成功/失败需要回复消息 // 结束状态成功/失败需要回复消息
if (InstanceStatus.FINISHED_STATUS.contains(req.getInstanceStatus())) { if (InstanceStatus.FINISHED_STATUS.contains(req.getInstanceStatus())) {
return Optional.of(AskResponse.succeed(null)); return AskResponse.succeed(null);
} }
return Optional.empty();
return null;
} }
@Override @Override

View File

@ -1,28 +0,0 @@
package tech.powerjob.server.core.handler.impl;
import org.springframework.boot.autoconfigure.condition.ConditionalOnExpression;
import org.springframework.stereotype.Component;
import tech.powerjob.common.RemoteConstant;
import tech.powerjob.server.remote.transport.starter.AkkaStarter;
import tech.powerjob.server.remote.transport.starter.VertXStarter;
import javax.annotation.PostConstruct;
/**
* 初始化器
*
* @author tjq
* @since 2022/9/11
*/
@Component
@ConditionalOnExpression("'${execution.env}'!='test'")
public class Initializer {
@PostConstruct
public void initHandler() {
// init akka
AkkaStarter.actorSystem.actorOf(WorkerRequestAkkaHandler.defaultProps(), RemoteConstant.SERVER_ACTOR_NAME);
// init vert.x
VertXStarter.vertx.deployVerticle(new WorkerRequestHttpHandler());
}
}

View File

@ -1,100 +0,0 @@
package tech.powerjob.server.core.handler.impl;
import akka.actor.AbstractActor;
import akka.actor.Props;
import akka.routing.DefaultResizer;
import akka.routing.RoundRobinPool;
import tech.powerjob.common.request.*;
import tech.powerjob.common.response.AskResponse;
import lombok.extern.slf4j.Slf4j;
import java.util.Optional;
import static tech.powerjob.server.core.handler.WorkerRequestHandlerHolder.fetchWorkerRequestHandler;
/**
* 处理 Worker 请求
*
* @author tjq
* @since 2020/3/30
*/
@Slf4j
public class WorkerRequestAkkaHandler extends AbstractActor {
public static Props defaultProps(){
return Props.create(WorkerRequestAkkaHandler.class)
.withDispatcher("akka.w-r-c-d")
.withRouter(
new RoundRobinPool(Runtime.getRuntime().availableProcessors() * 4)
.withResizer(new DefaultResizer(
Runtime.getRuntime().availableProcessors() * 4,
Runtime.getRuntime().availableProcessors() * 10,
1,
0.2d,
0.3d,
0.1d,
10
))
);
}
@Override
public Receive createReceive() {
return receiveBuilder()
.match(WorkerHeartbeat.class, hb -> fetchWorkerRequestHandler().processWorkerHeartbeat(hb))
.match(TaskTrackerReportInstanceStatusReq.class, this::onReceiveTaskTrackerReportInstanceStatusReq)
.match(WorkerLogReportReq.class, req -> fetchWorkerRequestHandler().processWorkerLogReport(req))
.match(WorkerNeedDeployContainerRequest.class, this::onReceiveWorkerNeedDeployContainerRequest)
.match(WorkerQueryExecutorClusterReq.class, this::onReceiveWorkerQueryExecutorClusterReq)
.matchAny(obj -> log.warn("[WorkerRequestAkkaHandler] receive unknown request: {}.", obj))
.build();
}
@Override
public void preStart() throws Exception {
super.preStart();
log.debug("[WorkerRequestAkkaHandler]init WorkerRequestActor");
}
@Override
public void postStop() throws Exception {
super.postStop();
log.debug("[WorkerRequestAkkaHandler]stop WorkerRequestActor");
}
/**
* 处理 instance 状态
* @param req 任务实例的状态上报请求
*/
private void onReceiveTaskTrackerReportInstanceStatusReq(TaskTrackerReportInstanceStatusReq req) {
try {
Optional<AskResponse> askResponseOpt = fetchWorkerRequestHandler().processTaskTrackerReportInstanceStatus(req);
if (askResponseOpt.isPresent()) {
getSender().tell(AskResponse.succeed(null), getSelf());
}
}catch (Exception e) {
log.error("[WorkerRequestAkkaHandler] update instance status failed for request: {}.", req, e);
}
}
/**
* 处理 Worker容器部署请求
* @param req 容器部署请求
*/
private void onReceiveWorkerNeedDeployContainerRequest(WorkerNeedDeployContainerRequest req) {
getSender().tell(fetchWorkerRequestHandler().processWorkerNeedDeployContainer(req), getSelf());
}
/**
* 处理 worker 请求获取当前任务所有处理器节点的请求
* @param req jobId + appId
*/
private void onReceiveWorkerQueryExecutorClusterReq(WorkerQueryExecutorClusterReq req) {
getSender().tell(fetchWorkerRequestHandler().processWorkerQueryExecutorCluster(req), getSelf());
}
}

View File

@ -1,81 +0,0 @@
package tech.powerjob.server.core.handler.impl;
import tech.powerjob.common.OmsConstant;
import tech.powerjob.common.ProtocolConstant;
import tech.powerjob.common.request.TaskTrackerReportInstanceStatusReq;
import tech.powerjob.common.request.WorkerHeartbeat;
import tech.powerjob.common.request.WorkerLogReportReq;
import tech.powerjob.common.response.AskResponse;
import tech.powerjob.common.response.ResultDTO;
import tech.powerjob.server.common.PowerJobServerConfigKey;
import tech.powerjob.server.common.utils.PropertyUtils;
import io.vertx.core.AbstractVerticle;
import io.vertx.core.http.HttpServer;
import io.vertx.core.http.HttpServerOptions;
import io.vertx.core.json.JsonObject;
import io.vertx.ext.web.Router;
import io.vertx.ext.web.RoutingContext;
import io.vertx.ext.web.handler.BodyHandler;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.exception.ExceptionUtils;
import java.util.Properties;
import static tech.powerjob.server.core.handler.WorkerRequestHandlerHolder.fetchWorkerRequestHandler;
/**
* WorkerRequestHandler
*
* @author tjq
* @since 2021/2/8
*/
@Slf4j
public class WorkerRequestHttpHandler extends AbstractVerticle {
@Override
public void start() throws Exception {
Properties properties = PropertyUtils.getProperties();
int port = Integer.parseInt(properties.getProperty(PowerJobServerConfigKey.HTTP_PORT, String.valueOf(OmsConstant.SERVER_DEFAULT_HTTP_PORT)));
HttpServerOptions options = new HttpServerOptions();
HttpServer server = vertx.createHttpServer(options);
Router router = Router.router(vertx);
router.route().handler(BodyHandler.create());
router.post(ProtocolConstant.SERVER_PATH_HEARTBEAT)
.handler(ctx -> {
WorkerHeartbeat heartbeat = ctx.getBodyAsJson().mapTo(WorkerHeartbeat.class);
fetchWorkerRequestHandler().processWorkerHeartbeat(heartbeat);
success(ctx);
});
router.post(ProtocolConstant.SERVER_PATH_STATUS_REPORT)
.blockingHandler(ctx -> {
TaskTrackerReportInstanceStatusReq req = ctx.getBodyAsJson().mapTo(TaskTrackerReportInstanceStatusReq.class);
try {
fetchWorkerRequestHandler().processTaskTrackerReportInstanceStatus(req);
out(ctx, AskResponse.succeed(null));
} catch (Exception e) {
log.error("[WorkerRequestHttpHandler] update instance status failed for request: {}.", req, e);
out(ctx, AskResponse.failed(ExceptionUtils.getMessage(e)));
}
});
router.post(ProtocolConstant.SERVER_PATH_LOG_REPORT)
.blockingHandler(ctx -> {
WorkerLogReportReq req = ctx.getBodyAsJson().mapTo(WorkerLogReportReq.class);
fetchWorkerRequestHandler().processWorkerLogReport(req);
success(ctx);
});
server.requestHandler(router).listen(port);
}
private static void out(RoutingContext ctx, Object msg) {
ctx.response()
.putHeader(OmsConstant.HTTP_HEADER_CONTENT_TYPE, OmsConstant.JSON_MEDIA_TYPE)
.end(JsonObject.mapFrom(msg).encode());
}
private static void success(RoutingContext ctx) {
out(ctx, ResultDTO.success(null));
}
}

View File

@ -2,15 +2,15 @@ package tech.powerjob.server.core.instance;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.BeanUtils; import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import org.apache.commons.lang3.StringUtils;
import tech.powerjob.common.enums.InstanceStatus; import tech.powerjob.common.enums.InstanceStatus;
import tech.powerjob.common.enums.Protocol;
import tech.powerjob.common.enums.TimeExpressionType; import tech.powerjob.common.enums.TimeExpressionType;
import tech.powerjob.common.model.LifeCycle; import tech.powerjob.common.model.LifeCycle;
import tech.powerjob.common.request.ServerStopInstanceReq; import tech.powerjob.common.request.ServerStopInstanceReq;
import tech.powerjob.common.request.TaskTrackerReportInstanceStatusReq; import tech.powerjob.common.request.TaskTrackerReportInstanceStatusReq;
import tech.powerjob.remote.framework.base.URL;
import tech.powerjob.server.common.module.WorkerInfo; import tech.powerjob.server.common.module.WorkerInfo;
import tech.powerjob.server.common.timewheel.holder.HashedWheelTimerHolder; import tech.powerjob.server.common.timewheel.holder.HashedWheelTimerHolder;
import tech.powerjob.server.common.utils.SpringUtils; import tech.powerjob.server.common.utils.SpringUtils;
@ -22,10 +22,10 @@ import tech.powerjob.server.persistence.remote.model.InstanceInfoDO;
import tech.powerjob.server.persistence.remote.model.JobInfoDO; import tech.powerjob.server.persistence.remote.model.JobInfoDO;
import tech.powerjob.server.persistence.remote.model.UserInfoDO; import tech.powerjob.server.persistence.remote.model.UserInfoDO;
import tech.powerjob.server.persistence.remote.repository.InstanceInfoRepository; import tech.powerjob.server.persistence.remote.repository.InstanceInfoRepository;
import tech.powerjob.server.remote.transport.TransportService; import tech.powerjob.server.remote.transporter.impl.ServerURLFactory;
import tech.powerjob.server.remote.transporter.TransportService;
import tech.powerjob.server.remote.worker.WorkerClusterQueryService; import tech.powerjob.server.remote.worker.WorkerClusterQueryService;
import javax.annotation.Resource;
import java.util.Date; import java.util.Date;
import java.util.List; import java.util.List;
import java.util.Optional; import java.util.Optional;
@ -176,7 +176,8 @@ public class InstanceManager {
if (workerInfoOpt.isPresent()) { if (workerInfoOpt.isPresent()) {
ServerStopInstanceReq stopInstanceReq = new ServerStopInstanceReq(instanceId); ServerStopInstanceReq stopInstanceReq = new ServerStopInstanceReq(instanceId);
WorkerInfo workerInfo = workerInfoOpt.get(); WorkerInfo workerInfo = workerInfoOpt.get();
transportService.tell(Protocol.of(workerInfo.getProtocol()), workerInfo.getAddress(), stopInstanceReq); final URL url = ServerURLFactory.stopInstance2Worker(workerInfo.getAddress());
transportService.tell(workerInfo.getProtocol(), url, stopInstanceReq);
} }
} }

View File

@ -5,15 +5,16 @@ import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.BeanUtils; import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import tech.powerjob.common.PowerQuery; import tech.powerjob.common.PowerQuery;
import tech.powerjob.common.RemoteConstant;
import tech.powerjob.common.SystemInstanceResult; import tech.powerjob.common.SystemInstanceResult;
import tech.powerjob.common.enums.InstanceStatus; import tech.powerjob.common.enums.InstanceStatus;
import tech.powerjob.common.enums.Protocol;
import tech.powerjob.common.exception.PowerJobException; import tech.powerjob.common.exception.PowerJobException;
import tech.powerjob.common.model.InstanceDetail; import tech.powerjob.common.model.InstanceDetail;
import tech.powerjob.common.request.ServerQueryInstanceStatusReq; import tech.powerjob.common.request.ServerQueryInstanceStatusReq;
import tech.powerjob.common.request.ServerStopInstanceReq; import tech.powerjob.common.request.ServerStopInstanceReq;
import tech.powerjob.common.response.AskResponse; import tech.powerjob.common.response.AskResponse;
import tech.powerjob.common.response.InstanceInfoDTO; import tech.powerjob.common.response.InstanceInfoDTO;
import tech.powerjob.remote.framework.base.URL;
import tech.powerjob.server.common.constants.InstanceType; import tech.powerjob.server.common.constants.InstanceType;
import tech.powerjob.server.common.module.WorkerInfo; import tech.powerjob.server.common.module.WorkerInfo;
import tech.powerjob.server.common.timewheel.TimerFuture; import tech.powerjob.server.common.timewheel.TimerFuture;
@ -26,12 +27,14 @@ import tech.powerjob.server.persistence.remote.model.JobInfoDO;
import tech.powerjob.server.persistence.remote.repository.InstanceInfoRepository; import tech.powerjob.server.persistence.remote.repository.InstanceInfoRepository;
import tech.powerjob.server.persistence.remote.repository.JobInfoRepository; import tech.powerjob.server.persistence.remote.repository.JobInfoRepository;
import tech.powerjob.server.remote.server.redirector.DesignateServer; import tech.powerjob.server.remote.server.redirector.DesignateServer;
import tech.powerjob.server.remote.transport.TransportService; import tech.powerjob.server.remote.transporter.impl.ServerURLFactory;
import tech.powerjob.server.remote.transporter.TransportService;
import tech.powerjob.server.remote.worker.WorkerClusterQueryService; import tech.powerjob.server.remote.worker.WorkerClusterQueryService;
import java.util.Date; import java.util.Date;
import java.util.List; import java.util.List;
import java.util.Optional; import java.util.Optional;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import static tech.powerjob.common.enums.InstanceStatus.RUNNING; import static tech.powerjob.common.enums.InstanceStatus.RUNNING;
@ -136,7 +139,7 @@ public class InstanceService {
if (workerInfoOpt.isPresent()) { if (workerInfoOpt.isPresent()) {
ServerStopInstanceReq req = new ServerStopInstanceReq(instanceId); ServerStopInstanceReq req = new ServerStopInstanceReq(instanceId);
WorkerInfo workerInfo = workerInfoOpt.get(); WorkerInfo workerInfo = workerInfoOpt.get();
transportService.tell(Protocol.of(workerInfo.getProtocol()), workerInfo.getAddress(), req); transportService.tell(workerInfo.getProtocol(), ServerURLFactory.stopInstance2Worker(workerInfo.getAddress()), req);
log.info("[Instance-{}] update instanceInfo and send 'stopInstance' request succeed.", instanceId); log.info("[Instance-{}] update instanceInfo and send 'stopInstance' request succeed.", instanceId);
} else { } else {
log.warn("[Instance-{}] update instanceInfo successfully but can't find TaskTracker to stop instance", instanceId); log.warn("[Instance-{}] update instanceInfo successfully but can't find TaskTracker to stop instance", instanceId);
@ -280,7 +283,10 @@ public class InstanceService {
WorkerInfo workerInfo = workerInfoOpt.get(); WorkerInfo workerInfo = workerInfoOpt.get();
ServerQueryInstanceStatusReq req = new ServerQueryInstanceStatusReq(instanceId); ServerQueryInstanceStatusReq req = new ServerQueryInstanceStatusReq(instanceId);
try { try {
AskResponse askResponse = transportService.ask(Protocol.of(workerInfo.getProtocol()), workerInfo.getAddress(), req); final URL url = ServerURLFactory.queryInstance2Worker(workerInfo.getAddress());
AskResponse askResponse = transportService.ask(workerInfo.getProtocol(), url, req, AskResponse.class)
.toCompletableFuture()
.get(RemoteConstant.DEFAULT_TIMEOUT_MS, TimeUnit.MILLISECONDS);
if (askResponse.isSuccess()) { if (askResponse.isSuccess()) {
InstanceDetail instanceDetail = askResponse.getData(InstanceDetail.class); InstanceDetail instanceDetail = askResponse.getData(InstanceDetail.class);
instanceDetail.setRunningTimes(instanceInfoDO.getRunningTimes()); instanceDetail.setRunningTimes(instanceInfoDO.getRunningTimes());

View File

@ -22,9 +22,8 @@ import tech.powerjob.server.persistence.remote.model.WorkflowInfoDO;
import tech.powerjob.server.persistence.remote.model.WorkflowInstanceInfoDO; import tech.powerjob.server.persistence.remote.model.WorkflowInstanceInfoDO;
import tech.powerjob.server.persistence.remote.model.brief.BriefInstanceInfo; import tech.powerjob.server.persistence.remote.model.brief.BriefInstanceInfo;
import tech.powerjob.server.persistence.remote.repository.*; import tech.powerjob.server.persistence.remote.repository.*;
import tech.powerjob.server.remote.transport.starter.AkkaStarter; import tech.powerjob.server.remote.transporter.TransportService;
import javax.annotation.Resource;
import java.util.*; import java.util.*;
import java.util.stream.Collectors; import java.util.stream.Collectors;
@ -49,6 +48,8 @@ public class InstanceStatusCheckService {
public static final long CHECK_INTERVAL = 10000; public static final long CHECK_INTERVAL = 10000;
private final TransportService transportService;
private final DispatchService dispatchService; private final DispatchService dispatchService;
private final InstanceManager instanceManager; private final InstanceManager instanceManager;
@ -61,7 +62,6 @@ public class InstanceStatusCheckService {
private final InstanceInfoRepository instanceInfoRepository; private final InstanceInfoRepository instanceInfoRepository;
private final WorkflowInfoRepository workflowInfoRepository; private final WorkflowInfoRepository workflowInfoRepository;
private final WorkflowInstanceInfoRepository workflowInstanceInfoRepository; private final WorkflowInstanceInfoRepository workflowInstanceInfoRepository;
@ -69,7 +69,7 @@ public class InstanceStatusCheckService {
public void checkWorkflowInstance() { public void checkWorkflowInstance() {
Stopwatch stopwatch = Stopwatch.createStarted(); Stopwatch stopwatch = Stopwatch.createStarted();
// 查询 DB 获取该 Server 需要负责的 AppGroup // 查询 DB 获取该 Server 需要负责的 AppGroup
List<Long> allAppIds = appInfoRepository.listAppIdByCurrentServer(AkkaStarter.getActorSystemAddress()); List<Long> allAppIds = appInfoRepository.listAppIdByCurrentServer(transportService.defaultProtocol().getAddress());
if (CollectionUtils.isEmpty(allAppIds)) { if (CollectionUtils.isEmpty(allAppIds)) {
log.info("[InstanceStatusChecker] current server has no app's job to check"); log.info("[InstanceStatusChecker] current server has no app's job to check");
return; return;
@ -89,7 +89,7 @@ public class InstanceStatusCheckService {
public void checkWaitingDispatchInstance() { public void checkWaitingDispatchInstance() {
Stopwatch stopwatch = Stopwatch.createStarted(); Stopwatch stopwatch = Stopwatch.createStarted();
// 查询 DB 获取该 Server 需要负责的 AppGroup // 查询 DB 获取该 Server 需要负责的 AppGroup
List<Long> allAppIds = appInfoRepository.listAppIdByCurrentServer(AkkaStarter.getActorSystemAddress()); List<Long> allAppIds = appInfoRepository.listAppIdByCurrentServer(transportService.defaultProtocol().getAddress());
if (CollectionUtils.isEmpty(allAppIds)) { if (CollectionUtils.isEmpty(allAppIds)) {
log.info("[InstanceStatusChecker] current server has no app's job to check"); log.info("[InstanceStatusChecker] current server has no app's job to check");
return; return;
@ -110,7 +110,7 @@ public class InstanceStatusCheckService {
public void checkWaitingWorkerReceiveInstance() { public void checkWaitingWorkerReceiveInstance() {
Stopwatch stopwatch = Stopwatch.createStarted(); Stopwatch stopwatch = Stopwatch.createStarted();
// 查询 DB 获取该 Server 需要负责的 AppGroup // 查询 DB 获取该 Server 需要负责的 AppGroup
List<Long> allAppIds = appInfoRepository.listAppIdByCurrentServer(AkkaStarter.getActorSystemAddress()); List<Long> allAppIds = appInfoRepository.listAppIdByCurrentServer(transportService.defaultProtocol().getAddress());
if (CollectionUtils.isEmpty(allAppIds)) { if (CollectionUtils.isEmpty(allAppIds)) {
log.info("[InstanceStatusChecker] current server has no app's job to check"); log.info("[InstanceStatusChecker] current server has no app's job to check");
return; return;
@ -131,7 +131,7 @@ public class InstanceStatusCheckService {
public void checkRunningInstance() { public void checkRunningInstance() {
Stopwatch stopwatch = Stopwatch.createStarted(); Stopwatch stopwatch = Stopwatch.createStarted();
// 查询 DB 获取该 Server 需要负责的 AppGroup // 查询 DB 获取该 Server 需要负责的 AppGroup
List<Long> allAppIds = appInfoRepository.listAppIdByCurrentServer(AkkaStarter.getActorSystemAddress()); List<Long> allAppIds = appInfoRepository.listAppIdByCurrentServer(transportService.defaultProtocol().getAddress());
if (CollectionUtils.isEmpty(allAppIds)) { if (CollectionUtils.isEmpty(allAppIds)) {
log.info("[InstanceStatusChecker] current server has no app's job to check"); log.info("[InstanceStatusChecker] current server has no app's job to check");
return; return;

View File

@ -23,10 +23,9 @@ import tech.powerjob.server.persistence.remote.repository.AppInfoRepository;
import tech.powerjob.server.persistence.remote.repository.InstanceInfoRepository; import tech.powerjob.server.persistence.remote.repository.InstanceInfoRepository;
import tech.powerjob.server.persistence.remote.repository.JobInfoRepository; import tech.powerjob.server.persistence.remote.repository.JobInfoRepository;
import tech.powerjob.server.persistence.remote.repository.WorkflowInfoRepository; import tech.powerjob.server.persistence.remote.repository.WorkflowInfoRepository;
import tech.powerjob.server.remote.transport.starter.AkkaStarter; import tech.powerjob.server.remote.transporter.TransportService;
import tech.powerjob.server.remote.worker.WorkerClusterManagerService; import tech.powerjob.server.remote.worker.WorkerClusterManagerService;
import javax.annotation.Resource;
import java.util.*; import java.util.*;
/** /**
@ -47,6 +46,7 @@ public class PowerScheduleService {
*/ */
private static final int MAX_APP_NUM = 10; private static final int MAX_APP_NUM = 10;
private final TransportService transportService;
private final DispatchService dispatchService; private final DispatchService dispatchService;
private final InstanceService instanceService; private final InstanceService instanceService;
@ -72,7 +72,7 @@ public class PowerScheduleService {
long start = System.currentTimeMillis(); long start = System.currentTimeMillis();
// 调度 CRON 表达式 JOB // 调度 CRON 表达式 JOB
try { try {
final List<Long> allAppIds = appInfoRepository.listAppIdByCurrentServer(AkkaStarter.getActorSystemAddress()); final List<Long> allAppIds = appInfoRepository.listAppIdByCurrentServer(transportService.defaultProtocol().getAddress());
if (CollectionUtils.isEmpty(allAppIds)) { if (CollectionUtils.isEmpty(allAppIds)) {
log.info("[CronJobSchedule] current server has no app's job to schedule."); log.info("[CronJobSchedule] current server has no app's job to schedule.");
return; return;
@ -92,7 +92,7 @@ public class PowerScheduleService {
long start = System.currentTimeMillis(); long start = System.currentTimeMillis();
// 调度 CRON 表达式 WORKFLOW // 调度 CRON 表达式 WORKFLOW
try { try {
final List<Long> allAppIds = appInfoRepository.listAppIdByCurrentServer(AkkaStarter.getActorSystemAddress()); final List<Long> allAppIds = appInfoRepository.listAppIdByCurrentServer(transportService.defaultProtocol().getAddress());
if (CollectionUtils.isEmpty(allAppIds)) { if (CollectionUtils.isEmpty(allAppIds)) {
log.info("[CronWorkflowSchedule] current server has no app's workflow to schedule."); log.info("[CronWorkflowSchedule] current server has no app's workflow to schedule.");
return; return;
@ -113,7 +113,7 @@ public class PowerScheduleService {
long start = System.currentTimeMillis(); long start = System.currentTimeMillis();
// 调度 FIX_RATE/FIX_DELAY 表达式 JOB // 调度 FIX_RATE/FIX_DELAY 表达式 JOB
try { try {
final List<Long> allAppIds = appInfoRepository.listAppIdByCurrentServer(AkkaStarter.getActorSystemAddress()); final List<Long> allAppIds = appInfoRepository.listAppIdByCurrentServer(transportService.defaultProtocol().getAddress());
if (CollectionUtils.isEmpty(allAppIds)) { if (CollectionUtils.isEmpty(allAppIds)) {
log.info("[FrequentJobSchedule] current server has no app's job to schedule."); log.info("[FrequentJobSchedule] current server has no app's job to schedule.");
return; return;
@ -132,7 +132,7 @@ public class PowerScheduleService {
public void cleanData() { public void cleanData() {
try { try {
final List<Long> allAppIds = appInfoRepository.listAppIdByCurrentServer(AkkaStarter.getActorSystemAddress()); final List<Long> allAppIds = appInfoRepository.listAppIdByCurrentServer(transportService.defaultProtocol().getAddress());
if (allAppIds.isEmpty()) { if (allAppIds.isEmpty()) {
return; return;
} }

View File

@ -44,18 +44,22 @@ public class CacheService {
jobId2JobNameCache = CacheBuilder.newBuilder() jobId2JobNameCache = CacheBuilder.newBuilder()
.expireAfterWrite(Duration.ofMinutes(1)) .expireAfterWrite(Duration.ofMinutes(1))
.maximumSize(512) .maximumSize(512)
.softValues()
.build(); .build();
workflowId2WorkflowNameCache = CacheBuilder.newBuilder() workflowId2WorkflowNameCache = CacheBuilder.newBuilder()
.expireAfterWrite(Duration.ofMinutes(1)) .expireAfterWrite(Duration.ofMinutes(1))
.maximumSize(512) .maximumSize(512)
.softValues()
.build(); .build();
instanceId2AppId = CacheBuilder.newBuilder() instanceId2AppId = CacheBuilder.newBuilder()
.maximumSize(1024) .maximumSize(1024)
.softValues()
.build(); .build();
jobId2AppId = CacheBuilder.newBuilder() jobId2AppId = CacheBuilder.newBuilder()
.maximumSize(1024) .maximumSize(1024)
.softValues()
.build(); .build();
} }

View File

@ -5,7 +5,7 @@
<parent> <parent>
<artifactId>powerjob-server</artifactId> <artifactId>powerjob-server</artifactId>
<groupId>tech.powerjob</groupId> <groupId>tech.powerjob</groupId>
<version>4.2.1</version> <version>4.3.0</version>
<relativePath>../pom.xml</relativePath> <relativePath>../pom.xml</relativePath>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>

Some files were not shown because too many files have changed in this diff Show More