Merge branch 'v3.2.1' into jenkins_auto_build

This commit is contained in:
朱八 2020-07-26 22:32:41 +08:00
commit 8f75f956e8
21 changed files with 287 additions and 75 deletions

View File

@ -40,8 +40,9 @@
<module>powerjob-server</module>
<module>powerjob-common</module>
<module>powerjob-client</module>
<module>powerjob-worker-samples</module>
<module>powerjob-worker-agent</module>
<module>powerjob-worker-spring-boot-starter</module>
<module>powerjob-worker-samples</module>
</modules>
<properties>

View File

@ -24,7 +24,6 @@
<mvn.invoker.version>3.0.1</mvn.invoker.version>
<commons.net.version>3.6</commons.net.version>
<fastjson.version>1.2.68</fastjson.version>
<druid.starter.version>1.1.23</druid.starter.version>
<!-- 部署时跳过该module -->
<maven.deploy.skip>true</maven.deploy.skip>
@ -79,18 +78,6 @@
<artifactId>spring-boot-starter-data-jpa</artifactId>
<version>${springboot.version}</version>
</dependency>
<!-- https://mvnrepository.com/artifact/com.alibaba/druid-spring-boot-starter -->
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>druid-spring-boot-starter</artifactId>
<version>${druid.starter.version}</version>
<exclusions>
<exclusion>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-autoconfigure</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-mongodb</artifactId>

View File

@ -184,7 +184,6 @@ public class HashedWheelTimer implements Timer {
log.warn("[HashedWheelTimer] timerFuture.totalTicks < currentTick, please fix the bug");
}
timerFuture.status = HashedWheelTimerFuture.RUNNING;
try {
// 提交执行
runTask(timerFuture);
@ -202,6 +201,7 @@ public class HashedWheelTimer implements Timer {
}
private void runTask(HashedWheelTimerFuture timerFuture) {
timerFuture.status = HashedWheelTimerFuture.RUNNING;
if (taskProcessPool == null) {
timerFuture.timerTask.run();
}else {

View File

@ -1,8 +1,9 @@
package com.github.kfcfans.powerjob.server.persistence.config;
import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.druid.spring.boot.autoconfigure.DruidDataSourceBuilder;
import com.zaxxer.hikari.HikariConfig;
import com.zaxxer.hikari.HikariDataSource;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.boot.jdbc.DataSourceBuilder;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
@ -20,28 +21,26 @@ public class MultiDatasourceConfig {
private static final String H2_DRIVER_CLASS_NAME = "org.h2.Driver";
private static final String H2_JDBC_URL = "jdbc:h2:file:~/powerjob-server/h2/powerjob_server_db";
private static final int H2_INITIAL_SIZE = 4;
private static final int H2_MIN_SIZE = 4;
private static final int H2_MAX_ACTIVE_SIZE = 10;
private static final String H2_DATASOURCE_NAME = "localDatasource";
@Primary
@Bean("omsCoreDatasource")
@ConfigurationProperties(prefix = "spring.datasource.druid")
@ConfigurationProperties(prefix = "spring.datasource.core")
public DataSource initOmsCoreDatasource() {
return DruidDataSourceBuilder.create().build();
return DataSourceBuilder.create().build();
}
@Bean("omsLocalDatasource")
public DataSource initOmsLocalDatasource() {
DruidDataSource ds = new DruidDataSource();
ds.setDriverClassName(H2_DRIVER_CLASS_NAME);
ds.setUrl(H2_JDBC_URL);
ds.setInitialSize(H2_INITIAL_SIZE);
ds.setMinIdle(H2_MIN_SIZE);
ds.setMaxActive(H2_MAX_ACTIVE_SIZE);
ds.setName(H2_DATASOURCE_NAME);
ds.setTestWhileIdle(false);
return ds;
HikariConfig config = new HikariConfig();
config.setDriverClassName(H2_DRIVER_CLASS_NAME);
config.setJdbcUrl(H2_JDBC_URL);
config.setAutoCommit(true);
// 池中最小空闲连接数量
config.setMinimumIdle(H2_MIN_SIZE);
// 池中最大连接数量
config.setMaximumPoolSize(H2_MAX_ACTIVE_SIZE);
return new HikariDataSource(config);
}
}

View File

@ -78,13 +78,15 @@ public class DispatchService {
long current = System.currentTimeMillis();
// 0 代表不限制在线任务还能省去一次 DB 查询
if (jobInfo.getMaxInstanceNum() > 0) {
Integer maxInstanceNum = jobInfo.getMaxInstanceNum();
if (maxInstanceNum > 0) {
// 这个 runningInstanceCount 已经包含了本 instance
long runningInstanceCount = instanceInfoRepository.countByJobIdAndStatusIn(jobId, Lists.newArrayList(WAITING_WORKER_RECEIVE.getV(), RUNNING.getV()));
// 超出最大同时运行限制不执行调度
if (runningInstanceCount > jobInfo.getMaxInstanceNum()) {
String result = String.format(SystemInstanceResult.TOO_MUCH_INSTANCE, runningInstanceCount, jobInfo.getMaxInstanceNum());
log.warn("[Dispatcher-{}|{}] cancel dispatch job due to too much instance(num={}) is running.", jobId, instanceId, runningInstanceCount);
if (runningInstanceCount > maxInstanceNum) {
String result = String.format(SystemInstanceResult.TOO_MUCH_INSTANCE, runningInstanceCount, maxInstanceNum);
log.warn("[Dispatcher-{}|{}] cancel dispatch job due to too much instance is running ({} > {}).", jobId, instanceId, runningInstanceCount, maxInstanceNum);
instanceInfoRepository.update4TriggerFailed(instanceId, FAILED.getV(), currentRunningTimes, current, current, RemoteConstant.EMPTY_ADDRESS, result, dbInstanceParams, now);
instanceManager.processFinishedInstance(instanceId, wfInstanceId, FAILED, result);

View File

@ -204,7 +204,7 @@ public class WorkflowInstanceManager {
node.setStatus(status.getV());
node.setResult(result);
log.debug("[Workflow-{}|{}] node(jobId={},instanceId={}) finished in workflowInstance, status={},result={}", wfId, wfInstanceId, node.getJobId(), instanceId, status.name(), result);
log.info("[Workflow-{}|{}] node(jobId={},instanceId={}) finished in workflowInstance, status={},result={}", wfId, wfInstanceId, node.getJobId(), instanceId, status.name(), result);
}
if (InstanceStatus.generalizedRunningStatus.contains(node.getStatus())) {

View File

@ -41,6 +41,7 @@ public class AppInfoController {
@PostMapping("/save")
public ResultDTO<Void> saveAppInfo(@RequestBody ModifyAppInfoRequest req) {
req.valid();
AppInfoDO appInfoDO;
Long id = req.getId();

View File

@ -1,6 +1,9 @@
package com.github.kfcfans.powerjob.server.web.request;
import com.github.kfcfans.powerjob.common.OmsException;
import com.github.kfcfans.powerjob.common.utils.CommonUtils;
import lombok.Data;
import org.apache.commons.lang3.StringUtils;
/**
* 修改应用信息请求
@ -14,4 +17,11 @@ public class ModifyAppInfoRequest {
private Long id;
private String appName;
private String password;
public void valid() {
CommonUtils.requireNonNull(appName, "appName can't be empty");
if (StringUtils.containsWhitespace(appName)) {
throw new OmsException("appName can't contains white space!");
}
}
}

View File

@ -2,17 +2,15 @@ oms.env=DAILY
logging.config=classpath:logback-dev.xml
####### 外部数据库配置(需要用户更改为自己的数据库配置) #######
spring.datasource.druid.driver-class-name=com.mysql.cj.jdbc.Driver
spring.datasource.druid.url=jdbc:mysql://remotehost:3306/powerjob-daily?useUnicode=true&characterEncoding=UTF-8
spring.datasource.druid.username=root
spring.datasource.druid.password=No1Bug2Please3!
spring.datasource.druid.initial-size=5
spring.datasource.druid.max-active=10
spring.datasource.druid.test-while-idle=false
spring.datasource.druid.name=remoteDatasource
spring.datasource.core.driver-class-name=com.mysql.cj.jdbc.Driver
spring.datasource.core.jdbc-url=jdbc:mysql://localhost:3306/powerjob-daily?useUnicode=true&characterEncoding=UTF-8
spring.datasource.core.username=root
spring.datasource.core.password=No1Bug2Please3!
spring.datasource.core.hikari.maximum-pool-size=20
spring.datasource.core.hikari.minimum-idle=5
####### mongoDB配置非核心依赖可移除 #######
spring.data.mongodb.uri=mongodb://remotehost:27017/powerjob-daily
spring.data.mongodb.uri=mongodb://localhost:27017/powerjob-daily
####### 邮件配置(启用邮件报警则需要) #######
spring.mail.host=smtp.163.com

View File

@ -2,14 +2,12 @@ oms.env=PRE
logging.config=classpath:logback-product.xml
####### 数据库配置 #######
spring.datasource.druid.driver-class-name=com.mysql.cj.jdbc.Driver
spring.datasource.druid.url=jdbc:mysql://remotehost:3306/powerjob-pre?useUnicode=true&characterEncoding=UTF-8
spring.datasource.druid.username=root
spring.datasource.druid.password=No1Bug2Please3!
spring.datasource.druid.initial-size=5
spring.datasource.druid.max-active=20
spring.datasource.druid.test-while-idle=false
spring.datasource.druid.name=remoteDatasource
spring.datasource.core.driver-class-name=com.mysql.cj.jdbc.Driver
spring.datasource.core.jdbc-url=jdbc:mysql://remotehost:3306/powerjob-pre?useUnicode=true&characterEncoding=UTF-8
spring.datasource.core.username=root
spring.datasource.core.password=No1Bug2Please3!
spring.datasource.core.hikari.maximum-pool-size=20
spring.datasource.core.hikari.minimum-idle=5
####### mongoDB配置非核心依赖可移除 #######
spring.data.mongodb.uri=mongodb://remotehost:27017/powerjob-pre

View File

@ -2,14 +2,12 @@ oms.env=PRODUCT
logging.config=classpath:logback-product.xml
####### 数据库配置 #######
spring.datasource.druid.driver-class-name=com.mysql.cj.jdbc.Driver
spring.datasource.druid.url=jdbc:mysql://localhost:3306/powerjob-product?useUnicode=true&characterEncoding=UTF-8
spring.datasource.druid.username=root
spring.datasource.druid.password=No1Bug2Please3!
spring.datasource.druid.initial-size=5
spring.datasource.druid.max-active=20
spring.datasource.druid.test-while-idle=false
spring.datasource.druid.name=remoteDatasource
spring.datasource.core.driver-class-name=com.mysql.cj.jdbc.Driver
spring.datasource.core.jdbc-url=jdbc:mysql://localhost:3306/powerjob-product?useUnicode=true&characterEncoding=UTF-8
spring.datasource.core.username=root
spring.datasource.core.password=No1Bug2Please3!
spring.datasource.core.hikari.maximum-pool-size=20
spring.datasource.core.hikari.minimum-idle=5
####### mongoDB配置非核心依赖可移除 #######
spring.data.mongodb.uri=mongodb://localhost:27017/powerjob-product

View File

@ -2,8 +2,10 @@
server.port=7700
spring.profiles.active=daily
spring.main.banner-mode=log
spring.jpa.open-in-view=false
spring.data.mongodb.repositories.type=none
logging.level.org.mongodb=warn
# 文件上传配置
spring.servlet.multipart.enabled=true
@ -11,16 +13,6 @@ spring.servlet.multipart.file-size-threshold=0
spring.servlet.multipart.max-file-size=209715200
spring.servlet.multipart.max-request-size=209715200
# druid 监控配置(监控地址 /druid登陆账号和密码默认都是 powerjob
spring.datasource.druid.filters=stat
spring.datasource.druid.stat-view-servlet.url-pattern=/druid/*
spring.datasource.druid.filter.stat.enabled=true
spring.datasource.druid.filter.stat.log-slow-sql=true
spring.datasource.druid.filter.stat.slow-sql-millis=5000
spring.datasource.druid.stat-view-servlet.enabled=true
spring.datasource.druid.stat-view-servlet.login-username=powerjob
spring.datasource.druid.stat-view-servlet.login-password=powerjob
###### PowerJob 自身配置(该配置只允许存在于 application.properties 文件中) ######
# akka ActorSystem 服务端口
oms.akka.port=10086

View File

@ -5,6 +5,7 @@ import com.github.kfcfans.powerjob.server.common.utils.timewheel.HashedWheelTime
import com.github.kfcfans.powerjob.server.common.utils.timewheel.TimerFuture;
import com.github.kfcfans.powerjob.server.common.utils.timewheel.TimerTask;
import com.google.common.collect.Lists;
import org.apache.commons.lang3.StringUtils;
import org.junit.Test;
import java.util.Date;
@ -82,4 +83,12 @@ public class UtilsTest {
public void testTZ() {
System.out.println(TimeZone.getDefault());
}
@Test
public void testStringUtils() {
String goodAppName = "powerjob-server";
String appName = "powerjob-server ";
System.out.println(StringUtils.containsWhitespace(goodAppName));
System.out.println(StringUtils.containsWhitespace(appName));
}
}

View File

@ -0,0 +1,45 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>powerjob</artifactId>
<groupId>com.github.kfcfans</groupId>
<version>1.0.0</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>powerjob-worker-spring-boot-starter</artifactId>
<version>3.2.1</version>
<packaging>jar</packaging>
<properties>
<powerjob.worker.version>3.2.1</powerjob.worker.version>
<springboot.version>2.2.6.RELEASE</springboot.version>
</properties>
<dependencies>
<!-- oms-worker -->
<dependency>
<groupId>com.github.kfcfans</groupId>
<artifactId>powerjob-worker</artifactId>
<version>${powerjob.worker.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter</artifactId>
<version>${springboot.version}</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<version>${springboot.version}</version>
<scope>test</scope>
</dependency>
</dependencies>
</project>

View File

@ -0,0 +1,50 @@
package com.github.kfcfans.powerjob.worker.autoconfigure;
import com.github.kfcfans.powerjob.common.utils.CommonUtils;
import com.github.kfcfans.powerjob.worker.OhMyWorker;
import com.github.kfcfans.powerjob.worker.common.OhMyConfig;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import java.util.Arrays;
import java.util.List;
/**
* PowerJob 自动装配
*
* @author songyinyin
* @since 2020/7/26 16:37
*/
@Configuration
@EnableConfigurationProperties(PowerJobProperties.class)
public class PowerJobAutoConfiguration {
@Bean
@ConditionalOnMissingBean
public OhMyWorker initPowerJob(PowerJobProperties properties) {
// 服务器HTTP地址端口号为 server.port而不是 ActorSystem port请勿添加任何前缀http://
CommonUtils.requireNonNull(properties.getServerAddress(), "serverAddress can't be empty!");
List<String> serverAddress = Arrays.asList(properties.getServerAddress().split(","));
// 1. 创建配置文件
OhMyConfig config = new OhMyConfig();
// 可以不显式设置默认值 27777
config.setPort(properties.getAkkaPort());
// appName需要提前在控制台注册否则启动报错
config.setAppName(properties.getAppName());
config.setServerAddress(serverAddress);
// 如果没有大型 Map/MapReduce 的需求建议使用内存来加速计算
// 有大型 Map/MapReduce 需求可能产生大量子任务Task的场景请使用 DISK否则妥妥的 OutOfMemory
config.setStoreStrategy(properties.getStoreStrategy());
// 启动测试模式true情况下不再尝试连接 server 并验证appName
config.setEnableTestMode(properties.isEnableTestMode());
// 2. 创建 Worker 对象设置配置文件
OhMyWorker ohMyWorker = new OhMyWorker();
ohMyWorker.setConfig(config);
return ohMyWorker;
}
}

View File

@ -0,0 +1,44 @@
package com.github.kfcfans.powerjob.worker.autoconfigure;
import com.github.kfcfans.powerjob.common.RemoteConstant;
import com.github.kfcfans.powerjob.worker.common.constants.StoreStrategy;
import com.github.kfcfans.powerjob.worker.core.processor.ProcessResult;
import lombok.Data;
import org.springframework.boot.context.properties.ConfigurationProperties;
/**
* PowerJob 配置项
*
* @author songyinyin
* @since 2020/7/26 16:37
*/
@Data
@ConfigurationProperties(prefix = "powerjob")
public class PowerJobProperties {
/**
* 应用名称需要提前在控制台注册否则启动报错
*/
private String appName;
/**
* 启动 akka 端口
*/
private int akkaPort = RemoteConstant.DEFAULT_WORKER_PORT;
/**
* 调度服务器地址ip:port 域名多个用英文逗号分隔
*/
private String serverAddress;
/**
* 本地持久化方式默认使用磁盘
*/
private StoreStrategy storeStrategy = StoreStrategy.DISK;
/**
* 最大返回值长度超过会被截断
* {@link ProcessResult}#msg 的最大长度
*/
private int maxResultLength = 8096;
/**
* 启动测试模式true情况下不再尝试连接 server 并验证appName
* true -> 用于本地写单元测试调试 false -> 默认值标准模式
*/
private boolean enableTestMode = false;
}

View File

@ -0,0 +1,50 @@
{
"groups": [
{
"name": "powerjob",
"type": "com.github.kfcfans.powerjob.worker.autoconfigure.PowerJobProperties",
"sourceType": "com.github.kfcfans.powerjob.worker.autoconfigure.PowerJobProperties"
}
],
"properties": [
{
"name": "powerjob.app-name",
"type": "java.lang.String",
"description": "应用名称,需要提前在控制台注册,否则启动报错",
"sourceType": "com.github.kfcfans.powerjob.worker.autoconfigure.PowerJobProperties"
},
{
"name": "powerjob.max-result-length",
"type": "java.lang.Integer",
"description": "最大返回值长度,超过会被截断 {@link ProcessResult}#msg 的最大长度",
"sourceType": "com.github.kfcfans.powerjob.worker.autoconfigure.PowerJobProperties",
"defaultValue": 8096
},
{
"name": "powerjob.akka-port",
"type": "java.lang.Integer",
"description": "启动 akka 端口",
"sourceType": "com.github.kfcfans.powerjob.worker.autoconfigure.PowerJobProperties"
},
{
"name": "powerjob.server-address",
"type": "java.lang.String",
"description": "调度服务器地址ip:port 或 域名,多值用英文逗号分隔",
"sourceType": "com.github.kfcfans.powerjob.worker.autoconfigure.PowerJobProperties"
},
{
"name": "powerjob.store-strategy",
"type": "com.github.kfcfans.powerjob.worker.common.constants.StoreStrategy",
"description": "本地持久化方式,默认使用磁盘",
"sourceType": "com.github.kfcfans.powerjob.worker.autoconfigure.PowerJobProperties"
},
{
"name": "powerjob.enable-test-mode",
"type": "java.lang.Boolean",
"description": "启动测试模式true情况下不再尝试连接 server 并验证appName。true -> 用于本地写单元测试调试; false -> 默认值,标准模式",
"sourceType": "com.github.kfcfans.powerjob.worker.autoconfigure.PowerJobProperties",
"defaultValue": false
}
],
"hints": []
}

View File

@ -0,0 +1,2 @@
org.springframework.boot.autoconfigure.EnableAutoConfiguration=\
com.github.kfcfans.powerjob.worker.autoconfigure.PowerJobAutoConfiguration

View File

@ -0,0 +1,22 @@
package com.github.kfcfans.powerjob.worker.autoconfigure;
import com.github.kfcfans.powerjob.worker.OhMyWorker;
import org.junit.Assert;
import org.junit.jupiter.api.Test;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.context.annotation.Configuration;
@Configuration
@EnableAutoConfiguration
class PowerJobAutoConfigurationTest {
@Test
void testAutoConfiguration() {
ConfigurableApplicationContext run = SpringApplication.run(PowerJobAutoConfigurationTest.class);
OhMyWorker worker = run.getBean(OhMyWorker.class);
Assert.assertNotNull(worker);
}
}

View File

@ -0,0 +1,2 @@
powerjob.enable-test-mode=true

View File

@ -171,11 +171,13 @@ public class FrequentTaskTracker extends TaskTracker {
newRootTask.setLastReportTime(-1L);
// 判断是否超出最大执行实例数
if (timeExpressionType == TimeExpressionType.FIX_RATE) {
if (subInstanceId2TimeHolder.size() > maxInstanceNum) {
log.warn("[TaskTracker-{}] cancel to launch the subInstance({}) due to too much subInstance is running.", instanceId, subInstanceId);
processFinishedSubInstance(subInstanceId, false, "TOO_MUCH_INSTANCE");
return;
if (maxInstanceNum > 0) {
if (timeExpressionType == TimeExpressionType.FIX_RATE) {
if (subInstanceId2TimeHolder.size() > maxInstanceNum) {
log.warn("[TaskTracker-{}] cancel to launch the subInstance({}) due to too much subInstance is running.", instanceId, subInstanceId);
processFinishedSubInstance(subInstanceId, false, "TOO_MUCH_INSTANCE");
return;
}
}
}