Compare commits

...

801 Commits

Author SHA1 Message Date
tjq
5d82b7cc5c chore: github workflows 2024-12-07 21:09:09 +08:00
tjq
f506ba8956 chore: github workflows 2024-12-07 21:03:29 +08:00
tjq
8166b78e68 chore: github workflows 2024-12-07 20:57:00 +08:00
tjq
730982b085 chore: github workflows 2024-12-07 20:52:39 +08:00
tjq
f6a8666031 chore: github workflows 2024-12-07 20:49:14 +08:00
tjq
e6264fc9a4 Merge branch '5.1.1_v2' 2024-12-07 20:43:22 +08:00
tjq
dc62c1b992 refactor: optimize worker skip round log 2024-12-07 20:42:45 +08:00
tjq
c627776764 chore: upgrade project version to 5.1.1 2024-12-07 20:38:31 +08:00
tjq
aefa9290c9 chore: github workflows 2024-12-07 20:36:16 +08:00
tjq
fdd80f6cf9 feat: opt log 2024-12-07 17:32:20 +08:00
tjq
7333ee3951 Merge branch '5.1.0-bugfix2' into 5.1.1_v2 2024-12-07 17:00:45 +08:00
tjq
92ddc6af4d feat: support create app with namespace_code #976 2024-11-22 22:03:06 +08:00
tjq
508127426f feat: appname and namespace duplicate check #1009 2024-11-22 21:45:54 +08:00
tjq
57627305fa fix: Repeated execution after broadcast worker node down #1003 2024-11-22 21:32:13 +08:00
tjq
4e84bc60d7 feat: support method job direct return ProcessResult #798 2024-11-22 21:05:23 +08:00
tjq
4fe2d7fdf1 feat: Add PowerjobClient api /queryInstance #1034 2024-11-21 22:53:57 +08:00
tjq
f44bd43d13 fix: reduce Probabilistic non-execution #1033 2024-11-21 22:11:44 +08:00
tjq
e912e2c31d feat: NOT_ALLOWED_CHANGE_PASSWORD_ACCOUNTS 2024-11-09 18:05:26 +08:00
tjq
f9dd8d7713 fix: PADDLING not work 2024-11-06 23:23:52 +08:00
tjq
0bb069fa5b Merge branch '5.1.0-bugfix' 2024-10-31 00:23:52 +08:00
tjq
f0b2fbb5b7 chore: change main verison to 5.1.0-bugfix 2024-10-31 00:03:54 +08:00
tjq
7443edf735 Merge branch 'pr-panyyf-master' into 5.1.0-bugfix 2024-10-30 23:53:25 +08:00
panyy_f
1383e48fec fix: worker-agent cannot specify multiple server issues when registering with the server #1012 2024-10-28 14:14:15 +08:00
tjq
243f7bb179 fix: PostgresqlSeriesDfsService can't restartup #974 2024-10-26 01:16:25 +08:00
tjq
827bcd2502 Merge branch 'pr-HeZhanfeng-fix-multi-thread-safety-vulnerabilities' into 5.1.0-bugfix 2024-10-26 00:44:59 +08:00
tjq
8f3981dd09 fix: OpenAPi Save task generates duplicate records #1018 2024-10-26 00:37:14 +08:00
hezhanfeng
9bab361618 fix:multi-thread safety vulnerabilities(修复格式化对象非线程安全的漏洞) 2024-10-17 11:11:28 +08:00
hezhanfeng
01e15bda39 fix:multi-thread safety vulnerabilities(修复格式化对象非线程安全的漏洞) 2024-09-30 11:02:25 +08:00
tjq
0bf95cf419 Merge branch '5.1.0' 2024-08-12 00:39:33 +08:00
tjq
f2bed56544 chore: add database schema file 2024-08-11 23:35:27 +08:00
tjq
85f5faaaac fix: PowerJobClient refresh token failed when jwt expired 2024-08-11 23:00:37 +08:00
tjq
44ef76328b refactor: optimize SwitchableStatus's package 2024-08-11 22:25:49 +08:00
tjq
5c49b8d8dd chore: upgrade project version and fe resource 2024-08-11 20:01:18 +08:00
tjq
944b06ee82 fix: list all app when user doesn't have any app permission 2024-08-11 19:34:46 +08:00
tjq
a35573544c fix: NetworkInterfaceChecker can't worker 2024-08-11 18:22:48 +08:00
tjq
fea1974014 feat: worker use random server address #953 2024-08-11 11:32:28 +08:00
tjq
4527454a7c feat: AdpPostgreSQLDialect #750 2024-08-11 01:51:03 +08:00
tjq
a261b864ca fix: Single worker use padding mode made deadlock #918 2024-08-11 01:20:04 +08:00
tjq
4f5ea6f897 Merge branch 'pr-LittleCadet-master' into 5.1.0 2024-08-11 01:13:58 +08:00
tjq
0a1191572e refactor: optimize worker log 2024-08-11 01:12:56 +08:00
tjq
605497b36d feat: PowerJobClient support shutdown #895 2024-08-11 00:50:25 +08:00
tjq
3e0088870a feat: PowerJob Client support ClientExtension(current for dynamic server ip) #895 2024-08-11 00:43:18 +08:00
tjq
a1dad6c39e fix: AKKA NAT BUG #929 2024-08-11 00:20:34 +08:00
tjq
6426424401 fix: SqlProcessorConfiguration can't work due to Conditional bean config error #946 2024-08-10 23:41:29 +08:00
tjq
1774680792 fix: Correct protocol not used for inter-server communication #949 2024-08-10 23:32:44 +08:00
tjq
29e0b2deb0 feat: app password use AES GCM #935 2024-08-10 23:26:49 +08:00
tjq
eb4d7ab8eb feat: app password use ciphertext 2024-08-10 14:42:09 +08:00
tjq
e711ed7251 feat: PowerJobClient Support Authentication 2024-08-10 11:41:07 +08:00
tjq
53be566173 feat: OpenApiInterceptor 2024-08-10 00:04:12 +08:00
tjq
bee4795027 feat: OpenApiInterceptor 2024-08-09 21:47:47 +08:00
tjq
84b90a366c feat: open-api support auth 2024-08-09 20:55:08 +08:00
tjq
c04cb08390 fix: use new jwt header name to fix nginx remove header bug #910 2024-07-08 23:44:58 +08:00
shenkang
4507a6a883 fix: 当该appId的worker是单机运行 且 padding时, 导致Dispatcher分发任务处于死循环中, 致使无法分发任务,状态一直为运行中 2024-05-30 11:11:46 +08:00
tjq
3edaae67dd chore: Synchronising version 5.0.1 resources 2024-03-16 22:09:39 +08:00
tjq
bb99ec9d04 Merge branch 'dev' into 5.0.1_beta 2024-03-16 22:02:30 +08:00
tjq
54fadf6368 feat: extend dfs support PostgresqlSeriesDfsService 2024-03-16 22:01:12 +08:00
tjq
1e092bb866 Merge branch 'pr-Jetol-storage-dfs-pg-jetol' into 5.0.1_beta 2024-03-16 21:57:21 +08:00
tjq
6a59f50b96 fix: can't upload container #843 2024-03-16 21:53:26 +08:00
tjq
9b5916daf3 feat: support user manager #860 2024-03-16 18:41:33 +08:00
tjq
5e7751f092 fix: Java 8 date/time type java.time.LocalDateTime not supported by default #869 2024-03-16 13:56:03 +08:00
tjq
e21b171b98 fix: Java 8 date/time type java.time.LocalDateTime not supported by default #869 2024-03-16 13:37:22 +08:00
tjq
a9d8a680dc feat: optimize TaskTracker log #858 2024-03-16 13:15:41 +08:00
tjq
b822a685f4 feat: CSInitializerFactory support graalvm #868 2024-03-16 13:08:47 +08:00
tjq
dd3a17275f fix: openapi can't work #854 2024-03-16 11:47:48 +08:00
tjq
89e7ef8b40 fix: duplicate authorisation #854 2024-03-15 23:32:05 +08:00
tjq
32cecc59e9 fix: Some attributes of namespace cannot be shown back #854 2024-03-15 23:18:06 +08:00
tjq
5be7b24f4b feat: support delete app #855 2024-03-15 22:58:07 +08:00
tjq
1a2df5e616 fix: Authorized users do not display the app password #870 2024-03-15 22:39:25 +08:00
tjq
075ecd8a84 chore: merge 5.0.0 and 4.3.9 to 5.0.1 2024-03-09 15:55:19 +08:00
tjq
02304fe921 Merge branch '5.0.0_v2' into 5.0.1_beta
# Conflicts:
#	others/powerjob-mysql.sql
#	pom.xml
#	powerjob-client/pom.xml
#	powerjob-common/pom.xml
#	powerjob-official-processors/pom.xml
#	powerjob-remote/pom.xml
#	powerjob-remote/powerjob-remote-benchmark/pom.xml
#	powerjob-remote/powerjob-remote-framework/pom.xml
#	powerjob-remote/powerjob-remote-impl-akka/pom.xml
#	powerjob-remote/powerjob-remote-impl-http/pom.xml
#	powerjob-server/pom.xml
#	powerjob-server/powerjob-server-common/pom.xml
#	powerjob-server/powerjob-server-core/pom.xml
#	powerjob-server/powerjob-server-extension/pom.xml
#	powerjob-server/powerjob-server-migrate/pom.xml
#	powerjob-server/powerjob-server-monitor/pom.xml
#	powerjob-server/powerjob-server-persistence/pom.xml
#	powerjob-server/powerjob-server-remote/pom.xml
#	powerjob-server/powerjob-server-starter/pom.xml
#	powerjob-server/powerjob-server-starter/src/main/resources/static/index.html
#	powerjob-server/powerjob-server-starter/src/main/resources/static/js/1.js
#	powerjob-server/powerjob-server-starter/src/main/resources/static/js/10.js
#	powerjob-server/powerjob-server-starter/src/main/resources/static/js/11.js
#	powerjob-server/powerjob-server-starter/src/main/resources/static/js/2.js
#	powerjob-server/powerjob-server-starter/src/main/resources/static/js/3.js
#	powerjob-server/powerjob-server-starter/src/main/resources/static/js/4.js
#	powerjob-server/powerjob-server-starter/src/main/resources/static/js/5.js
#	powerjob-server/powerjob-server-starter/src/main/resources/static/js/6.js
#	powerjob-server/powerjob-server-starter/src/main/resources/static/js/7.js
#	powerjob-server/powerjob-server-starter/src/main/resources/static/js/8.js
#	powerjob-server/powerjob-server-starter/src/main/resources/static/js/9.js
#	powerjob-server/powerjob-server-starter/src/main/resources/static/js/app.js
#	powerjob-worker-agent/pom.xml
#	powerjob-worker-samples/pom.xml
#	powerjob-worker-spring-boot-starter/pom.xml
#	powerjob-worker/pom.xml
2024-03-09 15:40:54 +08:00
Jetol
b3abb461db 增加Postgresql存储扩展,配置项参考:tech.powerjob.server.persistence.storage.impl.PostgresqlSeriesDfsService 2024-03-08 17:23:00 +08:00
tjq
d44131128e Merge branch '4.3.9' 2024-03-02 20:11:56 +08:00
tjq
f4c0a0309f chore: Main version upgraded to 4.3.9 2024-03-02 19:44:54 +08:00
tjq
9046a8bfcf fix: Server scheduling exception when worker is overloaded #853 2024-03-01 22:06:51 +08:00
tjq
9d95d4ce04 feat: Optimising container deployment logs #850 2024-03-01 21:52:10 +08:00
tjq
86ec85331a fix: Processor not changed after container redeployment #850 2024-03-01 21:21:26 +08:00
tjq
fb1159e1b5 fix: MinioOssService is not working properly #844 2024-02-26 23:39:44 +08:00
tjq
9c0731f20d Merge branch '4.3.8_zjlab_v2' 2024-02-26 01:18:09 +08:00
tjq
bd725aac15 chore: optimize docker publish script 2024-02-26 01:17:51 +08:00
tjq
c35ae19ba8 fix: Some issues found by codereview 2024-02-25 21:58:25 +08:00
tjq
c11d544afe chore: upgrade project version to 4.3.8 2024-02-25 19:47:38 +08:00
tjq
e64ad0f74d fix: Loss of subtask data when mapreduce enters swap mode 2024-02-25 19:13:03 +08:00
tjq
07e0e17ec0 feat: [ops] enhance Map/MapReduce's dev ops 2024-02-25 12:23:58 +08:00
tjq
37ef35bd80 feat: [ops] enhance Map/MapReduce's dev ops 2024-02-25 02:12:19 +08:00
tjq
4046ea39b5 feat: support TaskTrackerBehavior(PADDLING) 2024-02-24 20:58:48 +08:00
tjq
815d44ef7e feat: Supports selection of TaskTracker nodes for execution 2024-02-24 19:59:50 +08:00
tjq
c717fd3fb8 feat: [SuperMR] Map/MapReduce job can use swap to support ∞ subtask 2024-02-24 13:46:51 +08:00
tjq
cf4ed93812 feat: [SuperMR] YuGong 2024-02-24 11:50:16 +08:00
tjq
dda79439ca feat: [SuperMR] ExternalTaskPersistenceService 2024-02-23 23:07:16 +08:00
songyinyin
7437ad6067 fix: error accessing homepage when not logged in 2024-02-20 16:50:23 +08:00
tjq
12e2f83764 chore: fix some problem then upgrade project version to 5.0.0-beat2 2024-02-18 20:06:29 +08:00
tjq
a750d0c55c feat: support user become app admin by username and passwor 2024-02-18 19:56:51 +08:00
tjq
d1d0407046 fix: Jackson compatibility issue preventing 5.x server from scheduling 4.x worker 2024-02-18 18:08:49 +08:00
tjq
9dbd470c5a feat: upgrade sql for 5.0.0 2024-02-17 22:24:44 +08:00
tjq
5e1f7e2d11 chore: upgrade project version to 5.0.0-beta 2024-02-17 21:59:13 +08:00
tjq
3ea089eaee feat: optimize app manager list 2024-02-16 21:45:27 +08:00
tjq
7b7582dd91 feat: [auth] global admins 2024-02-16 13:28:58 +08:00
tjq
686189e6ca feat: NewSystemInitializer and token verify 2024-02-16 12:38:42 +08:00
tjq
9419340829 fix: [auth] Bugs in user login module 2024-02-13 20:34:45 +08:00
tjq
6539c66226 fix: [auth] Extracting PwjbUserInfo to resolve user contamination 2024-02-13 18:23:29 +08:00
tjq
c350607762 feat: support user related query 2024-02-13 16:22:56 +08:00
tjq
31a7690844 feat: [auth] The web interface adds ApiPermission for authentication. 2024-02-13 11:21:14 +08:00
tjq
05c22a5dc5 fix: [auth] grant and chekc permission's bug 2024-02-12 23:42:21 +08:00
tjq
919a5c3b35 feat: support namespace 2024-02-12 22:12:03 +08:00
tjq
841c7891c8 feat: [auth] grant permission 2024-02-12 11:11:06 +08:00
tjq
3fdcc1e599 feat: [auth] use CachingRequestBodyFilter fix multi read problem 2024-02-11 23:52:35 +08:00
tjq
e18b9a8962 feat: [auth] finished login part 2024-02-11 17:14:21 +08:00
tjq
cf8153ae39 feat: [auth] AuthController 2024-02-11 11:04:20 +08:00
tjq
a1c12bf1c7 feat: [auth] PowerJobPermissionService 2024-02-11 10:32:13 +08:00
tjq
0caa854409 feat: [auth] PowerJobLoginService 2024-02-11 10:14:47 +08:00
tjq
cda55c918b feat: [auth] design ThirdPartyLoginService 2024-02-10 14:11:14 +08:00
tjq
4793c19af6 chore: docker publish script add support for Apple Silicon device 2024-02-10 12:44:44 +08:00
tjq
78b58d02e8 Merge branch '4.3.7_v2' 2024-02-09 13:36:21 +08:00
tjq
ea919b102f docs: Happy New Year 2024-02-09 11:13:05 +08:00
tjq
599d710e27 refactor: rename RunnableAndCatch to SafeRunnable 2024-02-08 20:14:35 +08:00
tjq
ab7a398f61 Merge branch 'dev' into 4.3.7_v2 2024-02-08 20:06:27 +08:00
tjq
01d7247efa chore: Upgrade project version to 4.3.7 2024-02-08 19:45:56 +08:00
tjq
b29e265e42 feat: Optimizing IP acquisition logic with PingPongSocketServer #762 2024-02-08 19:41:40 +08:00
tjq
61aecc6354 refactor: optimize NetUtils 2024-02-08 16:08:39 +08:00
tjq
6de5e83a2f chore: upgrade logback version to fix logback serialization vulnerability #80 2024-02-08 15:12:55 +08:00
tjq
88f7a06596 Merge branch 'pr-yuhan0501-4.3.6_bugfix' into 4.3.7_v2 2024-02-08 15:04:11 +08:00
tjq
98fc4d3320 Merge branch 'lvhjean-4.3.6-bugfix-email-empty' into 4.3.7_v2 2024-02-08 15:01:17 +08:00
tjq
6842fb6a7b perf: add cost log for TaskPersistenceService 2024-02-08 14:25:55 +08:00
tjq
debc2e0abb fix: instanceInfo cannot display details on non-scheduled server nodes 2024-02-08 13:14:59 +08:00
tjq
f3dd56bf54 chore: upgrade h2 and spring version 2024-02-08 12:26:22 +08:00
tjq
e63dc91643 fix: @PowerJobHandler does not work in cglib proxy #770 2024-02-08 11:14:19 +08:00
tjq
4be6a139dd fix: Cyclic Logging on Exception #769 2024-02-08 10:42:47 +08:00
tjq
1ba74bf0af test: performance test for h2 2024-02-05 00:12:37 +08:00
tjq
ff84d46713 perf: Discarding the results of the map task to improve performance 2024-02-04 22:17:53 +08:00
liwh
d61d85abd4 fix: 修复 email未填写导致告警报异常, #808 2024-01-04 14:04:32 +08:00
yuhan
23d94ed46f 修复使用mysql存储日志的情况下文件流未关闭造成的文件文件句柄不释放的bug。
(cherry picked from commit 2a9444770d227ffe46d6c700a7e8570ef3e1bc17)
2024-01-03 14:15:46 +08:00
songyinyin
8f3803bda6 fix: 周期性任务出现异常时,导致任务停止 2023-09-20 17:17:36 +08:00
songyinyin
592dff8d75 chore: When the TaskTracker is successfully executed normally, the log level changes to Info #657 2023-09-20 17:16:51 +08:00
tjq
9b7c237cf0 Merge branch '4.3.6' 2023-09-03 14:05:03 +08:00
tjq
9b3a3cd586 chore: change main version to 4.3.6 2023-09-03 13:44:10 +08:00
tjq
966f09c034 feat: optimize NetUtils 2023-09-03 13:30:26 +08:00
tjq
4a9f38c760 Merge branch 'pr-disk-overflow' into 4.3.6 2023-09-02 13:49:25 +08:00
tjq
b96768208b Merge branch 'dev-pr' of https://github.com/zhihui1999/PowerJob into pr-disk-overflow 2023-09-02 13:48:09 +08:00
tjq
1545733012 Merge branch 'pr-Minio' into 4.3.6 2023-09-02 13:47:20 +08:00
tjq
fa8b4f2b62 Merge branch 'xinyi' of https://github.com/Yusanku/PowerJob into pr-Minio 2023-09-02 13:46:57 +08:00
tjq
882392a5e5 feat: support lazy init#725 2023-09-02 13:35:54 +08:00
tjq
c875ba3d37 feat: support lazy init#725 2023-09-02 13:19:15 +08:00
tjq
a138f9c8cc feat: support lazy init#725 2023-09-02 11:53:01 +08:00
tjq
360f105c01 docs: add second qq group 2023-08-31 23:51:50 +08:00
赵志辉
3544f76aaa fix: Memory overflow caused by mounting cloud disks 2023-08-24 11:06:16 +08:00
yw
8de1c47971 日志存储扩展-Minio 2023-08-23 09:37:14 +08:00
tjq
5dbceb7ce4 chore: upgrade dependency version 2023-08-22 22:52:20 +08:00
songyinyin
93dadab832 fix: When debugging, ShellProcessor appears Java.io.IOException: Stream closed #682 2023-08-17 14:37:17 +08:00
tjq
9ab2e92934 Merge branch '4.3.5' 2023-08-14 23:57:14 +08:00
tjq
0d359b8a8e docs: optimize README 2023-08-14 23:56:50 +08:00
tjq
c91240c1b8 chore: change main version to 4.3.5 2023-08-14 23:50:17 +08:00
tjq
b14b49f75e chore: optimize test env script 2023-08-14 23:41:06 +08:00
tjq
74a92622a3 Merge branch 'pr-gitee-worker-cluster-bugfix' into 4.3.5 2023-08-14 23:21:28 +08:00
tjq
b5085e09f1 Merge branch 'patch-redeploy-contationer-in-multi-server' of https://gitee.com/diligents/PowerJob into pr-gitee-worker-cluster-bugfix 2023-08-14 23:20:55 +08:00
tjq
91abbc03d9 fix: rollback NetUtils's interface index sort 2023-08-14 23:14:44 +08:00
tjq
bdcc9b131d refactor: optimize DfsService's log 2023-08-14 23:14:07 +08:00
tjq
58e542c69a Merge branch '4.3.4' 2023-08-13 23:05:55 +08:00
tjq
15fa1abd91 feat: Complete all testing and ready for release 2023-08-13 22:29:31 +08:00
tjq
c08b4f1858 fix: timeout bug #678 2023-08-13 21:30:58 +08:00
tjq
89b35c8495 feat: [officialProcessor] add VerificationProcessor 2023-08-13 18:00:20 +08:00
tjq
73ebe83c05 feat: code review problem modification, ready to release 4.3.4 2023-08-13 16:31:12 +08:00
tjq
ad08406d0b feat: [storageExt] finished MySqlSeriesDfsService 2023-08-13 16:11:10 +08:00
tjq
37a62549db feat: [storageExt] MySqlSeriesDfsService 2023-08-10 23:59:07 +08:00
tjq
c50a3edebf feat: [storageExt] MySqlSeriesDfsService 2023-08-10 00:16:40 +08:00
tjq
09b15dfbc1 feat: optimize online log cache time(60 -> 10) 2023-08-06 21:03:41 +08:00
tjq
6bcc275a70 Merge branch 'pr-archibald-nice-empty-job-notifyuserids' into 4.3.4 2023-08-06 20:42:48 +08:00
tjq
88ebd5e042 Merge branch 'pr-#669' into 4.3.4 2023-08-06 20:41:37 +08:00
tjq
df5e259e54 chore: change main version to 4.3.4 2023-08-06 20:17:40 +08:00
tjq
570ea0487b feat: output vertx request error 2023-08-06 19:25:47 +08:00
tjq
dd32916637 docs: add SECURITY.md #698 2023-08-04 22:20:01 +08:00
tjq
c3ce46aee9 refactor: optimize alarm code 2023-07-31 23:45:52 +08:00
tjq
d03247ea03 test: [storageExt] finished gridfs service's test 2023-07-30 21:59:35 +08:00
tjq
1c70bbc670 feat: [storageExt] use PropertyAndOneBeanCondition to control multi impl 2023-07-30 21:14:18 +08:00
tjq
b251df4c35 feat: [storageExt] support alicloud oss and add some test code 2023-07-30 15:35:18 +08:00
tjq
f0514ac65f feat: [storageExt] support alicloud oss 2023-07-30 14:39:57 +08:00
tjq
236d0a7f3b feat: [storageExt] Unified File Storage Solution with DFsService 2023-07-30 12:00:45 +08:00
tjq
fc57226d3a refactor: optimize extension package 2023-07-16 18:14:20 +08:00
tjq
5e9935fed4 feat: [storageExt] define DFsService 2023-07-16 17:55:01 +08:00
tjq
d3140d0501 feat: support non-LAN communication(server side) 2023-07-15 22:22:38 +08:00
tjq
7318fed73a feat: support non-LAN communication(worker side) 2023-07-15 21:38:56 +08:00
tjq
67a22e8b7e feat: add log for ContainerService 2023-07-12 20:45:38 +08:00
archieself
8aaa602082 fix-[#676] Set notifyUserIds to null when empty the notify info of a job. 2023-07-05 10:49:37 +08:00
张家伟
1adc25308f 获取主机网络连接信息时按索引从小到大排序,优先使用索引值小的网络接口。 2023-06-30 15:22:32 +08:00
tjq
00228f3b3e chore: update dependency maven version(3.6.3 to 3.9.2) 2023-06-11 17:51:42 +08:00
tjq
c3c735e5f1 chore: change main version to 4.3.3 2023-06-11 17:44:48 +08:00
tjq
df7ceb7ba5 Merge branch '4.3.3' 2023-06-11 17:24:05 +08:00
tjq
5da0c694c3 chore: change main version to 4.3.3 2023-06-11 17:18:51 +08:00
tjq
5cfd0f8e74 chore: change main version to 4.3.3 2023-06-11 16:45:44 +08:00
tjq
e2887a12f6 fix: SpringMethodProcessor can't throw correct exception 2023-06-11 16:44:28 +08:00
tjq
388581c321 chore: add <classifier>exec</classifier> for server 2023-06-11 15:51:12 +08:00
tjq
8953ecc74f feat: support powerjob method handler 2023-06-11 15:40:50 +08:00
tjq
8ecc5768c7 Merge branch 'pr-vannewang-master' into 4.3.3 2023-05-07 22:29:36 +08:00
tjq
a7394e518c fix: server can't record zero cost processor's status #620 2023-05-07 22:08:14 +08:00
tjq
174696066d feat: add log to check resource release status #627 2023-05-07 21:41:00 +08:00
wangxiaopeng
47b050aba2 抽取Spring API为公共抽象父类 2023-04-18 15:59:45 +08:00
wangxiaopeng
46165ccd97 获取任务执行结果,并将结果返回给控制台 2023-04-18 14:43:38 +08:00
wangxiaopeng
b9bd8079f2 调度方式(方法形式)新增 2023-04-06 18:08:56 +08:00
tjq
1a40447b23 Merge branch '4.3.2-main' 2023-03-19 23:33:16 +08:00
tjq
e2f5ca440f chore: upgrade main version to 4.3.2 2023-03-19 23:25:44 +08:00
tjq
b71edc1f26 fix: netty compatibility issues #591 2023-03-19 20:34:26 +08:00
tjq
2a87a24980 fix: workflow append string will have quotes #307 2023-03-17 23:06:08 +08:00
tjq
93b01191d0 fix: oms.table-prefix can't work #584 2023-03-16 23:53:58 +08:00
tjq
aa65884b3f test: test chinese result #581 2023-03-16 23:47:33 +08:00
tjq
4b79bd73bb fix: test mode can't startup when server not available #580 2023-03-16 23:35:10 +08:00
tjq
0a76d057ac feat: samples start to use http as protocl 2023-03-16 23:23:05 +08:00
tjq
7b003ed895 Merge branch '4.3.2-findbug' 2023-03-07 23:37:34 +08:00
tjq
70a696aaa8 chore: upgrade project version 2023-03-07 23:37:03 +08:00
tjq
5011ea983b chore: rollback akka version 2023-03-07 23:20:26 +08:00
tjq
a93602f845 test: add test code for repetitions 2023-03-07 23:19:57 +08:00
tjq
805046dccb Merge branch '4.3.1-main' 2023-03-07 00:36:47 +08:00
tjq
0772b41fac feat: sync web resource 2023-03-07 00:36:27 +08:00
tjq
1fc240ab27 Merge branch '4.3.1-main' 2023-03-07 00:12:56 +08:00
tjq
36b439603c docs: optimize README.md 2023-03-07 00:12:11 +08:00
tjq
0bb46be9d0 chore: upgrade project version 2023-03-07 00:09:31 +08:00
tjq
4eeda2c662 chore: skip deploy testing package 2023-03-05 21:49:59 +08:00
tjq
fcca0c0c94 feat: powerjob client support export job 2023-03-05 16:42:56 +08:00
tjq
06c4e085cb feat: sync console code from powerjob-console 2023-03-05 16:12:17 +08:00
tjq
81752dd26f test: TestFindByBeanNameProcessor 2023-03-05 15:58:54 +08:00
tjq
93d44ea07d chore: unified official processor's version 2023-03-05 15:55:16 +08:00
tjq
a90cf82974 fix: SQL script GITEE#I6DXY3 2023-03-05 15:47:22 +08:00
tjq
e36ac8bc59 chore: change main version to 4.3.1 2023-03-05 15:45:28 +08:00
tjq
3fcd99e364 feat: optimize pr code 2023-03-05 15:43:55 +08:00
tjq
74ef8f1d23 Merge branch '4.3.1-pr-AZI-D-patch-1' into 4.3.1-main 2023-03-05 15:41:35 +08:00
tjq
40192486c5 feat: change jobname when export job 2023-03-05 12:57:15 +08:00
tjq
d45cb0712c fix: openapi can't work in springboot 2.7.4 #559 2023-03-05 12:15:19 +08:00
tjq
5985c04997 feat: remove all circular-references by aware 2023-03-04 21:03:22 +08:00
tjq
5ddaa33f47 feat: support job export #571 2023-03-04 15:03:22 +08:00
AZI-D
d78d139276
修复精确查询没有限制的bug 2023-03-02 15:41:44 +08:00
tjq
43dfc9a265 feat: finished DAILY_TIME_INTERVAL processor 2023-02-25 23:07:45 +08:00
tjq
3aa42819e4 fix: NPE of DailyTimeIntervalStrategyHandler 2023-02-25 18:24:25 +08:00
songyinyin
8ea4a5b260 feat: powerjob-worker-spring-boot-starter support SpringBoot 3 and jdk 17 2023-02-19 15:07:14 +08:00
tjq
34352a1eea chore: optimize build test env script 2023-02-18 22:29:15 +08:00
tjq
2d0dcf6c7b chore: support mongodb in testenv 2023-02-18 21:37:50 +08:00
songyinyin
cdf416d693 chore: powerjob-remote http set httpRequest head "application/json" 2023-02-18 19:59:56 +08:00
raylua
8c32c775da fix: In multi-servers node , no workers found prompt without @DesignateServer in not conform server node 2023-02-17 19:29:01 +08:00
tjq
369ebdab0b test: DailyTimeIntervalStrategyHandlerTest 2023-02-13 00:26:13 +08:00
tjq
e01770adc7 perf: optimize DailyTimeIntervalStrategyHandler 2023-02-11 23:02:26 +08:00
tjq
42823b8bdd feat: support DailyTimeInterval #558 2023-02-11 22:57:43 +08:00
tjq
9f2f68344c feat: support DailyTimeInterval #558 2023-02-11 15:22:40 +08:00
tjq
3f7d4328e9 feat: powerjob-worker-agent support use custom protocl 2023-02-09 23:32:18 +08:00
tjq
421705e1bc feat: optimize exception log when load failed in spring #550 2023-02-08 22:39:29 +08:00
tjq
69dc1c50aa feat: support OmsServerAndLocalLogger #553 2023-02-08 22:34:05 +08:00
tjq
b89ac389fd feat: optimize exception log in server elect #551 2023-02-08 22:26:36 +08:00
tjq
a5e3e829b5 chore: test env 2023-02-08 01:07:05 +08:00
tjq
7d947038eb feat: try to load by bean name 2023-02-07 23:42:33 +08:00
songyinyin
c8a456f56d chore: docker-compose powerjob-worker-samples runs after powerjob-server 2023-02-03 22:22:50 +08:00
tjq
afa54e7958 chore: limit memory to ensure test env alive 2023-02-01 22:42:48 +08:00
tjq
39893b1e92 feat: optimize PowerJobSpringWorker 2023-02-01 22:13:52 +08:00
tjq
95a1f43994 feat: use async log to optimzie agent performance 2023-01-31 00:02:41 +08:00
tjq
d7c494e463 chore: add gclog for test env 2023-01-30 22:53:06 +08:00
tjq
5ea57eebcc chore: optimize test env docker-compose 2023-01-30 20:52:35 +08:00
tjq
ae36ccf75a Merge branch '4.3.0' 2023-01-28 12:17:12 +08:00
tjq
d5b4faa49c chore: update docker build script 2023-01-28 12:16:47 +08:00
tjq
b0fae5edf8 docs: optimize comment 2023-01-28 10:51:19 +08:00
tjq
1c60f17b1b feat: optimize server info 2023-01-27 19:16:54 +08:00
tjq
d9b1272802 chore: optimize test env script 2023-01-27 19:08:23 +08:00
tjq
31d9b5b7e6 chore: optimize test env script 2023-01-27 16:10:40 +08:00
tjq
19a3f2fbed docs: optimize readme 2023-01-27 15:58:53 +08:00
tjq
c9f5fb3f51 feat: optimize container log 2023-01-27 15:22:19 +08:00
tjq
a25eac67c7 fix: create TaskTracker failed causes HashMap to deadlock 2023-01-27 13:13:56 +08:00
tjq
fb2046649e feat: optimize code 2023-01-27 13:05:35 +08:00
tjq
54beb3b2d1 feat: optimize container service 2023-01-27 11:22:16 +08:00
tjq
2bd2ceca8e chore: optimize test env 2023-01-24 16:12:54 +08:00
tjq
8df74b9670 chore: optimize test env 2023-01-24 16:12:15 +08:00
tjq
6921cfdcf5 chore: optimize test env 2023-01-24 16:10:53 +08:00
tjq
da4aa8a9fe chore: optimize test env 2023-01-24 15:54:34 +08:00
tjq
a9f81d260c feat: add script for build test env 2023-01-24 15:22:53 +08:00
tjq
7b56393aee feat: use softValues to optimize memory usage 2023-01-24 13:23:34 +08:00
tjq
1b1efe6b80 feat: optimize vertx config 2023-01-24 12:56:56 +08:00
tjq
3bfe58abd2 feat: process empty return in vertx 2023-01-24 12:46:35 +08:00
tjq
55e259bcf7 chore: upgrade project version to 4.3.0 2023-01-24 10:39:30 +08:00
tjq
bc08b76d23 chore: fix compile error in official-processors 2023-01-24 10:29:34 +08:00
tjq
5f75dbe9fc feat: replace Deprecated method 2023-01-23 13:03:32 +08:00
tjq
e73675ce09 feat: add comment for ProcessorFactory 2023-01-23 10:18:59 +08:00
tjq
8e94976cdd feat: allowed user to customize the storage path of the h2 database #521 2023-01-22 17:50:29 +08:00
tjq
b8199bf036 feat: optimize demo project 2023-01-22 17:37:03 +08:00
tjq
6c21c7864a feat: add jaxb-api to samples project to support JDK 2023-01-22 17:22:24 +08:00
tjq
dc61bb4648 fix: JavaUtils cache failed in windows env 2023-01-22 17:11:56 +08:00
tjq
afdf4a7dc2 feat: worker starter use new port config name 2023-01-22 16:38:05 +08:00
tjq
789bcb5d10 chore: worker http support 2023-01-22 11:15:11 +08:00
tjq
5b78204beb chore: optimize pom config 2023-01-22 11:04:28 +08:00
tjq
dca97010c7 fix: server return Optional to worker 2023-01-22 10:52:53 +08:00
tjq
63a5e2b458 feat: optimize ServerElectionService 2023-01-22 10:36:22 +08:00
tjq
17b842a2a2 fix: server elect bug 2023-01-22 00:40:14 +08:00
tjq
4a41e322ab fix: server elect bug 2023-01-22 00:37:09 +08:00
tjq
e26f2df2d0 fix: server elect bug 2023-01-22 00:33:11 +08:00
tjq
571b7cf3f2 feat: optimize remote framework log output 2023-01-21 23:15:45 +08:00
tjq
4fece7be40 feat: optimize remote framework log output 2023-01-21 23:13:28 +08:00
tjq
bfb9c68590 feat: close remoteEngine when jvm exit 2023-01-21 22:37:18 +08:00
tjq
25c6a9a6d6 feat: remove PowerSerializable's path method 2023-01-21 11:10:51 +08:00
tjq
b746aa1859 feat: redefine PowerAkkaSerializer 2023-01-21 11:05:24 +08:00
tjq
e74fc2d138 chore: remove akka in common package 2023-01-21 10:34:37 +08:00
tjq
dedefd5a6d feat: replace akka by PowerJobRemoteEngine in server side 2023-01-21 10:31:12 +08:00
tjq
b013fbfefd feat: replace akka by PowerJobRemoteEngine in server side 2023-01-21 10:28:11 +08:00
tjq
5a14b300f9 feat: suit PowerJobAutoConfiguration for new properties 2023-01-20 17:15:49 +08:00
tjq
3892c38785 feat: remove all spring dependencies in powerjob-worker 2023-01-20 17:08:40 +08:00
tjq
8e96fdacc6 feat: remove springUtils 2023-01-20 16:39:53 +08:00
tjq
503e9db5c2 feat: remove spring in PowerJobWorker 2023-01-20 16:34:03 +08:00
tjq
f6a6914f91 feat: allow user to extend ProcessorFactory 2023-01-20 16:06:03 +08:00
tjq
847cf23738 feat: allow user to extend ProcessorFactory 2023-01-20 15:54:48 +08:00
tjq
16f5e67cf0 feat: use PowerJobRemoteEngine to replace akka 2023-01-20 15:09:21 +08:00
tjq
74358bca8d fix: determinePackageVersion throw exception in ide env 2023-01-20 14:51:56 +08:00
tjq
7eea92bfc7 fix: determinePackageVersion throw exception in ide env 2023-01-20 14:51:26 +08:00
tjq
5b94247daf feat: use PowerJobRemoteEngine to replace akka 2023-01-20 14:40:18 +08:00
tjq
2020f72905 feat: use PowerJobRemoteEngine to replace akka 2023-01-20 14:19:09 +08:00
tjq
f0da89503e feat: use PowerJobRemoteEngine to replace akka 2023-01-20 13:41:28 +08:00
tjq
d46a6de26e feat: chang to use PowerJobRemoteEngine to replace akka 2023-01-20 13:18:58 +08:00
tjq
43df09bb38 feat: worker use PowerJobRemoteFramework 2023-01-20 12:05:18 +08:00
tjq
0400eceab1 chore: optimize package version 2023-01-20 09:05:06 +08:00
tjq
38d6b16c74 feat: HandlerLocation use serverType 2023-01-20 09:00:52 +08:00
tjq
3d5a5ac342 feat: optimize code of PowerJobProcessorLoader 2023-01-17 23:18:14 +08:00
tjq
4d2e037107 feat: optimize code of BuiltInSpringProcessorFactory 2023-01-17 22:57:36 +08:00
tjq
cc7a63c69f feat: JarContainerProcessorFactory 2023-01-17 22:53:32 +08:00
tjq
57450a98ad feat: PowerJobProcessorLoader 2023-01-17 22:47:05 +08:00
tjq
44e6ea2373 feat: two default impl for ProcessorFactory 2023-01-17 22:39:50 +08:00
tjq
1ca5fed9cf feat: define ProcessorFactory 2023-01-17 22:08:24 +08:00
tjq
2982410d80 feat: define ProcessorFactory 2023-01-17 22:05:48 +08:00
tjq
fbd75a6ec7 feat: define ProcessorFactory 2023-01-17 22:00:35 +08:00
tjq
d6f3ae6c44 feat: define insideCluster in HandlerLocation 2023-01-17 21:37:41 +08:00
tjq
59121684a8 chore: merge master 2023-01-16 00:14:30 +08:00
tjq
c47fd69859 test: use gatling to test remote framework performance 2023-01-15 22:58:44 +08:00
Echo009
ccbe11ed0e
Merge pull request #531 from PowerJob/4.2.1-main
release 4.2.1
2023-01-15 21:37:26 +08:00
Echo009
30abf08703 feat: add some powerjob worker sample processores 2023-01-15 21:06:59 +08:00
Echo009
1b3134291c feat: optimize the code of LightTaskTracker 2023-01-15 21:04:32 +08:00
Echo009
2c51e0601d feat: optimize the code of persistence layer entity class 2023-01-15 16:25:01 +08:00
tjq
cd7a743097 feat: use gatling to have a presure test for remote framework 2023-01-08 21:41:15 +08:00
tjq
2afb20df0b feat: use gatling to have a presure test for remote framework 2023-01-08 21:22:56 +08:00
tjq
0d29b6369a feat: use gatling to have a presure test for remote framework 2023-01-08 20:48:42 +08:00
tjq
24b4cc4eb5 feat: use gatling to have a presure test for remote framework 2023-01-08 19:34:44 +08:00
tjq
50b4ca3cca feat: optimize resource clean 2023-01-08 18:30:44 +08:00
tjq
7b9ee74c21 feat: benchmark remote framework 2023-01-07 16:58:33 +08:00
tjq
8b9d6df172 feat: akka remote impl 2023-01-07 14:53:58 +08:00
tjq
676388a988 feat: akka remote impl 2023-01-07 14:38:17 +08:00
tjq
b0b2c24571 feat: optimize remote framework 2023-01-07 14:14:32 +08:00
tjq
5d3bfedf5d feat: akka proxy actor 2023-01-06 23:34:36 +08:00
tjq
d73b8e21e6 feat: suit path 2023-01-06 22:58:34 +08:00
tjq
94a0e2fa42 feat: optimize code for PowerJobActor 2023-01-06 22:54:52 +08:00
tjq
79cde85256 feat: define PowerJobActor 2023-01-06 22:53:11 +08:00
Echo009
fe03b8faab feat: optimize the code of TaskTracker 2023-01-04 22:43:23 +08:00
tjq
9f6d421ed2 feat: optimize Thread#stop usage 2023-01-04 00:39:31 +08:00
tjq
da04e4b048 feat: replace deprecated method 2023-01-03 23:22:33 +08:00
Echo009
a1beb44ccf pref: 支持轻量级任务模型,优化任务派发以及实例状态检查的处理逻辑 2023-01-02 23:57:09 +08:00
tjq
4b2d9d4d74 feat: optimize remote http impl 2023-01-02 12:47:28 +08:00
tjq
432adeb00f feat: optimize akka remote impl 2023-01-02 11:24:44 +08:00
tjq
f2b9ae222a feat: optimize akka remote impl 2023-01-02 11:21:49 +08:00
tjq
af8fbb0167 fix: throw exception when http server startup failed 2023-01-02 11:14:16 +08:00
tjq
d12ac4d6cd feat: optimize performance 2023-01-02 10:16:16 +08:00
tjq
e6a171d775 feat: add comment 2023-01-02 09:59:06 +08:00
tjq
2606440f44 feat: optimize HttpVertxCSInitializer 2023-01-02 00:22:48 +08:00
tjq
d3bd22302f feat: finished config 2023-01-02 00:11:31 +08:00
tjq
2c31e81c5f feat: framwork api 2023-01-01 20:25:11 +08:00
tjq
87a1a1d7c1 feat: vertx http framwork 2023-01-01 20:12:00 +08:00
tjq
268f5dd5c7 feat: add BenchmarkActor for performance test 2023-01-01 10:25:58 +08:00
tjq
eb6b0c35a5 feat: add BenchmarkActor for performance test 2023-01-01 09:41:13 +08:00
tjq
0c8e339140 feat: HttpCSInitializer 2023-01-01 09:34:05 +08:00
tjq
68a9cc52e2 refactor: change remote framework api 2022-12-31 16:56:00 +08:00
tjq
31d2283f99 feat: add remote akka impl 2022-12-31 16:40:33 +08:00
tjq
c6d90be839 feat: add remote akka impl 2022-12-31 16:34:13 +08:00
tjq
4356c5566d feat: finished remote engine 2022-12-31 15:07:27 +08:00
tjq
84ef2fd120 feat: define powerjob remote framework 2022-12-31 12:52:43 +08:00
tjq
d3b8c4e353 feat: define powerjob remote 2022-12-31 12:42:57 +08:00
ZhangJun
3f95ee8a33 部分代码优化
1.邮件通知服务,优雅注入发件人
2.雪花算法,对时钟回拨情况做优化,避免服务直接不可用
3.扫描数据库task,部分代码调整减少重复计算性能消耗
4.部分枚举类,增强代码安全性
5.其它,规范部分代码
2022-12-29 20:55:22 +08:00
Echo009
5ba4ce5457 fix: issue with nested workflow node state updates,#465 2022-10-31 14:01:12 +08:00
Echo009
39eb79de54 refactor: optimize the code of the server module
1. use constructor based dependency injection to replace field injection and solve the problem of circular dependencies
2. replace deprecated API calls
2022-10-30 12:59:48 +08:00
tjq
5189634b60 chore: change project version to 4.2.1 2022-10-23 14:27:15 +08:00
tjq
eb195cf891 feat: change client name to Pantheon 2022-10-23 14:25:24 +08:00
tjq
b9222b8594 test: add test code for h2 2022-10-23 14:08:01 +08:00
tjq
987aa966a0 test: add test code for h2 2022-10-23 13:27:18 +08:00
tjq
75e5c7049f feat: print h2 database version 2022-10-23 12:03:57 +08:00
tjq
33539857f4 feat: extract package util 2022-10-23 11:58:22 +08:00
tjq
a9936b8dba feat: optimize mongodb config 2022-10-23 11:40:48 +08:00
tjq
757b994176 feat: merge h2 upgrade from fjf 2022-10-23 11:30:49 +08:00
tjq
e6c94af599 feat: change StringUtils(from spring to apache) 2022-10-11 23:00:54 +08:00
tjq
3bcfbd8e9f feat: upgrade powerjob-server's version 2022-10-11 22:54:31 +08:00
tjq
0541216944 feat: upgrade HikariCP to latestversion for Java8 2022-10-11 22:43:24 +08:00
tjq
50b68e82bd feat: upgrade junit-jupiter-api to latest version 2022-10-11 22:19:38 +08:00
tjq
daaaa15b94 feat: upgrade kryo5 to latest version 2022-10-11 22:13:06 +08:00
tjq
195984bb95 feat: updrage jackson's version 2022-10-11 22:12:31 +08:00
tjq
7867b07d9c feat:upgrade slf4j to latest version 2022-10-11 22:11:00 +08:00
tjq
223fac9828 feat: upgrade commons-io and logback version 2022-10-11 22:06:45 +08:00
ocean
1b710c1332 refactor: 升级h2的版本 2022-10-06 17:03:26 +08:00
tjq
dc98f5f37a Merge branch 'v4.2.0' 2022-10-03 15:55:27 +08:00
tjq
c6009c8b5e feat: upgrade pom version 2022-10-03 15:50:03 +08:00
tjq
ce0290ea03 fix: front-end NPE #455 2022-10-03 15:47:47 +08:00
tjq
1301da0d7d feat: optimize code 2022-10-03 14:58:42 +08:00
tjq
6eb5966e96 feat: define LogType 2022-10-03 14:53:51 +08:00
tjq
db7f5855e1 feat: upgrade front-end to support more log type 2022-10-03 14:50:02 +08:00
tjq
fe1fad6a7b feat: tempory skip cycle reference check 2022-10-03 14:49:02 +08:00
tjq
7feb25cf8a feat: support OmsStdOutLogger 2022-10-03 14:36:55 +08:00
tjq
cded964bcd feat: support OmsNullLogger 2022-10-03 14:23:25 +08:00
tjq
5d5b1e3854 feat: optimzie comment 2022-10-03 14:16:17 +08:00
tjq
5b68b4dc75 feat: add 4.2.x upgrade SQL 2022-10-03 14:13:00 +08:00
tjq
2f62f448a8 feat: upgrade pom version to 4.2.0 2022-10-03 14:07:23 +08:00
tjq
cb72fcb08a feat: upgrade samples's springboot version 2022-10-03 14:01:05 +08:00
tjq
60209ebbc1 Merge branch '4.2.0-main' into v4.2.0 2022-10-03 13:59:14 +08:00
tjq
050190ba89 Merge branch '4.2.0-main-upgrade-spring' into v4.2.0 2022-10-03 13:57:24 +08:00
tjq
54db609d32 feat: support random DispatchStrategy #461 2022-10-03 13:54:56 +08:00
tjq
806747d88c fix: change inner class scope to fix JDK17 exception #457 2022-10-03 13:49:38 +08:00
ocean
6de2be72ef 完成spring的升级 2022-09-24 21:39:38 +08:00
songyinyin
4bc94dd465 refactor: update powerjob-server banner color 2022-09-24 20:02:13 +08:00
songyinyin
a5b46f6a47 refactor: change docker-compose mysql port to 3307 2022-09-24 20:00:23 +08:00
tjq
5a73e6ad91 fix: froent-ent NPE 2022-09-18 23:25:48 +08:00
tjq
91b48c0a5e fix: auto build script 2022-09-18 23:08:56 +08:00
tjq
78d793d28e fix: auto build script 2022-09-18 22:39:07 +08:00
tjq
653dcb4a92 feat: update front-end 2022-09-18 22:06:29 +08:00
tjq
ce555ad18f test: add test log processor in samples 2022-09-18 21:56:03 +08:00
tjq
e5d3139990 feat: use worker-samples as try demo 2022-09-18 21:18:56 +08:00
tjq
b2b8241295 feat: use worker-samples as try demo 2022-09-18 18:30:42 +08:00
tjq
f20a849a93 feat: support shutdown log by OFF level 2022-09-18 15:13:17 +08:00
tjq
f3c7ed8baf feat: add max queue size for log handler in worker to prevent OOM 2022-09-18 14:02:05 +08:00
tjq
483227f840 feat: script processor support cmd and powershel by fddc 2022-09-18 00:48:02 +08:00
tjq
45f7b17e14 feat: script processor support cmd and powershel by fddc 2022-09-18 00:40:35 +08:00
tjq
3823b3bc56 Merge branch '4.2.0-win-support' into 4.2.0-main 2022-09-18 00:28:52 +08:00
tjq
a39751818f Merge branch 'master' of https://github.com/fddc/PowerJob into 4.2.0-win-support 2022-09-18 00:28:27 +08:00
tjq
ec47f5a8c5 feat: add ConfigProcessor in official-processor 2022-09-18 00:22:03 +08:00
tjq
74b6acc927 feat: add ConfigProcessor in official-processor 2022-09-18 00:12:04 +08:00
tjq
dc90f272c7 feat: optimize worker log 2022-09-17 23:47:10 +08:00
tjq
e501cb9dfa feat: support LogConfig 2022-09-17 00:24:26 +08:00
tjq
a4a41c4ab7 feat: define JobLogConfig 2022-09-16 23:28:42 +08:00
tjq
3842acf952 feat: remove useless code 2022-09-16 23:10:25 +08:00
tjq
3ffaf382c7 chore: upgrade project version to 4.1.1 and ready to release 2022-09-13 01:32:54 +08:00
tjq
ca063803db release: v4.1.1 2022-09-13 00:23:33 +08:00
tjq
be2c5ea20e chore: upgrade project version to 4.1.1 and ready to release 2022-09-12 23:43:57 +08:00
tjq
2a3b9323a6 chore: upgrade project version to 4.1.1 and ready to release 2022-09-12 23:41:44 +08:00
tjq
12ff1335f2 test: fix unit test by ocean-fujfu 2022-09-12 23:24:49 +08:00
tjq
112628f386 Merge branch '4.1.1-docker-compose' into v4.1.1 2022-09-12 23:15:51 +08:00
tjq
42fa628a61 Merge branch '4.1.1-worker-enabled' into v4.1.1 2022-09-12 23:09:10 +08:00
tjq
65f2a58d2f Merge branch '4.1.1-monitor' into v4.1.1 2022-09-12 23:03:38 +08:00
tjq
5acb8f82e7 feat: optimize OmsLocalDbPool config 2022-09-12 22:59:00 +08:00
tjq
a32d7cebb5 feat: optimize server monitor event 2022-09-12 22:10:07 +08:00
tjq
5b223d23ad feat: optimize server monitor event 2022-09-12 21:52:26 +08:00
tjq
fd562d8ea0 fix: worker heartbeat use wrong thread pool 2022-09-12 21:26:01 +08:00
tjq
e6d32c9a05 fix: worker heartbeat use wrong thread pool 2022-09-12 21:18:40 +08:00
tjq
fadf2ce14e feat: optimize monitor logger config 2022-09-12 21:16:00 +08:00
songyinyin
510b5ab546 feat: add docker-compose sample, fix volumes path 2022-09-12 21:15:01 +08:00
tjq
a77ba8084e feat: config logback for monitor 2022-09-12 21:09:47 +08:00
tjq
1b9d8331a1 feat: optimize DatabaseMonitorAspect 2022-09-12 21:00:01 +08:00
ocean
ac2b28bb5f 升级spring boot后同步修改unit test 2022-09-12 15:41:32 +08:00
tjq
74f70cd58b feat: support serverInfoAware 2022-09-12 12:56:12 +08:00
tjq
5450ac00db feat: optimize WorkerLogReportEvent 2022-09-12 11:58:56 +08:00
tjq
2db0f05feb feat: optimize thread pool config 2022-09-12 11:33:13 +08:00
ocean
3466ff3f05 修复unit test 2022-09-12 11:31:18 +08:00
tjq
d531bf3a22 feat: optimize threadpool config 2022-09-12 11:07:05 +08:00
tjq
3b73a750e6 feat: server async process log report to prevent timeout #432 2022-09-12 10:45:56 +08:00
tjq
3869b115ce feat: optimize RejectedExecutionHandlerFactory 2022-09-12 09:36:29 +08:00
tjq
614349370a feat: refactor worker request handler and add monitor 2022-09-11 17:14:00 +08:00
tjq
48ac446014 feat: define MonitorService to inject monitor context 2022-09-10 23:41:48 +08:00
tjq
ac1b1fe0c8 fix: limit worker num failed in map/mapreduce job #450 2022-09-10 09:55:26 +08:00
tjq
dfd1fd069b fix: invalid random when JobInfo's maxWorkerCount is a small value #449 2022-09-10 09:27:57 +08:00
tjq
22db37cad9 feat: optimize instanceMetadataCacheSize 2022-09-10 08:40:29 +08:00
tjq
22522c099d feat: status report append appId info 2022-09-10 08:32:32 +08:00
tjq
eaf6dcad4f feat: add appId in WorkerHeartbeatEvent 2022-09-10 08:30:18 +08:00
tjq
b6de5aa563 feat: add WorkerHeartbeatEvent 2022-09-09 22:45:05 +08:00
tjq
519213ad4a feat: add SlowLockEvent 2022-09-09 00:41:50 +08:00
tjq
ce369b3e30 feat: optimize parseEffectRows 2022-09-09 00:31:54 +08:00
tjq
29a50ed89a feat: optimize parseEffectRows 2022-09-09 00:27:13 +08:00
tjq
2a76e7d043 feat: monitor db effect rows 2022-09-09 00:22:27 +08:00
tjq
cca9c5421e feat: finished jpa base monitor 2022-09-09 00:09:22 +08:00
tjq
e23825c399 feat: use package aop 2022-09-08 23:59:17 +08:00
songyinyin
a7e3c05f6c feat: add docker-compose sample, docker build tag latest 2022-09-08 22:53:48 +08:00
songyinyin
5b865fe49b feat: add docker-compose sample 2022-09-08 22:14:21 +08:00
songyinyin
88bc28140f feat: powerjob worker add property: powerjob.worker.enabled 2022-09-07 23:44:36 +08:00
tjq
a0cc5670d4 feat: define DatabaseEvent 2022-09-06 01:54:10 +08:00
tjq
5080796c6f feat: add monitor module 2022-09-06 00:26:04 +08:00
Echo009
ad1a7227d6 chore: update console resources 2022-09-04 10:36:29 +08:00
Echo009
d0e95c2129 style: rename lifecycle to lifeCycle 2022-09-04 10:36:24 +08:00
Echo009
03165bf5e4 chore: upgrade mysql connector version (8.0.19 -> 8.0.28) 2022-09-03 00:02:15 +08:00
Echo009
bdd9b978f9 feat: support inject workflow context directly 2022-09-02 20:12:58 +08:00
Echo009
54524553c1 chore: update powerjob console resources and upgrade project version to 4.1.0 2022-09-01 08:21:24 +08:00
Echo009
0b5a404cf4 chore: upgrade fastjson version (1.2.68 -> 1.2.83) 2022-09-01 07:29:17 +08:00
Echo009
e1c4946a73 chore: update sql script 2022-09-01 07:14:30 +08:00
Echo009
3566569dc5 feat: add InjectWorkflowContextProcessor 2022-09-01 07:06:21 +08:00
Echo009
08711f93d0 perf: optimize akka config 2022-09-01 07:06:21 +08:00
Echo009
5ed6eac38a perf: use cached lock replace SegmentLock 2022-09-01 07:06:21 +08:00
Echo009
0c4eb3834a fix: task status transfer anomaly, #404 2022-09-01 07:06:21 +08:00
Echo009
a9a0422de1 fix: the problem of saving frequent job 2022-09-01 07:06:21 +08:00
Echo009
812d71f090 fix: NPE in FrequentScheduler 2022-09-01 07:06:21 +08:00
Echo009
7539faffff feat: use Groovy Engine replace Nashorn Engine. 2022-09-01 07:06:21 +08:00
Echo009
88b92e2994 chore: add stop task demo 2022-09-01 07:06:21 +08:00
Echo009
2e1c585b5e fix: Illegal nested workflow node 2022-09-01 07:06:21 +08:00
Echo009
3923937f6c fix: the problem of failed to save workflow and job 2022-09-01 07:06:21 +08:00
Echo009
8909584976 feat: replace cron implementation and support job lifecycle #382 #208 2022-09-01 07:06:21 +08:00
Echo009
abf266b7f8 feat: support passing instance parameters. #381 2022-09-01 07:06:21 +08:00
Echo009
1d34547f45 chore: update config 2022-09-01 07:06:21 +08:00
Echo009
56447596f7 fix: Repetitive execution of frequency tasks #375 2022-09-01 07:06:21 +08:00
Echo009
8488a10465 fix: stop nested workflow 2022-09-01 07:06:21 +08:00
Echo009
b60c236824 feat: workflow's DAG must be not empty 2022-09-01 07:06:21 +08:00
Echo009
d87c358743 fix: workflow cron bug #316 2022-09-01 07:06:21 +08:00
Echo009
5791b43ac6 fix: the problem of retrying nested workflow node 2022-09-01 07:06:21 +08:00
Echo009
d4eb8e3303 fix: the problem of incorrect sub-workflow state 2022-09-01 07:06:21 +08:00
Echo009
9c30e5ee83 fix: update data structure of JobInfoVO 2022-09-01 07:06:21 +08:00
Echo009
8aa5140265 feat: support frequent task alarm #370 2022-09-01 07:06:21 +08:00
Echo009
d7c0d12a30 fix: decision node missing job param 2022-09-01 07:06:21 +08:00
Echo009
9194641c6f fix: workflow node validator 2022-09-01 07:06:21 +08:00
Echo009
4b14be8321 feat: update the workflow maintenance interface 2022-09-01 07:06:21 +08:00
Echo009
d996b34a54 feat: support nested workflow #266 2022-09-01 07:06:21 +08:00
Echo009
c15cefc447 feat: support decision node #188 2022-09-01 07:06:21 +08:00
Echo009
8663f3b79f fix: problem of task process in case of task slice exception. #355 2022-09-01 07:06:21 +08:00
Echo009
ac8e96508c feat: use CompatibleFieldSerializer as default kyro serializer, providing both forward and backward compatibility 2022-09-01 07:06:21 +08:00
Echo009
d799586ce9 fix: NetUtils, use the first valid network interface 2022-09-01 07:06:21 +08:00
脏兮兮
e585ba5a19
feat: 添加 worker tag 配置 2022-09-01 07:05:16 +08:00
读钓
62d682fbd5
fix: When you append a string to the workflow context, the value has multiple double quotes.(#307) 2022-08-31 23:15:04 +08:00
Ryan
fb6e57a75c
1.基于作者4.0.1版本SQL修改 (#263)
2.增加表注释、字段注释
3.调整排序规则 utf8mb4_0900_ai_ci -> utf8mb4_general_ci
2022-08-31 23:13:03 +08:00
Justin Zhang
75c88c32ed
fix: bug #324
* fix: bug #324

Fail to cancel delay job instance by id via API

* style: correct log

* style: update log

Co-authored-by: Echo009 <ech0.extreme@foxmail.com>
2022-08-31 23:12:10 +08:00
fddc
0aa06d1ae6 解决win平台bat脚本中文路径执行乱码问题 2021-08-06 20:41:24 +08:00
fddc
973322370a agent新增tag启动参数 2021-05-31 13:06:09 +08:00
fddc
49c7d18c00 增加powershell,以支持windws平台 2021-05-14 18:23:27 +08:00
fddc
4fccc81697 非windows系统才需要chmod 2021-05-13 11:02:42 +08:00
tjq
e094c22952 [release] v4.0.1 2021-04-05 16:47:31 +08:00
tjq
6ae809617b docs: update user.png and readme 2021-04-05 16:47:10 +08:00
tjq
cbcd5dcca7 refactor: update guava version for security #CVE-2020-8908 2021-04-05 15:08:59 +08:00
tjq
7a471a3917 chore: change version to 4.0.1 2021-04-05 15:01:00 +08:00
tjq
584b20ae9b chore: use project.parent.version to manage powerjob-server's version 2021-04-05 14:47:01 +08:00
tjq
f955ae2f61 fix: incorrect worker list display #245 2021-04-05 14:18:24 +08:00
tjq
c8a1f536c3 feat: output more server info 2021-04-05 13:54:16 +08:00
tjq
7527b31ece fix: update powerjob-server's sql to fix the bug of can't save workflow #247 2021-04-05 13:24:39 +08:00
tjq
4e9c1f98b7 refactor: optimize dialect properties 2021-04-05 12:57:50 +08:00
tjq
12d0d4dbb0 fix: compatibility issues for PostgreSQL 2021-04-05 12:52:48 +08:00
tjq
17439536f0 fix: server election bug 2021-04-05 12:51:10 +08:00
tjq
9949e23bc4 fix: NPE when some app has on worker connected 2021-04-05 12:49:37 +08:00
Echo009
4d236153de chore: add PostgreSQL dialect config hint to pre and product env config file 2021-04-03 17:12:57 +08:00
Echo009
a0f76f7ba9 refactor: optimize code for pull request 249,fix issue #153 2021-04-03 17:08:16 +08:00
Chang Kung Yao
5f0865129e Add Postgresql Support 2021-03-30 08:27:15 +08:00
lwc1
c31f10b3c1 fix server election 2021-03-27 14:32:12 +08:00
luter
0e5873ca05 getWorkerInfoByAddress NPE 问题的修正 2021-03-23 22:13:28 +08:00
tjq
cfb05d9871 chore: fix github CI failed 2021-03-22 00:17:41 +08:00
tjq
98330846c3 [release] v4.0.0 2021-03-21 23:10:43 +08:00
tjq
15f48304b2 chore: update central maven path and ready to release 2021-03-21 22:14:30 +08:00
tjq
88caf22c9f chore: install python2.7 for powerjob-worker-agent 2021-03-21 21:10:08 +08:00
tjq
93c19eae82 chore: optimize build script 2021-03-21 20:38:51 +08:00
tjq
f5afff56c4 fix: execute in local when there is no address info for one app 2021-03-21 19:18:22 +08:00
tjq
0adb16011c fix: NPE when saving container 2021-03-21 18:48:34 +08:00
tjq
ddabdf2187 feat: upgrade serialier to kryo5 2021-03-21 15:57:29 +08:00
tjq
e767ad1043 feat: use kryo to replace jackson which is really easy to lead conflict 2021-03-21 15:08:48 +08:00
tjq
4399f99c42 feat: add database dirver for powerjob-worker-agent to support dynamic sql processor 2021-03-21 14:15:23 +08:00
tjq
f8ef896a65 refactor: change processor API(abstract class to interface) 2021-03-21 13:53:06 +08:00
tjq
0a854cd276 refactor: rename OhMyClient to PowerJobClient 2021-03-20 00:02:24 +08:00
tjq
fe439721d0 refactor: change name from OhMyWorker to PowerJobWorker 2021-03-19 23:40:55 +08:00
Echo009
d8811c7d77 fix: fail to redispatch the job instance which is timeout 2021-03-17 11:24:52 +08:00
Echo009
d29f0651e2 fix: missing node status info in workflow instance DAG 2021-03-16 19:45:32 +08:00
Echo009
145d525b29 fix: missing job id in node when fetching workflow info 2021-03-16 16:57:43 +08:00
Echo009
ab8c8b5f0f refactor: simplify processing logic of workflow maintenance 2021-03-16 16:16:11 +08:00
Echo009
0febeea298 chore: change scope of mysql dependency in official processor module 2021-03-15 18:58:53 +08:00
Echo009
50635faf53 refactor: processing logic of workflow maintenance 2021-03-15 17:41:13 +08:00
tjq
2cb33a7019 fix: ClassNotFound in official-processor 2021-03-14 23:28:42 +08:00
tjq
d6b72afd19 test: DynamicDatasourceSqlProcessorTest 2021-03-14 23:23:40 +08:00
tjq
2e0797007c feat: update powerjob-console 2021-03-14 23:10:27 +08:00
tjq
dff1fa4938 feat: support DynamicDatasourceSqlProcessor 2021-03-14 23:00:17 +08:00
tjq
fb3673116b feat: add security check for dangerous official processors 2021-03-14 22:35:03 +08:00
tjq
0f6ac91080 feat: update sql file 2021-03-14 19:58:12 +08:00
tjq
f3b4e91745 feat: update container template 2021-03-14 19:24:15 +08:00
tjq
3ce47382d8 chore: change maven groupId to tech.powerjob 2021-03-14 17:01:36 +08:00
tjq
9dffb49918 chore: modify package name to tech.powerjob 2021-03-14 16:47:22 +08:00
tjq
67f9cbab5e fix: open 10010 port in docker 2021-03-14 16:10:29 +08:00
tjq
3f6585cfda fix: remove OmsWorkerFileUtils to avoid possible path conflict problems 2021-03-14 11:54:37 +08:00
tjq
1273b16caf feat: show job according to creation order 2021-03-14 00:48:13 +08:00
tjq
0fc9978385 refactor: optimize powerjob-common's package 2021-03-13 23:38:12 +08:00
tjq
f1baef7de4 feat: merge official processor upgrade 2021-03-13 21:08:07 +08:00
tjq
2c8d1345cb feat: merge workflow upgrade by cvte 2021-03-13 20:44:56 +08:00
tjq
5a9a5c6910 feat: show result in sql processor 2021-03-13 20:35:43 +08:00
Echo009
93158ba19b refactor: SimpleSpringJdbcTemplateSqlProcessor => SimpleSpringSqlProcessor 2021-03-13 14:49:42 +08:00
Jiang Jining
5834963fdd fix: Set default request body to prevent NullPointerException 2021-03-13 00:47:08 +08:00
Jiang Jining
0f1d760dbe test: Add test for HttpProcessor 2021-03-13 00:43:51 +08:00
Echo009
86b584be2f fix: NoClassDefFoundError when invoking JSON.toJSON() method in official processor 2021-03-12 15:43:16 +08:00
Echo009
1d67e97b45 refactor: SqlProcessor => SimpleSpringJdbcTemplateSqlProcessor 💡 2021-03-12 14:41:38 +08:00
tjq
30d0d7d338 feat: terminate actorSystem when shutdown worker 2021-03-11 23:37:05 +08:00
tjq
4c89a1e69c feat: prevent worker from being repeatedly initialized 2021-03-11 23:28:33 +08:00
jiangjining
d07ed2b013 refactor: Rename Task_TRACKER_ACTOR_NAME 2021-03-11 18:28:36 +08:00
Echo009
48860e3172 fix: NoClassDefFoundError when instantiating SqlProcessor 2021-03-11 15:41:09 +08:00
Echo009
87ed304737 feat: SQL processor 2021-03-11 11:40:25 +08:00
Echo009
f4d7459f63 fix: compile error in official processors module 2021-03-10 14:33:12 +08:00
jiangjining
65e0d118c3 feat: Add windows script support.#161 2021-03-09 11:09:28 +08:00
jiangjining
2ed0391d15 feat: Official HttpProcessor adds alarm support. #223 2021-03-09 11:02:04 +08:00
Echo009
d20cbdb844 fix: the problem of inconsistent data when changing job in node of workflow 2021-03-09 10:46:24 +08:00
Echo009
375b70dd40 fix: concurrency problem when dispatch job instance 2021-03-08 16:34:13 +08:00
Echo009
afff77b540 fix: concurrency problem when process workflow instance 2021-03-08 16:14:08 +08:00
Echo009
e6127f1dba refactor: optimize the processing logic of appending workflow context in worker 2021-03-08 10:53:20 +08:00
Echo009
38929bff6d refactor: optimize API of workflow 2021-03-08 01:08:24 +08:00
tjq
6a25bb57e4 [release] v3.4.8 2021-03-07 23:37:05 +08:00
tjq
b690d9c594 fix: NoClassDefFoundError of official-processor 2021-03-07 23:30:09 +08:00
tjq
f81b785c0f fix: NoClassDefFoundError of official-processor 2021-03-07 23:05:36 +08:00
tjq
3cf4807ca3 chore: change pom version to 3.4.8 2021-03-07 22:51:58 +08:00
tjq
59d4df1422 feat: support worker clusters dynamically sliced according to business tag #226 2021-03-07 22:47:52 +08:00
tjq
cc9d804e84 refactor: just use one actor in server and worker side 2021-03-07 21:27:18 +08:00
tjq
30f2d2404e feat: support for starting multiple workers on a single java application #217 2021-03-07 21:17:19 +08:00
tjq
17cb9ea626 chore: upgrade deployment script 2021-03-07 18:04:56 +08:00
tjq
bbc1cce627 style: optimize server's package 2021-03-07 16:24:09 +08:00
tjq
7579d02693 chroe: fix multi module's package 2021-03-07 15:10:23 +08:00
tjq
572ad44fb1 style: optimize server's package 2021-03-07 13:42:25 +08:00
tjq
51d12dc6b4 style: optimize server's package 2021-03-07 00:15:55 +08:00
tjq
dcb6b5ee24 style: optimize server's package 2021-03-06 23:57:48 +08:00
tjq
b608bb9908 style: optimize server's packafe 2021-03-06 23:51:27 +08:00
tjq
f633f9ae57 fix: NPE when instanceTimeLimit is null 2021-03-06 19:11:29 +08:00
tjq
a0564b7922 fix: official processor can't work 2021-03-06 00:11:52 +08:00
Echo009
5b09865b42 feat: migrate data service (v3.x to v4.x) 2021-03-05 23:54:50 +08:00
tjq
27ebd42c8e feat: support for specifying binding IP according to JVM startup parameters #227 2021-03-05 11:47:00 +08:00
Echo009
218d974122 refactor: rename package 2021-03-04 15:25:17 +08:00
Echo009
59e3fee086 feat: update powerjob client,copy job and workflow are supported 2021-03-04 15:12:53 +08:00
Echo009
0c424b52df feat: copy job API 2021-03-04 14:05:50 +08:00
Echo009
aee69a4167 fix: a silly mistake,orz 2021-03-04 09:14:06 +08:00
Echo009
b3ca7fd670 fix: the problem that can not stop the job instance which is managed by another server 2021-03-03 22:00:14 +08:00
Echo009
7575bbd4e1 feat: official processors adapt for workflow 2021-03-03 20:17:10 +08:00
Echo009
5f1ab82f0e fix: some problems when stop a workflow instance 2021-03-03 19:52:44 +08:00
Echo009
8f9e53ed83 feat: the workflow instance whose max instance number is less than or equal to zero is always allowed to be executed 2021-03-03 14:26:50 +08:00
Echo009
21291b34cb feat: copy workflow API 2021-03-02 18:56:49 +08:00
Echo009
bf5f6ef0db fix: running times of job instance is always 0 2021-03-02 10:22:28 +08:00
Echo009
44ae97d6d8 refactor: optimize initialization of workflow context 2021-03-01 20:02:47 +08:00
Echo009
da6eb3705b chore: exclude slf4j in maven-shade-plugin relocations config 2021-03-01 15:38:26 +08:00
Echo009
9a6047ad9c fix: fatal bug when updating instance status of frequent task 2021-03-01 15:29:11 +08:00
Echo009
519d370445 refactor: change parameter type of getting instance detail API 2021-03-01 15:19:51 +08:00
Echo009
ef384b5574 feat: add job parameters info into fetching job instance detail api 2021-02-26 11:34:11 +08:00
Echo009
539d31007c feat: add workflow context info into fetching workflow instance api 2021-02-26 11:33:03 +08:00
Echo009
cea2c0cc87 fix: incorrect name of workflow instance DAG node 2021-02-26 10:10:14 +08:00
Echo009
79b30f882d fix: missing workflow instance result and finished time 2021-02-26 09:54:02 +08:00
Echo009
6d5225beab fix: missing node params 2021-02-25 20:13:44 +08:00
Echo009
540849524d fix: NPE when fetch workflow info 2021-02-25 10:13:41 +08:00
Echo009
84d00ee580 fix: 修改工作流节点接口取值逻辑的问题 2021-02-25 10:13:41 +08:00
tjq
a8e284ccbe feat: remove build-in script processor 2021-02-24 00:19:06 +08:00
tjq
611f8773e8 [release] v3.4.7 2021-02-23 00:56:47 +08:00
tjq
b197dc4286 chore: change pom version to 3.4.7 2021-02-23 00:46:03 +08:00
tjq
ec9519a6b2 fix: rollback ScriptProcessor to fix bug #222 2021-02-23 00:40:41 +08:00
tjq
3d1c907104 feat: update agent's official-processor version 2021-02-23 00:20:43 +08:00
Echo009
6dc20eee7d fix: 修复特殊场景下 DAG 校验错误的问题(存在“游离”的环) 2021-02-22 21:43:06 +08:00
Echo009
b89c129e2b fix: 修复获取工作流详情信息 timeExpressionType 字段值缺失的问题 2021-02-22 16:07:42 +08:00
tjq
bd9224a805 feat: support random dispatch strategy #205 2021-02-22 00:46:11 +08:00
Echo009
823a47303b feat: 工作流节点支持标记成功 2021-02-21 15:29:10 +08:00
tjq
79a61454f3 feat: add JVM shutdown hook to release database lock 2021-02-21 12:43:07 +08:00
tjq
91bb28abbb feat: adopt a new server-id generation policy to solve the problem of long IDs #220 2021-02-21 12:26:36 +08:00
Echo009
449608293c refactor: 调整工作流上下文控制策略 2021-02-20 21:32:26 +08:00
曾巩锢
5fd4b9ae9d feat: 新增工作流相关维护接口 2021-02-20 19:23:20 +08:00
tjq
ed94bef458 refactor: optimize worker cluster service 2021-02-20 00:08:23 +08:00
tjq
e24f20f5ba refactor: use DesignateServer to get worker info list 2021-02-19 23:13:14 +08:00
tjq
2e488b5837 chore: update akka to 2.6.12 2021-02-19 21:48:49 +08:00
tjq
daf42be5cb refactor: optimize DispatchService's worker filter code #215 2021-02-19 20:52:52 +08:00
tjq
eac7ce7b27 refactor: use Map to replace String in WorkflowContext 2021-02-19 19:53:07 +08:00
Echo009
d6d461c77e refactor: 将工作流上下文相关的逻辑集中至 WorkflowContext 2021-02-19 15:58:12 +08:00
Echo009
fa3981d167 docs: 改动较大的类文件添加 @author 注释 2021-02-19 11:23:11 +08:00
Echo009
783ea4f67f feat: OpenAPI 添加工作流实例重试功能 2021-02-18 19:43:33 +08:00
Echo009
9748190a8a fix: 修复工作流实例 DAG 中节点信息未同步的问题 2021-02-18 19:43:33 +08:00
Echo009
6671bcf6f7 fix: 修复 DispatchService#dispatch 方法的 jobId 取值问题 2021-02-18 15:21:11 +08:00
Echo009
c3cc8aef4c feat: 工作流支持节点禁用,原地重试 2021-02-18 15:20:16 +08:00
Echo009
4ee6300c6a feat: 工作流节点支持失败跳过 2021-02-18 15:15:06 +08:00
Echo009
11b712332d feat: 工作流任务支持更新上下文数据(后续任务的实例参数信息) 2021-02-18 15:06:49 +08:00
Echo009
814d4321a1 fix: 修复 map 生成的子任务命名和根任务名或最终任务名一致导致的问题(无限生成子任务 或者 直接失败) 2021-02-18 14:48:37 +08:00
Echo009
fd36f2f8e1 feat: 工作流支持重复的任务节点、支持任务参数(静态)个性化、调整实例参数传递机制 2021-02-18 14:45:22 +08:00
Echo009
6d9af1cff0 feat: add WorkflowNodeInfoRepository 2021-02-18 12:19:03 +08:00
Echo009
56993335e3 feat: 更新实体模型以及 DAG 相关数据模型 2021-02-18 10:00:38 +08:00
tjq
e1fc805a0b [release] v3.4.6 2021-02-18 00:04:24 +08:00
tjq
16a5f8efb6 fix: TaskTracker should write address info into TaskDO when execute broadcast 2021-02-17 10:58:53 +08:00
tjq
83ac13b221 feat: user-customized worker filter #215 2021-02-16 23:09:56 +08:00
tjq
770f30dd05 fix: returned an incorrect address when using HTTP protocol 2021-02-16 14:11:08 +08:00
tjq
83f6cf50a7 fix: Judge if OS is Windows and whether to run shell scripts 2021-02-12 21:00:11 +08:00
tjq
c17da02da5 refactor: optimized package structure 2021-02-12 20:57:12 +08:00
tjq
7441c61313 chore: change pom version to 3.4.6 2021-02-11 10:38:09 +08:00
tjq
dfbf9ec137 fix: returns success even if the status message fails to be processed in http protocol #209 2021-02-10 00:01:37 +08:00
tjq
5586d48f93 fix: always use ActorSystem's address as AppInfo's currentServer to avoid some problem #209 2021-02-09 23:43:54 +08:00
tjq
ee9ed3c099 feat: new threads are given names to help locate problems #213 2021-02-09 22:53:28 +08:00
tjq
b4288225a0 fix: use fixedDelay instead of fixedRate to fix the problem of scheduling of multiple threads causing system crashes #213 2021-02-09 22:47:36 +08:00
tjq
a1a5ade215 feat: abstract ServerElectionService interface to support developers to customize #191 2021-02-09 22:40:50 +08:00
Jiang Jining
eb87c329bb fix: Judge if OS is Windows and whether to run shell scripts. 2021-02-09 00:35:24 +08:00
Jiang Jining
ea5cbfe8c4 docs: Add stargazers trend, add Reddit community promotion, correct usage of recommend. 2021-02-09 00:26:42 +08:00
tjq
a575b65320 feat: Unified API management using ProtocolConstant #209 2021-02-08 22:22:17 +08:00
tjq
eda39a6372 feat: support other protocol's server elect #209 2021-02-08 21:07:00 +08:00
tjq
0f1e17e862 feat: add HttpTransporter #209 2021-02-08 20:48:03 +08:00
tjq
57501075de feat: add WorkerRequestHttpHandler powered by vert.x 2021-02-08 20:03:35 +08:00
tjq
d978f84a60 refactor: Upgrade ClusterStatusHolder to support multiple communication protocols 2021-02-08 00:06:03 +08:00
tjq
3d1dc68928 style: rename package 2021-02-07 22:55:36 +08:00
tjq
64b9673a5c feat: add ScriptProcessor as official processor 2021-02-07 22:45:51 +08:00
tjq
b24e0d2682 feat: support disable swagger-ui by config #203 2021-02-06 16:01:56 +08:00
tjq
75d922d4a9 feat: support disable swagger-ui by config #203 2021-02-06 15:53:51 +08:00
tjq
e97bdc71c7 fix: NPE when InstanceTimeLimit is null #207 2021-02-06 15:37:12 +08:00
tjq
ef3f322fff docs: add PULL_REQUEST_TEMPLATE 2021-02-04 23:32:19 +08:00
tjq
d06e5bc029 test: clean script by FileCleanupProcessor #136 2021-02-04 01:06:56 +08:00
tjq
0f3cd48527 refactor: optimize official processors 2021-02-04 00:49:28 +08:00
tjq
5b41da1473 feat: open minimum interval time limit for fixed rate job #199 2021-02-04 00:22:27 +08:00
tjq
4a4ef9ba13 fix: timeout params not take effect for HttpProcessor #200 2021-02-04 00:18:30 +08:00
tjq
3cc195ee33 feat: add FileCleanupProcessor #136 2021-02-01 23:47:00 +08:00
tjq
bb11209d45 [release] v3.4.5 2021-01-31 23:17:13 +08:00
tjq
5ff56f8a41 refactor: optimize official processors 2021-01-31 23:09:36 +08:00
tjq
7616890050 fix: incorrect usage of maven-shade-plugin 2021-01-31 22:56:35 +08:00
tjq
80d2709104 fix: misspell 2021-01-31 22:22:04 +08:00
tjq
c199945840 docs: update user.png 2021-01-31 21:57:10 +08:00
tjq
f3048caaf0 fix: powerjob-agent can't output logs 2021-01-31 19:59:09 +08:00
tjq
2c3e35a742 feat: offical HttpProcessor support custom timeout 2021-01-31 19:49:16 +08:00
tjq
02eed5e15e fix: powerjob-agent can't output log 2021-01-31 19:30:12 +08:00
tjq
e3ef52fc0a fix: powerjob-agent can't output log 2021-01-31 19:11:35 +08:00
tjq
1af361444d feat: offical HttpProcessor 2021-01-31 18:44:40 +08:00
tjq
c4098a60e7 feat: add new module[powerjob-offical-processors] 2021-01-31 17:50:21 +08:00
tjq
b642e1b39b feat: change version to v3.4.5 2021-01-31 17:39:58 +08:00
tjq
2066f5d1da refactor: optimize powerjob-agent's logback config 2021-01-31 15:20:56 +08:00
tjq
cce26511d8 fix: task status rollback #190 2021-01-31 14:44:51 +08:00
tjq
cdaea08ba7 fix: the bug of tasks occasionally cannot be stopped #180 2021-01-31 14:20:52 +08:00
tjq
fb29d8013b fix: JobInfoQuery can't serialize #183 2021-01-31 13:26:38 +08:00
tjq
2035bd6544 feat: support using network interface name for NetUtils #179 2021-01-19 09:19:43 +08:00
tjq
9e349a202a feat: support using network interface name for NetUtils #179 2021-01-19 09:15:37 +08:00
tjq
74fc5edb1f [release] v3.4.4 2021-01-17 17:19:32 +08:00
tjq
5dbc8db849 style: optimize .ignore 2021-01-17 17:19:07 +08:00
tjq
b71d167afd docs: update readme 2021-01-17 11:02:39 +08:00
tjq
4951ccad09 docs: update readme 2021-01-17 10:41:30 +08:00
tjq
1e51a5161f feat: support auto login powerjob-console by url params 2021-01-17 10:00:28 +08:00
tjq
144020a710 fix: NPE when clean the data of instance_info table #165 2021-01-16 23:57:21 +08:00
tjq
b23ae334d3 feat: change the pom version to v3.4.4 and ready to release 2021-01-16 23:25:50 +08:00
tjq
119de1fb44 fix: I forgot to release the segment lock... 2021-01-16 23:24:04 +08:00
tjq
3cb9ddf31f fix: he bug of calculate thread pool size in ProcessorTracker 2021-01-16 22:56:40 +08:00
tjq
dd66c8fd31 fix: the bug of calculate thread pool size in ProcessorTracker 2021-01-16 22:52:26 +08:00
tjq
01e7dd41c7 docs: update readme 2021-01-16 22:51:00 +08:00
tjq
b044784e6a refactor: JobInfoQuery 2021-01-16 22:48:21 +08:00
ocean23
e429c6f59e fix: modify by suggestion 2021-01-16 17:14:10 +08:00
ocean23
62aeb9c080 fix: solved fixed Rate(Delay) concurrency #174 2021-01-16 16:48:14 +08:00
tjq
76eaabb42f feat: support custom statuc check period #166 2021-01-16 16:20:03 +08:00
tjq
293bc64ffa refactor: optimze the packageName and methodName of LockService 2021-01-16 16:04:20 +08:00
tjq
1c52809ebb feat: add segment lock to fix the schedule concurrency bug #168 2021-01-16 16:01:40 +08:00
tjq
895e69f043 refactor: change the convert method's name 2021-01-16 11:51:57 +08:00
tjq
0912964ec3 feat: support InstanceQuery #158 2021-01-16 11:40:54 +08:00
tjq
64c36d402a feat: develop QueryConvertUtils to redefine the query 2021-01-16 00:44:49 +08:00
tjq
7acc1e67e3 feat: support fetchAllJob in OpenAPI #158 2021-01-15 09:18:04 +08:00
tjq
bd8db1b9df feat: decrease initialDelay to speed up the execution of short tasks #166 2021-01-15 00:11:33 +08:00
jiangjining
3bd37525ca docs: Modify README.md and fill in LICENSE. 2021-01-13 19:57:06 +08:00
Jining Jiang
2ab5ac6aa0
Merge pull request #5 from PowerJob/master
Update
2021-01-13 14:58:05 +08:00
tjq
bc58272c85 docs: update readme 2021-01-12 09:26:19 +08:00
tjq
9d10e939a1 docs: update FUNDING.yml 2021-01-11 00:05:11 +08:00
tjq
2ac9cb44e9 release: v3.4.3 2021-01-10 18:16:05 +08:00
tjq
8d6d7c927d fix: Grammatical errors 2021-01-10 18:15:23 +08:00
tjq
515ce9bce2 refacotr: no throw PowerJobException in OpenAPI 2021-01-10 18:09:35 +08:00
tjq
2ecbc2c205 fix: Grammatical errors 2021-01-10 18:01:40 +08:00
tjq
37cf53e0a9 fix: Grammatical errors 2021-01-10 17:56:11 +08:00
tjq
374bae9fc1 feat: change pom version to 3.4.3 and ready to release 2021-01-10 17:36:40 +08:00
tjq
5f3827b8e7 docs: optimize readme 2021-01-10 16:32:03 +08:00
tjq
f34f903947 docs: translate OhMyConfig 2021-01-10 12:04:23 +08:00
tjq
5feaf6106e docs: review translation 2021-01-10 11:54:16 +08:00
tjq
b97c26c78b docs: translate for OhMyClient 2021-01-10 11:20:13 +08:00
tjq
078db73bce docs: translate for internationalization #152 2021-01-10 10:47:32 +08:00
jiangjining
2da5fc624b docs: Modify instance detail model. 2021-01-08 22:49:36 +08:00
jjn
b1b8e1de95 docs: Add translations for models in PowerJob-common. 2021-01-08 22:20:35 +08:00
jiangjining
d5ab77ddbd docs: Add translations for some entities in PowerJob-common. 2021-01-07 20:35:55 +08:00
jiangjining
36e258012e docs: Add translations PowerJob-client. 2021-01-07 19:44:25 +08:00
jiangjining
281c53ad63 docs: Add translations for pom and property files. 2021-01-07 18:56:56 +08:00
jiangjining
d4a7aa68b1 Optimize translation in starter. 2021-01-07 10:09:29 +08:00
jiangjining
d69171afa5 Add translations for PowerJobAutoConfiguration. 2021-01-07 09:36:55 +08:00
jiangjining
1384e1a886 Optimize comments in starter. 2021-01-06 20:32:13 +08:00
jiangjining
b248f76d32 Add translation for PowerJobProperties. 2021-01-06 15:33:49 +08:00
Jining Jiang
f507d340a2
Merge pull request #4 from PowerJob/master
Update
2021-01-05 18:32:43 +08:00
tjq
43ca05883a refactor: change repo url to PowerJob/PowerJob 2021-01-03 22:32:18 +08:00
tjq
9e6ba33f90 [release] v3.4.2 2021-01-03 14:47:12 +08:00
tjq
b6173967bb refactor: optimize ProcessorTracker log 2021-01-03 14:27:00 +08:00
tjq
6fc5c65825 fix: receive ProcessorTrackerStatusReportReq but system can't find TaskTracker #147 2021-01-03 13:01:35 +08:00
tjq
6c3c6695e4 refactor: change version to 3.4.2 and ready to release 2021-01-02 20:15:09 +08:00
tjq
dfd2106a3f feat: Swagger API version keep up-to-date with POM version #139 2021-01-02 20:10:04 +08:00
tjq
85b8d004e4 feat: Swagger API version keep up-to-date with POM version 2021-01-02 20:07:19 +08:00
tjq
52ea3fb80d fix: the bug of concurrent clean the same thing #144 2021-01-02 20:06:18 +08:00
tjq
24cff5b6ec fix: multi server clean the same object 2021-01-02 20:03:05 +08:00
tjq
6a85995937 refactor: optimize thread pool config 2021-01-02 19:52:22 +08:00
ocean23
3776d4ad84 修改注释 2021-01-02 13:30:46 +08:00
ocean23
0daa097d20 根据意见重构代码 2021-01-02 13:28:55 +08:00
tjq
9a661aa177 fix: DatabaseLock can't unlock when timeout 2021-01-02 13:14:25 +08:00
ocean23
05181c34ec 回滚GridFsManager的代码把锁加在CleanService上 2021-01-02 12:39:36 +08:00
ocean23
ead9f08e52 回滚GridFsManager的代码把锁加在CleanService上 2021-01-02 12:38:19 +08:00
tjq
3ecefd22cb fix: the bug of idle check #146 2021-01-02 11:31:48 +08:00
ocean23
0e77a23e76 fix并发情况下server端删除日志时数据已经被其他server删除的问题 2021-01-01 14:17:54 +08:00
tjq
269d64065c docs: update readme 2021-01-01 13:00:44 +08:00
tjq
1b68fdb7d7 docs: update readme 2021-01-01 12:56:51 +08:00
tjq
83dae8ddd3 docs: update readme 2021-01-01 12:55:35 +08:00
tjq
2b7936b39b docs: update readme 2021-01-01 12:53:51 +08:00
tjq
6ae24ac9ed fix: OpenAPI can't save workflow 2021-01-01 12:33:52 +08:00
jjnnzb
ef881cfcac Change version judgement info. 2020-12-29 07:46:07 +08:00
jjnnzb
5329fba6b0 Set deafult version to Unknown and trim version info. 2020-12-28 23:19:18 +08:00
jjnnzb
d4af8138d0 Prevent codes from reporting error when pom.xml is not reinstalled. 2020-12-27 21:53:36 +08:00
jjnnzb
4f3b6057b6 Swagger API document version keep update-to-date with pom.xml version. 2020-12-27 15:58:16 +08:00
Jining Jiang
73a5a724ec
Merge pull request #3 from KFCFans/master
Update
2020-12-25 16:17:06 +08:00
tjq
e8ada3789f fix: can't download log file when sever and browser are not in the same network 2020-12-20 23:14:44 +08:00
tjq
c176a447e7 feat: support LogLevel 2020-12-20 22:53:56 +08:00
tjq
11054e9761 [release] v3.4.1 2020-12-20 21:32:58 +08:00
tjq
2d989d2b0b revert: temporary remove @Type(StringType) due to it will change the db colum type to varchar 2020-12-20 21:28:37 +08:00
tjq
12162f2955 feat: log full stack info when can't fetch processor #134 2020-12-20 20:40:36 +08:00
tjq
0de6a9f4f5 merge: modify README_enUS.md 2020-12-20 00:06:00 +08:00
tjq
c40774d578 refactor: add extension module 2020-12-20 00:04:07 +08:00
tjq
9f2d3134d8 merge: fix-k8s-postgresql by tanwenhai 2020-12-19 22:10:15 +08:00
tjq
0095d78615 merge: from commiter tanwenhai 2020-12-19 22:08:28 +08:00
tjq
977b8bfd4b fix: fetch instance log failed when server use different http port 2020-12-19 21:49:41 +08:00
tanwenhai
76e5a41881 serverIdProvider interface and implements 2020-12-17 11:12:54 +08:00
tanwenhai
35f0991f03 TypeDef 2020-12-16 18:35:56 +08:00
tanwenhai
4eb53990f4 daily config 2020-12-16 17:05:29 +08:00
tanwenhai
ca6b02d19f reset 2020-12-15 16:14:18 +08:00
tanwenhai
e311e7c500 id generate 2020-12-15 16:11:02 +08:00
tanwenhai
be43839d73 actuator endpoint 2020-12-15 15:29:06 +08:00
tanwenhai
9822e8a10f postgresql 2020-12-15 15:27:57 +08:00
Jining Jiang
3acbd0bcf5
Merge pull request #2 from KFCFans/master
Update to 3.4.0-bugfix
2020-12-14 09:53:59 +08:00
jjnnzb
ee80ae76a8 Optimize statement. 2020-12-13 20:50:29 +08:00
tjq
d799c0f2bd feat: develop web console to suit the modify 2020-12-13 18:19:25 +08:00
tjq
3029673612 refactor: use aop to redirect request #131 2020-12-13 18:14:44 +08:00
KFCFans
ff81ac4485 feat: change version to 3.4.1 2020-12-12 10:37:24 +08:00
tjq
8b93ca93eb [release] v3.4.0-bugfix 2020-12-06 20:15:29 +08:00
tjq
1a8cc2a696 feat: change version and start to release 2020-12-05 11:04:49 +08:00
tjq
9db9d0d01f fix: concurrency safety in ProcessotTrackerPool 2020-12-05 10:36:33 +08:00
tjq
b4f92bbc25 fix: memory leak in ProcessorTrackerPool #122 2020-12-05 10:28:52 +08:00
jjnnzb
a4f768e46c advertise--->promote 2020-12-02 11:28:12 +08:00
jjnnzb
e300885839 Modify README_enUS.md, client--->user. 2020-12-02 00:37:41 +08:00
jjnnzb
6ce5765ff4 Modify README_enUS.md, add client registration, design goals and online trial. 2020-12-02 00:35:23 +08:00
jjnnzb
f104b34d2c Modify README_enUS.md, correct spelling mistakes and change statements. 2020-12-01 22:47:05 +08:00
Jining Jiang
efb4486a43
Merge pull request #1 from KFCFans/master
Upgrade to 3.4.0
2020-11-30 09:15:46 +08:00
tjq
143663e333 [release] v3.4.0 2020-11-29 21:27:52 +08:00
905 changed files with 59724 additions and 26975 deletions

2
.github/FUNDING.yml vendored
View File

@ -2,7 +2,7 @@
github: #[KFCFans] github: #[KFCFans]
patreon: # Replace with a single Patreon username patreon: # Replace with a single Patreon username
open_collective: # Replace with a single Open Collective username open_collective: powerjob
ko_fi: # Replace with a single Ko-fi username ko_fi: # Replace with a single Ko-fi username
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry

27
.github/PULL_REQUEST_TEMPLATE.md vendored Normal file
View File

@ -0,0 +1,27 @@
## What is the purpose of the change
For example: Making PowerJob better
## Brief changelog
It is best to associate an existing issue
## Verifying this change
Do I need to test?
Has testing been completed?
Test method?
Follow this checklist to help us incorporate your contribution quickly and easily. Notice, `it would be helpful if you could finish the following 3 checklist before request the community to review your PR`.
- [x] Make sure there is a [Github issue](https://github.com/PowerJob/PowerJob/issues) filed for the change (usually before you start working on it). Trivial changes like typos do not require a Github issue. Your pull request should address just this issue, without pulling in other changes - one PR resolves one issue.
- [x] Write a pull request description that is detailed enough to understand what the pull request does, how, and why.
- [x] Follow the git commit specification
* feat: xxx -> The feat type is used to identify production changes related to new backward-compatible abilities or functionality.
* perf: xxx -> The perf type is used to identify production changes related to backward-compatible performance improvements.
* fix: xxx -> The fix type is used to identify production changes related to backward-compatible bug fixes.
* docs: xxx -> The docs type is used to identify documentation changes related to the project - whether intended externally for the end users (in case of a library) or internally for the developers.
* test: xxx -> The test type is used to identify development changes related to tests - such as refactoring existing tests or adding new tests.
* refactor: xxx -> The refactor type is used to identify development changes related to modifying the codebase, which neither adds a feature nor fixes a bug - such as removing redundant code, simplifying the code, renaming variables, etc.

View File

@ -1,30 +0,0 @@
name: Docker Image CI
on:
push:
branches: [ master ]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Build the Docker image
run: mvn clean package -Pdev -DskipTests -U -e -pl powerjob-server,powerjob-worker-agent -am && /bin/cp -rf powerjob-server/target/*.jar powerjob-server/docker/powerjob-server.jar && /bin/cp -rf powerjob-worker-agent/target/*.jar powerjob-worker-agent/powerjob-agent.jar
- uses: docker/build-push-action@v1
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
repository: tjqq/powerjob-server
tags: latest
path: powerjob-server/docker/
- uses: docker/build-push-action@v1
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
repository: tjqq/powerjob-agent
tags: latest
path: powerjob-worker-agent/

68
.github/workflows/docker_publish.yml vendored Normal file
View File

@ -0,0 +1,68 @@
name: build_docker
on:
push:
tags:
- 'v*' # Push events to matching v*, i.e. v1.0, v20.15.10
jobs:
build_docker:
name: Build docker
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Build Maven Project
uses: actions/setup-java@v4
with:
java-version: '8'
distribution: 'temurin'
- name: Publish package
run: mvn clean package -Pdev -DskipTests -U -e && /bin/cp -rf powerjob-server/powerjob-server-starter/target/*.jar powerjob-server/docker/powerjob-server.jar && /bin/cp -rf powerjob-worker-agent/target/*.jar powerjob-worker-agent/powerjob-agent.jar && /bin/cp -rf powerjob-worker-samples/target/*.jar powerjob-worker-samples/powerjob-worker-samples.jar
# Login
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Build And Push [powerjob-server]
uses: docker/build-push-action@v6
with:
context: powerjob-server/docker/
push: true
platforms: linux/amd64,linux/arm64
tags: |
tjqq/powerjob-server:latest
powerjob/powerjob-server:latest
tjqq/powerjob-server:${{ github.ref_name }}
powerjob/powerjob-server:${{ github.ref_name }}
- name: Build And Push [powerjob-agent]
uses: docker/build-push-action@v6
with:
context: powerjob-worker-agent/
push: true
platforms: linux/amd64,linux/arm64
tags: |
tjqq/powerjob-agent:latest
powerjob/powerjob-agent:latest
tjqq/powerjob-agent:${{ github.ref_name }}
powerjob/powerjob-agent:${{ github.ref_name }}
- name: Build And Push [powerjob-worker-samples]
uses: docker/build-push-action@v6
with:
context: powerjob-worker-samples/
push: true
platforms: linux/amd64,linux/arm64
tags: |
tjqq/powerjob-worker-samples:latest
powerjob/powerjob-worker-samples:latest
tjqq/powerjob-worker-samples:${{ github.ref_name }}
powerjob/powerjob-worker-samples:${{ github.ref_name }}

View File

@ -1,38 +0,0 @@
# This workflow will build a Java project with Maven
# For more information see: https://help.github.com/actions/language-and-framework-guides/building-and-testing-java-with-maven
name: Java CI with Maven
on:
push:
branches: [ master ]
pull_request:
branches: [ master ]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up JDK 1.8
uses: actions/setup-java@v1
with:
java-version: 1.8
- name: Build with Maven
run: mvn -B clean package -Pdev -DskipTests --file pom.xml
- name: upload build result
run: mkdir staging && cp powerjob-server/target/*.jar staging/powerjob-server.jar && cp powerjob-client/target/*.jar staging/powerjob-client.jar && cp powerjob-worker-agent/target/*.jar staging/powerjob-agent.jar
- uses: actions/upload-artifact@v1
with:
name: powerjob-server.jar
path: staging/powerjob-server.jar
- uses: actions/upload-artifact@v1
with:
name: powerjob-client.jar
path: staging/powerjob-client.jar
- uses: actions/upload-artifact@v1
with:
name: powerjob-agent.jar
path: staging/powerjob-agent.jar

28
.github/workflows/maven_build.yml vendored Normal file
View File

@ -0,0 +1,28 @@
# This workflow will build a Java project with Maven
# For more information see: https://docs.github.com/zh/actions/use-cases-and-examples/building-and-testing/building-and-testing-java-with-maven
name: Java CI with Maven
on:
push:
branches: [ master ]
pull_request:
branches: [ master ]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-java@v4
with:
java-version: '8'
distribution: 'temurin'
- run: mvn -B clean package -Pdev -DskipTests --file pom.xml
- run: mkdir staging && cp powerjob-server/powerjob-server-starter/target/*.jar staging/powerjob-server.jar && cp powerjob-client/target/*.jar staging/powerjob-client.jar && cp powerjob-worker-agent/target/*.jar staging/powerjob-agent.jar && cp powerjob-worker-spring-boot-starter/target/*.jar staging/powerjob-worker-spring-boot-starter.jar
- uses: actions/upload-artifact@v4
with:
name: Package
path: staging

22
.github/workflows/maven_publish.yml vendored Normal file
View File

@ -0,0 +1,22 @@
name: Publish package to the Maven Central Repository
on:
release:
types: [created]
jobs:
publish:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up Maven Central Repository
uses: actions/setup-java@v4
with:
java-version: '8'
distribution: 'temurin'
server-id: ossrh
server-username: MAVEN_USERNAME
server-password: MAVEN_PASSWORD
- name: Publish package
run: mvn --batch-mode clean deploy -pl powerjob-worker,powerjob-client,powerjob-worker-spring-boot-starter,powerjob-official-processors,powerjob-worker-agent -DskipTests -Prelease -am
env:
MAVEN_USERNAME: ${{ secrets.OSSRH_USERNAME }}
MAVEN_PASSWORD: ${{ secrets.OSSRH_TOKEN }}

8
.gitignore vendored
View File

@ -35,3 +35,11 @@ build/
*.log *.log
*/.DS_Store */.DS_Store
.DS_Store .DS_Store
.phd
.txt
.trc
*/.phd
*/.txt
*/.trc
powerjob-data/

View File

@ -186,7 +186,7 @@
same "printed page" as the copyright notice for easier same "printed page" as the copyright notice for easier
identification within third-party archives. identification within third-party archives.
Copyright [yyyy] [name of copyright owner] Copyright [2021] [PowerJob]
Licensed under the Apache License, Version 2.0 (the "License"); Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License. you may not use this file except in compliance with the License.

109
README.md
View File

@ -1,76 +1,77 @@
# English | [简体中文](./README_zhCN.md)
<p align="center">
🏮PowerJob 全体成员祝大家龙年腾飞,新的一年身体健康,万事如意,阖家欢乐,幸福安康!🏮
</p>
<p align="center"> <p align="center">
<img src="https://raw.githubusercontent.com/KFCFans/PowerJob/master/others/images/logo.png" alt="PowerJob" title="PowerJob" width="557"/> <img src="https://raw.githubusercontent.com/KFCFans/PowerJob/master/others/images/logo.png" alt="PowerJob" title="PowerJob" width="557"/>
</p> </p>
<p align="center"> <p align="center">
<a href="https://github.com/KFCFans/PowerJob/actions"><img src="https://github.com/KFCFans/PowerJob/workflows/Java%20CI%20with%20Maven/badge.svg?branch=master" alt="actions"></a> <a href="https://github.com/PowerJob/PowerJob/actions"><img src="https://github.com/PowerJob/PowerJob/workflows/Java%20CI%20with%20Maven/badge.svg?branch=master" alt="actions"></a>
<a href="https://search.maven.org/search?q=com.github.kfcfans"><img alt="Maven Central" src="https://img.shields.io/maven-central/v/com.github.kfcfans/powerjob-worker"></a> <a href="https://central.sonatype.com/search?smo=true&q=powerjob-worker&namespace=tech.powerjob"><img alt="Maven Central" src="https://img.shields.io/maven-central/v/tech.powerjob/powerjob-worker"></a>
<a href="https://github.com/KFCFans/PowerJob/releases"><img alt="GitHub release (latest SemVer)" src="https://img.shields.io/github/v/release/kfcfans/powerjob?color=%23E59866"></a> <a href="https://github.com/PowerJob/PowerJob/releases"><img alt="GitHub release (latest SemVer)" src="https://img.shields.io/github/v/release/kfcfans/powerjob?color=%23E59866"></a>
<a href="https://github.com/KFCFans/PowerJob/blob/master/LICENSE"><img src="https://img.shields.io/github/license/KFCFans/PowerJob" alt="LICENSE"></a> <a href="https://github.com/PowerJob/PowerJob/blob/master/LICENSE"><img src="https://img.shields.io/github/license/KFCFans/PowerJob" alt="LICENSE"></a>
</p> </p>
PowerJob原OhMyScheduler是全新一代分布式调度与计算框架能让您轻松完成作业的调度与繁杂任务的分布式计算。 [PowerJob](https://github.com/PowerJob/PowerJob) is an open-source distributed computing and job scheduling framework which allows developers to easily schedule tasks in their own application.
# 简介
### 主要特性
* 使用简单提供前端Web界面允许开发者可视化地完成调度任务的管理增、删、改、查、任务运行状态监控和运行日志查看等功能。
* 定时策略完善支持CRON表达式、固定频率、固定延迟和API四种定时调度策略。
* 执行模式丰富支持单机、广播、Map、MapReduce四种执行模式其中Map/MapReduce处理器能使开发者寥寥数行代码便获得集群分布式计算的能力。
* DAG工作流支持支持在线配置任务依赖关系可视化得对任务进行编排同时还支持上下游任务间的数据传递
* 执行器支持广泛支持Spring Bean、内置/外置Java类、Shell、Python等处理器应用范围广。
* 运维便捷支持在线日志功能执行器产生的日志可以在前端控制台页面实时显示降低debug成本极大地提高开发效率。
* 依赖精简最小仅依赖关系型数据库MySQL/Oracle/MS SQLServer...扩展依赖为MongoDB用于存储庞大的在线日志
* 高可用&高性能:调度服务器经过精心设计,一改其他调度框架基于数据库锁的策略,实现了无锁化调度。部署多个调度服务器可以同时实现高可用和性能的提升(支持无限的水平扩展)。
* 故障转移与恢复:任务执行失败后,可根据配置的重试策略完成重试,只要执行器集群有足够的计算节点,任务就能顺利完成。
### 适用场景 Refer to [PowerJob Introduction](https://www.yuque.com/powerjob/en/introduce) for detailed information.
* 有定时执行需求的业务场景:如每天凌晨全量同步数据、生成业务报表等。
* 有需要全部机器一同执行的业务场景:如使用广播执行模式清理集群日志。
* 有需要分布式处理的业务场景比如需要更新一大批数据单机执行耗时非常长可以使用Map/MapReduce处理器完成任务的分发调动整个集群加速计算。
* 有需要**延迟执行**某些任务的业务场景:比如订单过期处理等。
### 设计目标 # Introduction
PowerJob 的设计目标为企业级的分布式任务调度平台,即成为公司内部的**任务调度中间件**。整个公司统一部署调度中心 powerjob-server旗下所有业务线应用只需要依赖 `powerjob-worker` 即可接入调度中心获取任务调度与分布式计算能力。
### 在线试用 ### Features
试用地址:[try.powerjob.tech](http://try.powerjob.tech/) - **Friendly UI:** [Front-end](http://try.powerjob.tech/#/welcome?appName=powerjob-agent-test&password=123) page is provided and developers can manage their task, monitor the status, check the logs online, etc.
试用应用名称powerjob-agent-test
控制台密码123
[建议点击查看试用文档了解相关操作](https://www.yuque.com/powerjob/guidence/hnbskn) - **Abundant Timing Strategies:** Four timing strategies are supported, including CRON expression, fixed rate, fixed delay and OpenAPI which allows you to define your own scheduling policies, such as delaying execution.
### 同类产品对比 - **Multiple Execution Mode:** Four execution modes are supported, including stand-alone, broadcast, Map and MapReduce. Distributed computing resource could be utilized in MapReduce mode, try the magic out [here](https://www.yuque.com/powerjob/en/za1d96#9YOnV)!
| | QuartZ | xxl-job | SchedulerX 2.0 | PowerJob |
| -------------- | ------------------------ | ---------------------------------------- | ------------------------------------------------- | ------------------------------------------------------------ |
| 定时类型 | CRON | CRON | CRON、固定频率、固定延迟、OpenAPI | **CRON、固定频率、固定延迟、OpenAPI** |
| 任务类型 | 内置Java | 内置Java、GLUE Java、Shell、Python等脚本 | 内置Java、外置JavaFatJar、Shell、Python等脚本 | **内置Java、外置Java容器、Shell、Python等脚本** |
| 分布式计算 | 无 | 静态分片 | MapReduce动态分片 | **MapReduce动态分片** |
| 在线任务治理 | 不支持 | 支持 | 支持 | **支持** |
| 日志白屏化 | 不支持 | 支持 | 不支持 | **支持** |
| 调度方式及性能 | 基于数据库锁,有性能瓶颈 | 基于数据库锁,有性能瓶颈 | 不详 | **无锁化设计,性能强劲无上限** |
| 报警监控 | 无 | 邮件 | 短信 | **WebHook、邮件、钉钉与自定义扩展** |
| 系统依赖 | JDBC支持的关系型数据库MySQL、Oracle... | MySQL | 人民币 | **任意Spring Data Jpa支持的关系型数据库MySQL、Oracle...** |
| DAG工作流 | 不支持 | 不支持 | 支持 | **支持** |
- **Workflow(DAG) Support:** Both job dependency management and data communications between jobs are supported.
# 官方文档 - **Extensive Processor Support:** Developers can write their processors in Java, Shell, Python, and will subsequently support multilingual scheduling via HTTP.
**[中文文档](https://www.yuque.com/powerjob/guidence/ztn4i5)**
**[Document](https://www.yuque.com/powerjob/en/xrdoqw)** - **Powerful Disaster Tolerance:** As long as there are enough computing nodes, configurable retry policies make it possible for your task to be executed and finished successfully.
PS感谢文档翻译平台[breword](https://www.breword.com/)对本项目英文文档翻译做出的巨大贡献! - **High Availability & High Performance:** PowerJob supports unlimited horizontal expansion. It's easy to achieve high availability and performance by deploying as many PowerJob server and worker nodes.
# 接入登记 ### Applicable scenes
[点击进行接入登记,为 PowerJob 的发展贡献自己的力量!](https://github.com/KFCFans/PowerJob/issues/6)
ღ( ´・ᴗ・\` )ღ 感谢以下接入用户的大力支持 ღ( ´・ᴗ・\` )ღ - Timed tasks, for example, allocating e-coupons on 9 AM every morning.
- Broadcast tasks, for example, broadcasting to the cluster to clear logs.
- MapReduce tasks, for example, speeding up certain job like updating large amounts of data.
- Delayed tasks, for example, processing overdue orders.
- Customized tasks, triggered with [OpenAPI](https://www.yuque.com/powerjob/en/openapi).
<p align="center"> ### Online trial
- Address: [try.powerjob.tech](http://try.powerjob.tech/#/welcome?appName=powerjob-agent-test&password=123)
- Recommend reading the documentation first: [here](https://www.yuque.com/powerjob/en/trial)
# Documents
**[Docs](https://www.yuque.com/powerjob/en/introduce)**
**[中文文档](https://www.yuque.com/powerjob/guidence/intro)**
# Known Users
[Click to register as PowerJob user!](https://github.com/PowerJob/PowerJob/issues/6)
ღ( ´・ᴗ・\` )ღ Many thanks to the following registered users. ღ( ´・ᴗ・\` )ღ
<p style="text-align: center">
<img src="https://raw.githubusercontent.com/KFCFans/PowerJob/master/others/images/user.png" alt="PowerJob User" title="PowerJob User"/> <img src="https://raw.githubusercontent.com/KFCFans/PowerJob/master/others/images/user.png" alt="PowerJob User" title="PowerJob User"/>
</p> </p>
# 其他 # Stargazers over time
* 开源许可证Apache License, Version 2.0
* 欢迎共同参与本项目的贡献PR和Issue都大大滴欢迎求求了 [![Stargazers over time](https://starchart.cc/PowerJob/PowerJob.svg)](https://starchart.cc/PowerJob/PowerJob)
* 觉得还不错的话可以点个Star支持一下哦 = ̄ω ̄=
* 联系方式@KFCFans -> `tengjiqi@gmail.com` # License
* 用户交流QQ群487453839
PowerJob is released under Apache License 2.0. Please refer to [License](./LICENSE) for details.
# Others
- Any developer interested in getting more involved in PowerJob may join our [Reddit](https://www.reddit.com/r/PowerJob) or [Gitter](https://gitter.im/PowerJob/community) community and make [contributions](https://github.com/PowerJob/PowerJob/pulls)!
- Reach out to me through email **tengjiqi@gmail.com**. Any issues or questions are welcomed on [Issues](https://github.com/PowerJob/PowerJob/issues).
- Look forward to your opinions. Response may be late but not denied.

View File

@ -1,63 +0,0 @@
<p align="center">
<img src="https://raw.githubusercontent.com/KFCFans/PowerJob/master/others/images/logo.png" alt="PowerJob" title="PowerJob" width="557"/>
</p>
<p align="center">
<a href="https://github.com/KFCFans/PowerJob/actions"><img src="https://github.com/KFCFans/PowerJob/workflows/Java%20CI%20with%20Maven/badge.svg?branch=master" alt="actions"></a>
<a href="https://search.maven.org/search?q=com.github.kfcfans"><img alt="Maven Central" src="https://img.shields.io/maven-central/v/com.github.kfcfans/powerjob-worker"></a>
<a href="https://github.com/KFCFans/PowerJob/releases"><img alt="GitHub release (latest SemVer)" src="https://img.shields.io/github/v/release/kfcfans/powerjob?color=%23E59866"></a>
<a href="https://github.com/KFCFans/PowerJob/blob/master/LICENSE"><img src="https://img.shields.io/github/license/KFCFans/PowerJob" alt="LICENSE"></a>
</p>
- Have you ever wondered how cron jobs could be organized orderly?
- Have you ever felt upset when scheduling tasks suddenly terminated without any warning?
- Have you ever felt helpless when batches of business tasks require handling?
- Have you ever felt depressed about tasks that carry with complex dependencies?
Well, PowerJob is there for you, it is the choice of a new generation.It is a powerful, business-oriented scheduling framework that provides distributed computing ability.Based on Akka architecture, it makes everything with scheduling easier.Just with several steps, PowerJob could be deployed and work for you!
# Introduction
### Features
- Simple to use: PowerJob provides a friendly front-end Web that allows developers to visually manage tasks (Create, Read, Update and Delete), monitor task status, and view operation logs online.
- Complete timing strategy: PowerJob supports four different scheduling strategies, including CRON expression, fixed frequency timing, fixed delay timing as well as the Open API.
- Various execution modes: PowerJob supports four execution modes: stand-alone, broadcast, Map, and MapReduce. It's worth mentioning the Map and MapReduce modes. With the completion of several lines of codes, developers could take full advantage of PowerJob's distributed computing ability.
- Complete workflow support. PowerJob supports DAG(Directed acyclic graph) based online task configuration. Developers could arrange tasks on the console, while data could be transferred between tasks on the flow.
- Extensive executor support: PowerJob supports multiple processors, including Spring Beans, ordinary Java objects, Shell, Python and so on.
- Simple in dependency: PowerJob aims to be simple in dependency. The only dependency is merely database (MySQL / Oracle / MS SQLServer ...), with MongoDB being the extra dependency for storing huge online logs.
- High availability and performance: Unlike traditional job-scheduling frameworks which rely on database locks, PowerJob server is lock-free when scheduling. PowerJob supports unlimited horizontal expansion. It's easy to achieve high availability and performance just by deploying as many PowerJob server instances as you need.
- Quick failover and recovery support: Whenever any task failed, PowerJob server would retry according to the configured strategy. As long as there were enough nodes in the cluster, the failed tasks could execute successfully finally.
- Convenient to run and maintain: PowerJob supports online logging. Logs generated by the worker would be transferred and displayed on the console instantly, therefore reducing the cost of debugging and improving the efficiency for developers significantly.
### Applicable scene
- Scenarios with timed tasks: such as full synchronization of data at midnight, generating business reports at desired time.
- Scenarios that require all machines to run tasks simultaneously: such as log cleanup.
- Scenarios that require distributed processing: For example, a large amount of data requires updating, while the stand-alone execution takes quite a lot of time. The Map/MapReduce mode could be applied while the workers would join the cluster for PowerJob server to dispatch, to speed up the time-consuming process, therefore improving the computing ablility of whole cluster.
### Comparison of similar products
| | QuartZ | xxl-job | SchedulerX 2.0 | PowerJob |
| ---------------------------------- | --------------------------------------------------------- | --------------------------------------------------------- | ------------------------------------------------------------ | ------------------------------------------------------------ |
| Timing type | CRON | CRON | CRON, fixed frequency, fixed delay, OpenAPI | **CRON, fixed frequency, fixed delay, OpenAPI** |
| Task type | Built-in Java | Built-in Java, GLUE Java, Shell, Python and other scripts | Built-in Java, external Java (FatJar), Shell, Python and other scripts | **Built-in Java, external Java (container), Shell, Python and other scripts** |
| Distributed strategy | Unsupported | Static sharding | MapReduce dynamic sharding | **MapReduce dynamic sharding** |
| Online task management | Unsupported | Supported | Supported | **Supported** |
| Online logging | Unsupported | Supported | Unsupported | **Supported** |
| Scheduling methods and performance | Based on database lock, there is a performance bottleneck | Based on database lock, there is a performance bottleneck | Unknown | **Lock-free design, powerful performance without upper limit** |
| Alarm monitoring | Unsupported | Email | SMS | **Email, WebHook, Dingtalk. An interface is provided for customization.** |
| System dependence | Any relational database (MySQL, Oracle ...) supported by JDBC | MySQL | RMB (free during public beta, hey, help to advertise) | **Any relational database (MySQL, Oracle ...) supported by Spring Data Jpa** |
| workflow | Unsupported | Unsupported | Supported | **Supported** |
# Document
**[GitHub Wiki](https://github.com/KFCFans/PowerJob/wiki)**
**[中文文档](https://www.yuque.com/powerjob/product)**
# Others
- PowerJob is permanently open source software(Apache License, Version 2.0), please feel free to try, use or deploy!
- Owner of PowerJob (@KFCFans) has abundant time for maintenance, and is willing to provide technical support if you have needs!
- Welcome to contribute to PowerJob, both Pull Requests and Issues are precious.
- Please STAR PowerJob if it is valuable. ~ =  ̄ω ̄ =
- Do you need any help or want to propose suggestions? Please raise Github issues or contact the Author @KFCFans-> `tengjiqi@gmail.com` directly.

78
README_zhCN.md Normal file
View File

@ -0,0 +1,78 @@
# [English](./README.md) | 简体中文
<p align="center">
🏮PowerJob 全体成员祝大家龙年腾飞,新的一年身体健康,万事如意,阖家欢乐,幸福安康!🏮
</p>
<p align="center">
<img src="https://raw.githubusercontent.com/KFCFans/PowerJob/master/others/images/logo.png" alt="PowerJob" title="PowerJob" width="557"/>
</p>
<p align="center">
<a href="https://github.com/PowerJob/PowerJob/actions"><img src="https://github.com/PowerJob/PowerJob/workflows/Java%20CI%20with%20Maven/badge.svg?branch=master" alt="actions"></a>
<a href="https://search.maven.org/search?q=tech.powerjob"><img alt="Maven Central" src="https://img.shields.io/maven-central/v/tech.powerjob/powerjob-worker"></a>
<a href="https://github.com/PowerJob/PowerJob/releases"><img alt="GitHub release (latest SemVer)" src="https://img.shields.io/github/v/release/kfcfans/powerjob?color=%23E59866"></a>
<a href="https://github.com/PowerJob/PowerJob/blob/master/LICENSE"><img src="https://img.shields.io/github/license/KFCFans/PowerJob" alt="LICENSE"></a>
</p>
PowerJob原OhMyScheduler是全新一代分布式调度与计算框架能让您轻松完成作业的调度与繁杂任务的分布式计算。
# 简介
### 主要特性
* 使用简单提供前端Web界面允许开发者可视化地完成调度任务的管理增、删、改、查、任务运行状态监控和运行日志查看等功能。
* 定时策略完善支持CRON表达式、固定频率、固定延迟和API四种定时调度策略。
* 执行模式丰富支持单机、广播、Map、MapReduce四种执行模式其中Map/MapReduce处理器能使开发者寥寥数行代码便获得集群分布式计算的能力。
* DAG工作流支持支持在线配置任务依赖关系可视化得对任务进行编排同时还支持上下游任务间的数据传递
* 执行器支持广泛支持Spring Bean、内置/外置Java类、Shell、Python等处理器应用范围广。
* 运维便捷支持在线日志功能执行器产生的日志可以在前端控制台页面实时显示降低debug成本极大地提高开发效率。
* 依赖精简最小仅依赖关系型数据库MySQL/Oracle/MS SQLServer...)。
* 高可用&高性能:调度服务器经过精心设计,一改其他调度框架基于数据库锁的策略,实现了无锁化调度。部署多个调度服务器可以同时实现高可用和性能的提升(支持无限的水平扩展)。
* 故障转移与恢复:任务执行失败后,可根据配置的重试策略完成重试,只要执行器集群有足够的计算节点,任务就能顺利完成。
### 适用场景
* 有定时执行需求的业务场景:如每天凌晨全量同步数据、生成业务报表等。
* 有需要全部机器一同执行的业务场景:如使用广播执行模式清理集群日志。
* 有需要分布式处理的业务场景比如需要更新一大批数据单机执行耗时非常长可以使用Map/MapReduce处理器完成任务的分发调动整个集群加速计算。
* 有需要**延迟执行**某些任务的业务场景:比如订单过期处理等。
### 设计目标
PowerJob 的设计目标为企业级的分布式任务调度平台,即成为公司内部的**任务调度中间件**。整个公司统一部署调度中心 powerjob-server旗下所有业务线应用只需要依赖 `powerjob-worker` 即可接入调度中心获取任务调度与分布式计算能力。
### 在线试用
* [点击查看试用说明和教程](https://www.yuque.com/powerjob/guidence/trial)
### 同类产品对比
| | QuartZ | xxl-job | SchedulerX 2.0 | PowerJob |
| -------------- | ------------------------ | ---------------------------------------- | ------------------------------------------------- | ------------------------------------------------------------ |
| 定时类型 | CRON | CRON | CRON、固定频率、固定延迟、OpenAPI | **CRON、固定频率、固定延迟、OpenAPI** |
| 任务类型 | 内置Java | 内置Java、GLUE Java、Shell、Python等脚本 | 内置Java、外置JavaFatJar、Shell、Python等脚本 | **内置Java、外置Java容器、Shell、Python等脚本** |
| 分布式计算 | 无 | 静态分片 | MapReduce动态分片 | **MapReduce动态分片** |
| 在线任务治理 | 不支持 | 支持 | 支持 | **支持** |
| 日志白屏化 | 不支持 | 支持 | 不支持 | **支持** |
| 调度方式及性能 | 基于数据库锁,有性能瓶颈 | 基于数据库锁,有性能瓶颈 | 不详 | **无锁化设计,性能强劲无上限** |
| 报警监控 | 无 | 邮件 | 短信 | **WebHook、邮件、钉钉与自定义扩展** |
| 系统依赖 | JDBC支持的关系型数据库MySQL、Oracle... | MySQL | 人民币 | **任意Spring Data Jpa支持的关系型数据库MySQL、Oracle...** |
| DAG工作流 | 不支持 | 不支持 | 支持 | **支持** |
# 官方文档
**[中文文档](https://www.yuque.com/powerjob/guidence/intro)**
**[Docs](https://www.yuque.com/powerjob/en/introduce)**
# 接入登记
[点击进行接入登记,为 PowerJob 的发展贡献自己的力量!](https://github.com/PowerJob/PowerJob/issues/6)
ღ( ´・ᴗ・\` )ღ 感谢以下接入用户的大力支持 ღ( ´・ᴗ・\` )ღ
<p align="center">
<img src="https://raw.githubusercontent.com/KFCFans/PowerJob/master/others/images/user.png" alt="PowerJob User" title="PowerJob User"/>
</p>
# 其他
* 开源许可证Apache License, Version 2.0
* 欢迎共同参与本项目的贡献PR和Issue都大大滴欢迎求求了
* 觉得还不错的话可以点个Star支持一下哦 = ̄ω ̄=
* 联系方式@KFCFans -> `tengjiqi@gmail.com`
* 用户交流QQ群因广告信息泛滥加群需要验证请认真填写申请原因
* 一群已满487453839
* 二群834937813

4
SECURITY.md Normal file
View File

@ -0,0 +1,4 @@
# Security notices relating to PowerJob
Please disclose any security issues or vulnerabilities found through [Tidelift's coordinated disclosure system](https://tidelift.com/security) or to the maintainers privately(tengjiqi@gmail.com).

54
docker-compose.yml Normal file
View File

@ -0,0 +1,54 @@
# 使用说明 V4.3.1
# 1. PowerJob 根目录执行docker-compose up
# 2. 静静等待服务启动。
version: '3'
services:
powerjob-mysql:
environment:
MYSQL_ROOT_HOST: "%"
MYSQL_ROOT_PASSWORD: No1Bug2Please3!
restart: always
container_name: powerjob-mysql
image: powerjob/powerjob-mysql:latest
ports:
- "3307:3306"
volumes:
- ./powerjob-data/powerjob-mysql:/var/lib/mysql
command: --lower_case_table_names=1
powerjob-server:
container_name: powerjob-server
image: powerjob/powerjob-server:latest
restart: always
depends_on:
- powerjob-mysql
environment:
JVMOPTIONS: "-Xmx512m"
PARAMS: "--oms.mongodb.enable=false --spring.datasource.core.jdbc-url=jdbc:mysql://powerjob-mysql:3306/powerjob-daily?useUnicode=true&characterEncoding=UTF-8&serverTimezone=Asia/Shanghai"
ports:
- "7700:7700"
- "10086:10086"
- "10010:10010"
volumes:
- ./powerjob-data/powerjob-server:/root/powerjob/server/
powerjob-worker-samples:
container_name: powerjob-worker-samples
image: powerjob/powerjob-worker-samples:latest
restart: always
depends_on:
- powerjob-mysql
- powerjob-server
# environment:
# PARAMS: "--powerjob.worker.server-address=powerjob-server:7700"
ports:
- "8081:8081"
- "27777:27777"
volumes:
- ./powerjob-data/powerjob-worker-samples:/root/powerjob/worker
- ./others/script/wait-for-it.sh:/wait-for-it.sh
entrypoint:
- "sh"
- "-c"
- "chmod +x wait-for-it.sh && ./wait-for-it.sh powerjob-server:7700 --strict -- java -Xmx512m -jar /powerjob-worker-samples.jar --powerjob.worker.server-address=powerjob-server:7700"

11
others/Dockerfile Normal file
View File

@ -0,0 +1,11 @@
FROM mysql/mysql-server:8.0.30
MAINTAINER dudiao(idudaio@163.com)
ENV TZ=Asia/Shanghai
RUN ln -sf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
COPY sql/01schema.sql /docker-entrypoint-initdb.d
COPY sql/02worker-samples.sql /docker-entrypoint-initdb.d

19
others/dev/build_test_env.sh Executable file
View File

@ -0,0 +1,19 @@
#!/bin/bash
# 构建 PowerJob 测试环境
echo "================== 关闭全部服务 =================="
docker-compose down
echo "================== 构建 jar =================="
cd `dirname $0`/../.. || exit
# mvn clean package -Pdev -DskipTests -U -e -pl powerjob-server,powerjob-worker-agent -am
# -U强制检查snapshot库 -pl指定需要构建的模块多模块逗号分割 -am同时构建依赖模块一般与pl连用 -Pxxx指定使用的配置文件
mvn clean package -Pdev -DskipTests
echo "================== 拷贝 jar =================="
/bin/cp -rf powerjob-server/powerjob-server-starter/target/*.jar powerjob-server/docker/powerjob-server.jar
/bin/cp -rf powerjob-worker-agent/target/*.jar powerjob-worker-agent/powerjob-agent.jar
ls -l powerjob-server/docker/powerjob-server.jar
ls -l powerjob-worker-agent/powerjob-agent.jar
cd others/dev
docker-compose build
docker-compose --compatibility up

View File

@ -0,0 +1,109 @@
# 构建 PowerJob 测试环境
version: '3.7'
services:
powerjob-mysql:
build:
context: ../
environment:
MYSQL_ROOT_HOST: "%"
MYSQL_ROOT_PASSWORD: No1Bug2Please3!
deploy:
resources:
limits:
memory: 768M
restart: always
container_name: powerjob-mysql
image: powerjob/powerjob-mysql:test_env
ports:
- "3309:3306"
volumes:
- ~/powerjob-data/powerjob-mysql:/var/lib/mysql
command: --lower_case_table_names=1
# powerjob-mongodb:
# image: mongo:latest
# container_name: powerjob-mongodb
# restart: always
# deploy:
# resources:
# limits:
# memory: 256M
# environment:
# MONGO_INITDB_ROOT_USERNAME: "root"
# MONGO_INITDB_ROOT_PASSWORD: "No1Bug2Please3!"
# MONGO_INITDB_DATABASE: "powerjob_daily"
# ports:
# - "27017:27017"
# volumes:
# - ./testenv/init_mongodb.js:/docker-entrypoint-initdb.d/mongo-init.js:ro
# - ~/powerjob-data/powerjob-mongodb:/data/db
powerjob-server:
build:
context: ../../powerjob-server/docker
deploy:
resources:
limits:
memory: 896M
container_name: powerjob-server
image: powerjob/powerjob-server:test_env
restart: always
depends_on:
- powerjob-mysql
# - powerjob-mongodb
environment:
PARAMS: "--spring.profiles.active=daily --spring.datasource.core.jdbc-url=jdbc:mysql://powerjob-mysql:3306/powerjob-daily?useUnicode=true&characterEncoding=UTF-8&serverTimezone=Asia/Shanghai --oms.storage.dfs.mysql_series.url=jdbc:mysql://powerjob-mysql:3306/powerjob-daily?useUnicode=true&characterEncoding=UTF-8&serverTimezone=Asia/Shanghai"
JVMOPTIONS: "-server -XX:+UseG1GC -XX:+PrintGCDetails -XX:+PrintGCDateStamps -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=7 -XX:GCLogFileSize=100M -Xloggc:/root/powerjob/server/gc.log"
ports:
- "7700:7700"
- "10086:10086"
- "10010:10010"
volumes:
- ~/powerjob-data/powerjob-server:/root/powerjob/server/
- ~/.m2:/root/.m2
powerjob-worker-agent:
build:
context: ../../powerjob-worker-agent
deploy:
resources:
limits:
memory: 384M
container_name: powerjob-worker-agent
image: powerjob/powerjob-worker-agent:test_env
restart: always
depends_on:
- powerjob-mysql
- powerjob-server
ports:
- "5002:5005"
- "10002:10000"
- "27777:27777"
volumes:
- ~/powerjob-data/powerjob-worker-agent:/root
entrypoint:
- "sh"
- "-c"
- "./wait-for-it.sh powerjob-server:7700 --strict -- java -agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5005 -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.port=10000 -Dcom.sun.management.jmxremote.rmi.port=10000 -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -server -XX:+UseG1GC -XX:+PrintGCDetails -XX:+PrintGCDateStamps -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=7 -XX:GCLogFileSize=100M -Xloggc:/root/gc.log -jar /powerjob-agent.jar --app powerjob-worker-samples --server powerjob-server:7700"
powerjob-worker-agent2:
deploy:
resources:
limits:
memory: 384M
container_name: powerjob-worker-agent2
image: powerjob/powerjob-worker-agent:test_env
restart: always
depends_on:
- powerjob-mysql
- powerjob-server
ports:
- "5003:5005"
- "10003:10000"
- "27778:27777"
volumes:
- ~/powerjob-data/powerjob-worker-agent2:/root
entrypoint:
- "sh"
- "-c"
- "./wait-for-it.sh powerjob-server:7700 --strict -- java -agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5005 -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.port=10000 -Dcom.sun.management.jmxremote.rmi.port=10000 -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -server -XX:+UseG1GC -XX:+PrintGCDetails -XX:+PrintGCDateStamps -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=7 -XX:GCLogFileSize=100M -Xloggc:/root/gc.log -jar /powerjob-agent.jar --app powerjob-worker-samples --server powerjob-server:7700"

View File

@ -11,10 +11,11 @@ cd `dirname $0`/../.. || exit
read -r -p "是否进行maven构建y/n:" needmvn read -r -p "是否进行maven构建y/n:" needmvn
if [ "$needmvn" = "y" ] || [ "$needmvn" = "Y" ]; then if [ "$needmvn" = "y" ] || [ "$needmvn" = "Y" ]; then
echo "================== 构建 jar ==================" echo "================== 构建 jar =================="
# mvn clean package -Pdev -DskipTests -U -e -pl powerjob-server,powerjob-worker-agent -am
# -U强制检查snapshot库 -pl指定需要构建的模块多模块逗号分割 -am同时构建依赖模块一般与pl连用 -Pxxx指定使用的配置文件 # -U强制检查snapshot库 -pl指定需要构建的模块多模块逗号分割 -am同时构建依赖模块一般与pl连用 -Pxxx指定使用的配置文件
mvn clean package -Pdev -DskipTests -U -e -pl powerjob-server,powerjob-worker-agent -am mvn clean package -Pdev -DskipTests -U -e
echo "================== 拷贝 jar ==================" echo "================== 拷贝 jar =================="
/bin/cp -rf powerjob-server/target/*.jar powerjob-server/docker/powerjob-server.jar /bin/cp -rf powerjob-server/powerjob-server-starter/target/*.jar powerjob-server/docker/powerjob-server.jar
/bin/cp -rf powerjob-worker-agent/target/*.jar powerjob-worker-agent/powerjob-agent.jar /bin/cp -rf powerjob-worker-agent/target/*.jar powerjob-worker-agent/powerjob-agent.jar
ls -l powerjob-server/docker/powerjob-server.jar ls -l powerjob-server/docker/powerjob-server.jar
ls -l powerjob-worker-agent/powerjob-agent.jar ls -l powerjob-worker-agent/powerjob-agent.jar
@ -32,11 +33,19 @@ read -r -p "是否重新构建镜像y/n:" rebuild
if [ "$rebuild" = "y" ] || [ "$rebuild" = "Y" ]; then if [ "$rebuild" = "y" ] || [ "$rebuild" = "Y" ]; then
echo "================== 删除旧镜像 ==================" echo "================== 删除旧镜像 =================="
docker rmi -f tjqq/powerjob-server:$version docker rmi -f tjqq/powerjob-server:$version
docker rmi -f powerjob/powerjob-server:$version
docker rmi -f tjqq/powerjob-agent:$version docker rmi -f tjqq/powerjob-agent:$version
docker rmi -f powerjob/powerjob-agent:$version
docker rmi -f powerjob/powerjob-mysql:$version
docker rmi -f powerjob/powerjob-worker-samples:$version
echo "================== 构建 powerjob-server 镜像 ==================" echo "================== 构建 powerjob-server 镜像 =================="
docker build -t tjqq/powerjob-server:$version powerjob-server/docker/. || exit docker build -t tjqq/powerjob-server:$version powerjob-server/docker/. || exit
echo "================== 构建 powerjob-agent 镜像 ==================" echo "================== 构建 powerjob-agent 镜像 =================="
docker build -t tjqq/powerjob-agent:$version powerjob-worker-agent/. || exit docker build -t tjqq/powerjob-agent:$version powerjob-worker-agent/. || exit
echo "================== 构建 powerjob-mysql 镜像 =================="
docker build -t powerjob/powerjob-mysql:$version others/. || exit
echo "================== 构建 powerjob-worker-samples 镜像 =================="
docker build -t powerjob/powerjob-worker-samples:$version powerjob-worker-samples/. || exit
read -r -p "是否正式发布该镜像y/n:" needrelease read -r -p "是否正式发布该镜像y/n:" needrelease
if [ "$needrelease" = "y" ] || [ "$needrelease" = "Y" ]; then if [ "$needrelease" = "y" ] || [ "$needrelease" = "Y" ]; then
@ -46,6 +55,25 @@ if [ "$rebuild" = "y" ] || [ "$rebuild" = "Y" ]; then
docker push tjqq/powerjob-server:$version docker push tjqq/powerjob-server:$version
echo "================== 正在推送 agent 镜像到中央仓库 ==================" echo "================== 正在推送 agent 镜像到中央仓库 =================="
docker push tjqq/powerjob-agent:$version docker push tjqq/powerjob-agent:$version
echo "================== 正在推送 powerjob-mysql 镜像到中央仓库 =================="
docker push powerjob/powerjob-mysql:$version
echo "================== 正在推送 samples 镜像到中央仓库 =================="
docker push powerjob/powerjob-worker-samples:$version
echo "================== 双写推送 =================="
docker tag tjqq/powerjob-server:$version powerjob/powerjob-server:$version
docker push powerjob/powerjob-server:$version
docker tag tjqq/powerjob-agent:$version powerjob/powerjob-agent:$version
docker push powerjob/powerjob-agent:$version
echo "================== 更新 LATEST 版本 =================="
docker tag powerjob/powerjob-server:$version powerjob/powerjob-server:latest
docker push powerjob/powerjob-server:latest
docker tag powerjob/powerjob-agent:$version powerjob/powerjob-agent:latest
docker push powerjob/powerjob-agent:latest
docker tag powerjob/powerjob-mysql:$version powerjob/powerjob-mysql:latest
docker push powerjob/powerjob-mysql:latest
docker tag powerjob/powerjob-worker-samples:$version powerjob/powerjob-worker-samples:latest
docker push powerjob/powerjob-worker-samples:latest
echo "================== Docker 推送完毕 =================="
fi fi
fi fi
fi fi
@ -62,7 +90,7 @@ if [ "$startup" = "y" ] || [ "$startup" = "Y" ]; then
echo "================== 准备启动 powerjob-server ==================" echo "================== 准备启动 powerjob-server =================="
docker run -d \ docker run -d \
--name powerjob-server \ --name powerjob-server \
-p 7700:7700 -p 10086:10086 -p 5001:5005 -p 10001:10000 \ -p 7700:7700 -p 10086:10086 -p 10010:10010 -p 5001:5005 -p 10001:10000 \
-e JVMOPTIONS="-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5005 -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.port=10000 -Dcom.sun.management.jmxremote.rmi.port=10000 -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false" \ -e JVMOPTIONS="-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5005 -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.port=10000 -Dcom.sun.management.jmxremote.rmi.port=10000 -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false" \
-e PARAMS="--spring.profiles.active=pre" \ -e PARAMS="--spring.profiles.active=pre" \
-e TZ="Asia/Shanghai" \ -e TZ="Asia/Shanghai" \

View File

@ -0,0 +1,71 @@
#!/bin/bash
echo "A docker image release script for the Apple Silicon device."
# -p允许后面跟一个字符串作为提示 -r保证读入的是原始内容不会发生任何转义
read -r -p "请输入Dockedr镜像版本:" version
echo "即将构建的 server 镜像powerjob-server:$version"
echo "即将构建的 agent 镜像powerjob-agent:$version"
read -r -p "任意键继续:"
# 一键部署脚本,请勿挪动脚本
cd `dirname $0`/../.. || exit
read -r -p "是否进行maven构建y/n:" needmvn
if [ "$needmvn" = "y" ] || [ "$needmvn" = "Y" ]; then
echo "================== 构建 jar =================="
# mvn clean package -Pdev -DskipTests -U -e -pl powerjob-server,powerjob-worker-agent -am
# -U强制检查snapshot库 -pl指定需要构建的模块多模块逗号分割 -am同时构建依赖模块一般与pl连用 -Pxxx指定使用的配置文件
mvn clean package -Pdev -DskipTests -U -e
echo "================== 拷贝 jar =================="
/bin/cp -rf powerjob-server/powerjob-server-starter/target/*.jar powerjob-server/docker/powerjob-server.jar
/bin/cp -rf powerjob-worker-agent/target/*.jar powerjob-worker-agent/powerjob-agent.jar
ls -l powerjob-server/docker/powerjob-server.jar
ls -l powerjob-worker-agent/powerjob-agent.jar
fi
echo "================== 关闭老应用 =================="
docker stop powerjob-server
docker stop powerjob-agent
docker stop powerjob-agent2
echo "================== 删除老容器 =================="
docker container rm powerjob-server
docker container rm powerjob-agent
docker container rm powerjob-agent2
read -r -p "是否构建并发布镜像y/n:" rebuild
if [ "$rebuild" = "y" ] || [ "$rebuild" = "Y" ]; then
echo "================== 删除旧镜像 =================="
docker rmi -f tjqq/powerjob-server:$version
docker rmi -f powerjob/powerjob-server:$version
docker rmi -f tjqq/powerjob-agent:$version
docker rmi -f powerjob/powerjob-agent:$version
docker rmi -f powerjob/powerjob-mysql:$version
docker rmi -f powerjob/powerjob-worker-samples:$version
echo "================== 构建 powerjob-server 镜像(tjqq) =================="
docker buildx build --platform=linux/amd64,linux/arm64 --tag tjqq/powerjob-server:$version powerjob-server/docker/. --push || exit
echo "================== 构建 powerjob-server 镜像(powerjob) =================="
docker buildx build --platform=linux/amd64,linux/arm64 --tag powerjob/powerjob-server:$version powerjob-server/docker/. --push || exit
echo "================== 构建 powerjob-agent 镜像(tjqq) =================="
docker buildx build --platform=linux/amd64,linux/arm64 --tag tjqq/powerjob-agent:$version powerjob-worker-agent/. --push|| exit
echo "================== 构建 powerjob-agent 镜像(powerjob) =================="
docker buildx build --platform=linux/amd64,linux/arm64 --tag powerjob/powerjob-agent:$version powerjob-worker-agent/. --push|| exit
echo "================== 构建 powerjob-mysql 镜像 =================="
docker buildx build --platform=linux/amd64,linux/arm64 --tag powerjob/powerjob-mysql:$version others/. --push|| exit
echo "================== 构建 powerjob-worker-samples 镜像 =================="
docker buildx build --platform=linux/amd64,linux/arm64 --tag powerjob/powerjob-worker-samples:$version powerjob-worker-samples/. --push|| exit
fi
read -r -p "是否推送LATESTy/n:" push_latest
if [ "$push_latest" = "y" ] || [ "$push_latest" = "Y" ]; then
echo "================== powerjob-server LATEST (tjqq) =================="
docker buildx build --platform=linux/amd64,linux/arm64 --tag tjqq/powerjob-server:latest powerjob-server/docker/. --push || exit
echo "================== powerjob-server LATEST (powerjob) =================="
docker buildx build --platform=linux/amd64,linux/arm64 --tag powerjob/powerjob-server:latest powerjob-server/docker/. --push || exit
echo "================== powerjob-agent LATEST (tjqq) =================="
docker buildx build --platform=linux/amd64,linux/arm64 --tag tjqq/powerjob-agent:latest powerjob-worker-agent/. --push|| exit
echo "================== powerjob-agent LATEST (powerjob) =================="
docker buildx build --platform=linux/amd64,linux/arm64 --tag powerjob/powerjob-agent:latest powerjob-worker-agent/. --push|| exit
echo "================== powerjob-mysql LATEST =================="
docker buildx build --platform=linux/amd64,linux/arm64 --tag powerjob/powerjob-mysql:latest others/. --push|| exit
echo "================== powerjob-worker-samples LATEST =================="
docker buildx build --platform=linux/amd64,linux/arm64 --tag powerjob/powerjob-worker-samples:latest powerjob-worker-samples/. --push|| exit
fi

View File

@ -0,0 +1,12 @@
db.createUser(
{
user: "zqq",
pwd: "No1Bug2Please3!",
roles: [
{
role: "readWrite",
db: "powerjob_daily"
}
]
}
);

Binary file not shown.

Before

Width:  |  Height:  |  Size: 162 KiB

After

Width:  |  Height:  |  Size: 209 KiB

328
others/powerjob-mysql.sql Normal file
View File

@ -0,0 +1,328 @@
/*
SQL MySQL8 SQL
使 SQL使 SpringDataJPA SQL
*/
/*
Navicat Premium Data Transfer
Source Server : Local@3306
Source Server Type : MySQL
Source Server Version : 80300 (8.3.0)
Source Host : localhost:3306
Source Schema : powerjob5
Target Server Type : MySQL
Target Server Version : 80300 (8.3.0)
File Encoding : 65001
Date: 11/08/2024 23:23:30
*/
SET NAMES utf8mb4;
SET FOREIGN_KEY_CHECKS = 0;
-- ----------------------------
-- Table structure for app_info
-- ----------------------------
DROP TABLE IF EXISTS `app_info`;
CREATE TABLE `app_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_name` varchar(255) DEFAULT NULL,
`creator` bigint DEFAULT NULL,
`current_server` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`modifier` bigint DEFAULT NULL,
`namespace_id` bigint DEFAULT NULL,
`password` varchar(255) DEFAULT NULL,
`tags` varchar(255) DEFAULT NULL,
`title` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_app_info` (`app_name`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for container_info
-- ----------------------------
DROP TABLE IF EXISTS `container_info`;
CREATE TABLE `container_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_id` bigint DEFAULT NULL,
`container_name` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`last_deploy_time` datetime(6) DEFAULT NULL,
`source_info` varchar(255) DEFAULT NULL,
`source_type` int DEFAULT NULL,
`status` int DEFAULT NULL,
`version` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_container_info` (`app_id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for instance_info
-- ----------------------------
DROP TABLE IF EXISTS `instance_info`;
CREATE TABLE `instance_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`actual_trigger_time` bigint DEFAULT NULL,
`app_id` bigint DEFAULT NULL,
`expected_trigger_time` bigint DEFAULT NULL,
`finished_time` bigint DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`instance_id` bigint DEFAULT NULL,
`instance_params` longtext,
`job_id` bigint DEFAULT NULL,
`job_params` longtext,
`last_report_time` bigint DEFAULT NULL,
`result` longtext,
`running_times` bigint DEFAULT NULL,
`status` int DEFAULT NULL,
`task_tracker_address` varchar(255) DEFAULT NULL,
`type` int DEFAULT NULL,
`wf_instance_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_instance_info` (`job_id`,`status`),
KEY `idx02_instance_info` (`app_id`,`status`),
KEY `idx03_instance_info` (`instance_id`,`status`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for job_info
-- ----------------------------
DROP TABLE IF EXISTS `job_info`;
CREATE TABLE `job_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`advanced_runtime_config` varchar(255) DEFAULT NULL,
`alarm_config` varchar(255) DEFAULT NULL,
`app_id` bigint DEFAULT NULL,
`concurrency` int DEFAULT NULL,
`designated_workers` varchar(255) DEFAULT NULL,
`dispatch_strategy` int DEFAULT NULL,
`dispatch_strategy_config` varchar(255) DEFAULT NULL,
`execute_type` int DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`instance_retry_num` int DEFAULT NULL,
`instance_time_limit` bigint DEFAULT NULL,
`job_description` varchar(255) DEFAULT NULL,
`job_name` varchar(255) DEFAULT NULL,
`job_params` longtext,
`lifecycle` varchar(255) DEFAULT NULL,
`log_config` varchar(255) DEFAULT NULL,
`max_instance_num` int DEFAULT NULL,
`max_worker_count` int DEFAULT NULL,
`min_cpu_cores` double NOT NULL,
`min_disk_space` double NOT NULL,
`min_memory_space` double NOT NULL,
`next_trigger_time` bigint DEFAULT NULL,
`notify_user_ids` varchar(255) DEFAULT NULL,
`processor_info` varchar(255) DEFAULT NULL,
`processor_type` int DEFAULT NULL,
`status` int DEFAULT NULL,
`tag` varchar(255) DEFAULT NULL,
`task_retry_num` int DEFAULT NULL,
`time_expression` varchar(255) DEFAULT NULL,
`time_expression_type` int DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_job_info` (`app_id`,`status`,`time_expression_type`,`next_trigger_time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for namespace
-- ----------------------------
DROP TABLE IF EXISTS `namespace`;
CREATE TABLE `namespace` (
`id` bigint NOT NULL AUTO_INCREMENT,
`code` varchar(255) DEFAULT NULL,
`creator` bigint DEFAULT NULL,
`dept` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`modifier` bigint DEFAULT NULL,
`name` varchar(255) DEFAULT NULL,
`status` int DEFAULT NULL,
`tags` varchar(255) DEFAULT NULL,
`token` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_namespace` (`code`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for oms_lock
-- ----------------------------
DROP TABLE IF EXISTS `oms_lock`;
CREATE TABLE `oms_lock` (
`id` bigint NOT NULL AUTO_INCREMENT,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`lock_name` varchar(255) DEFAULT NULL,
`max_lock_time` bigint DEFAULT NULL,
`ownerip` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_oms_lock` (`lock_name`)
) ENGINE=InnoDB AUTO_INCREMENT=4 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for pwjb_user_info
-- ----------------------------
DROP TABLE IF EXISTS `pwjb_user_info`;
CREATE TABLE `pwjb_user_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`password` varchar(255) DEFAULT NULL,
`username` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_username` (`username`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for server_info
-- ----------------------------
DROP TABLE IF EXISTS `server_info`;
CREATE TABLE `server_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`ip` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_server_info` (`ip`),
KEY `idx01_server_info` (`gmt_modified`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for sundry
-- ----------------------------
DROP TABLE IF EXISTS `sundry`;
CREATE TABLE `sundry` (
`id` bigint NOT NULL AUTO_INCREMENT,
`content` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`pkey` varchar(255) DEFAULT NULL,
`skey` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_sundry` (`pkey`,`skey`)
) ENGINE=InnoDB AUTO_INCREMENT=3 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for user_info
-- ----------------------------
DROP TABLE IF EXISTS `user_info`;
CREATE TABLE `user_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`account_type` varchar(255) DEFAULT NULL,
`email` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`nick` varchar(255) DEFAULT NULL,
`origin_username` varchar(255) DEFAULT NULL,
`password` varchar(255) DEFAULT NULL,
`phone` varchar(255) DEFAULT NULL,
`status` int DEFAULT NULL,
`token_login_verify_info` varchar(255) DEFAULT NULL,
`username` varchar(255) DEFAULT NULL,
`web_hook` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_user_name` (`username`),
KEY `uidx02_user_info` (`email`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for user_role
-- ----------------------------
DROP TABLE IF EXISTS `user_role`;
CREATE TABLE `user_role` (
`id` bigint NOT NULL AUTO_INCREMENT,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`role` int DEFAULT NULL,
`scope` int DEFAULT NULL,
`target` bigint DEFAULT NULL,
`user_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `uidx01_user_id` (`user_id`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for workflow_info
-- ----------------------------
DROP TABLE IF EXISTS `workflow_info`;
CREATE TABLE `workflow_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_id` bigint DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`lifecycle` varchar(255) DEFAULT NULL,
`max_wf_instance_num` int DEFAULT NULL,
`next_trigger_time` bigint DEFAULT NULL,
`notify_user_ids` varchar(255) DEFAULT NULL,
`pedag` longtext,
`status` int DEFAULT NULL,
`time_expression` varchar(255) DEFAULT NULL,
`time_expression_type` int DEFAULT NULL,
`wf_description` varchar(255) DEFAULT NULL,
`wf_name` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_workflow_info` (`app_id`,`status`,`time_expression_type`,`next_trigger_time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for workflow_instance_info
-- ----------------------------
DROP TABLE IF EXISTS `workflow_instance_info`;
CREATE TABLE `workflow_instance_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`actual_trigger_time` bigint DEFAULT NULL,
`app_id` bigint DEFAULT NULL,
`dag` longtext,
`expected_trigger_time` bigint DEFAULT NULL,
`finished_time` bigint DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`parent_wf_instance_id` bigint DEFAULT NULL,
`result` longtext,
`status` int DEFAULT NULL,
`wf_context` longtext,
`wf_init_params` longtext,
`wf_instance_id` bigint DEFAULT NULL,
`workflow_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_wf_instance` (`wf_instance_id`),
KEY `idx01_wf_instance` (`workflow_id`,`status`,`app_id`,`expected_trigger_time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for workflow_node_info
-- ----------------------------
DROP TABLE IF EXISTS `workflow_node_info`;
CREATE TABLE `workflow_node_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_id` bigint NOT NULL,
`enable` bit(1) NOT NULL,
`extra` longtext,
`gmt_create` datetime(6) NOT NULL,
`gmt_modified` datetime(6) NOT NULL,
`job_id` bigint DEFAULT NULL,
`node_name` varchar(255) DEFAULT NULL,
`node_params` longtext,
`skip_when_failed` bit(1) NOT NULL,
`type` int DEFAULT NULL,
`workflow_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_workflow_node_info` (`workflow_id`,`gmt_create`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
SET FOREIGN_KEY_CHECKS = 1;

View File

@ -1,33 +1,32 @@
#!/bin/bash #!/bin/bash
cd `dirname $0`/../.. || exit cd `dirname $0`/../.. || exit
echo "================== 构建 jar ==================" echo "================== 构建 jar =================="
mvn clean package -Pdev -DskipTests -U -e -pl powerjob-server,powerjob-worker-agent -am mvn clean package -Pdev -DskipTests -e
echo "================== 拷贝 jar ==================" echo "================== 拷贝 jar =================="
/bin/cp -rf powerjob-server/target/*.jar powerjob-server/docker/powerjob-server.jar /bin/cp -rf powerjob-server/powerjob-server-starter/target/*.jar powerjob-server/docker/powerjob-server.jar
/bin/cp -rf powerjob-worker-agent/target/*.jar powerjob-worker-agent/powerjob-agent.jar
echo "================== 关闭老应用 ==================" echo "================== 关闭老应用 =================="
docker stop powerjob-server docker stop powerjob-server
docker stop powerjob-agent docker stop powerjob-worker-samples
docker stop powerjob-agent2 docker stop powerjob-worker-samples2
echo "================== 删除老容器 ==================" echo "================== 删除老容器 =================="
docker container rm powerjob-server docker container rm powerjob-server
docker container rm powerjob-agent docker container rm powerjob-worker-samples
docker container rm powerjob-agent2 docker container rm powerjob-worker-samples2
echo "================== 删除旧镜像 ==================" echo "================== 删除旧镜像 =================="
docker rmi -f tjqq/powerjob-server:latest docker rmi -f tjqq/powerjob-server:latest
docker rmi -f tjqq/powerjob-agent:latest docker rmi -f tjqq/powerjob-worker-samples:latest
echo "================== 构建 powerjob-server 镜像 ==================" echo "================== 构建 powerjob-server 镜像 =================="
docker build -t tjqq/powerjob-server:latest powerjob-server/docker/. || exit docker build -t tjqq/powerjob-server:latest powerjob-server/docker/. || exit
echo "================== 构建 powerjob-agent 镜像 ==================" echo "================== 构建 powerjob-worker-samples 镜像 =================="
docker build -t tjqq/powerjob-agent:latest powerjob-worker-agent/. || exit docker build -t tjqq/powerjob-worker-samples:latest powerjob-worker-samples/. || exit
echo "================== 准备启动 powerjob-server ==================" echo "================== 准备启动 powerjob-server =================="
docker run -d \ docker run -d \
--restart=always \ --restart=always \
--name powerjob-server \ --name powerjob-server \
-p 7700:7700 -p 10086:10086 -p 5001:5005 -p 10001:10000 \ -p 7700:7700 -p 10086:10086 -p 5001:5005 -p 10001:10000 \
-e JVMOPTIONS="-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5005 -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.port=10000 -Dcom.sun.management.jmxremote.rmi.port=10000 -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false" \ -e JVMOPTIONS="-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5005 -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.port=10000 -Dcom.sun.management.jmxremote.rmi.port=10000 -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false" \
-e PARAMS="--spring.profiles.active=product --spring.datasource.core.jdbc-url=jdbc:mysql://remotehost:3306/powerjob-product?useUnicode=true&characterEncoding=UTF-8 --spring.data.mongodb.uri=mongodb://remotehost:27017/powerjob-product" \ -e PARAMS="--oms.swagger.enable=true --spring.profiles.active=product --spring.datasource.core.jdbc-url=jdbc:mysql://remotehost:3306/powerjob-product?useUnicode=true&characterEncoding=UTF-8 --oms.mongodb.enable=false --spring.data.mongodb.uri=mongodb://remotehost:27017/powerjob-product" \
-v ~/docker/powerjob-server:/root/powerjob-server -v ~/.m2:/root/.m2 \ -v ~/docker/powerjob-server:/root/powerjob/server -v ~/.m2:/root/.m2 \
tjqq/powerjob-server:latest tjqq/powerjob-server:latest
sleep 60 sleep 60
echo "================== 准备启动 powerjob-agent ==================" echo "================== 准备启动 powerjob-agent =================="
@ -37,19 +36,19 @@ echo "使用的Server地址$serverAddress"
docker run -d \ docker run -d \
--restart=always \ --restart=always \
--name powerjob-agent \ --name powerjob-worker-samples \
-p 27777:27777 -p 5002:5005 -p 10002:10000 \ -p 27777:27777 -p 5002:5005 -p 10002:10000 \
-e JVMOPTIONS="-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5005 -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.port=10000 -Dcom.sun.management.jmxremote.rmi.port=10000 -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false" \ -e JVMOPTIONS="-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5005 -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.port=10000 -Dcom.sun.management.jmxremote.rmi.port=10000 -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false" \
-e PARAMS="--app powerjob-agent-test --server $serverAddress" \ -e PARAMS="--powerjob.worker.server-address=$serverAddress" \
-v ~/docker/powerjob-agent:/root \ -v ~/docker/powerjob-worker-samples:/root \
tjqq/powerjob-agent:latest tjqq/powerjob-worker-samples:latest
docker run -d \ docker run -d \
--restart=always \ --restart=always \
--name powerjob-agent2 \ --name powerjob-worker-samples2 \
-p 27778:27777 -p 5003:5005 -p 10003:10000 \ -p 27778:27777 -p 5003:5005 -p 10003:10000 \
-e JVMOPTIONS="-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5005 -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.port=10000 -Dcom.sun.management.jmxremote.rmi.port=10000 -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false" \ -e JVMOPTIONS="-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5005 -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.port=10000 -Dcom.sun.management.jmxremote.rmi.port=10000 -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false" \
-e PARAMS="--app powerjob-agent-test --server $serverAddress" \ -e PARAMS="--powerjob.worker.server-address=$serverAddress" \
-v ~/docker/powerjob-agent2:/root \ -v ~/docker/powerjob-worker-samples2:/root \
tjqq/powerjob-agent:latest tjqq/powerjob-worker-samples:latest

182
others/script/wait-for-it.sh Executable file
View File

@ -0,0 +1,182 @@
#!/usr/bin/env bash
# Use this script to test if a given TCP host/port are available
WAITFORIT_cmdname=${0##*/}
echoerr() { if [[ $WAITFORIT_QUIET -ne 1 ]]; then echo "$@" 1>&2; fi }
usage()
{
cat << USAGE >&2
Usage:
$WAITFORIT_cmdname host:port [-s] [-t timeout] [-- command args]
-h HOST | --host=HOST Host or IP under test
-p PORT | --port=PORT TCP port under test
Alternatively, you specify the host and port as host:port
-s | --strict Only execute subcommand if the test succeeds
-q | --quiet Don't output any status messages
-t TIMEOUT | --timeout=TIMEOUT
Timeout in seconds, zero for no timeout
-- COMMAND ARGS Execute command with args after the test finishes
USAGE
exit 1
}
wait_for()
{
if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
echoerr "$WAITFORIT_cmdname: waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
else
echoerr "$WAITFORIT_cmdname: waiting for $WAITFORIT_HOST:$WAITFORIT_PORT without a timeout"
fi
WAITFORIT_start_ts=$(date +%s)
while :
do
if [[ $WAITFORIT_ISBUSY -eq 1 ]]; then
nc -z $WAITFORIT_HOST $WAITFORIT_PORT
WAITFORIT_result=$?
else
(echo -n > /dev/tcp/$WAITFORIT_HOST/$WAITFORIT_PORT) >/dev/null 2>&1
WAITFORIT_result=$?
fi
if [[ $WAITFORIT_result -eq 0 ]]; then
WAITFORIT_end_ts=$(date +%s)
echoerr "$WAITFORIT_cmdname: $WAITFORIT_HOST:$WAITFORIT_PORT is available after $((WAITFORIT_end_ts - WAITFORIT_start_ts)) seconds"
break
fi
sleep 1
done
return $WAITFORIT_result
}
wait_for_wrapper()
{
# In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692
if [[ $WAITFORIT_QUIET -eq 1 ]]; then
timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --quiet --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
else
timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
fi
WAITFORIT_PID=$!
trap "kill -INT -$WAITFORIT_PID" INT
wait $WAITFORIT_PID
WAITFORIT_RESULT=$?
if [[ $WAITFORIT_RESULT -ne 0 ]]; then
echoerr "$WAITFORIT_cmdname: timeout occurred after waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
fi
return $WAITFORIT_RESULT
}
# process arguments
while [[ $# -gt 0 ]]
do
case "$1" in
*:* )
WAITFORIT_hostport=(${1//:/ })
WAITFORIT_HOST=${WAITFORIT_hostport[0]}
WAITFORIT_PORT=${WAITFORIT_hostport[1]}
shift 1
;;
--child)
WAITFORIT_CHILD=1
shift 1
;;
-q | --quiet)
WAITFORIT_QUIET=1
shift 1
;;
-s | --strict)
WAITFORIT_STRICT=1
shift 1
;;
-h)
WAITFORIT_HOST="$2"
if [[ $WAITFORIT_HOST == "" ]]; then break; fi
shift 2
;;
--host=*)
WAITFORIT_HOST="${1#*=}"
shift 1
;;
-p)
WAITFORIT_PORT="$2"
if [[ $WAITFORIT_PORT == "" ]]; then break; fi
shift 2
;;
--port=*)
WAITFORIT_PORT="${1#*=}"
shift 1
;;
-t)
WAITFORIT_TIMEOUT="$2"
if [[ $WAITFORIT_TIMEOUT == "" ]]; then break; fi
shift 2
;;
--timeout=*)
WAITFORIT_TIMEOUT="${1#*=}"
shift 1
;;
--)
shift
WAITFORIT_CLI=("$@")
break
;;
--help)
usage
;;
*)
echoerr "Unknown argument: $1"
usage
;;
esac
done
if [[ "$WAITFORIT_HOST" == "" || "$WAITFORIT_PORT" == "" ]]; then
echoerr "Error: you need to provide a host and port to test."
usage
fi
WAITFORIT_TIMEOUT=${WAITFORIT_TIMEOUT:-15}
WAITFORIT_STRICT=${WAITFORIT_STRICT:-0}
WAITFORIT_CHILD=${WAITFORIT_CHILD:-0}
WAITFORIT_QUIET=${WAITFORIT_QUIET:-0}
# Check to see if timeout is from busybox?
WAITFORIT_TIMEOUT_PATH=$(type -p timeout)
WAITFORIT_TIMEOUT_PATH=$(realpath $WAITFORIT_TIMEOUT_PATH 2>/dev/null || readlink -f $WAITFORIT_TIMEOUT_PATH)
WAITFORIT_BUSYTIMEFLAG=""
if [[ $WAITFORIT_TIMEOUT_PATH =~ "busybox" ]]; then
WAITFORIT_ISBUSY=1
# Check if busybox timeout uses -t flag
# (recent Alpine versions don't support -t anymore)
if timeout &>/dev/stdout | grep -q -e '-t '; then
WAITFORIT_BUSYTIMEFLAG="-t"
fi
else
WAITFORIT_ISBUSY=0
fi
if [[ $WAITFORIT_CHILD -gt 0 ]]; then
wait_for
WAITFORIT_RESULT=$?
exit $WAITFORIT_RESULT
else
if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
wait_for_wrapper
WAITFORIT_RESULT=$?
else
wait_for
WAITFORIT_RESULT=$?
fi
fi
if [[ $WAITFORIT_CLI != "" ]]; then
if [[ $WAITFORIT_RESULT -ne 0 && $WAITFORIT_STRICT -eq 1 ]]; then
echoerr "$WAITFORIT_cmdname: strict mode, refusing to execute subprocess"
exit $WAITFORIT_RESULT
fi
exec "${WAITFORIT_CLI[@]}"
else
exit $WAITFORIT_RESULT
fi

2
others/sql/01schema.sql Normal file
View File

@ -0,0 +1,2 @@
-- powerjob
create database `powerjob-daily` default character set utf8mb4 collate utf8mb4_general_ci;

View File

@ -0,0 +1,21 @@
USE powerjob-daily;
SET NAMES utf8mb4;
SET FOREIGN_KEY_CHECKS = 0;
CREATE TABLE IF NOT EXISTS `app_info` (
`id` bigint NOT NULL AUTO_INCREMENT COMMENT '应用ID',
`app_name` varchar(128) not NULL COMMENT '应用名称',
`current_server` varchar(255) default null COMMENT 'Server地址,用于负责调度应用的ActorSystem地址',
`gmt_create` datetime not null COMMENT '创建时间',
`gmt_modified` datetime not null COMMENT '更新时间',
`password` varchar(255) not null COMMENT '应用密码',
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_app_info` (`app_name`)
) ENGINE = InnoDB AUTO_INCREMENT = 1
DEFAULT CHARSET = utf8mb4
COLLATE = utf8mb4_general_ci COMMENT ='应用表';
insert into app_info (app_name, gmt_create, gmt_modified, password) select 'powerjob-worker-samples', current_timestamp(), current_timestamp(), 'powerjob123' from dual where not exists ( select * from app_info where app_name = 'powerjob-worker-samples');
SET FOREIGN_KEY_CHECKS = 1;

View File

@ -1,17 +1,17 @@
/* /*
Navicat Premium Data Transfer Navicat Premium Data Transfer
Source Server : Local MySQL Source Server : Local@3306
Source Server Type : MySQL Source Server Type : MySQL
Source Server Version : 80021 Source Server Version : 80300 (8.3.0)
Source Host : localhost:3306 Source Host : localhost:3306
Source Schema : powerjob-db-template Source Schema : powerjob4
Target Server Type : MySQL Target Server Type : MySQL
Target Server Version : 80021 Target Server Version : 80300 (8.3.0)
File Encoding : 65001 File Encoding : 65001
Date: 28/11/2020 17:05:50 Date: 02/03/2024 18:51:36
*/ */
SET NAMES utf8mb4; SET NAMES utf8mb4;
@ -29,7 +29,7 @@ CREATE TABLE `app_info` (
`gmt_modified` datetime(6) DEFAULT NULL, `gmt_modified` datetime(6) DEFAULT NULL,
`password` varchar(255) DEFAULT NULL, `password` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`), PRIMARY KEY (`id`),
UNIQUE KEY `appNameUK` (`app_name`) UNIQUE KEY `uidx01_app_info` (`app_name`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci; ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ---------------------------- -- ----------------------------
@ -48,7 +48,7 @@ CREATE TABLE `container_info` (
`status` int DEFAULT NULL, `status` int DEFAULT NULL,
`version` varchar(255) DEFAULT NULL, `version` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`), PRIMARY KEY (`id`),
KEY `IDX8hixyaktlnwil2w9up6b0p898` (`app_id`) KEY `idx01_container_info` (`app_id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci; ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ---------------------------- -- ----------------------------
@ -66,6 +66,7 @@ CREATE TABLE `instance_info` (
`instance_id` bigint DEFAULT NULL, `instance_id` bigint DEFAULT NULL,
`instance_params` longtext, `instance_params` longtext,
`job_id` bigint DEFAULT NULL, `job_id` bigint DEFAULT NULL,
`job_params` longtext,
`last_report_time` bigint DEFAULT NULL, `last_report_time` bigint DEFAULT NULL,
`result` longtext, `result` longtext,
`running_times` bigint DEFAULT NULL, `running_times` bigint DEFAULT NULL,
@ -74,9 +75,9 @@ CREATE TABLE `instance_info` (
`type` int DEFAULT NULL, `type` int DEFAULT NULL,
`wf_instance_id` bigint DEFAULT NULL, `wf_instance_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`), PRIMARY KEY (`id`),
KEY `IDX5b1nhpe5je7gc5s1ur200njr7` (`job_id`), KEY `idx01_instance_info` (`job_id`,`status`),
KEY `IDXjnji5lrr195kswk6f7mfhinrs` (`app_id`), KEY `idx02_instance_info` (`app_id`,`status`),
KEY `IDXa98hq3yu0l863wuotdjl7noum` (`instance_id`) KEY `idx03_instance_info` (`instance_id`,`status`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci; ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ---------------------------- -- ----------------------------
@ -85,17 +86,24 @@ CREATE TABLE `instance_info` (
DROP TABLE IF EXISTS `job_info`; DROP TABLE IF EXISTS `job_info`;
CREATE TABLE `job_info` ( CREATE TABLE `job_info` (
`id` bigint NOT NULL AUTO_INCREMENT, `id` bigint NOT NULL AUTO_INCREMENT,
`advanced_runtime_config` varchar(255) DEFAULT NULL,
`alarm_config` varchar(255) DEFAULT NULL,
`app_id` bigint DEFAULT NULL, `app_id` bigint DEFAULT NULL,
`concurrency` int DEFAULT NULL, `concurrency` int DEFAULT NULL,
`designated_workers` varchar(255) DEFAULT NULL, `designated_workers` varchar(255) DEFAULT NULL,
`dispatch_strategy` int DEFAULT NULL,
`dispatch_strategy_config` varchar(255) DEFAULT NULL,
`execute_type` int DEFAULT NULL, `execute_type` int DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL, `gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL, `gmt_modified` datetime(6) DEFAULT NULL,
`instance_retry_num` int DEFAULT NULL, `instance_retry_num` int DEFAULT NULL,
`instance_time_limit` bigint DEFAULT NULL, `instance_time_limit` bigint DEFAULT NULL,
`job_description` varchar(255) DEFAULT NULL, `job_description` varchar(255) DEFAULT NULL,
`job_name` varchar(255) DEFAULT NULL, `job_name` varchar(255) DEFAULT NULL,
`job_params` varchar(255) DEFAULT NULL, `job_params` longtext,
`lifecycle` varchar(255) DEFAULT NULL,
`log_config` varchar(255) DEFAULT NULL,
`max_instance_num` int DEFAULT NULL, `max_instance_num` int DEFAULT NULL,
`max_worker_count` int DEFAULT NULL, `max_worker_count` int DEFAULT NULL,
`min_cpu_cores` double NOT NULL, `min_cpu_cores` double NOT NULL,
@ -103,14 +111,15 @@ CREATE TABLE `job_info` (
`min_memory_space` double NOT NULL, `min_memory_space` double NOT NULL,
`next_trigger_time` bigint DEFAULT NULL, `next_trigger_time` bigint DEFAULT NULL,
`notify_user_ids` varchar(255) DEFAULT NULL, `notify_user_ids` varchar(255) DEFAULT NULL,
`processor_info` longtext, `processor_info` varchar(255) DEFAULT NULL,
`processor_type` int DEFAULT NULL, `processor_type` int DEFAULT NULL,
`status` int DEFAULT NULL, `status` int DEFAULT NULL,
`tag` varchar(255) DEFAULT NULL,
`task_retry_num` int DEFAULT NULL, `task_retry_num` int DEFAULT NULL,
`time_expression` varchar(255) DEFAULT NULL, `time_expression` varchar(255) DEFAULT NULL,
`time_expression_type` int DEFAULT NULL, `time_expression_type` int DEFAULT NULL,
PRIMARY KEY (`id`), PRIMARY KEY (`id`),
KEY `IDXk2xprmn3lldmlcb52i36udll1` (`app_id`) KEY `idx01_job_info` (`app_id`,`status`,`time_expression_type`,`next_trigger_time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci; ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ---------------------------- -- ----------------------------
@ -125,8 +134,8 @@ CREATE TABLE `oms_lock` (
`max_lock_time` bigint DEFAULT NULL, `max_lock_time` bigint DEFAULT NULL,
`ownerip` varchar(255) DEFAULT NULL, `ownerip` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`), PRIMARY KEY (`id`),
UNIQUE KEY `lockNameUK` (`lock_name`) UNIQUE KEY `uidx01_oms_lock` (`lock_name`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci; ) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ---------------------------- -- ----------------------------
-- Table structure for server_info -- Table structure for server_info
@ -138,7 +147,8 @@ CREATE TABLE `server_info` (
`gmt_modified` datetime(6) DEFAULT NULL, `gmt_modified` datetime(6) DEFAULT NULL,
`ip` varchar(255) DEFAULT NULL, `ip` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`), PRIMARY KEY (`id`),
UNIQUE KEY `UKtk8ytgpl7mpukhnvhbl82kgvy` (`ip`) UNIQUE KEY `uidx01_server_info` (`ip`),
KEY `idx01_server_info` (`gmt_modified`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci; ) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ---------------------------- -- ----------------------------
@ -155,7 +165,9 @@ CREATE TABLE `user_info` (
`phone` varchar(255) DEFAULT NULL, `phone` varchar(255) DEFAULT NULL,
`username` varchar(255) DEFAULT NULL, `username` varchar(255) DEFAULT NULL,
`web_hook` varchar(255) DEFAULT NULL, `web_hook` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`) PRIMARY KEY (`id`),
KEY `uidx01_user_info` (`username`),
KEY `uidx02_user_info` (`email`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci; ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ---------------------------- -- ----------------------------
@ -165,8 +177,10 @@ DROP TABLE IF EXISTS `workflow_info`;
CREATE TABLE `workflow_info` ( CREATE TABLE `workflow_info` (
`id` bigint NOT NULL AUTO_INCREMENT, `id` bigint NOT NULL AUTO_INCREMENT,
`app_id` bigint DEFAULT NULL, `app_id` bigint DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL, `gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL, `gmt_modified` datetime(6) DEFAULT NULL,
`lifecycle` varchar(255) DEFAULT NULL,
`max_wf_instance_num` int DEFAULT NULL, `max_wf_instance_num` int DEFAULT NULL,
`next_trigger_time` bigint DEFAULT NULL, `next_trigger_time` bigint DEFAULT NULL,
`notify_user_ids` varchar(255) DEFAULT NULL, `notify_user_ids` varchar(255) DEFAULT NULL,
@ -177,7 +191,7 @@ CREATE TABLE `workflow_info` (
`wf_description` varchar(255) DEFAULT NULL, `wf_description` varchar(255) DEFAULT NULL,
`wf_name` varchar(255) DEFAULT NULL, `wf_name` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`), PRIMARY KEY (`id`),
KEY `IDX7uo5w0e3beeho3fnx9t7eiol3` (`app_id`) KEY `idx01_workflow_info` (`app_id`,`status`,`time_expression_type`,`next_trigger_time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci; ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ---------------------------- -- ----------------------------
@ -193,12 +207,37 @@ CREATE TABLE `workflow_instance_info` (
`finished_time` bigint DEFAULT NULL, `finished_time` bigint DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL, `gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL, `gmt_modified` datetime(6) DEFAULT NULL,
`parent_wf_instance_id` bigint DEFAULT NULL,
`result` longtext, `result` longtext,
`status` int DEFAULT NULL, `status` int DEFAULT NULL,
`wf_context` longtext,
`wf_init_params` longtext, `wf_init_params` longtext,
`wf_instance_id` bigint DEFAULT NULL, `wf_instance_id` bigint DEFAULT NULL,
`workflow_id` bigint DEFAULT NULL, `workflow_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`) PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_wf_instance` (`wf_instance_id`),
KEY `idx01_wf_instance` (`workflow_id`,`status`,`app_id`,`expected_trigger_time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for workflow_node_info
-- ----------------------------
DROP TABLE IF EXISTS `workflow_node_info`;
CREATE TABLE `workflow_node_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_id` bigint NOT NULL,
`enable` bit(1) NOT NULL,
`extra` longtext,
`gmt_create` datetime(6) NOT NULL,
`gmt_modified` datetime(6) NOT NULL,
`job_id` bigint DEFAULT NULL,
`node_name` varchar(255) DEFAULT NULL,
`node_params` longtext,
`skip_when_failed` bit(1) NOT NULL,
`type` int DEFAULT NULL,
`workflow_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_workflow_node_info` (`workflow_id`,`gmt_create`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci; ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
SET FOREIGN_KEY_CHECKS = 1; SET FOREIGN_KEY_CHECKS = 1;

View File

@ -0,0 +1,323 @@
/*
Navicat Premium Data Transfer
Source Server : Local@3306
Source Server Type : MySQL
Source Server Version : 80300 (8.3.0)
Source Host : localhost:3306
Source Schema : powerjob5
Target Server Type : MySQL
Target Server Version : 80300 (8.3.0)
File Encoding : 65001
Date: 16/03/2024 22:07:31
*/
SET NAMES utf8mb4;
SET FOREIGN_KEY_CHECKS = 0;
-- ----------------------------
-- Table structure for app_info
-- ----------------------------
DROP TABLE IF EXISTS `app_info`;
CREATE TABLE `app_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_name` varchar(255) DEFAULT NULL,
`creator` bigint DEFAULT NULL,
`current_server` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`modifier` bigint DEFAULT NULL,
`namespace_id` bigint DEFAULT NULL,
`password` varchar(255) DEFAULT NULL,
`tags` varchar(255) DEFAULT NULL,
`title` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_app_info` (`app_name`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for container_info
-- ----------------------------
DROP TABLE IF EXISTS `container_info`;
CREATE TABLE `container_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_id` bigint DEFAULT NULL,
`container_name` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`last_deploy_time` datetime(6) DEFAULT NULL,
`source_info` varchar(255) DEFAULT NULL,
`source_type` int DEFAULT NULL,
`status` int DEFAULT NULL,
`version` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_container_info` (`app_id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for instance_info
-- ----------------------------
DROP TABLE IF EXISTS `instance_info`;
CREATE TABLE `instance_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`actual_trigger_time` bigint DEFAULT NULL,
`app_id` bigint DEFAULT NULL,
`expected_trigger_time` bigint DEFAULT NULL,
`finished_time` bigint DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`instance_id` bigint DEFAULT NULL,
`instance_params` longtext,
`job_id` bigint DEFAULT NULL,
`job_params` longtext,
`last_report_time` bigint DEFAULT NULL,
`result` longtext,
`running_times` bigint DEFAULT NULL,
`status` int DEFAULT NULL,
`task_tracker_address` varchar(255) DEFAULT NULL,
`type` int DEFAULT NULL,
`wf_instance_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_instance_info` (`job_id`,`status`),
KEY `idx02_instance_info` (`app_id`,`status`),
KEY `idx03_instance_info` (`instance_id`,`status`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for job_info
-- ----------------------------
DROP TABLE IF EXISTS `job_info`;
CREATE TABLE `job_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`advanced_runtime_config` varchar(255) DEFAULT NULL,
`alarm_config` varchar(255) DEFAULT NULL,
`app_id` bigint DEFAULT NULL,
`concurrency` int DEFAULT NULL,
`designated_workers` varchar(255) DEFAULT NULL,
`dispatch_strategy` int DEFAULT NULL,
`dispatch_strategy_config` varchar(255) DEFAULT NULL,
`execute_type` int DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`instance_retry_num` int DEFAULT NULL,
`instance_time_limit` bigint DEFAULT NULL,
`job_description` varchar(255) DEFAULT NULL,
`job_name` varchar(255) DEFAULT NULL,
`job_params` longtext,
`lifecycle` varchar(255) DEFAULT NULL,
`log_config` varchar(255) DEFAULT NULL,
`max_instance_num` int DEFAULT NULL,
`max_worker_count` int DEFAULT NULL,
`min_cpu_cores` double NOT NULL,
`min_disk_space` double NOT NULL,
`min_memory_space` double NOT NULL,
`next_trigger_time` bigint DEFAULT NULL,
`notify_user_ids` varchar(255) DEFAULT NULL,
`processor_info` varchar(255) DEFAULT NULL,
`processor_type` int DEFAULT NULL,
`status` int DEFAULT NULL,
`tag` varchar(255) DEFAULT NULL,
`task_retry_num` int DEFAULT NULL,
`time_expression` varchar(255) DEFAULT NULL,
`time_expression_type` int DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_job_info` (`app_id`,`status`,`time_expression_type`,`next_trigger_time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for namespace
-- ----------------------------
DROP TABLE IF EXISTS `namespace`;
CREATE TABLE `namespace` (
`id` bigint NOT NULL AUTO_INCREMENT,
`code` varchar(255) DEFAULT NULL,
`creator` bigint DEFAULT NULL,
`dept` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`modifier` bigint DEFAULT NULL,
`name` varchar(255) DEFAULT NULL,
`status` int DEFAULT NULL,
`tags` varchar(255) DEFAULT NULL,
`token` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_namespace` (`code`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for oms_lock
-- ----------------------------
DROP TABLE IF EXISTS `oms_lock`;
CREATE TABLE `oms_lock` (
`id` bigint NOT NULL AUTO_INCREMENT,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`lock_name` varchar(255) DEFAULT NULL,
`max_lock_time` bigint DEFAULT NULL,
`ownerip` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_oms_lock` (`lock_name`)
) ENGINE=InnoDB AUTO_INCREMENT=4 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for pwjb_user_info
-- ----------------------------
DROP TABLE IF EXISTS `pwjb_user_info`;
CREATE TABLE `pwjb_user_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`password` varchar(255) DEFAULT NULL,
`username` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_username` (`username`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for server_info
-- ----------------------------
DROP TABLE IF EXISTS `server_info`;
CREATE TABLE `server_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`ip` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_server_info` (`ip`),
KEY `idx01_server_info` (`gmt_modified`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for sundry
-- ----------------------------
DROP TABLE IF EXISTS `sundry`;
CREATE TABLE `sundry` (
`id` bigint NOT NULL AUTO_INCREMENT,
`content` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`pkey` varchar(255) DEFAULT NULL,
`skey` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_sundry` (`pkey`,`skey`)
) ENGINE=InnoDB AUTO_INCREMENT=3 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for user_info
-- ----------------------------
DROP TABLE IF EXISTS `user_info`;
CREATE TABLE `user_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`account_type` varchar(255) DEFAULT NULL,
`email` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`nick` varchar(255) DEFAULT NULL,
`origin_username` varchar(255) DEFAULT NULL,
`password` varchar(255) DEFAULT NULL,
`phone` varchar(255) DEFAULT NULL,
`status` int DEFAULT NULL,
`token_login_verify_info` varchar(255) DEFAULT NULL,
`username` varchar(255) DEFAULT NULL,
`web_hook` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_user_name` (`username`),
KEY `uidx02_user_info` (`email`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for user_role
-- ----------------------------
DROP TABLE IF EXISTS `user_role`;
CREATE TABLE `user_role` (
`id` bigint NOT NULL AUTO_INCREMENT,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`role` int DEFAULT NULL,
`scope` int DEFAULT NULL,
`target` bigint DEFAULT NULL,
`user_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `uidx01_user_id` (`user_id`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for workflow_info
-- ----------------------------
DROP TABLE IF EXISTS `workflow_info`;
CREATE TABLE `workflow_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_id` bigint DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`lifecycle` varchar(255) DEFAULT NULL,
`max_wf_instance_num` int DEFAULT NULL,
`next_trigger_time` bigint DEFAULT NULL,
`notify_user_ids` varchar(255) DEFAULT NULL,
`pedag` longtext,
`status` int DEFAULT NULL,
`time_expression` varchar(255) DEFAULT NULL,
`time_expression_type` int DEFAULT NULL,
`wf_description` varchar(255) DEFAULT NULL,
`wf_name` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_workflow_info` (`app_id`,`status`,`time_expression_type`,`next_trigger_time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for workflow_instance_info
-- ----------------------------
DROP TABLE IF EXISTS `workflow_instance_info`;
CREATE TABLE `workflow_instance_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`actual_trigger_time` bigint DEFAULT NULL,
`app_id` bigint DEFAULT NULL,
`dag` longtext,
`expected_trigger_time` bigint DEFAULT NULL,
`finished_time` bigint DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`parent_wf_instance_id` bigint DEFAULT NULL,
`result` longtext,
`status` int DEFAULT NULL,
`wf_context` longtext,
`wf_init_params` longtext,
`wf_instance_id` bigint DEFAULT NULL,
`workflow_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_wf_instance` (`wf_instance_id`),
KEY `idx01_wf_instance` (`workflow_id`,`status`,`app_id`,`expected_trigger_time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for workflow_node_info
-- ----------------------------
DROP TABLE IF EXISTS `workflow_node_info`;
CREATE TABLE `workflow_node_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_id` bigint NOT NULL,
`enable` bit(1) NOT NULL,
`extra` longtext,
`gmt_create` datetime(6) NOT NULL,
`gmt_modified` datetime(6) NOT NULL,
`job_id` bigint DEFAULT NULL,
`node_name` varchar(255) DEFAULT NULL,
`node_params` longtext,
`skip_when_failed` bit(1) NOT NULL,
`type` int DEFAULT NULL,
`workflow_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_workflow_node_info` (`workflow_id`,`gmt_create`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
SET FOREIGN_KEY_CHECKS = 1;

View File

@ -0,0 +1,323 @@
/*
Navicat Premium Data Transfer
Source Server : Local@3306
Source Server Type : MySQL
Source Server Version : 80300 (8.3.0)
Source Host : localhost:3306
Source Schema : powerjob5
Target Server Type : MySQL
Target Server Version : 80300 (8.3.0)
File Encoding : 65001
Date: 11/08/2024 23:23:30
*/
SET NAMES utf8mb4;
SET FOREIGN_KEY_CHECKS = 0;
-- ----------------------------
-- Table structure for app_info
-- ----------------------------
DROP TABLE IF EXISTS `app_info`;
CREATE TABLE `app_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_name` varchar(255) DEFAULT NULL,
`creator` bigint DEFAULT NULL,
`current_server` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`modifier` bigint DEFAULT NULL,
`namespace_id` bigint DEFAULT NULL,
`password` varchar(255) DEFAULT NULL,
`tags` varchar(255) DEFAULT NULL,
`title` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_app_info` (`app_name`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for container_info
-- ----------------------------
DROP TABLE IF EXISTS `container_info`;
CREATE TABLE `container_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_id` bigint DEFAULT NULL,
`container_name` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`last_deploy_time` datetime(6) DEFAULT NULL,
`source_info` varchar(255) DEFAULT NULL,
`source_type` int DEFAULT NULL,
`status` int DEFAULT NULL,
`version` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_container_info` (`app_id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for instance_info
-- ----------------------------
DROP TABLE IF EXISTS `instance_info`;
CREATE TABLE `instance_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`actual_trigger_time` bigint DEFAULT NULL,
`app_id` bigint DEFAULT NULL,
`expected_trigger_time` bigint DEFAULT NULL,
`finished_time` bigint DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`instance_id` bigint DEFAULT NULL,
`instance_params` longtext,
`job_id` bigint DEFAULT NULL,
`job_params` longtext,
`last_report_time` bigint DEFAULT NULL,
`result` longtext,
`running_times` bigint DEFAULT NULL,
`status` int DEFAULT NULL,
`task_tracker_address` varchar(255) DEFAULT NULL,
`type` int DEFAULT NULL,
`wf_instance_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_instance_info` (`job_id`,`status`),
KEY `idx02_instance_info` (`app_id`,`status`),
KEY `idx03_instance_info` (`instance_id`,`status`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for job_info
-- ----------------------------
DROP TABLE IF EXISTS `job_info`;
CREATE TABLE `job_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`advanced_runtime_config` varchar(255) DEFAULT NULL,
`alarm_config` varchar(255) DEFAULT NULL,
`app_id` bigint DEFAULT NULL,
`concurrency` int DEFAULT NULL,
`designated_workers` varchar(255) DEFAULT NULL,
`dispatch_strategy` int DEFAULT NULL,
`dispatch_strategy_config` varchar(255) DEFAULT NULL,
`execute_type` int DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`instance_retry_num` int DEFAULT NULL,
`instance_time_limit` bigint DEFAULT NULL,
`job_description` varchar(255) DEFAULT NULL,
`job_name` varchar(255) DEFAULT NULL,
`job_params` longtext,
`lifecycle` varchar(255) DEFAULT NULL,
`log_config` varchar(255) DEFAULT NULL,
`max_instance_num` int DEFAULT NULL,
`max_worker_count` int DEFAULT NULL,
`min_cpu_cores` double NOT NULL,
`min_disk_space` double NOT NULL,
`min_memory_space` double NOT NULL,
`next_trigger_time` bigint DEFAULT NULL,
`notify_user_ids` varchar(255) DEFAULT NULL,
`processor_info` varchar(255) DEFAULT NULL,
`processor_type` int DEFAULT NULL,
`status` int DEFAULT NULL,
`tag` varchar(255) DEFAULT NULL,
`task_retry_num` int DEFAULT NULL,
`time_expression` varchar(255) DEFAULT NULL,
`time_expression_type` int DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_job_info` (`app_id`,`status`,`time_expression_type`,`next_trigger_time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for namespace
-- ----------------------------
DROP TABLE IF EXISTS `namespace`;
CREATE TABLE `namespace` (
`id` bigint NOT NULL AUTO_INCREMENT,
`code` varchar(255) DEFAULT NULL,
`creator` bigint DEFAULT NULL,
`dept` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`modifier` bigint DEFAULT NULL,
`name` varchar(255) DEFAULT NULL,
`status` int DEFAULT NULL,
`tags` varchar(255) DEFAULT NULL,
`token` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_namespace` (`code`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for oms_lock
-- ----------------------------
DROP TABLE IF EXISTS `oms_lock`;
CREATE TABLE `oms_lock` (
`id` bigint NOT NULL AUTO_INCREMENT,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`lock_name` varchar(255) DEFAULT NULL,
`max_lock_time` bigint DEFAULT NULL,
`ownerip` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_oms_lock` (`lock_name`)
) ENGINE=InnoDB AUTO_INCREMENT=4 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for pwjb_user_info
-- ----------------------------
DROP TABLE IF EXISTS `pwjb_user_info`;
CREATE TABLE `pwjb_user_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`password` varchar(255) DEFAULT NULL,
`username` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_username` (`username`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for server_info
-- ----------------------------
DROP TABLE IF EXISTS `server_info`;
CREATE TABLE `server_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`ip` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_server_info` (`ip`),
KEY `idx01_server_info` (`gmt_modified`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for sundry
-- ----------------------------
DROP TABLE IF EXISTS `sundry`;
CREATE TABLE `sundry` (
`id` bigint NOT NULL AUTO_INCREMENT,
`content` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`pkey` varchar(255) DEFAULT NULL,
`skey` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_sundry` (`pkey`,`skey`)
) ENGINE=InnoDB AUTO_INCREMENT=3 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for user_info
-- ----------------------------
DROP TABLE IF EXISTS `user_info`;
CREATE TABLE `user_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`account_type` varchar(255) DEFAULT NULL,
`email` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`nick` varchar(255) DEFAULT NULL,
`origin_username` varchar(255) DEFAULT NULL,
`password` varchar(255) DEFAULT NULL,
`phone` varchar(255) DEFAULT NULL,
`status` int DEFAULT NULL,
`token_login_verify_info` varchar(255) DEFAULT NULL,
`username` varchar(255) DEFAULT NULL,
`web_hook` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_user_name` (`username`),
KEY `uidx02_user_info` (`email`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for user_role
-- ----------------------------
DROP TABLE IF EXISTS `user_role`;
CREATE TABLE `user_role` (
`id` bigint NOT NULL AUTO_INCREMENT,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`role` int DEFAULT NULL,
`scope` int DEFAULT NULL,
`target` bigint DEFAULT NULL,
`user_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `uidx01_user_id` (`user_id`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for workflow_info
-- ----------------------------
DROP TABLE IF EXISTS `workflow_info`;
CREATE TABLE `workflow_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_id` bigint DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`lifecycle` varchar(255) DEFAULT NULL,
`max_wf_instance_num` int DEFAULT NULL,
`next_trigger_time` bigint DEFAULT NULL,
`notify_user_ids` varchar(255) DEFAULT NULL,
`pedag` longtext,
`status` int DEFAULT NULL,
`time_expression` varchar(255) DEFAULT NULL,
`time_expression_type` int DEFAULT NULL,
`wf_description` varchar(255) DEFAULT NULL,
`wf_name` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_workflow_info` (`app_id`,`status`,`time_expression_type`,`next_trigger_time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for workflow_instance_info
-- ----------------------------
DROP TABLE IF EXISTS `workflow_instance_info`;
CREATE TABLE `workflow_instance_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`actual_trigger_time` bigint DEFAULT NULL,
`app_id` bigint DEFAULT NULL,
`dag` longtext,
`expected_trigger_time` bigint DEFAULT NULL,
`finished_time` bigint DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`parent_wf_instance_id` bigint DEFAULT NULL,
`result` longtext,
`status` int DEFAULT NULL,
`wf_context` longtext,
`wf_init_params` longtext,
`wf_instance_id` bigint DEFAULT NULL,
`workflow_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_wf_instance` (`wf_instance_id`),
KEY `idx01_wf_instance` (`workflow_id`,`status`,`app_id`,`expected_trigger_time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for workflow_node_info
-- ----------------------------
DROP TABLE IF EXISTS `workflow_node_info`;
CREATE TABLE `workflow_node_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_id` bigint NOT NULL,
`enable` bit(1) NOT NULL,
`extra` longtext,
`gmt_create` datetime(6) NOT NULL,
`gmt_modified` datetime(6) NOT NULL,
`job_id` bigint DEFAULT NULL,
`node_name` varchar(255) DEFAULT NULL,
`node_params` longtext,
`skip_when_failed` bit(1) NOT NULL,
`type` int DEFAULT NULL,
`workflow_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_workflow_node_info` (`workflow_id`,`gmt_create`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
SET FOREIGN_KEY_CHECKS = 1;

View File

@ -0,0 +1,7 @@
由于存在不同数据库、不同版本的升级,官方能给出的 upgrade SQL 相对有限,大家可参考以下方式自行生成升级 SQL
- 【官方脚本】参考官方每个版本的数据库全库建表文件(项目 others - sql - schema自行进行字段 DIFF
- 【自己动手版】导出当前您的 powerjob 数据库表结构,同时创建一个测试库,让 5.x 版本的 server 直连该测试库,自动建表。分别拿到两个版本的表结构 SQL 后,借用工具生产 update SQL 即可navigate 等数据库管理软件均支持结构对比)
参考文档https://www.yuque.com/powerjob/guidence/upgrade

View File

@ -0,0 +1,10 @@
-- Upgrade SQL FROM 4.0.x to 4.1.x
-- ----------------------------
-- Table change for workflow_instance_info
-- ----------------------------
alter table workflow_instance_info
add parent_wf_instance_id bigint default null null comment '上层工作流实例ID';
-- ----------------------------
-- Table change for job_info
-- ----------------------------
alter table job_info add alarm_config varchar(512) comment '告警配置' default null;

View File

@ -0,0 +1,6 @@
-- Upgrade SQL FROM 4.1.x to 4.2.x
-- ----------------------------
-- Table change for job_info
-- ----------------------------
alter table job_info add tag varchar(255) comment 'TAG' default null;
alter table job_info add log_config varchar(255) comment 'logConfig' default null;

View File

@ -0,0 +1,6 @@
-- Upgrade SQL FROM 4.3.7 to 4.3.8
-- ----------------------------
-- Table change for job_info
-- ----------------------------
alter table job_info add dispatch_strategy_config varchar(255) comment 'dispatch_strategy_config' default null;
alter table job_info add advanced_runtime_config varchar(255) comment 'advanced_runtime_config' default null;

View File

@ -0,0 +1,88 @@
-- Upgrade SQL FROM 4.1.x to 4.2.x
-- ----------------------------
-- Table change for app_info
-- ----------------------------
SET FOREIGN_KEY_CHECKS=0;
ALTER TABLE `app_info` ADD COLUMN `creator` bigint NULL DEFAULT NULL;
ALTER TABLE `app_info` ADD COLUMN `extra` varchar(255) NULL DEFAULT NULL;
ALTER TABLE `app_info` ADD COLUMN `modifier` bigint NULL DEFAULT NULL;
ALTER TABLE `app_info` ADD COLUMN `namespace_id` bigint NULL DEFAULT NULL;
ALTER TABLE `app_info` ADD COLUMN `tags` varchar(255) NULL DEFAULT NULL;
ALTER TABLE `app_info` ADD COLUMN `title` varchar(255) NULL DEFAULT NULL;
-- ----------------------------
-- Table change for user_info
-- ----------------------------
ALTER TABLE `user_info` ADD COLUMN `account_type` varchar(255) NULL DEFAULT NULL;
ALTER TABLE `user_info` ADD COLUMN `nick` varchar(255) NULL DEFAULT NULL;
ALTER TABLE `user_info` ADD COLUMN `origin_username` varchar(255) NULL DEFAULT NULL;
ALTER TABLE `user_info` ADD COLUMN `token_login_verify_info` varchar(255) NULL DEFAULT NULL;
ALTER TABLE `user_info` ADD UNIQUE INDEX `uidx01_user_name`(`username` ASC) USING BTREE;
-- ----------------------------
-- new table 'namespace'
-- ----------------------------
CREATE TABLE `namespace` (
`id` bigint NOT NULL AUTO_INCREMENT,
`code` varchar(255) NULL DEFAULT NULL,
`creator` bigint NULL DEFAULT NULL,
`dept` varchar(255) NULL DEFAULT NULL,
`extra` varchar(255) NULL DEFAULT NULL,
`gmt_create` datetime(6) NULL DEFAULT NULL,
`gmt_modified` datetime(6) NULL DEFAULT NULL,
`modifier` bigint NULL DEFAULT NULL,
`name` varchar(255) NULL DEFAULT NULL,
`status` int NULL DEFAULT NULL,
`tags` varchar(255) NULL DEFAULT NULL,
`token` varchar(255) NULL DEFAULT NULL,
PRIMARY KEY (`id`) USING BTREE,
UNIQUE INDEX `uidx01_namespace`(`code` ASC) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 2 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_0900_ai_ci ROW_FORMAT = Dynamic;
-- ----------------------------
-- new table 'pwjb_user_info'
-- ----------------------------
CREATE TABLE `pwjb_user_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`extra` varchar(255) NULL DEFAULT NULL,
`gmt_create` datetime(6) NULL DEFAULT NULL,
`gmt_modified` datetime(6) NULL DEFAULT NULL,
`password` varchar(255) NULL DEFAULT NULL,
`username` varchar(255) NULL DEFAULT NULL,
PRIMARY KEY (`id`) USING BTREE,
UNIQUE INDEX `uidx01_username`(`username` ASC) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 2 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_0900_ai_ci ROW_FORMAT = Dynamic;
-- ----------------------------
-- new table 'sundry'
-- ----------------------------
CREATE TABLE `sundry` (
`id` bigint NOT NULL AUTO_INCREMENT,
`content` varchar(255) NULL DEFAULT NULL,
`extra` varchar(255) NULL DEFAULT NULL,
`gmt_create` datetime(6) NULL DEFAULT NULL,
`gmt_modified` datetime(6) NULL DEFAULT NULL,
`pkey` varchar(255) NULL DEFAULT NULL,
`skey` varchar(255) NULL DEFAULT NULL,
PRIMARY KEY (`id`) USING BTREE,
UNIQUE INDEX `uidx01_sundry`(`pkey` ASC, `skey` ASC) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 3 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_0900_ai_ci ROW_FORMAT = Dynamic;
-- ----------------------------
-- new table 'user_role'
-- ----------------------------
CREATE TABLE `user_role` (
`id` bigint NOT NULL AUTO_INCREMENT,
`extra` varchar(255) NULL DEFAULT NULL,
`gmt_create` datetime(6) NULL DEFAULT NULL,
`gmt_modified` datetime(6) NULL DEFAULT NULL,
`role` int NULL DEFAULT NULL,
`scope` int NULL DEFAULT NULL,
`target` bigint NULL DEFAULT NULL,
`user_id` bigint NULL DEFAULT NULL,
PRIMARY KEY (`id`) USING BTREE,
INDEX `uidx01_user_id`(`user_id` ASC) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 2 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_0900_ai_ci ROW_FORMAT = Dynamic;

39
pom.xml
View File

@ -4,13 +4,13 @@
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<groupId>com.github.kfcfans</groupId> <groupId>tech.powerjob</groupId>
<artifactId>powerjob</artifactId> <artifactId>powerjob</artifactId>
<version>2.0.0</version> <version>5.1.1</version>
<packaging>pom</packaging> <packaging>pom</packaging>
<name>powerjob</name> <name>powerjob</name>
<url>http://www.powerjob.tech</url> <url>http://www.powerjob.tech</url>
<description>Distributed scheduling and execution framework</description> <description>Enterprise job scheduling middleware with distributed computing ability.</description>
<licenses> <licenses>
<license> <license>
<name>Apache License, Version 2.0</name> <name>Apache License, Version 2.0</name>
@ -19,8 +19,8 @@
</license> </license>
</licenses> </licenses>
<scm> <scm>
<url>https://github.com/KFCFans/PowerJob</url> <url>https://github.com/PowerJob/PowerJob</url>
<connection>https://github.com/KFCFans/PowerJob.git</connection> <connection>https://github.com/PowerJob/PowerJob.git</connection>
</scm> </scm>
<developers> <developers>
@ -43,6 +43,8 @@
<module>powerjob-worker-agent</module> <module>powerjob-worker-agent</module>
<module>powerjob-worker-spring-boot-starter</module> <module>powerjob-worker-spring-boot-starter</module>
<module>powerjob-worker-samples</module> <module>powerjob-worker-samples</module>
<module>powerjob-official-processors</module>
<module>powerjob-remote</module>
</modules> </modules>
<properties> <properties>
@ -87,7 +89,7 @@
<testTarget>${java.version}</testTarget> <testTarget>${java.version}</testTarget>
</configuration> </configuration>
</plugin> </plugin>
<!-- 打包源码 --> <!-- Package source codes -->
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId> <artifactId>maven-source-plugin</artifactId>
@ -121,9 +123,9 @@
<artifactId>maven-javadoc-plugin</artifactId> <artifactId>maven-javadoc-plugin</artifactId>
<version>${maven-javadoc-plugin.version}</version> <version>${maven-javadoc-plugin.version}</version>
<configuration> <configuration>
<!-- JavaDoc 编译错误不影响正常构建 --> <!-- Prevent JavaDoc error from affecting building project. -->
<failOnError>false</failOnError> <failOnError>false</failOnError>
<!-- 非严格模式...以后要好好按格式写注释啊... --> <!-- Non-strict mode -->
<additionalJOption>-Xdoclint:none</additionalJOption> <additionalJOption>-Xdoclint:none</additionalJOption>
</configuration> </configuration>
<executions> <executions>
@ -149,22 +151,33 @@
</execution> </execution>
</executions> </executions>
</plugin> </plugin>
<plugin>
<groupId>org.sonatype.plugins</groupId>
<artifactId>nexus-staging-maven-plugin</artifactId>
<version>1.6.7</version>
<extensions>true</extensions>
<configuration>
<serverId>ossrh</serverId>
<nexusUrl>https://s01.oss.sonatype.org/</nexusUrl>
<autoReleaseAfterClose>true</autoReleaseAfterClose>
</configuration>
</plugin>
</plugins> </plugins>
</build> </build>
<distributionManagement> <distributionManagement>
<snapshotRepository> <snapshotRepository>
<id>ossrh</id> <id>ossrh</id>
<url>https://oss.sonatype.org/content/repositories/snapshots/</url> <url>https://s01.oss.sonatype.org/content/repositories/snapshots/</url>
</snapshotRepository> </snapshotRepository>
<repository> <repository>
<id>ossrh</id> <id>ossrh</id>
<url>https://oss.sonatype.org/service/local/staging/deploy/maven2/</url> <url>https://s01.oss.sonatype.org/service/local/staging/deploy/maven2/</url>
</repository> </repository>
</distributionManagement> </distributionManagement>
</profile> </profile>
<!-- 本地使用 --> <!-- Local profile -->
<profile> <profile>
<id>dev</id> <id>dev</id>
<activation> <activation>
@ -173,7 +186,7 @@
<build> <build>
<plugins> <plugins>
<!-- 编译插件 --> <!-- Maven compiler plugin -->
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId> <artifactId>maven-compiler-plugin</artifactId>
@ -185,7 +198,7 @@
<testTarget>${java.version}</testTarget> <testTarget>${java.version}</testTarget>
</configuration> </configuration>
</plugin> </plugin>
<!-- 编辑 MANIFEST.MF --> <!-- Edit MANIFEST.MF -->
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId> <artifactId>maven-jar-plugin</artifactId>

View File

@ -4,19 +4,20 @@
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent> <parent>
<artifactId>powerjob</artifactId> <artifactId>powerjob</artifactId>
<groupId>com.github.kfcfans</groupId> <groupId>tech.powerjob</groupId>
<version>2.0.0</version> <version>5.1.1</version>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<artifactId>powerjob-client</artifactId> <artifactId>powerjob-client</artifactId>
<version>3.4.0</version> <version>5.1.1</version>
<packaging>jar</packaging> <packaging>jar</packaging>
<properties> <properties>
<junit.version>5.6.1</junit.version> <junit.version>5.9.1</junit.version>
<fastjson.version>1.2.68</fastjson.version> <logback.version>1.2.13</logback.version>
<powerjob.common.version>3.4.0</powerjob.common.version> <fastjson.version>1.2.83</fastjson.version>
<powerjob.common.version>5.1.1</powerjob.common.version>
<mvn.shade.plugin.version>3.2.4</mvn.shade.plugin.version> <mvn.shade.plugin.version>3.2.4</mvn.shade.plugin.version>
</properties> </properties>
@ -32,18 +33,25 @@
<!-- oms-common --> <!-- oms-common -->
<dependency> <dependency>
<groupId>com.github.kfcfans</groupId> <groupId>tech.powerjob</groupId>
<artifactId>powerjob-common</artifactId> <artifactId>powerjob-common</artifactId>
<version>${powerjob.common.version}</version> <version>${powerjob.common.version}</version>
</dependency> </dependency>
<!-- Junit 测试 --> <!-- Junit tests -->
<dependency> <dependency>
<groupId>org.junit.jupiter</groupId> <groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-api</artifactId> <artifactId>junit-jupiter-api</artifactId>
<version>${junit.version}</version> <version>${junit.version}</version>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<!-- log for test stage -->
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<version>${logback.version}</version>
<scope>test</scope>
</dependency>
</dependencies> </dependencies>
<build> <build>

View File

@ -1,444 +0,0 @@
package com.github.kfcfans.powerjob.client;
import com.alibaba.fastjson.JSONObject;
import com.github.kfcfans.powerjob.common.InstanceStatus;
import com.github.kfcfans.powerjob.common.OmsConstant;
import com.github.kfcfans.powerjob.common.OpenAPIConstant;
import com.github.kfcfans.powerjob.common.PowerJobException;
import com.github.kfcfans.powerjob.common.request.http.SaveJobInfoRequest;
import com.github.kfcfans.powerjob.common.request.http.SaveWorkflowRequest;
import com.github.kfcfans.powerjob.common.response.*;
import com.github.kfcfans.powerjob.common.utils.CommonUtils;
import com.github.kfcfans.powerjob.common.utils.HttpUtils;
import com.github.kfcfans.powerjob.common.utils.JsonUtils;
import com.google.common.collect.Lists;
import lombok.extern.slf4j.Slf4j;
import okhttp3.FormBody;
import okhttp3.MediaType;
import okhttp3.RequestBody;
import org.apache.commons.lang3.StringUtils;
import java.io.IOException;
import java.util.List;
import java.util.Objects;
import static com.github.kfcfans.powerjob.client.TypeStore.*;
/**
* OpenAPI 客户端
*
* @author tjq
* @since 2020/4/15
*/
@Slf4j
public class OhMyClient {
private Long appId;
private String currentAddress;
private final List<String> allAddress;
private static final String URL_PATTERN = "http://%s%s%s";
/**
* 初始化 OhMyClient 客户端
* @param domain 比如 www.powerjob-server.com内网域名自行完成 DNS & Proxy
* @param appName 负责的应用名称
*/
public OhMyClient(String domain, String appName, String password) {
this(Lists.newArrayList(domain), appName, password);
}
/**
* 初始化 OhMyClient 客户端
* @param addressList IP:Port 列表
* @param appName 负责的应用名称
*/
public OhMyClient(List<String> addressList, String appName, String password) {
CommonUtils.requireNonNull(addressList, "addressList can't be null!");
CommonUtils.requireNonNull(appName, "appName can't be null");
allAddress = addressList;
for (String addr : addressList) {
String url = getUrl(OpenAPIConstant.ASSERT, addr);
try {
String result = assertApp(appName, password, url);
if (StringUtils.isNotEmpty(result)) {
ResultDTO<Long> resultDTO = JSONObject.parseObject(result, LONG_RESULT_TYPE);
if (resultDTO.isSuccess()) {
appId = resultDTO.getData();
currentAddress = addr;
break;
}else {
throw new PowerJobException(resultDTO.getMessage());
}
}
}catch (IOException ignore) {
}
}
if (StringUtils.isEmpty(currentAddress)) {
throw new PowerJobException("no server available");
}
log.info("[OhMyClient] {}'s OhMyClient bootstrap successfully, using server: {}", appName, currentAddress);
}
private static String assertApp(String appName, String password, String url) throws IOException {
FormBody.Builder builder = new FormBody.Builder()
.add("appName", appName);
if (password != null) {
builder.add("password", password);
}
return HttpUtils.post(url, builder.build());
}
private static String getUrl(String path, String address) {
return String.format(URL_PATTERN, address, OpenAPIConstant.WEB_PATH, path);
}
/* ************* Job 区 ************* */
/**
* 保存任务包括创建与修改
* @param request 任务详细参数
* @return 创建的任务ID
* @throws PowerJobException 异常
*/
public ResultDTO<Long> saveJob(SaveJobInfoRequest request) throws PowerJobException {
request.setAppId(appId);
MediaType jsonType = MediaType.parse("application/json; charset=utf-8");
String json = JSONObject.toJSONString(request);
String post = postHA(OpenAPIConstant.SAVE_JOB, RequestBody.create(jsonType, json));
return JSONObject.parseObject(post, LONG_RESULT_TYPE);
}
/**
* 根据 jobId 查询任务信息
* @param jobId 任务ID
* @return 任务详细信息
* @throws PowerJobException 异常
*/
public ResultDTO<JobInfoDTO> fetchJob(Long jobId) throws PowerJobException {
RequestBody body = new FormBody.Builder()
.add("jobId", jobId.toString())
.add("appId", appId.toString())
.build();
String post = postHA(OpenAPIConstant.FETCH_JOB, body);
return JSONObject.parseObject(post, JOB_RESULT_TYPE);
}
/**
* 禁用某个任务
* @param jobId 任务ID
* @return 标准返回对象
* @throws PowerJobException 异常
*/
public ResultDTO<Void> disableJob(Long jobId) throws PowerJobException {
RequestBody body = new FormBody.Builder()
.add("jobId", jobId.toString())
.add("appId", appId.toString())
.build();
String post = postHA(OpenAPIConstant.DISABLE_JOB, body);
return JSONObject.parseObject(post, VOID_RESULT_TYPE);
}
/**
* 启用某个任务
* @param jobId 任务ID
* @return 标准返回对象
* @throws PowerJobException 异常
*/
public ResultDTO<Void> enableJob(Long jobId) throws PowerJobException {
RequestBody body = new FormBody.Builder()
.add("jobId", jobId.toString())
.add("appId", appId.toString())
.build();
String post = postHA(OpenAPIConstant.ENABLE_JOB, body);
return JSONObject.parseObject(post, VOID_RESULT_TYPE);
}
/**
* 删除某个任务
* @param jobId 任务ID
* @return 标准返回对象
* @throws PowerJobException 异常
*/
public ResultDTO<Void> deleteJob(Long jobId) throws PowerJobException {
RequestBody body = new FormBody.Builder()
.add("jobId", jobId.toString())
.add("appId", appId.toString())
.build();
String post = postHA(OpenAPIConstant.DELETE_JOB, body);
return JSONObject.parseObject(post, VOID_RESULT_TYPE);
}
/**
* 运行某个任务
* @param jobId 任务ID
* @param instanceParams 任务实例的参数
* @param delayMS 延迟时间单位毫秒
* @return 任务实例IDinstanceId
* @throws PowerJobException 异常
*/
public ResultDTO<Long> runJob(Long jobId, String instanceParams, long delayMS) throws PowerJobException {
FormBody.Builder builder = new FormBody.Builder()
.add("jobId", jobId.toString())
.add("appId", appId.toString())
.add("delay", String.valueOf(delayMS));
if (StringUtils.isNotEmpty(instanceParams)) {
builder.add("instanceParams", instanceParams);
}
String post = postHA(OpenAPIConstant.RUN_JOB, builder.build());
return JSONObject.parseObject(post, LONG_RESULT_TYPE);
}
public ResultDTO<Long> runJob(Long jobId) throws PowerJobException {
return runJob(jobId, null, 0);
}
/* ************* Instance 区 ************* */
/**
* 停止应用实例
* @param instanceId 应用实例ID
* @return true 停止成功false 停止失败
* @throws PowerJobException 异常
*/
public ResultDTO<Void> stopInstance(Long instanceId) throws PowerJobException {
RequestBody body = new FormBody.Builder()
.add("instanceId", instanceId.toString())
.add("appId", appId.toString())
.build();
String post = postHA(OpenAPIConstant.STOP_INSTANCE, body);
return JSONObject.parseObject(post, VOID_RESULT_TYPE);
}
/**
* 取消任务实例
* 接口使用条件调用接口时间与待取消任务的预计执行时间有一定时间间隔否则不保证可靠性
* @param instanceId 任务实例ID
* @return true 代表取消成功false 取消失败
* @throws PowerJobException 异常
*/
public ResultDTO<Void> cancelInstance(Long instanceId) throws PowerJobException {
RequestBody body = new FormBody.Builder()
.add("instanceId", instanceId.toString())
.add("appId", appId.toString())
.build();
String post = postHA(OpenAPIConstant.CANCEL_INSTANCE, body);
return JSONObject.parseObject(post, VOID_RESULT_TYPE);
}
/**
* 重试任务实例
* 只有完成状态成功失败手动停止被取消的任务才能被重试且暂不支持工作流内任务实例的重试
* @param instanceId 任务实例ID
* @return true 代表取消成功false 取消失败
* @throws PowerJobException 异常
*/
public ResultDTO<Void> retryInstance(Long instanceId) throws PowerJobException {
RequestBody body = new FormBody.Builder()
.add("instanceId", instanceId.toString())
.add("appId", appId.toString())
.build();
String post = postHA(OpenAPIConstant.RETRY_INSTANCE, body);
return JSONObject.parseObject(post, VOID_RESULT_TYPE);
}
/**
* 查询任务实例状态
* @param instanceId 应用实例ID
* @return {@link InstanceStatus} 的枚举值
* @throws PowerJobException 异常
*/
public ResultDTO<Integer> fetchInstanceStatus(Long instanceId) throws PowerJobException {
RequestBody body = new FormBody.Builder()
.add("instanceId", instanceId.toString())
.build();
String post = postHA(OpenAPIConstant.FETCH_INSTANCE_STATUS, body);
return JSONObject.parseObject(post, INTEGER_RESULT_TYPE);
}
/**
* 查询任务实例的信息
* @param instanceId 任务实例ID
* @return 任务实例信息
* @throws PowerJobException 潜在的异常
*/
public ResultDTO<InstanceInfoDTO> fetchInstanceInfo(Long instanceId) throws PowerJobException {
RequestBody body = new FormBody.Builder()
.add("instanceId", instanceId.toString())
.build();
String post = postHA(OpenAPIConstant.FETCH_INSTANCE_INFO, body);
return JSONObject.parseObject(post, INSTANCE_RESULT_TYPE);
}
/* ************* Workflow 区 ************* */
/**
* 保存工作流包括创建和修改
* @param request 创建/修改 Workflow 请求
* @return 工作流ID
* @throws PowerJobException 异常
*/
public ResultDTO<Long> saveWorkflow(SaveWorkflowRequest request) throws PowerJobException {
request.setAppId(appId);
MediaType jsonType = MediaType.parse(OmsConstant.JSON_MEDIA_TYPE);
// 中坑记录 FastJSON 序列化会导致 Server 接收时 pEWorkflowDAG null无语.jpg
String json = JsonUtils.toJSONStringUnsafe(request);
String post = postHA(OpenAPIConstant.SAVE_WORKFLOW, RequestBody.create(jsonType, json));
return JSONObject.parseObject(post, LONG_RESULT_TYPE);
}
/**
* 根据 workflowId 查询工作流信息
* @param workflowId workflowId
* @return 工作流信息
* @throws PowerJobException 异常
*/
public ResultDTO<WorkflowInfoDTO> fetchWorkflow(Long workflowId) throws PowerJobException {
RequestBody body = new FormBody.Builder()
.add("workflowId", workflowId.toString())
.add("appId", appId.toString())
.build();
String post = postHA(OpenAPIConstant.FETCH_WORKFLOW, body);
return JSONObject.parseObject(post, WF_RESULT_TYPE);
}
/**
* 禁用某个工作流
* @param workflowId 工作流ID
* @return 标准返回对象
* @throws PowerJobException 异常
*/
public ResultDTO<Void> disableWorkflow(Long workflowId) throws PowerJobException {
RequestBody body = new FormBody.Builder()
.add("workflowId", workflowId.toString())
.add("appId", appId.toString())
.build();
String post = postHA(OpenAPIConstant.DISABLE_WORKFLOW, body);
return JSONObject.parseObject(post, VOID_RESULT_TYPE);
}
/**
* 启用某个工作流
* @param workflowId workflowId
* @return 标准返回对象
* @throws PowerJobException 异常
*/
public ResultDTO<Void> enableWorkflow(Long workflowId) throws PowerJobException {
RequestBody body = new FormBody.Builder()
.add("workflowId", workflowId.toString())
.add("appId", appId.toString())
.build();
String post = postHA(OpenAPIConstant.ENABLE_WORKFLOW, body);
return JSONObject.parseObject(post, VOID_RESULT_TYPE);
}
/**
* 删除某个工作流
* @param workflowId workflowId
* @return 标准返回对象
* @throws PowerJobException 异常
*/
public ResultDTO<Void> deleteWorkflow(Long workflowId) throws PowerJobException {
RequestBody body = new FormBody.Builder()
.add("workflowId", workflowId.toString())
.add("appId", appId.toString())
.build();
String post = postHA(OpenAPIConstant.DELETE_WORKFLOW, body);
return JSONObject.parseObject(post, VOID_RESULT_TYPE);
}
/**
* 运行工作流
* @param workflowId 工作流ID
* @param initParams 启动参数
* @param delayMS 延迟时间单位毫秒 ms
* @return 工作流实例ID
* @throws PowerJobException 异常信息
*/
public ResultDTO<Long> runWorkflow(Long workflowId, String initParams, long delayMS) throws PowerJobException {
FormBody.Builder builder = new FormBody.Builder()
.add("workflowId", workflowId.toString())
.add("appId", appId.toString())
.add("delay", String.valueOf(delayMS));
if (StringUtils.isNotEmpty(initParams)) {
builder.add("initParams", initParams);
}
String post = postHA(OpenAPIConstant.RUN_WORKFLOW, builder.build());
return JSONObject.parseObject(post, LONG_RESULT_TYPE);
}
public ResultDTO<Long> runWorkflow(Long workflowId) throws PowerJobException {
return runWorkflow(workflowId, null, 0);
}
/* ************* Workflow Instance 区 ************* */
/**
* 停止应用实例
* @param wfInstanceId 工作流实例ID
* @return true 停止成功 false 停止失败
* @throws PowerJobException 异常
*/
public ResultDTO<Void> stopWorkflowInstance(Long wfInstanceId) throws PowerJobException {
RequestBody body = new FormBody.Builder()
.add("wfInstanceId", wfInstanceId.toString())
.add("appId", appId.toString())
.build();
String post = postHA(OpenAPIConstant.STOP_WORKFLOW_INSTANCE, body);
return JSONObject.parseObject(post, VOID_RESULT_TYPE);
}
/**
* 查询任务实例的信息
* @param wfInstanceId 任务实例ID
* @return 任务实例信息
* @throws PowerJobException 潜在的异常
*/
public ResultDTO<WorkflowInstanceInfoDTO> fetchWorkflowInstanceInfo(Long wfInstanceId) throws PowerJobException {
RequestBody body = new FormBody.Builder()
.add("wfInstanceId", wfInstanceId.toString())
.add("appId", appId.toString())
.build();
String post = postHA(OpenAPIConstant.FETCH_WORKFLOW_INSTANCE_INFO, body);
return JSONObject.parseObject(post, WF_INSTANCE_RESULT_TYPE);
}
private String postHA(String path, RequestBody requestBody) {
// 先尝试默认地址
String url = getUrl(path, currentAddress);
try {
String res = HttpUtils.post(url, requestBody);
if (StringUtils.isNotEmpty(res)) {
return res;
}
}catch (IOException e) {
log.warn("[OhMyClient] request url:{} failed, reason is {}.", url, e.toString());
}
// 失败开始重试
for (String addr : allAddress) {
if (Objects.equals(addr, currentAddress)) {
continue;
}
url = getUrl(path, addr);
try {
String res = HttpUtils.post(url, requestBody);
if (StringUtils.isNotEmpty(res)) {
log.warn("[OhMyClient] server change: from({}) -> to({}).", currentAddress, addr);
currentAddress = addr;
return res;
}
}catch (IOException e) {
log.warn("[OhMyClient] request url:{} failed, reason is {}.", url, e.toString());
}
}
log.error("[OhMyClient] do post for path: {} failed because of no server available in {}.", path, allAddress);
throw new PowerJobException("no server available when send post");
}
}

View File

@ -1,28 +0,0 @@
package com.github.kfcfans.powerjob.client;
import com.alibaba.fastjson.TypeReference;
import com.github.kfcfans.powerjob.common.response.*;
/**
* 类型工厂
*
* @author tjq
* @since 11/7/20
*/
public class TypeStore {
public static final TypeReference<ResultDTO<Void>> VOID_RESULT_TYPE = new TypeReference<ResultDTO<Void>>(){};
public static final TypeReference<ResultDTO<Integer>> INTEGER_RESULT_TYPE = new TypeReference<ResultDTO<Integer>>(){};
public static final TypeReference<ResultDTO<Long>> LONG_RESULT_TYPE = new TypeReference<ResultDTO<Long>>(){};
public static final TypeReference<ResultDTO<JobInfoDTO>> JOB_RESULT_TYPE = new TypeReference<ResultDTO<JobInfoDTO>>(){};
public static final TypeReference<ResultDTO<InstanceInfoDTO>> INSTANCE_RESULT_TYPE = new TypeReference<ResultDTO<InstanceInfoDTO>>() {};
public static final TypeReference<ResultDTO<WorkflowInfoDTO>> WF_RESULT_TYPE = new TypeReference<ResultDTO<WorkflowInfoDTO>>() {};
public static final TypeReference<ResultDTO<WorkflowInstanceInfoDTO>> WF_INSTANCE_RESULT_TYPE = new TypeReference<ResultDTO<WorkflowInstanceInfoDTO>>() {};
}

View File

@ -0,0 +1,71 @@
package tech.powerjob.client;
import lombok.Getter;
import lombok.Setter;
import lombok.ToString;
import lombok.experimental.Accessors;
import tech.powerjob.client.common.Protocol;
import tech.powerjob.client.extension.ClientExtension;
import java.io.Serializable;
import java.util.List;
import java.util.Map;
/**
* 客户端配置
*
* @author 程序帕鲁
* @since 2024/2/20
*/
@Getter
@Setter
@ToString
@Accessors(chain = true)
public class ClientConfig implements Serializable {
/**
* 执行器 AppName
*/
private String appName;
/**
* 执行器密码
*/
private String password;
/**
* 地址列表支持格式
* - IP:Port, eg: 192.168.1.1:7700
* - 域名, eg: powerjob.apple-inc.com
*/
private List<String> addressList;
/**
* 客户端通讯协议
*/
private Protocol protocol = Protocol.HTTP;
/**
* 连接超时时间
*/
private Integer connectionTimeout;
/**
* 指定了等待服务器响应数据的最长时间更具体地说这是从服务器开始返回响应数据包括HTTP头和数据客户端读取数据的超时时间
*/
private Integer readTimeout;
/**
* 指定了向服务器发送数据的最长时间这是从客户端开始发送数据如POST请求的正文到数据完全发送出去的时间
*/
private Integer writeTimeout;
/**
* 默认携带的请求头
* 用于流量被基础设施识别
*/
private Map<String, String> defaultHeaders;
/**
* 客户端行为扩展
*/
private ClientExtension clientExtension;
}

View File

@ -0,0 +1,82 @@
package tech.powerjob.client;
import tech.powerjob.common.request.http.SaveJobInfoRequest;
import tech.powerjob.common.request.http.SaveWorkflowNodeRequest;
import tech.powerjob.common.request.http.SaveWorkflowRequest;
import tech.powerjob.common.request.query.InstancePageQuery;
import tech.powerjob.common.request.query.JobInfoQuery;
import tech.powerjob.common.response.*;
import java.util.List;
/**
* PowerJobClient, the client for OpenAPI.
*
* @author tjq
* @since 2023/3/5
*/
public interface IPowerJobClient {
/* ************* Job 区 ************* */
ResultDTO<SaveJobInfoRequest> exportJob(Long jobId);
ResultDTO<Long> saveJob(SaveJobInfoRequest request);
ResultDTO<Long> copyJob(Long jobId);
ResultDTO<JobInfoDTO> fetchJob(Long jobId);
ResultDTO<List<JobInfoDTO>> fetchAllJob();
ResultDTO<List<JobInfoDTO>> queryJob(JobInfoQuery powerQuery);
ResultDTO<Void> disableJob(Long jobId);
ResultDTO<Void> enableJob(Long jobId);
ResultDTO<Void> deleteJob(Long jobId);
ResultDTO<Long> runJob(Long jobId, String instanceParams, long delayMS);
/* ************* Instance API list ************* */
ResultDTO<Void> stopInstance(Long instanceId);
ResultDTO<Void> cancelInstance(Long instanceId);
ResultDTO<Void> retryInstance(Long instanceId);
ResultDTO<Integer> fetchInstanceStatus(Long instanceId);
ResultDTO<InstanceInfoDTO> fetchInstanceInfo(Long instanceId);
ResultDTO<PageResult<InstanceInfoDTO>> queryInstanceInfo(InstancePageQuery instancePageQuery);
/* ************* Workflow API list ************* */
ResultDTO<Long> saveWorkflow(SaveWorkflowRequest request);
ResultDTO<Long> copyWorkflow(Long workflowId);
ResultDTO<List<WorkflowNodeInfoDTO>> saveWorkflowNode(List<SaveWorkflowNodeRequest> requestList);
ResultDTO<WorkflowInfoDTO> fetchWorkflow(Long workflowId);
ResultDTO<Void> disableWorkflow(Long workflowId);
ResultDTO<Void> enableWorkflow(Long workflowId);
ResultDTO<Void> deleteWorkflow(Long workflowId);
ResultDTO<Long> runWorkflow(Long workflowId, String initParams, long delayMS);
/* ************* Workflow Instance API list ************* */
ResultDTO<Void> stopWorkflowInstance(Long wfInstanceId);
ResultDTO<Void> retryWorkflowInstance(Long wfInstanceId);
ResultDTO<Void> markWorkflowNodeAsSuccess(Long wfInstanceId, Long nodeId);
ResultDTO<WorkflowInstanceInfoDTO> fetchWorkflowInstanceInfo(Long wfInstanceId);
}

View File

@ -0,0 +1,563 @@
package tech.powerjob.client;
import com.alibaba.fastjson.JSON;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import tech.powerjob.client.module.AppAuthRequest;
import tech.powerjob.client.module.AppAuthResult;
import tech.powerjob.client.service.PowerRequestBody;
import tech.powerjob.client.service.RequestService;
import tech.powerjob.client.service.impl.ClusterRequestServiceOkHttp3Impl;
import tech.powerjob.common.OpenAPIConstant;
import tech.powerjob.common.enums.EncryptType;
import tech.powerjob.common.enums.InstanceStatus;
import tech.powerjob.common.exception.PowerJobException;
import tech.powerjob.common.request.http.SaveJobInfoRequest;
import tech.powerjob.common.request.http.SaveWorkflowNodeRequest;
import tech.powerjob.common.request.http.SaveWorkflowRequest;
import tech.powerjob.common.request.query.InstancePageQuery;
import tech.powerjob.common.request.query.JobInfoQuery;
import tech.powerjob.common.response.*;
import tech.powerjob.common.serialize.JsonUtils;
import tech.powerjob.common.utils.CommonUtils;
import tech.powerjob.common.utils.DigestUtils;
import java.io.Closeable;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import static tech.powerjob.client.TypeStore.*;
/**
* PowerJobClient, the client for OpenAPI.
*
* @author tjq
* @since 2020/4/15
*/
@Slf4j
public class PowerJobClient implements IPowerJobClient, Closeable {
private Long appId;
private final RequestService requestService;
public PowerJobClient(ClientConfig config) {
List<String> addressList = config.getAddressList();
String appName = config.getAppName();
CommonUtils.requireNonNull(addressList, "addressList can't be null!");
CommonUtils.requireNonNull(appName, "appName can't be null");
this.requestService = new ClusterRequestServiceOkHttp3Impl(config);
AppAuthRequest appAuthRequest = new AppAuthRequest();
appAuthRequest.setAppName(appName);
appAuthRequest.setEncryptedPassword(DigestUtils.md5(config.getPassword()));
appAuthRequest.setEncryptType(EncryptType.MD5.getCode());
String assertResponse = requestService.request(OpenAPIConstant.AUTH_APP, PowerRequestBody.newJsonRequestBody(appAuthRequest));
if (StringUtils.isNotEmpty(assertResponse)) {
ResultDTO<AppAuthResult> resultDTO = JSON.parseObject(assertResponse, APP_AUTH_RESULT_TYPE);
if (resultDTO.isSuccess()) {
appId = resultDTO.getData().getAppId();
} else {
throw new PowerJobException(resultDTO.getMessage());
}
}
if (appId == null) {
throw new PowerJobException("appId is null, please check your config");
}
log.info("[PowerJobClient] [INIT] {}'s PowerJobClient bootstrap successfully", appName);
}
/**
* Init PowerJobClient with domain, appName and password.
*
* @param domain like powerjob-server.apple-inc.com (Intranet Domain)
* @param appName name of the application
* @param password password of the application
*/
public PowerJobClient(String domain, String appName, String password) {
this(new ClientConfig().setAppName(appName).setPassword(password).setAddressList(Lists.newArrayList(domain)));
}
/**
* Init PowerJobClient with server address, appName and password.
*
* @param addressList IP:Port address list, like 192.168.1.1:7700
* @param appName name of the application
* @param password password of the application
*/
public PowerJobClient(List<String> addressList, String appName, String password) {
this(new ClientConfig().setAppName(appName).setPassword(password).setAddressList(addressList));
}
/* ************* Job 区 ************* */
/**
* Save one Job
* When an ID exists in SaveJobInfoRequest, it is an update operation. Otherwise, it is a crate operation.
*
* @param request Job meta info
* @return jobId
*/
@Override
public ResultDTO<Long> saveJob(SaveJobInfoRequest request) {
request.setAppId(appId);
String post = requestService.request(OpenAPIConstant.SAVE_JOB, PowerRequestBody.newJsonRequestBody(request));
return JSON.parseObject(post, LONG_RESULT_TYPE);
}
/**
* Copy one Job
*
* @param jobId Job id
* @return Id of job copy
*/
@Override
public ResultDTO<Long> copyJob(Long jobId) {
Map<String, String> param = Maps.newHashMap();
param.put("jobId", jobId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.COPY_JOB, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, LONG_RESULT_TYPE);
}
@Override
public ResultDTO<SaveJobInfoRequest> exportJob(Long jobId) {
Map<String, String> param = Maps.newHashMap();
param.put("jobId", jobId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.EXPORT_JOB, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, SAVE_JOB_INFO_REQUEST_RESULT_TYPE);
}
/**
* Query JobInfo by jobId
*
* @param jobId jobId
* @return Job meta info
*/
@Override
public ResultDTO<JobInfoDTO> fetchJob(Long jobId) {
Map<String, String> param = Maps.newHashMap();
param.put("jobId", jobId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.FETCH_JOB, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, JOB_RESULT_TYPE);
}
/**
* Query all JobInfo
*
* @return All JobInfo
*/
@Override
public ResultDTO<List<JobInfoDTO>> fetchAllJob() {
Map<String, String> param = Maps.newHashMap();
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.FETCH_ALL_JOB, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, LIST_JOB_RESULT_TYPE);
}
/**
* Query JobInfo by PowerQuery
*
* @param powerQuery JobQuery
* @return JobInfo
*/
@Override
public ResultDTO<List<JobInfoDTO>> queryJob(JobInfoQuery powerQuery) {
powerQuery.setAppIdEq(appId);
String post = requestService.request(OpenAPIConstant.QUERY_JOB, PowerRequestBody.newJsonRequestBody(powerQuery));
return JSON.parseObject(post, LIST_JOB_RESULT_TYPE);
}
/**
* Disable one Job by jobId
*
* @param jobId jobId
* @return Standard return object
*/
@Override
public ResultDTO<Void> disableJob(Long jobId) {
Map<String, String> param = Maps.newHashMap();
param.put("jobId", jobId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.DISABLE_JOB, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, VOID_RESULT_TYPE);
}
/**
* Enable one job by jobId
*
* @param jobId jobId
* @return Standard return object
*/
@Override
public ResultDTO<Void> enableJob(Long jobId) {
Map<String, String> param = Maps.newHashMap();
param.put("jobId", jobId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.ENABLE_JOB, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, VOID_RESULT_TYPE);
}
/**
* Delete one job by jobId
*
* @param jobId jobId
* @return Standard return object
*/
@Override
public ResultDTO<Void> deleteJob(Long jobId) {
Map<String, String> param = Maps.newHashMap();
param.put("jobId", jobId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.DELETE_JOB, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, VOID_RESULT_TYPE);
}
/**
* Run a job once
*
* @param jobId ID of the job to be run
* @param instanceParams Runtime parameters of the job (TaskContext#instanceParams)
* @param delayMS Delay timeMilliseconds
* @return instanceId
*/
@Override
public ResultDTO<Long> runJob(Long jobId, String instanceParams, long delayMS) {
Map<String, String> param = Maps.newHashMap();
param.put("jobId", jobId.toString());
param.put("appId", appId.toString());
param.put("delay", String.valueOf(delayMS));
if (StringUtils.isNotEmpty(instanceParams)) {
param.put("instanceParams", instanceParams);
}
String post = requestService.request(OpenAPIConstant.RUN_JOB, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, LONG_RESULT_TYPE);
}
public ResultDTO<Long> runJob(Long jobId) {
return runJob(jobId, null, 0);
}
/* ************* Instance API list ************* */
/**
* Stop one job instance
*
* @param instanceId instanceId
* @return Standard return object
*/
@Override
public ResultDTO<Void> stopInstance(Long instanceId) {
Map<String, String> param = Maps.newHashMap();
param.put("instanceId", instanceId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.STOP_INSTANCE, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, VOID_RESULT_TYPE);
}
/**
* Cancel a job instance that is not yet running
* NoticeThere is a time interval between the call interface time and the expected execution time of the job instance to be cancelled, otherwise reliability is not guaranteed
*
* @param instanceId instanceId
* @return Standard return object
*/
@Override
public ResultDTO<Void> cancelInstance(Long instanceId) {
Map<String, String> param = Maps.newHashMap();
param.put("instanceId", instanceId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.CANCEL_INSTANCE, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, VOID_RESULT_TYPE);
}
/**
* Retry failed job instance
* Notice: Only job instance with completion status (success, failure, manually stopped, cancelled) can be retried, and retries of job instances within workflows are not supported yet.
*
* @param instanceId instanceId
* @return Standard return object
*/
@Override
public ResultDTO<Void> retryInstance(Long instanceId) {
Map<String, String> param = Maps.newHashMap();
param.put("instanceId", instanceId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.RETRY_INSTANCE, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, VOID_RESULT_TYPE);
}
/**
* Query status about a job instance
*
* @param instanceId instanceId
* @return {@link InstanceStatus}
*/
@Override
public ResultDTO<Integer> fetchInstanceStatus(Long instanceId) {
Map<String, String> param = Maps.newHashMap();
param.put("instanceId", instanceId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.FETCH_INSTANCE_STATUS, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, INTEGER_RESULT_TYPE);
}
/**
* Query detail about a job instance
*
* @param instanceId instanceId
* @return instance detail
*/
@Override
public ResultDTO<InstanceInfoDTO> fetchInstanceInfo(Long instanceId) {
Map<String, String> param = Maps.newHashMap();
param.put("instanceId", instanceId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.FETCH_INSTANCE_INFO, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, INSTANCE_RESULT_TYPE);
}
@Override
public ResultDTO<PageResult<InstanceInfoDTO>> queryInstanceInfo(InstancePageQuery instancePageQuery) {
instancePageQuery.setAppIdEq(appId);
String post = requestService.request(OpenAPIConstant.QUERY_INSTANCE, PowerRequestBody.newJsonRequestBody(instancePageQuery));
return JSON.parseObject(post, PAGE_INSTANCE_RESULT_TYPE);
}
/* ************* Workflow API list ************* */
/**
* Save one workflow
* When an ID exists in SaveWorkflowRequest, it is an update operation. Otherwise, it is a crate operation.
*
* @param request Workflow meta info
* @return workflowId
*/
@Override
public ResultDTO<Long> saveWorkflow(SaveWorkflowRequest request) {
request.setAppId(appId);
// 中坑记录 FastJSON 序列化会导致 Server 接收时 pEWorkflowDAG null无语.jpg
String json = JsonUtils.toJSONStringUnsafe(request);
String post = requestService.request(OpenAPIConstant.SAVE_WORKFLOW, PowerRequestBody.newJsonRequestBody(json));
return JSON.parseObject(post, LONG_RESULT_TYPE);
}
/**
* Copy one workflow
*
* @param workflowId Workflow id
* @return Id of workflow copy
*/
@Override
public ResultDTO<Long> copyWorkflow(Long workflowId) {
Map<String, String> param = Maps.newHashMap();
param.put("workflowId", workflowId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.COPY_WORKFLOW, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, LONG_RESULT_TYPE);
}
/**
* 添加工作流节点
*
* @param requestList Node info list of Workflow
* @return Standard return object
*/
@Override
public ResultDTO<List<WorkflowNodeInfoDTO>> saveWorkflowNode(List<SaveWorkflowNodeRequest> requestList) {
for (SaveWorkflowNodeRequest saveWorkflowNodeRequest : requestList) {
saveWorkflowNodeRequest.setAppId(appId);
}
String json = JsonUtils.toJSONStringUnsafe(requestList);
String post = requestService.request(OpenAPIConstant.SAVE_WORKFLOW_NODE, PowerRequestBody.newJsonRequestBody(json));
return JSON.parseObject(post, WF_NODE_LIST_RESULT_TYPE);
}
/**
* Query Workflow by workflowId
*
* @param workflowId workflowId
* @return Workflow meta info
*/
@Override
public ResultDTO<WorkflowInfoDTO> fetchWorkflow(Long workflowId) {
Map<String, String> param = Maps.newHashMap();
param.put("workflowId", workflowId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.FETCH_WORKFLOW, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, WF_RESULT_TYPE);
}
/**
* Disable Workflow by workflowId
*
* @param workflowId workflowId
* @return Standard return object
*/
@Override
public ResultDTO<Void> disableWorkflow(Long workflowId) {
Map<String, String> param = Maps.newHashMap();
param.put("workflowId", workflowId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.DISABLE_WORKFLOW, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, VOID_RESULT_TYPE);
}
/**
* Enable Workflow by workflowId
*
* @param workflowId workflowId
* @return Standard return object
*/
@Override
public ResultDTO<Void> enableWorkflow(Long workflowId) {
Map<String, String> param = Maps.newHashMap();
param.put("workflowId", workflowId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.ENABLE_WORKFLOW, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, VOID_RESULT_TYPE);
}
/**
* Delete Workflow by workflowId
*
* @param workflowId workflowId
* @return Standard return object
*/
@Override
public ResultDTO<Void> deleteWorkflow(Long workflowId) {
Map<String, String> param = Maps.newHashMap();
param.put("workflowId", workflowId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.DELETE_WORKFLOW, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, VOID_RESULT_TYPE);
}
/**
* Run a workflow once
*
* @param workflowId workflowId
* @param initParams workflow startup parameters
* @param delayMS Delay timeMilliseconds
* @return workflow instanceId
*/
@Override
public ResultDTO<Long> runWorkflow(Long workflowId, String initParams, long delayMS) {
Map<String, String> param = Maps.newHashMap();
param.put("workflowId", workflowId.toString());
param.put("appId", appId.toString());
param.put("delay", String.valueOf(delayMS));
if (StringUtils.isNotEmpty(initParams)) {
param.put("initParams", initParams);
}
String post = requestService.request(OpenAPIConstant.RUN_WORKFLOW, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, LONG_RESULT_TYPE);
}
public ResultDTO<Long> runWorkflow(Long workflowId) {
return runWorkflow(workflowId, null, 0);
}
/* ************* Workflow Instance API list ************* */
/**
* Stop one workflow instance
*
* @param wfInstanceId workflow instanceId
* @return Standard return object
*/
@Override
public ResultDTO<Void> stopWorkflowInstance(Long wfInstanceId) {
Map<String, String> param = Maps.newHashMap();
param.put("wfInstanceId", wfInstanceId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.STOP_WORKFLOW_INSTANCE, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, VOID_RESULT_TYPE);
}
/**
* Retry one workflow instance
*
* @param wfInstanceId workflow instanceId
* @return Standard return object
*/
@Override
public ResultDTO<Void> retryWorkflowInstance(Long wfInstanceId) {
Map<String, String> param = Maps.newHashMap();
param.put("wfInstanceId", wfInstanceId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.RETRY_WORKFLOW_INSTANCE, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, VOID_RESULT_TYPE);
}
/**
* mark the workflow node as success
*
* @param wfInstanceId workflow instanceId
* @param nodeId node id
* @return Standard return object
*/
@Override
public ResultDTO<Void> markWorkflowNodeAsSuccess(Long wfInstanceId, Long nodeId) {
Map<String, String> param = Maps.newHashMap();
param.put("wfInstanceId", wfInstanceId.toString());
param.put("appId", appId.toString());
param.put("nodeId", nodeId.toString());
String post = requestService.request(OpenAPIConstant.MARK_WORKFLOW_NODE_AS_SUCCESS, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, VOID_RESULT_TYPE);
}
/**
* Query detail about a workflow instance
*
* @param wfInstanceId workflow instanceId
* @return detail about a workflow
*/
@Override
public ResultDTO<WorkflowInstanceInfoDTO> fetchWorkflowInstanceInfo(Long wfInstanceId) {
Map<String, String> param = Maps.newHashMap();
param.put("wfInstanceId", wfInstanceId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.FETCH_WORKFLOW_INSTANCE_INFO, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, WF_INSTANCE_RESULT_TYPE);
}
@Override
public void close() throws IOException {
requestService.close();
}
}

View File

@ -0,0 +1,43 @@
package tech.powerjob.client;
import com.alibaba.fastjson.TypeReference;
import tech.powerjob.client.module.AppAuthResult;
import tech.powerjob.common.request.http.SaveJobInfoRequest;
import tech.powerjob.common.response.*;
import java.util.List;
/**
* TypeReference store.
*
* @author tjq
* @since 11/7/20
*/
public class TypeStore {
public static final TypeReference<ResultDTO<AppAuthResult>> APP_AUTH_RESULT_TYPE = new TypeReference<ResultDTO<AppAuthResult>>(){};
public static final TypeReference<ResultDTO<Void>> VOID_RESULT_TYPE = new TypeReference<ResultDTO<Void>>(){};
public static final TypeReference<ResultDTO<Integer>> INTEGER_RESULT_TYPE = new TypeReference<ResultDTO<Integer>>(){};
public static final TypeReference<ResultDTO<Long>> LONG_RESULT_TYPE = new TypeReference<ResultDTO<Long>>(){};
public static final TypeReference<ResultDTO<JobInfoDTO>> JOB_RESULT_TYPE = new TypeReference<ResultDTO<JobInfoDTO>>(){};
public static final TypeReference<ResultDTO<SaveJobInfoRequest>> SAVE_JOB_INFO_REQUEST_RESULT_TYPE = new TypeReference<ResultDTO<SaveJobInfoRequest>>(){};
public static final TypeReference<ResultDTO<List<JobInfoDTO>>> LIST_JOB_RESULT_TYPE = new TypeReference<ResultDTO<List<JobInfoDTO>>>(){};
public static final TypeReference<ResultDTO<InstanceInfoDTO>> INSTANCE_RESULT_TYPE = new TypeReference<ResultDTO<InstanceInfoDTO>>() {};
public static final TypeReference<ResultDTO<List<InstanceInfoDTO>>> LIST_INSTANCE_RESULT_TYPE = new TypeReference<ResultDTO<List<InstanceInfoDTO>>>(){};
public static final TypeReference<ResultDTO<PageResult<InstanceInfoDTO>>> PAGE_INSTANCE_RESULT_TYPE = new TypeReference<ResultDTO<PageResult<InstanceInfoDTO>>>(){};
public static final TypeReference<ResultDTO<WorkflowInfoDTO>> WF_RESULT_TYPE = new TypeReference<ResultDTO<WorkflowInfoDTO>>() {};
public static final TypeReference<ResultDTO<WorkflowInstanceInfoDTO>> WF_INSTANCE_RESULT_TYPE = new TypeReference<ResultDTO<WorkflowInstanceInfoDTO>>() {};
public static final TypeReference<ResultDTO<List<WorkflowNodeInfoDTO>>> WF_NODE_LIST_RESULT_TYPE = new TypeReference<ResultDTO<List<WorkflowNodeInfoDTO>>> () {};
}

View File

@ -0,0 +1,28 @@
package tech.powerjob.client.common;
import lombok.Getter;
/**
* Protocol
*
* @author tjq
* @since 2024/2/20
*/
@Getter
public enum Protocol {
HTTP("http"),
HTTPS("https");
private final String protocol;
Protocol(String protocol) {
this.protocol = protocol;
}
@Override
public String toString() {
return protocol;
}
}

View File

@ -0,0 +1,19 @@
package tech.powerjob.client.extension;
import java.util.List;
/**
* 扩展服务
*
* @author tjq
* @since 2024/8/11
*/
public interface ClientExtension {
/**
* 动态提供地址适用于 server 部署在动态集群上的场景
* @param context 上下文
* @return 地址格式要求同 ClientConfig#addressList
*/
List<String> addressProvider(ExtensionContext context);
}

View File

@ -0,0 +1,10 @@
package tech.powerjob.client.extension;
/**
* 扩展上下文
*
* @author tjq
* @since 2024/8/11
*/
public class ExtensionContext {
}

View File

@ -0,0 +1,39 @@
package tech.powerjob.client.module;
import lombok.Getter;
import lombok.Setter;
import lombok.ToString;
import java.io.Serializable;
import java.util.Map;
/**
* App 鉴权请求
*
* @author tjq
* @since 2024/2/19
*/
@Getter
@Setter
@ToString
public class AppAuthRequest implements Serializable {
/**
* 应用名称
*/
private String appName;
/**
* 加密后密码
*/
private String encryptedPassword;
/**
* 加密类型
*/
private String encryptType;
/**
* 额外参数方便开发者传递其他参数
*/
private Map<String, Object> extra;
}

View File

@ -0,0 +1,30 @@
package tech.powerjob.client.module;
import lombok.Getter;
import lombok.Setter;
import lombok.ToString;
import java.io.Serializable;
import java.util.Map;
/**
* App 鉴权响应
*
* @author tjq
* @since 2024/2/21
*/
@Getter
@Setter
@ToString
public class AppAuthResult implements Serializable {
private Long appId;
private String token;
/**
* 额外参数
* 有安全需求的开发者可执行扩展
*/
private Map<String, Object> extra;
}

View File

@ -0,0 +1,26 @@
package tech.powerjob.client.service;
import lombok.Data;
import lombok.experimental.Accessors;
import java.io.Serializable;
import java.util.Map;
/**
* HTTP 响应
*
* @author tjq
* @since 2024/8/10
*/
@Data
@Accessors(chain = true)
public class HttpResponse implements Serializable {
private boolean success;
private int code;
private String response;
private Map<String, String> headers;
}

View File

@ -0,0 +1,47 @@
package tech.powerjob.client.service;
import com.google.common.collect.Maps;
import lombok.Getter;
import tech.powerjob.common.enums.MIME;
import java.util.Map;
/**
* 请求体
*
* @author tjq
* @since 2024/8/10
*/
@Getter
public class PowerRequestBody {
private MIME mime;
private Object payload;
private final Map<String, String> headers = Maps.newHashMap();
private PowerRequestBody() {
}
public static PowerRequestBody newJsonRequestBody(Object data) {
PowerRequestBody powerRequestBody = new PowerRequestBody();
powerRequestBody.mime = MIME.APPLICATION_JSON;
powerRequestBody.payload = data;
return powerRequestBody;
}
public static PowerRequestBody newFormRequestBody(Map<String, String> form) {
PowerRequestBody powerRequestBody = new PowerRequestBody();
powerRequestBody.mime = MIME.APPLICATION_FORM;
powerRequestBody.payload = form;
return powerRequestBody;
}
public void addHeaders(Map<String, String> hs) {
if (hs == null || hs.isEmpty()) {
return;
}
this.headers.putAll(hs);
}
}

View File

@ -0,0 +1,15 @@
package tech.powerjob.client.service;
import java.io.Closeable;
/**
* 请求服务
*
* @author tjq
* @since 2024/2/20
*/
public interface RequestService extends Closeable {
String request(String path, PowerRequestBody powerRequestBody);
}

View File

@ -0,0 +1,107 @@
package tech.powerjob.client.service.impl;
import com.alibaba.fastjson.JSONObject;
import com.google.common.collect.Maps;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import tech.powerjob.client.ClientConfig;
import tech.powerjob.client.TypeStore;
import tech.powerjob.client.module.AppAuthRequest;
import tech.powerjob.client.module.AppAuthResult;
import tech.powerjob.client.service.HttpResponse;
import tech.powerjob.client.service.PowerRequestBody;
import tech.powerjob.common.OpenAPIConstant;
import tech.powerjob.common.enums.EncryptType;
import tech.powerjob.common.exception.PowerJobException;
import tech.powerjob.common.response.ResultDTO;
import tech.powerjob.common.utils.DigestUtils;
import tech.powerjob.common.utils.MapUtils;
import java.util.Map;
/**
* 封装鉴权相关逻辑
*
* @author tjq
* @since 2024/2/21
*/
@Slf4j
abstract class AppAuthClusterRequestService extends ClusterRequestService {
protected AppAuthResult appAuthResult;
public AppAuthClusterRequestService(ClientConfig config) {
super(config);
}
@Override
public String request(String path, PowerRequestBody powerRequestBody) {
// 若不存在 appAuthResult则首先进行鉴权
if (appAuthResult == null) {
refreshAppAuthResult();
}
HttpResponse httpResponse = doRequest(path, powerRequestBody);
// 如果 auth 成功则代表请求有效直接返回
String authStatus = MapUtils.getString(httpResponse.getHeaders(), OpenAPIConstant.RESPONSE_HEADER_AUTH_STATUS);
if (Boolean.TRUE.toString().equalsIgnoreCase(authStatus)) {
return httpResponse.getResponse();
}
// 否则请求无效刷新鉴权后重新请求
log.warn("[PowerJobClient] auth failed[authStatus: {}], try to refresh the auth info", authStatus);
refreshAppAuthResult();
httpResponse = doRequest(path, powerRequestBody);
// 只要请求不失败直接返回如果鉴权失败则返回鉴权错误信息server 保证 response 永远非空
return httpResponse.getResponse();
}
private HttpResponse doRequest(String path, PowerRequestBody powerRequestBody) {
// 添加鉴权信息
Map<String, String> authHeaders = buildAuthHeader();
powerRequestBody.addHeaders(authHeaders);
HttpResponse httpResponse = clusterHaRequest(path, powerRequestBody);
// 任何请求不成功都直接报错
if (!httpResponse.isSuccess()) {
throw new PowerJobException("REMOTE_SERVER_INNER_EXCEPTION");
}
return httpResponse;
}
private Map<String, String> buildAuthHeader() {
Map<String, String> authHeader = Maps.newHashMap();
authHeader.put(OpenAPIConstant.REQUEST_HEADER_APP_ID, String.valueOf(appAuthResult.getAppId()));
authHeader.put(OpenAPIConstant.REQUEST_HEADER_ACCESS_TOKEN, appAuthResult.getToken());
return authHeader;
}
@SneakyThrows
private void refreshAppAuthResult() {
AppAuthRequest appAuthRequest = buildAppAuthRequest();
HttpResponse httpResponse = clusterHaRequest(OpenAPIConstant.AUTH_APP, PowerRequestBody.newJsonRequestBody(appAuthRequest));
if (!httpResponse.isSuccess()) {
throw new PowerJobException("AUTH_APP_EXCEPTION!");
}
ResultDTO<AppAuthResult> authResultDTO = JSONObject.parseObject(httpResponse.getResponse(), TypeStore.APP_AUTH_RESULT_TYPE);
if (!authResultDTO.isSuccess()) {
throw new PowerJobException("AUTH_FAILED_" + authResultDTO.getMessage());
}
log.warn("[PowerJobClient] refresh auth info successfully!");
this.appAuthResult = authResultDTO.getData();
}
protected AppAuthRequest buildAppAuthRequest() {
AppAuthRequest appAuthRequest = new AppAuthRequest();
appAuthRequest.setAppName(config.getAppName());
appAuthRequest.setEncryptedPassword(DigestUtils.md5(config.getPassword()));
appAuthRequest.setEncryptType(EncryptType.MD5.getCode());
return appAuthRequest;
}
}

View File

@ -0,0 +1,140 @@
package tech.powerjob.client.service.impl;
import lombok.extern.slf4j.Slf4j;
import tech.powerjob.client.ClientConfig;
import tech.powerjob.client.extension.ClientExtension;
import tech.powerjob.client.extension.ExtensionContext;
import tech.powerjob.client.service.HttpResponse;
import tech.powerjob.client.service.PowerRequestBody;
import tech.powerjob.client.service.RequestService;
import tech.powerjob.common.OpenAPIConstant;
import tech.powerjob.common.exception.PowerJobException;
import tech.powerjob.common.utils.CollectionUtils;
import javax.net.ssl.X509TrustManager;
import java.io.IOException;
import java.security.cert.X509Certificate;
import java.util.List;
import java.util.Objects;
/**
* 集群请求服务
* 封装网络相关通用逻辑
*
* @author tjq
* @since 2024/2/21
*/
@Slf4j
abstract class ClusterRequestService implements RequestService {
protected final ClientConfig config;
/**
* 当前地址上次请求成功的地址
*/
protected String currentAddress;
/**
* 地址格式
* 协议://域名/OpenAPI/子路径
*/
protected static final String URL_PATTERN = "%s://%s%s%s";
/**
* 默认超时时间
*/
protected static final Integer DEFAULT_TIMEOUT_SECONDS = 2;
protected static final int HTTP_SUCCESS_CODE = 200;
public ClusterRequestService(ClientConfig config) {
this.config = config;
this.currentAddress = config.getAddressList().get(0);
}
/**
* 具体某一次 HTTP 请求的实现
* @param url 完整请求地址
* @param body 请求体
* @return 响应
* @throws IOException 异常
*/
protected abstract HttpResponse sendHttpRequest(String url, PowerRequestBody body) throws IOException;
/**
* 封装集群请求能力
* @param path 请求 PATH
* @param powerRequestBody 请求体
* @return 响应
*/
protected HttpResponse clusterHaRequest(String path, PowerRequestBody powerRequestBody) {
// 先尝试默认地址
String url = getUrl(path, currentAddress);
try {
return sendHttpRequest(url, powerRequestBody);
} catch (IOException e) {
log.warn("[ClusterRequestService] request url:{} failed, reason is {}.", url, e.toString());
}
List<String> addressList = fetchAddressList();
// 失败开始重试
for (String addr : addressList) {
if (Objects.equals(addr, currentAddress)) {
continue;
}
url = getUrl(path, addr);
try {
HttpResponse res = sendHttpRequest(url, powerRequestBody);
log.warn("[ClusterRequestService] server change: from({}) -> to({}).", currentAddress, addr);
currentAddress = addr;
return res;
} catch (IOException e) {
log.warn("[ClusterRequestService] request url:{} failed, reason is {}.", url, e.toString());
}
}
log.error("[ClusterRequestService] do post for path: {} failed because of no server available in {}.", path, addressList);
throw new PowerJobException("no server available when send post request");
}
private List<String> fetchAddressList() {
ClientExtension clientExtension = config.getClientExtension();
if (clientExtension != null) {
List<String> addressList = clientExtension.addressProvider(new ExtensionContext());
if (!CollectionUtils.isEmpty(addressList)) {
return addressList;
}
}
return config.getAddressList();
}
/**
* 不验证证书
* X.509 是一个国际标准定义了公钥证书的格式这个标准是由国际电信联盟ITU-T制定的用于公钥基础设施PKI中数字证书的创建和分发X.509证书主要用于在公开网络上验证实体的身份如服务器或客户端的身份验证过程中确保通信双方是可信的X.509证书广泛应用于多种安全协议中包括SSL/TLS它是实现HTTPS的基础
*/
protected static class NoVerifyX509TrustManager implements X509TrustManager {
@Override
public void checkClientTrusted(X509Certificate[] arg0, String arg1) {
}
@Override
public void checkServerTrusted(X509Certificate[] arg0, String arg1) {
// 不验证
}
@Override
public X509Certificate[] getAcceptedIssuers() {
return new X509Certificate[0];
}
}
private String getUrl(String path, String address) {
String protocol = config.getProtocol().getProtocol();
return String.format(URL_PATTERN, protocol, address, OpenAPIConstant.WEB_PATH, path);
}
}

View File

@ -0,0 +1,148 @@
package tech.powerjob.client.service.impl;
import com.google.common.collect.Maps;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import okhttp3.*;
import tech.powerjob.client.ClientConfig;
import tech.powerjob.client.common.Protocol;
import tech.powerjob.client.service.HttpResponse;
import tech.powerjob.client.service.PowerRequestBody;
import tech.powerjob.common.OmsConstant;
import tech.powerjob.common.serialize.JsonUtils;
import javax.net.ssl.*;
import java.io.IOException;
import java.security.SecureRandom;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.TimeUnit;
/**
* desc
*
* @author tjq
* @since 2024/2/20
*/
@Slf4j
public class ClusterRequestServiceOkHttp3Impl extends AppAuthClusterRequestService {
private final OkHttpClient okHttpClient;
public ClusterRequestServiceOkHttp3Impl(ClientConfig config) {
super(config);
// 初始化 HTTP 客户端
if (Protocol.HTTPS.equals(config.getProtocol())) {
okHttpClient = initHttpsNoVerifyClient();
} else {
okHttpClient = initHttpClient();
}
}
@Override
protected HttpResponse sendHttpRequest(String url, PowerRequestBody powerRequestBody) throws IOException {
// 添加公共 header
powerRequestBody.addHeaders(config.getDefaultHeaders());
Object obj = powerRequestBody.getPayload();
RequestBody requestBody = null;
switch (powerRequestBody.getMime()) {
case APPLICATION_JSON:
MediaType jsonType = MediaType.parse(OmsConstant.JSON_MEDIA_TYPE);
String body = obj instanceof String ? (String) obj : JsonUtils.toJSONStringUnsafe(obj);
requestBody = RequestBody.create(jsonType, body);
break;
case APPLICATION_FORM:
FormBody.Builder formBuilder = new FormBody.Builder();
Map<String, String> formObj = (Map<String, String>) obj;
formObj.forEach(formBuilder::add);
requestBody = formBuilder.build();
}
Request request = new Request.Builder()
.post(requestBody)
.headers(Headers.of(powerRequestBody.getHeaders()))
.url(url)
.build();
try (Response response = okHttpClient.newCall(request).execute()) {
int code = response.code();
HttpResponse httpResponse = new HttpResponse()
.setCode(code)
.setSuccess(code == HTTP_SUCCESS_CODE);
ResponseBody body = response.body();
if (body != null) {
httpResponse.setResponse(body.string());
}
Headers respHeaders = response.headers();
Set<String> headerNames = respHeaders.names();
Map<String, String> respHeaderMap = Maps.newHashMap();
headerNames.forEach(hdKey -> respHeaderMap.put(hdKey, respHeaders.get(hdKey)));
httpResponse.setHeaders(respHeaderMap);
return httpResponse;
}
}
@SneakyThrows
private OkHttpClient initHttpClient() {
OkHttpClient.Builder okHttpBuilder = commonOkHttpBuilder();
return okHttpBuilder.build();
}
@SneakyThrows
private OkHttpClient initHttpsNoVerifyClient() {
X509TrustManager trustManager = new NoVerifyX509TrustManager();
SSLContext sslContext = SSLContext.getInstance("TLS");
sslContext.init(null, new TrustManager[]{trustManager}, new SecureRandom());
SSLSocketFactory sslSocketFactory = sslContext.getSocketFactory();
OkHttpClient.Builder okHttpBuilder = commonOkHttpBuilder();
// 不需要校验证书
okHttpBuilder.sslSocketFactory(sslSocketFactory, trustManager);
// 不校验 url中的 hostname
okHttpBuilder.hostnameVerifier((String hostname, SSLSession session) -> true);
return okHttpBuilder.build();
}
private OkHttpClient.Builder commonOkHttpBuilder() {
return new OkHttpClient.Builder()
// 设置读取超时时间
.readTimeout(Optional.ofNullable(config.getReadTimeout()).orElse(DEFAULT_TIMEOUT_SECONDS), TimeUnit.SECONDS)
// 设置写的超时时间
.writeTimeout(Optional.ofNullable(config.getWriteTimeout()).orElse(DEFAULT_TIMEOUT_SECONDS), TimeUnit.SECONDS)
// 设置连接超时时间
.connectTimeout(Optional.ofNullable(config.getConnectionTimeout()).orElse(DEFAULT_TIMEOUT_SECONDS), TimeUnit.SECONDS)
.callTimeout(Optional.ofNullable(config.getConnectionTimeout()).orElse(DEFAULT_TIMEOUT_SECONDS), TimeUnit.SECONDS);
}
@Override
public void close() throws IOException {
// 关闭 Dispatcher
okHttpClient.dispatcher().executorService().shutdown();
// 清理连接池
okHttpClient.connectionPool().evictAll();
// 清理缓存如果有使用
Cache cache = okHttpClient.cache();
if (cache != null) {
cache.close();
}
}
}

View File

@ -1,125 +0,0 @@
import com.alibaba.fastjson.JSONObject;
import com.github.kfcfans.powerjob.common.ExecuteType;
import com.github.kfcfans.powerjob.common.ProcessorType;
import com.github.kfcfans.powerjob.common.TimeExpressionType;
import com.github.kfcfans.powerjob.common.request.http.SaveJobInfoRequest;
import com.github.kfcfans.powerjob.common.response.JobInfoDTO;
import com.github.kfcfans.powerjob.common.response.ResultDTO;
import com.github.kfcfans.powerjob.client.OhMyClient;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import java.util.concurrent.TimeUnit;
/**
* 测试 Client
*
* @author tjq
* @since 2020/4/15
*/
public class TestClient {
private static OhMyClient ohMyClient;
public static final long JOB_ID = 4L;
@BeforeAll
public static void initClient() throws Exception {
ohMyClient = new OhMyClient("127.0.0.1:7700", "powerjob-agent-test", "123");
}
@Test
public void testSaveJob() throws Exception {
SaveJobInfoRequest newJobInfo = new SaveJobInfoRequest();
newJobInfo.setId(JOB_ID);
newJobInfo.setJobName("omsOpenAPIJobccccc");
newJobInfo.setJobDescription("tes OpenAPI");
newJobInfo.setJobParams("{'aa':'bb'}");
newJobInfo.setTimeExpressionType(TimeExpressionType.CRON);
newJobInfo.setTimeExpression("0 0 * * * ? ");
newJobInfo.setExecuteType(ExecuteType.STANDALONE);
newJobInfo.setProcessorType(ProcessorType.EMBEDDED_JAVA);
newJobInfo.setProcessorInfo("com.github.kfcfans.powerjob.samples.processors.StandaloneProcessorDemo");
newJobInfo.setDesignatedWorkers("");
newJobInfo.setMinCpuCores(1.1);
newJobInfo.setMinMemorySpace(1.2);
newJobInfo.setMinDiskSpace(1.3);
ResultDTO<Long> resultDTO = ohMyClient.saveJob(newJobInfo);
System.out.println(JSONObject.toJSONString(resultDTO));
}
@Test
public void testFetchJob() throws Exception {
ResultDTO<JobInfoDTO> fetchJob = ohMyClient.fetchJob(JOB_ID);
System.out.println(JSONObject.toJSONString(fetchJob));
}
@Test
public void testDisableJob() throws Exception {
System.out.println(ohMyClient.disableJob(JOB_ID));
}
@Test
public void testEnableJob() throws Exception {
System.out.println(ohMyClient.enableJob(JOB_ID));
}
@Test
public void testDeleteJob() throws Exception {
System.out.println(ohMyClient.deleteJob(JOB_ID));
}
@Test
public void testRun() {
System.out.println(ohMyClient.runJob(JOB_ID));
}
@Test
public void testRunJobDelay() throws Exception {
System.out.println(ohMyClient.runJob(JOB_ID, "this is instanceParams", 60000));
}
@Test
public void testFetchInstanceInfo() throws Exception {
System.out.println(ohMyClient.fetchInstanceInfo(205436386851946560L));
}
@Test
public void testStopInstance() throws Exception {
ResultDTO<Void> res = ohMyClient.stopInstance(205436995885858880L);
System.out.println(res.toString());
}
@Test
public void testFetchInstanceStatus() throws Exception {
System.out.println(ohMyClient.fetchInstanceStatus(205436995885858880L));
}
@Test
public void testCancelInstanceInTimeWheel() throws Exception {
ResultDTO<Long> startRes = ohMyClient.runJob(JOB_ID, "start by OhMyClient", 20000);
System.out.println("runJob result: " + JSONObject.toJSONString(startRes));
ResultDTO<Void> cancelRes = ohMyClient.cancelInstance(startRes.getData());
System.out.println("cancelJob result: " + JSONObject.toJSONString(cancelRes));
}
@Test
public void testCancelInstanceInDatabase() throws Exception {
ResultDTO<Long> startRes = ohMyClient.runJob(15L, "start by OhMyClient", 2000000);
System.out.println("runJob result: " + JSONObject.toJSONString(startRes));
// 手动重启 server干掉时间轮中的调度数据
TimeUnit.MINUTES.sleep(1);
ResultDTO<Void> cancelRes = ohMyClient.cancelInstance(startRes.getData());
System.out.println("cancelJob result: " + JSONObject.toJSONString(cancelRes));
}
@Test
public void testRetryInstance() throws Exception {
ResultDTO<Void> res = ohMyClient.retryInstance(169557545206153344L);
System.out.println(res);
}
}

View File

@ -1,112 +0,0 @@
import com.alibaba.fastjson.JSONObject;
import com.github.kfcfans.powerjob.client.OhMyClient;
import com.github.kfcfans.powerjob.common.ExecuteType;
import com.github.kfcfans.powerjob.common.ProcessorType;
import com.github.kfcfans.powerjob.common.TimeExpressionType;
import com.github.kfcfans.powerjob.common.model.PEWorkflowDAG;
import com.github.kfcfans.powerjob.common.request.http.SaveJobInfoRequest;
import com.github.kfcfans.powerjob.common.request.http.SaveWorkflowRequest;
import com.google.common.collect.Lists;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import java.util.List;
/**
* 测试 Clientworkflow部分
*
* @author tjq
* @since 2020/6/2
*/
public class TestWorkflow {
private static OhMyClient ohMyClient;
private static final long WF_ID = 1;
@BeforeAll
public static void initClient() throws Exception {
ohMyClient = new OhMyClient("127.0.0.1:7700", "powerjob-agent-test", "123");
}
@Test
public void initTestData() throws Exception {
SaveJobInfoRequest base = new SaveJobInfoRequest();
base.setJobName("DAG-Node-");
base.setTimeExpressionType(TimeExpressionType.WORKFLOW);
base.setExecuteType(ExecuteType.STANDALONE);
base.setProcessorType(ProcessorType.EMBEDDED_JAVA);
base.setProcessorInfo("com.github.kfcfans.powerjob.samples.workflow.WorkflowStandaloneProcessor");
for (int i = 0; i < 5; i++) {
SaveJobInfoRequest request = JSONObject.parseObject(JSONObject.toJSONBytes(base), SaveJobInfoRequest.class);
request.setJobName(request.getJobName() + i);
System.out.println(ohMyClient.saveJob(request));
}
}
@Test
public void testSaveWorkflow() throws Exception {
// DAG
List<PEWorkflowDAG.Node> nodes = Lists.newLinkedList();
List<PEWorkflowDAG.Edge> edges = Lists.newLinkedList();
nodes.add(new PEWorkflowDAG.Node(1L, "DAG-Node-1"));
nodes.add(new PEWorkflowDAG.Node(2L, "DAG-Node-2"));
edges.add(new PEWorkflowDAG.Edge(1L, 2L));
PEWorkflowDAG peWorkflowDAG = new PEWorkflowDAG(nodes, edges);
SaveWorkflowRequest req = new SaveWorkflowRequest();
req.setWfName("workflow-by-client");
req.setWfDescription("created by client");
req.setPEWorkflowDAG(peWorkflowDAG);
req.setEnable(true);
req.setTimeExpressionType(TimeExpressionType.API);
System.out.println("req ->" + JSONObject.toJSON(req));
System.out.println(ohMyClient.saveWorkflow(req));
}
@Test
public void testDisableWorkflow() throws Exception {
System.out.println(ohMyClient.disableWorkflow(WF_ID));
}
@Test
public void testDeleteWorkflow() throws Exception {
System.out.println(ohMyClient.deleteWorkflow(WF_ID));
}
@Test
public void testEnableWorkflow() throws Exception {
System.out.println(ohMyClient.enableWorkflow(WF_ID));
}
@Test
public void testFetchWorkflowInfo() throws Exception {
System.out.println(ohMyClient.fetchWorkflow(WF_ID));
}
@Test
public void testRunWorkflow() throws Exception {
System.out.println(ohMyClient.runWorkflow(WF_ID));
}
@Test
public void testStopWorkflowInstance() throws Exception {
System.out.println(ohMyClient.stopWorkflowInstance(149962433421639744L));
}
@Test
public void testFetchWfInstanceInfo() throws Exception {
System.out.println(ohMyClient.fetchWorkflowInstanceInfo(149962433421639744L));
}
@Test
public void testRunWorkflowPlus() throws Exception {
System.out.println(ohMyClient.runWorkflow(WF_ID, "this is init Params 2", 90000));
}
}

View File

@ -0,0 +1,22 @@
package tech.powerjob.client.test;
import com.google.common.collect.Lists;
import org.junit.jupiter.api.BeforeAll;
import tech.powerjob.client.IPowerJobClient;
import tech.powerjob.client.PowerJobClient;
/**
* Initialize OhMyClient
*
* @author tjq
* @since 1/16/21
*/
public class ClientInitializer {
protected static IPowerJobClient powerJobClient;
@BeforeAll
public static void initClient() throws Exception {
powerJobClient = new PowerJobClient(Lists.newArrayList("127.0.0.1:7700", "127.0.0.1:7701"), "powerjob-worker-samples", "powerjob123");
}
}

View File

@ -0,0 +1,172 @@
package tech.powerjob.client.test;
import com.alibaba.fastjson.JSONObject;
import com.google.common.collect.Lists;
import lombok.extern.slf4j.Slf4j;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import tech.powerjob.client.PowerJobClient;
import tech.powerjob.common.enums.ExecuteType;
import tech.powerjob.common.enums.ProcessorType;
import tech.powerjob.common.enums.TimeExpressionType;
import tech.powerjob.common.request.http.SaveJobInfoRequest;
import tech.powerjob.common.request.query.InstancePageQuery;
import tech.powerjob.common.response.InstanceInfoDTO;
import tech.powerjob.common.response.JobInfoDTO;
import tech.powerjob.common.response.ResultDTO;
/**
* Test cases for {@link PowerJobClient}
*
* @author tjq
* @author Echo009
* @since 2020/4/15
*/
@Slf4j
class TestClient extends ClientInitializer {
public static final long JOB_ID = 1L;
@Test
void testSaveJob() {
SaveJobInfoRequest newJobInfo = new SaveJobInfoRequest();
newJobInfo.setId(JOB_ID);
newJobInfo.setJobName("omsOpenAPIJobccccc" + System.currentTimeMillis());
newJobInfo.setJobDescription("test OpenAPI" + System.currentTimeMillis());
newJobInfo.setJobParams("{'aa':'bb'}");
newJobInfo.setTimeExpressionType(TimeExpressionType.CRON);
newJobInfo.setTimeExpression("0 0 * * * ? ");
newJobInfo.setExecuteType(ExecuteType.STANDALONE);
newJobInfo.setProcessorType(ProcessorType.BUILT_IN);
newJobInfo.setProcessorInfo("tech.powerjob.samples.processors.StandaloneProcessorDemo");
newJobInfo.setDesignatedWorkers("");
newJobInfo.setMinCpuCores(1.1);
newJobInfo.setMinMemorySpace(1.2);
newJobInfo.setMinDiskSpace(1.3);
log.info("[TestClient] [testSaveJob] SaveJobInfoRequest: {}", JSONObject.toJSONString(newJobInfo));
ResultDTO<Long> resultDTO = powerJobClient.saveJob(newJobInfo);
log.info("[TestClient] [testSaveJob] result: {}", JSONObject.toJSONString(resultDTO));
Assertions.assertNotNull(resultDTO);
}
@Test
void testCopyJob() {
ResultDTO<Long> copyJobRes = powerJobClient.copyJob(JOB_ID);
System.out.println(JSONObject.toJSONString(copyJobRes));
Assertions.assertNotNull(copyJobRes);
}
@Test
void testExportJob() {
ResultDTO<SaveJobInfoRequest> exportJobRes = powerJobClient.exportJob(JOB_ID);
System.out.println(JSONObject.toJSONString(exportJobRes));
}
@Test
void testFetchJob() {
ResultDTO<JobInfoDTO> fetchJob = powerJobClient.fetchJob(JOB_ID);
System.out.println(JSONObject.toJSONString(fetchJob));
Assertions.assertNotNull(fetchJob);
}
@Test
void testDisableJob() {
ResultDTO<Void> res = powerJobClient.disableJob(JOB_ID);
System.out.println(res);
Assertions.assertNotNull(res);
}
@Test
void testEnableJob() {
ResultDTO<Void> res = powerJobClient.enableJob(JOB_ID);
System.out.println(res);
Assertions.assertNotNull(res);
}
@Test
void testDeleteJob() {
ResultDTO<Void> res = powerJobClient.deleteJob(JOB_ID);
System.out.println(res);
Assertions.assertNotNull(res);
}
@Test
void testRun() {
ResultDTO<Long> res = powerJobClient.runJob(JOB_ID, null, 0);
System.out.println(res);
Assertions.assertNotNull(res);
}
@Test
void testRunJobDelay() {
ResultDTO<Long> res = powerJobClient.runJob(JOB_ID, "this is instanceParams", 60000);
System.out.println(res);
Assertions.assertNotNull(res);
}
@Test
void testFetchInstanceInfo() {
ResultDTO<InstanceInfoDTO> res = powerJobClient.fetchInstanceInfo(702482902331424832L);
System.out.println(res);
Assertions.assertNotNull(res);
}
@Test
void testQueryInstanceInfo() {
InstancePageQuery instancePageQuery = new InstancePageQuery();
instancePageQuery.setJobIdEq(11L);
instancePageQuery.setSortBy("actualTriggerTime");
instancePageQuery.setAsc(true);
instancePageQuery.setPageSize(3);
instancePageQuery.setStatusIn(Lists.newArrayList(1,2,5));
TestUtils.output(powerJobClient.queryInstanceInfo(instancePageQuery));
}
@Test
void testStopInstance() {
ResultDTO<Void> res = powerJobClient.stopInstance(702482902331424832L);
System.out.println(res);
Assertions.assertNotNull(res);
}
@Test
void testFetchInstanceStatus() {
ResultDTO<Integer> res = powerJobClient.fetchInstanceStatus(702482902331424832L);
System.out.println(res);
Assertions.assertNotNull(res);
}
@Test
void testCancelInstanceInTimeWheel() {
ResultDTO<Long> startRes = powerJobClient.runJob(JOB_ID, "start by OhMyClient", 20000);
System.out.println("runJob result: " + JSONObject.toJSONString(startRes));
ResultDTO<Void> cancelRes = powerJobClient.cancelInstance(startRes.getData());
System.out.println("cancelJob result: " + JSONObject.toJSONString(cancelRes));
Assertions.assertTrue(cancelRes.isSuccess());
}
// @Test
// @SneakyThrows
// void testCancelInstanceInDatabase() {
// ResultDTO<Long> startRes = powerJobClient.runJob(15L, "start by OhMyClient", 2000000);
// System.out.println("runJob result: " + JSONObject.toJSONString(startRes));
//
// // Restart server manually and clear all the data in time wheeler.
// TimeUnit.MINUTES.sleep(1);
//
// ResultDTO<Void> cancelRes = powerJobClient.cancelInstance(startRes.getData());
// System.out.println("cancelJob result: " + JSONObject.toJSONString(cancelRes));
// Assertions.assertTrue(cancelRes.isSuccess());
// }
@Test
void testRetryInstance() {
ResultDTO<Void> res = powerJobClient.retryInstance(169557545206153344L);
System.out.println(res);
Assertions.assertNotNull(res);
}
}

View File

@ -0,0 +1,35 @@
package tech.powerjob.client.test;
import com.alibaba.fastjson.JSONObject;
import lombok.extern.slf4j.Slf4j;
import org.junit.jupiter.api.Test;
import tech.powerjob.common.response.JobInfoDTO;
import tech.powerjob.common.response.ResultDTO;
import tech.powerjob.common.utils.CommonUtils;
/**
* 测试容灾能力
*
* @author tjq
* @since 2024/8/11
*/
@Slf4j
public class TestClusterHA extends ClientInitializer {
@Test
void testHa() {
// 人工让 server 启停
for (int i = 0; i < 1000000; i++) {
CommonUtils.easySleep(100);
ResultDTO<JobInfoDTO> jobInfoDTOResultDTO = powerJobClient.fetchJob(1L);
log.info("[TestClusterHA] response: {}", JSONObject.toJSONString(jobInfoDTOResultDTO));
if (!jobInfoDTOResultDTO.isSuccess()) {
throw new RuntimeException("request failed!");
}
}
}
}

View File

@ -0,0 +1,45 @@
package tech.powerjob.client.test;
import tech.powerjob.common.enums.ExecuteType;
import tech.powerjob.common.enums.ProcessorType;
import tech.powerjob.common.enums.TimeExpressionType;
import tech.powerjob.common.request.http.SaveJobInfoRequest;
import tech.powerjob.common.response.ResultDTO;
import org.junit.jupiter.api.Test;
import java.util.concurrent.ForkJoinPool;
/**
* TestConcurrencyControl
*
* @author tjq
* @since 1/16/21
*/
class TestConcurrencyControl extends ClientInitializer {
@Test
void testRunJobConcurrencyControl() {
SaveJobInfoRequest saveJobInfoRequest = new SaveJobInfoRequest();
saveJobInfoRequest.setJobName("test concurrency control job");
saveJobInfoRequest.setProcessorType(ProcessorType.SHELL);
saveJobInfoRequest.setProcessorInfo("pwd");
saveJobInfoRequest.setExecuteType(ExecuteType.STANDALONE);
saveJobInfoRequest.setTimeExpressionType(TimeExpressionType.API);
saveJobInfoRequest.setMaxInstanceNum(1);
Long jobId = powerJobClient.saveJob(saveJobInfoRequest).getData();
System.out.println("jobId: " + jobId);
ForkJoinPool pool = new ForkJoinPool(32);
for (int i = 0; i < 100; i++) {
String params = "index-" + i;
pool.execute(() -> {
ResultDTO<Long> res = powerJobClient.runJob(jobId, params, 0);
System.out.println(params + ": " + res);
});
}
}
}

View File

@ -0,0 +1,48 @@
package tech.powerjob.client.test;
import com.alibaba.fastjson.JSON;
import tech.powerjob.common.request.query.JobInfoQuery;
import tech.powerjob.common.enums.ExecuteType;
import tech.powerjob.common.enums.ProcessorType;
import tech.powerjob.common.response.JobInfoDTO;
import tech.powerjob.common.response.ResultDTO;
import com.google.common.collect.Lists;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.time.DateUtils;
import org.junit.jupiter.api.Test;
import java.util.Date;
import java.util.List;
/**
* Test the query method
*
* @author tjq
* @since 1/16/21
*/
@Slf4j
class TestQuery extends ClientInitializer {
@Test
void testFetchAllJob() {
ResultDTO<List<JobInfoDTO>> allJobRes = powerJobClient.fetchAllJob();
System.out.println(JSON.toJSONString(allJobRes));
}
@Test
void testQueryJob() {
JobInfoQuery jobInfoQuery = new JobInfoQuery()
.setIdGt(-1L)
.setIdLt(10086L)
.setJobNameLike("DAG")
.setGmtModifiedGt(DateUtils.addYears(new Date(), -10))
.setGmtCreateLt(DateUtils.addDays(new Date(), 10))
.setExecuteTypeIn(Lists.newArrayList(ExecuteType.STANDALONE.getV(), ExecuteType.BROADCAST.getV(), ExecuteType.MAP_REDUCE.getV()))
.setProcessorTypeIn(Lists.newArrayList(ProcessorType.BUILT_IN.getV(), ProcessorType.SHELL.getV(), ProcessorType.EXTERNAL.getV()))
.setProcessorInfoLike("tech.powerjob");
ResultDTO<List<JobInfoDTO>> jobQueryResult = powerJobClient.queryJob(jobInfoQuery);
System.out.println(JSON.toJSONString(jobQueryResult));
System.out.println(jobQueryResult.getData().size());
}
}

View File

@ -0,0 +1,17 @@
package tech.powerjob.client.test;
import com.alibaba.fastjson.JSONObject;
/**
* TestUtils
*
* @author tjq
* @since 2024/11/21
*/
public class TestUtils {
public static void output(Object v) {
String str = JSONObject.toJSONString(v);
System.out.println(str);
}
}

View File

@ -0,0 +1,191 @@
package tech.powerjob.client.test;
import com.alibaba.fastjson.JSONObject;
import tech.powerjob.client.PowerJobClient;
import tech.powerjob.common.enums.ExecuteType;
import tech.powerjob.common.enums.ProcessorType;
import tech.powerjob.common.enums.TimeExpressionType;
import tech.powerjob.common.enums.WorkflowNodeType;
import tech.powerjob.common.model.PEWorkflowDAG;
import tech.powerjob.common.request.http.SaveJobInfoRequest;
import tech.powerjob.common.request.http.SaveWorkflowNodeRequest;
import tech.powerjob.common.request.http.SaveWorkflowRequest;
import tech.powerjob.common.response.ResultDTO;
import tech.powerjob.common.response.WorkflowInfoDTO;
import tech.powerjob.common.response.WorkflowInstanceInfoDTO;
import tech.powerjob.common.response.WorkflowNodeInfoDTO;
import com.google.common.collect.Lists;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import java.util.List;
/**
* Test cases for {@link PowerJobClient} workflow.
*
* @author tjq
* @author Echo009
* @since 2020/6/2
*/
class TestWorkflow extends ClientInitializer {
private static final long WF_ID = 2;
@Test
void initTestData() {
SaveJobInfoRequest base = new SaveJobInfoRequest();
base.setJobName("DAG-Node-");
base.setTimeExpressionType(TimeExpressionType.WORKFLOW);
base.setExecuteType(ExecuteType.STANDALONE);
base.setProcessorType(ProcessorType.BUILT_IN);
base.setProcessorInfo("tech.powerjob.samples.workflow.WorkflowStandaloneProcessor");
for (int i = 0; i < 5; i++) {
SaveJobInfoRequest request = JSONObject.parseObject(JSONObject.toJSONBytes(base), SaveJobInfoRequest.class);
request.setJobName(request.getJobName() + i);
ResultDTO<Long> res = powerJobClient.saveJob(request);
System.out.println(res);
Assertions.assertNotNull(res);
}
}
@Test
void testSaveWorkflow() {
SaveWorkflowRequest req = new SaveWorkflowRequest();
req.setWfName("workflow-by-client");
req.setWfDescription("created by client");
req.setEnable(true);
req.setTimeExpressionType(TimeExpressionType.API);
System.out.println("req ->" + JSONObject.toJSON(req));
ResultDTO<Long> res = powerJobClient.saveWorkflow(req);
System.out.println(res);
Assertions.assertNotNull(res);
req.setId(res.getData());
// 创建节点
SaveWorkflowNodeRequest saveWorkflowNodeRequest1 = new SaveWorkflowNodeRequest();
saveWorkflowNodeRequest1.setJobId(1L);
saveWorkflowNodeRequest1.setNodeName("DAG-Node-1");
saveWorkflowNodeRequest1.setType(WorkflowNodeType.JOB.getCode());
SaveWorkflowNodeRequest saveWorkflowNodeRequest2 = new SaveWorkflowNodeRequest();
saveWorkflowNodeRequest2.setJobId(1L);
saveWorkflowNodeRequest2.setNodeName("DAG-Node-2");
saveWorkflowNodeRequest2.setType(WorkflowNodeType.JOB.getCode());
SaveWorkflowNodeRequest saveWorkflowNodeRequest3 = new SaveWorkflowNodeRequest();
saveWorkflowNodeRequest3.setJobId(1L);
saveWorkflowNodeRequest3.setNodeName("DAG-Node-3");
saveWorkflowNodeRequest3.setType(WorkflowNodeType.JOB.getCode());
List<WorkflowNodeInfoDTO> nodeList = powerJobClient.saveWorkflowNode(Lists.newArrayList(saveWorkflowNodeRequest1,saveWorkflowNodeRequest2,saveWorkflowNodeRequest3)).getData();
System.out.println(nodeList);
Assertions.assertNotNull(nodeList);
// DAG
List<PEWorkflowDAG.Node> nodes = Lists.newLinkedList();
List<PEWorkflowDAG.Edge> edges = Lists.newLinkedList();
nodes.add(new PEWorkflowDAG.Node(nodeList.get(0).getId()));
nodes.add(new PEWorkflowDAG.Node(nodeList.get(1).getId()));
nodes.add(new PEWorkflowDAG.Node(nodeList.get(2).getId()));
edges.add(new PEWorkflowDAG.Edge(nodeList.get(0).getId(), nodeList.get(1).getId()));
edges.add(new PEWorkflowDAG.Edge(nodeList.get(1).getId(), nodeList.get(2).getId()));
PEWorkflowDAG peWorkflowDAG = new PEWorkflowDAG(nodes, edges);
// 保存完整信息
req.setDag(peWorkflowDAG);
res = powerJobClient.saveWorkflow(req);
System.out.println(res);
Assertions.assertNotNull(res);
}
@Test
void testCopyWorkflow() {
ResultDTO<Long> res = powerJobClient.copyWorkflow(WF_ID);
System.out.println(res);
Assertions.assertNotNull(res);
}
@Test
void testDisableWorkflow() {
ResultDTO<Void> res = powerJobClient.disableWorkflow(WF_ID);
System.out.println(res);
Assertions.assertNotNull(res);
}
@Test
void testDeleteWorkflow() {
ResultDTO<Void> res = powerJobClient.deleteWorkflow(WF_ID);
System.out.println(res);
Assertions.assertNotNull(res);
}
@Test
void testEnableWorkflow() {
ResultDTO<Void> res = powerJobClient.enableWorkflow(WF_ID);
System.out.println(res);
Assertions.assertNotNull(res);
}
@Test
void testFetchWorkflowInfo() {
ResultDTO<WorkflowInfoDTO> res = powerJobClient.fetchWorkflow(WF_ID);
System.out.println(res);
Assertions.assertNotNull(res);
}
@Test
void testRunWorkflow() {
ResultDTO<Long> res = powerJobClient.runWorkflow(WF_ID, null, 0);
System.out.println(res);
Assertions.assertNotNull(res);
}
@Test
void testStopWorkflowInstance() {
ResultDTO<Void> res = powerJobClient.stopWorkflowInstance(149962433421639744L);
System.out.println(res);
Assertions.assertNotNull(res);
}
@Test
void testRetryWorkflowInstance() {
ResultDTO<Void> res = powerJobClient.retryWorkflowInstance(149962433421639744L);
System.out.println(res);
Assertions.assertNotNull(res);
}
@Test
void testMarkWorkflowNodeAsSuccess() {
ResultDTO<Void> res = powerJobClient.markWorkflowNodeAsSuccess(149962433421639744L, 1L);
System.out.println(res);
Assertions.assertNotNull(res);
}
@Test
void testFetchWfInstanceInfo() {
ResultDTO<WorkflowInstanceInfoDTO> res = powerJobClient.fetchWorkflowInstanceInfo(149962433421639744L);
System.out.println(res);
Assertions.assertNotNull(res);
}
@Test
void testRunWorkflowPlus() {
ResultDTO<Long> res = powerJobClient.runWorkflow(WF_ID, "this is init Params 2", 90000);
System.out.println(res);
Assertions.assertNotNull(res);
}
}

View File

@ -4,23 +4,24 @@
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent> <parent>
<artifactId>powerjob</artifactId> <artifactId>powerjob</artifactId>
<groupId>com.github.kfcfans</groupId> <groupId>tech.powerjob</groupId>
<version>2.0.0</version> <version>5.1.1</version>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<artifactId>powerjob-common</artifactId> <artifactId>powerjob-common</artifactId>
<version>3.4.0</version> <version>5.1.1</version>
<packaging>jar</packaging> <packaging>jar</packaging>
<properties> <properties>
<slf4j.version>1.7.30</slf4j.version> <slf4j.version>1.7.36</slf4j.version>
<commons.lang.version>3.10</commons.lang.version> <commons.lang.version>3.12.0</commons.lang.version>
<commons.io.version>2.6</commons.io.version> <commons.io.version>2.11.0</commons.io.version>
<guava.version>29.0-jre</guava.version> <guava.version>31.1-jre</guava.version>
<okhttp.version>3.14.9</okhttp.version> <okhttp.version>3.14.9</okhttp.version>
<akka.version>2.6.4</akka.version> <kryo.version>5.3.0</kryo.version>
<junit.version>5.6.1</junit.version> <jackson.version>2.14.3</jackson.version>
<junit.version>5.9.0</junit.version>
</properties> </properties>
<dependencies> <dependencies>
@ -52,23 +53,6 @@
<version>${okhttp.version}</version> <version>${okhttp.version}</version>
</dependency> </dependency>
<!-- akka remote -->
<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-remote_2.13</artifactId>
<version>${akka.version}</version>
</dependency>
<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-serialization-jackson_2.13</artifactId>
<version>${akka.version}</version>
</dependency>
<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-slf4j_2.13</artifactId>
<version>${akka.version}</version>
</dependency>
<!-- commons-io --> <!-- commons-io -->
<dependency> <dependency>
<groupId>commons-io</groupId> <groupId>commons-io</groupId>
@ -76,7 +60,32 @@
<version>${commons.io.version}</version> <version>${commons.io.version}</version>
</dependency> </dependency>
<!-- Junit 测试 --> <!-- kryo 超超超高性能序列化框架 -->
<dependency>
<groupId>com.esotericsoftware.kryo</groupId>
<artifactId>kryo5</artifactId>
<version>${kryo.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>${jackson.version}</version>
</dependency>
<!-- 解决 Java8 data/time 类型处理问题 #869 -->
<dependency>
<groupId>com.fasterxml.jackson.datatype</groupId>
<artifactId>jackson-datatype-jsr310</artifactId>
<version>${jackson.version}</version>
</dependency>
<!-- Junit tests -->
<dependency> <dependency>
<groupId>org.junit.jupiter</groupId> <groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-api</artifactId> <artifactId>junit-jupiter-api</artifactId>

View File

@ -1,13 +0,0 @@
package com.github.kfcfans.powerjob.common;
/**
* 部署环境
*
* @author tjq
* @since 2020/5/3
*/
public enum Env {
DAILY,
PRE,
PRODUCT
}

View File

@ -1,20 +0,0 @@
package com.github.kfcfans.powerjob.common;
/**
* 公共常量
*
* @author tjq
* @since 2020/5/31
*/
public class OmsConstant {
public static final String TIME_PATTERN = "yyyy-MM-dd HH:mm:ss";
public static final String TIME_PATTERN_PLUS = "yyyy-MM-dd HH:mm:ss.SSS";
public static final String NONE = "N/A";
public static final String COMMA = ",";
public static final String LINE_SEPARATOR = "\r\n";
public static final String JSON_MEDIA_TYPE = "application/json; charset=utf-8";
}

View File

@ -1,12 +0,0 @@
package com.github.kfcfans.powerjob.common;
import java.io.Serializable;
/**
* OMS 序列化标记接口
*
* @author tjq
* @since 2020/4/16
*/
public interface OmsSerializable extends Serializable {
}

View File

@ -1,24 +0,0 @@
package com.github.kfcfans.powerjob.common;
import java.net.NetworkInterface;
/**
* 通过 JVM 启动参数传入的配置信息
*
*
* @author tjq
* @since 2020/8/8
*/
public class PowerJobDKey {
/**
* The property name for {@link NetworkInterface#getDisplayName() the name of network interface} that the PowerJob application prefers
*/
public static final String PREFERRED_NETWORK_INTERFACE = "powerjob.network.interface.preferred";
/**
* Java regular expressions for network interfaces that will be ignored.
*/
public static final String IGNORED_NETWORK_INTERFACE_REGEX = "powerjob.network.interface.ignored";
}

View File

@ -1,37 +0,0 @@
package com.github.kfcfans.powerjob.common;
/**
* RemoteConstant
*
* @author tjq
* @since 2020/3/17
*/
public class RemoteConstant {
/* ************************ AKKA WORKER ************************ */
public static final int DEFAULT_WORKER_PORT = 27777;
public static final String WORKER_ACTOR_SYSTEM_NAME = "oms";
public static final String Task_TRACKER_ACTOR_NAME = "task_tracker";
public static final String PROCESSOR_TRACKER_ACTOR_NAME = "processor_tracker";
public static final String WORKER_ACTOR_NAME = "worker";
public static final String TROUBLESHOOTING_ACTOR_NAME = "troubleshooting";
public static final String WORKER_AKKA_CONFIG_NAME = "oms-worker.akka.conf";
/* ************************ AKKA SERVER ************************ */
public static final String SERVER_ACTOR_SYSTEM_NAME = "oms-server";
public static final String SERVER_ACTOR_NAME = "server_actor";
public static final String SERVER_FRIEND_ACTOR_NAME = "friend_actor";
public static final String SERVER_TROUBLESHOOTING_ACTOR_NAME = "server_troubleshooting_actor";
public static final String SERVER_AKKA_CONFIG_NAME = "oms-server.akka.conf";
/* ************************ OTHERS ************************ */
public static final String EMPTY_ADDRESS = "N/A";
public static final long DEFAULT_TIMEOUT_MS = 5000;
}

View File

@ -1,36 +0,0 @@
package com.github.kfcfans.powerjob.common;
/**
* 系统生成的任务实例运行结果
*
* @author tjq
* @since 2020/4/11
*/
public class SystemInstanceResult {
/* *********** 普通instance 专用 *********** */
// 同时运行的任务实例数过多
public static final String TOO_MANY_INSTANCES = "too many instances(%d>%d)";
// 无可用worker
public static final String NO_WORKER_AVAILABLE = "no worker available";
// 任务执行超时
public static final String INSTANCE_EXECUTE_TIMEOUT = "instance execute timeout";
// 创建根任务失败
public static final String TASK_INIT_FAILED = "create root task failed";
// 未知错误
public static final String UNKNOWN_BUG = "unknown bug";
// TaskTracker 长时间未上报
public static final String REPORT_TIMEOUT = "worker report timeout, maybe TaskTracker down";
/* *********** workflow 专用 *********** */
public static final String MIDDLE_JOB_FAILED = "middle job failed";
public static final String MIDDLE_JOB_STOPPED = "middle job stopped by user";
public static final String CAN_NOT_FIND_JOB = "can't find some job";
// 被用户手动停止
public static final String STOPPED_BY_USER = "stopped by user";
public static final String CANCELED_BY_USER = "canceled by user";
}

View File

@ -1,37 +0,0 @@
package com.github.kfcfans.powerjob.common;
import com.google.common.collect.Lists;
import lombok.AllArgsConstructor;
import lombok.Getter;
import java.util.List;
/**
* 时间表达式类型
*
* @author tjq
* @since 2020/3/30
*/
@Getter
@AllArgsConstructor
public enum TimeExpressionType {
API(1),
CRON(2),
FIX_RATE(3),
FIX_DELAY(4),
WORKFLOW(5);
int v;
public static final List<Integer> frequentTypes = Lists.newArrayList(FIX_RATE.v, FIX_DELAY.v);
public static TimeExpressionType of(int v) {
for (TimeExpressionType type : values()) {
if (type.v == v) {
return type;
}
}
throw new IllegalArgumentException("unknown TimeExpressionType of " + v);
}
}

View File

@ -1,27 +0,0 @@
package com.github.kfcfans.powerjob.common.model;
import com.github.kfcfans.powerjob.common.OmsSerializable;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
/**
* 已部署的容器信息
*
* @author tjq
* @since 2020/5/18
*/
@Data
@NoArgsConstructor
@AllArgsConstructor
public class DeployedContainerInfo implements OmsSerializable {
// 容器ID
private Long containerId;
// 版本
private String version;
// 部署时间
private long deployedTime;
// 机器地址无需上报
private String workerAddress;
}

View File

@ -1,21 +0,0 @@
package com.github.kfcfans.powerjob.common.model;
import lombok.Data;
/**
* Git代码库信息
*
* @author tjq
* @since 2020/5/17
*/
@Data
public class GitRepoInfo {
// 仓库地址
private String repo;
// 分支名称
private String branch;
// 用户名
private String username;
// 密码
private String password;
}

View File

@ -1,64 +0,0 @@
package com.github.kfcfans.powerjob.common.model;
import com.github.kfcfans.powerjob.common.OmsSerializable;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.List;
/**
* 任务实例的运行详细信息
*
* @author tjq
* @since 2020/4/11
*/
@Data
@NoArgsConstructor
public class InstanceDetail implements OmsSerializable {
// 任务预计执行时间
private Long expectedTriggerTime;
// 任务整体开始时间
private Long actualTriggerTime;
// 任务整体结束时间可能不存在
private Long finishedTime;
// 任务状态
private Integer status;
// 任务执行结果可能不存在
private String result;
// TaskTracker地址
private String taskTrackerAddress;
// 启动参数
private String instanceParams;
// MR或BD任务专用
private TaskDetail taskDetail;
// 秒级任务专用
private List<SubInstanceDetail> subInstanceDetails;
// 重试次数
private Long runningTimes;
// 扩展字段中间件升级不易最好不要再改 common 包了...否则 server worker 版本不兼容
private String extra;
// 秒级任务的 extra -> List<SubInstanceDetail>
@Data
@NoArgsConstructor
public static class SubInstanceDetail implements OmsSerializable {
private long subInstanceId;
private Long startTime;
private Long finishedTime;
private String result;
private int status;
}
// MapReduce Broadcast 任务的 extra ->
@Data
@NoArgsConstructor
public static class TaskDetail implements OmsSerializable {
private long totalTaskNum;
private long succeedTaskNum;
private long failedTaskNum;
}
}

View File

@ -1,26 +0,0 @@
package com.github.kfcfans.powerjob.common.model;
import com.github.kfcfans.powerjob.common.OmsSerializable;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
/**
* 任务实例日志对象
*
* @author tjq
* @since 2020/4/21
*/
@Data
@NoArgsConstructor
@AllArgsConstructor
public class InstanceLogContent implements OmsSerializable {
// 实例ID
private long instanceId;
// 日志提交时间
private long logTime;
// 日志内容
private String logContent;
}

View File

@ -1,63 +0,0 @@
package com.github.kfcfans.powerjob.common.model;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import com.fasterxml.jackson.databind.ser.std.ToStringSerializer;
import com.google.common.collect.Lists;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.io.Serializable;
import java.util.List;
/**
* Point & Edge DAG 表示法
* + 线易于表达和传播
*
* @author tjq
* @since 2020/5/26
*/
@Data
@NoArgsConstructor
public class PEWorkflowDAG implements Serializable {
// DAG 点线表示法
private List<Node> nodes;
private List<Edge> edges;
//
@Data
@NoArgsConstructor
@AllArgsConstructor
public static class Node implements Serializable {
private Long jobId;
private String jobName;
// 运行时参数图定义不需要
@JsonSerialize(using= ToStringSerializer.class)
private Long instanceId;
private Integer status;
private String result;
public Node(Long jobId, String jobName) {
this.jobId = jobId;
this.jobName = jobName;
}
}
// jobId -> jobId
@Data
@NoArgsConstructor
@AllArgsConstructor
public static class Edge implements Serializable {
private Long from;
private Long to;
}
public PEWorkflowDAG(@Nonnull List<Node> nodes, @Nullable List<Edge> edges) {
this.nodes = nodes;
this.edges = edges == null ? Lists.newLinkedList() : edges;
}
}

View File

@ -1,86 +0,0 @@
package com.github.kfcfans.powerjob.common.model;
import com.github.kfcfans.powerjob.common.OmsSerializable;
import lombok.Data;
/**
* 系统指标
*
* @author tjq
* @since 2020/3/25
*/
@Data
public class SystemMetrics implements OmsSerializable, Comparable<SystemMetrics> {
// CPU核心数量
private int cpuProcessors;
// CPU负载负载 使用率 是两个完全不同的概念Java 无法获取 CPU 使用率只能获取负载
private double cpuLoad;
// 内存单位 GB
private double jvmUsedMemory;
private double jvmMaxMemory;
// 内存占用0.X非百分比
private double jvmMemoryUsage;
// 磁盘单位 GB
private double diskUsed;
private double diskTotal;
// 磁盘占用0.X非百分比
private double diskUsage;
// 缓存分数
private int score;
@Override
public int compareTo(SystemMetrics that) {
// 降序排列
return that.calculateScore() - this.calculateScore();
}
/**
* 计算得分情况内存 & CPU (磁盘不参与计算)
* @return 得分情况
*/
public int calculateScore() {
if (score > 0) {
return score;
}
// 对于 TaskTracker 来说内存是任务顺利完成的关键因此内存 2 块钱 1GB
double memScore = (jvmMaxMemory - jvmUsedMemory) * 2;
// CPU 剩余负载1 块钱 1
double cpuScore = cpuProcessors - cpuLoad;
// Indian Windows 无法获取 CpuLoad -1固定为 1
if (cpuScore > cpuProcessors) {
cpuScore = 1;
}
score = (int) (memScore + cpuScore);
return score;
}
/**
* 该机器是否可用
* @param minCPUCores 判断标准之最低可用CPU核心数量
* @param minMemorySpace 判断标准之最低可用内存
* @param minDiskSpace 判断标准之最低可用磁盘空间
* @return 是否可用
*/
public boolean available(double minCPUCores, double minMemorySpace, double minDiskSpace) {
double availableMemory = jvmMaxMemory - jvmUsedMemory;
double availableDisk = diskTotal - diskUsed;
if (availableMemory < minMemorySpace || availableDisk < minDiskSpace) {
return false;
}
// cpuLoad 为负数代表无法获取不判断等于 0 为最理想情况CPU 空载不需要判断
if (cpuLoad <= 0 || minCPUCores <= 0) {
return true;
}
return minCPUCores < (cpuProcessors - cpuLoad);
}
}

View File

@ -1,19 +0,0 @@
package com.github.kfcfans.powerjob.common.request;
import com.github.kfcfans.powerjob.common.OmsSerializable;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
/**
* 服务器查询实例运行状态需要返回详细的运行数据
*
* @author tjq
* @since 2020/4/10
*/
@Data
@NoArgsConstructor
@AllArgsConstructor
public class ServerQueryInstanceStatusReq implements OmsSerializable {
private Long instanceId;
}

View File

@ -1,74 +0,0 @@
package com.github.kfcfans.powerjob.common.request;
import com.github.kfcfans.powerjob.common.OmsSerializable;
import lombok.Data;
import java.util.List;
/**
* 服务端调度任务请求一次任务处理的入口
*
* @author tjq
* @since 2020/3/17
*/
@Data
public class ServerScheduleJobReq implements OmsSerializable {
// 可用处理器地址可能多值逗号分隔
private List<String> allWorkerAddress;
/* *********************** 任务相关属性 *********************** */
/**
* 任务ID当更换Server后需要根据 JobId 重新查询任务元数据
*/
private Long jobId;
private Long wfInstanceId;
/**
* 基础信息
*/
private Long instanceId;
/**
* 任务执行处理器信息
*/
// 任务执行类型单机广播MR
private String executeType;
// 处理器类型JavaBeanJar脚本等
private String processorType;
// 处理器信息
private String processorInfo;
/**
* 超时时间
*/
// 整个任务的总体超时时间
private long instanceTimeoutMS;
/**
* 任务运行参数
*/
// 任务级别的参数相当于类的static变量
private String jobParams;
// 实例级别的参数相当于类的普通变量API触发专用从API触发处带入
private String instanceParams;
// 每台机器的处理线程数上限
private int threadConcurrency;
// 子任务重试次数任务本身的重试机制由server控制
private int taskRetryNum;
/**
* 定时执行信息
*/
// 时间表达式类型CRON/API/FIX_RATE/FIX_DELAY
private String timeExpressionType;
// 时间表达式CRON/NULL/LONG/LONG单位MS
private String timeExpression;
// 最大同时运行任务数默认 1
private Integer maxInstanceNum;
}

View File

@ -1,32 +0,0 @@
package com.github.kfcfans.powerjob.common.request;
import com.github.kfcfans.powerjob.common.OmsSerializable;
import lombok.Data;
/**
* TaskTracker 将状态上报给服务器
*
* @author tjq
* @since 2020/3/17
*/
@Data
public class TaskTrackerReportInstanceStatusReq implements OmsSerializable {
private Long jobId;
private Long instanceId;
private Long wfInstanceId;
private int instanceStatus;
private String result;
/* ********* 统计信息 ********* */
private long totalTaskNum;
private long succeedTaskNum;
private long failedTaskNum;
private long startTime;
private long reportTime;
private String sourceAddress;
}

View File

@ -1,36 +0,0 @@
package com.github.kfcfans.powerjob.common.request;
import com.github.kfcfans.powerjob.common.OmsSerializable;
import com.github.kfcfans.powerjob.common.model.DeployedContainerInfo;
import com.github.kfcfans.powerjob.common.model.SystemMetrics;
import lombok.Data;
import java.util.List;
/**
* Worker 上报健康信息worker定时发送的heartbeat
*
* @author tjq
* @since 2020/3/25
*/
@Data
public class WorkerHeartbeat implements OmsSerializable {
// 本机地址 -> IP:port
private String workerAddress;
// 当前 appName
private String appName;
// 当前 appId
private Long appId;
// 当前时间
private long heartbeatTime;
// 当前加载的容器容器名称 -> 容器版本
private List<DeployedContainerInfo> containerInfos;
// worker 版本信息
private String version;
// 扩展字段
private String extra;
private SystemMetrics systemMetrics;
}

View File

@ -1,90 +0,0 @@
package com.github.kfcfans.powerjob.common.request.http;
import com.github.kfcfans.powerjob.common.ExecuteType;
import com.github.kfcfans.powerjob.common.ProcessorType;
import com.github.kfcfans.powerjob.common.TimeExpressionType;
import com.github.kfcfans.powerjob.common.utils.CommonUtils;
import lombok.Data;
import java.util.List;
/**
* 创建/修改 JobInfo 请求
*
* @author tjq
* @since 2020/3/30
*/
@Data
public class SaveJobInfoRequest {
// 任务IDjobIdnull -> 插入否则为更新
private Long id;
/* ************************** 任务基本信息 ************************** */
// 任务名称
private String jobName;
// 任务描述
private String jobDescription;
// 任务所属的应用IDClient无需填写该参数自动填充
private Long appId;
// 任务自带的参数
private String jobParams;
/* ************************** 定时参数 ************************** */
// 时间表达式类型CRON/API/FIX_RATE/FIX_DELAY
private TimeExpressionType timeExpressionType;
// 时间表达式CRON/NULL/LONG/LONG
private String timeExpression;
/* ************************** 执行方式 ************************** */
// 执行类型单机/广播/MR
private ExecuteType executeType;
// 执行器类型Java/Shell
private ProcessorType processorType;
// 执行器信息
private String processorInfo;
/* ************************** 运行时配置 ************************** */
// 最大同时运行任务数0 代表不限
private Integer maxInstanceNum = 0;
// 并发度同时执行的线程数量
private Integer concurrency = 5;
// 任务整体超时时间
private Long instanceTimeLimit = 0L;
/* ************************** 重试配置 ************************** */
private Integer instanceRetryNum = 0;
private Integer taskRetryNum = 0;
/* ************************** 繁忙机器配置 ************************** */
// 最低CPU核心数量0代表不限
private double minCpuCores = 0;
// 最低内存空间单位 GB0代表不限
private double minMemorySpace = 0;
// 最低磁盘空间单位 GB0代表不限
private double minDiskSpace = 0;
// 1 正常运行2 停止不再调度
private boolean enable = true;
/* ************************** 集群配置 ************************** */
// 指定机器运行空代表不限非空则只会使用其中的机器运行多值逗号分割
private String designatedWorkers;
// 最大机器数量
private Integer maxWorkerCount = 0;
// 报警用户ID列表
private List<Long> notifyUserIds;
public void valid() {
CommonUtils.requireNonNull(jobName, "jobName can't be empty");
CommonUtils.requireNonNull(appId, "appId can't be empty");
CommonUtils.requireNonNull(processorInfo, "processorInfo can't be empty");
CommonUtils.requireNonNull(executeType, "executeType can't be empty");
CommonUtils.requireNonNull(processorType, "processorType can't be empty");
CommonUtils.requireNonNull(timeExpressionType, "timeExpressionType can't be empty");
}
}

View File

@ -1,49 +0,0 @@
package com.github.kfcfans.powerjob.common.response;
import com.github.kfcfans.powerjob.common.InstanceStatus;
import lombok.Data;
import java.util.Date;
/**
* instanceInfo 对外输出对象
*
* @author tjq
* @since 2020/5/14
*/
@Data
public class InstanceInfoDTO {
// 任务ID
private Long jobId;
// 任务所属应用的ID冗余提高查询效率
private Long appId;
// 任务实例ID
private Long instanceId;
// 工作流实例ID
private Long wfInstanceId;
// 任务实例参数
private String instanceParams;
/**
* 任务状态 {@link InstanceStatus}
*/
private int status;
// 该任务实例的类型普通/工作流InstanceType
private Integer type;
// 执行结果
private String result;
// 预计触发时间
private Long expectedTriggerTime;
// 实际触发时间
private Long actualTriggerTime;
// 结束时间
private Long finishedTime;
// TaskTracker地址
private String taskTrackerAddress;
// 总共执行的次数用于重试判断
private Long runningTimes;
private Date gmtCreate;
private Date gmtModified;
}

View File

@ -1,78 +0,0 @@
package com.github.kfcfans.powerjob.common.response;
import lombok.Data;
import java.util.Date;
/**
* jobInfo 对外输出对象
*
* @author tjq
* @since 2020/5/14
*/
@Data
public class JobInfoDTO {
private Long id;
/* ************************** 任务基本信息 ************************** */
// 任务名称
private String jobName;
// 任务描述
private String jobDescription;
// 任务所属的应用ID
private Long appId;
// 任务自带的参数
private String jobParams;
/* ************************** 定时参数 ************************** */
// 时间表达式类型CRON/API/FIX_RATE/FIX_DELAY
private Integer timeExpressionType;
// 时间表达式CRON/NULL/LONG/LONG
private String timeExpression;
/* ************************** 执行方式 ************************** */
// 执行类型单机/广播/MR
private Integer executeType;
// 执行器类型Java/Shell
private Integer processorType;
// 执行器信息
private String processorInfo;
/* ************************** 运行时配置 ************************** */
// 最大同时运行任务数默认 1
private Integer maxInstanceNum;
// 并发度同时执行某个任务的最大线程数量
private Integer concurrency;
// 任务整体超时时间
private Long instanceTimeLimit;
/* ************************** 重试配置 ************************** */
private Integer instanceRetryNum;
private Integer taskRetryNum;
// 1 正常运行2 停止不再调度
private Integer status;
// 下一次调度时间
private Long nextTriggerTime;
/* ************************** 繁忙机器配置 ************************** */
// 最低CPU核心数量0代表不限
private double minCpuCores;
// 最低内存空间单位 GB0代表不限
private double minMemorySpace;
// 最低磁盘空间单位 GB0代表不限
private double minDiskSpace;
/* ************************** 集群配置 ************************** */
// 指定机器运行空代表不限非空则只会使用其中的机器运行多值逗号分割
private String designatedWorkers;
// 最大机器数量
private Integer maxWorkerCount;
// 报警用户ID列表多值逗号分隔
private String notifyUserIds;
private Date gmtCreate;
private Date gmtModified;
}

View File

@ -1,44 +0,0 @@
package com.github.kfcfans.powerjob.common.response;
import com.github.kfcfans.powerjob.common.OmsSerializable;
import lombok.Getter;
import lombok.Setter;
import lombok.ToString;
import org.apache.commons.lang3.exception.ExceptionUtils;
/**
* 请求返回的结果对象
*
* @author tjq
* @since 2020/3/30
*/
@Getter
@Setter
@ToString
public class ResultDTO<T> implements OmsSerializable {
private boolean success;
// 数据success为 true 时存在
private T data;
// 错误信息success为 false 时存在
private String message;
public static <T> ResultDTO<T> success(T data) {
ResultDTO<T> r = new ResultDTO<>();
r.success = true;
r.data = data;
return r;
}
public static <T> ResultDTO<T> failed(String message) {
ResultDTO<T> r = new ResultDTO<>();
r.success = false;
r.message = message;
return r;
}
public static <T> ResultDTO<T> failed(Throwable t) {
return failed(ExceptionUtils.getStackTrace(t));
}
}

View File

@ -1,45 +0,0 @@
package com.github.kfcfans.powerjob.common.response;
import lombok.Data;
import java.util.Date;
/**
* workflowInfo 对外输出对象
*
* @author tjq
* @since 2020/6/2
*/
@Data
public class WorkflowInfoDTO {
private Long id;
private String wfName;
private String wfDescription;
// 所属应用ID
private Long appId;
// 工作流的DAG图信息点线式DAG的json
private String peDAG;
/* ************************** 定时参数 ************************** */
// 时间表达式类型CRON/API/FIX_RATE/FIX_DELAY
private Integer timeExpressionType;
// 时间表达式CRON/NULL/LONG/LONG
private String timeExpression;
// 最大同时运行的工作流个数默认 1
private Integer maxWfInstanceNum;
// 1 正常运行2 停止不再调度
private Integer status;
// 下一次调度时间
private Long nextTriggerTime;
// 工作流整体失败的报警
private String notifyUserIds;
private Date gmtCreate;
private Date gmtModified;
}

View File

@ -1,68 +0,0 @@
package com.github.kfcfans.powerjob.common.utils;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.github.kfcfans.powerjob.common.PowerJobException;
import org.apache.commons.lang3.exception.ExceptionUtils;
/**
* JSON工具类
*
* @author tjq
* @since 2020/4/16
*/
public class JsonUtils {
private static final ObjectMapper objectMapper = new ObjectMapper();
static {
objectMapper.configure(JsonParser.Feature.ALLOW_SINGLE_QUOTES, true);
}
public static String toJSONString(Object obj) {
try {
return objectMapper.writeValueAsString(obj);
}catch (Exception ignore) {
}
return null;
}
public static String toJSONStringUnsafe(Object obj) {
try {
return objectMapper.writeValueAsString(obj);
}catch (Exception e) {
throw new PowerJobException(e);
}
}
public static byte[] toBytes(Object obj) {
try {
return objectMapper.writeValueAsBytes(obj);
}catch (Exception ignore) {
}
return null;
}
public static <T> T parseObject(String json, Class<T> clz) throws JsonProcessingException {
return objectMapper.readValue(json, clz);
}
public static <T> T parseObject(byte[] b, Class<T> clz) throws Exception {
return objectMapper.readValue(b, clz);
}
public static <T> T parseObject(byte[] b, TypeReference<T> typeReference) throws Exception {
return objectMapper.readValue(b, typeReference);
}
public static <T> T parseObjectUnsafe(String json, Class<T> clz) {
try {
return objectMapper.readValue(json, clz);
}catch (Exception e) {
ExceptionUtils.rethrow(e);
}
throw new PowerJobException("impossible");
}
}

View File

@ -1,7 +1,7 @@
package com.github.kfcfans.powerjob.common; package tech.powerjob.common;
/** /**
* 容器常量 * Container constants.
* *
* @author tjq * @author tjq
* @since 2020/5/15 * @since 2020/5/15
@ -9,13 +9,16 @@ package com.github.kfcfans.powerjob.common;
public class ContainerConstant { public class ContainerConstant {
/** /**
* spring-context 配置文件名称 * Spring-context configuration file name of the container.
*/ */
public static final String SPRING_CONTEXT_FILE_NAME = "oms-worker-container-spring-context.xml"; public static final String SPRING_CONTEXT_FILE_NAME = "oms-worker-container-spring-context.xml";
/** /**
* container 属性文件名称 * Property file name of the container.
*/ */
public static final String CONTAINER_PROPERTIES_FILE_NAME = "oms-worker-container.properties"; public static final String CONTAINER_PROPERTIES_FILE_NAME = "oms-worker-container.properties";
/**
* Package name of the container.
*/
public static final String CONTAINER_PACKAGE_NAME_KEY = "PACKAGE_NAME"; public static final String CONTAINER_PACKAGE_NAME_KEY = "PACKAGE_NAME";
} }

View File

@ -0,0 +1,35 @@
package tech.powerjob.common;
/**
* Common constants.
*
* @author tjq
* @since 2020/5/31
*/
public class OmsConstant {
/**
* package name
*/
public static final String PACKAGE = "tech.powerjob";
public static final int SERVER_DEFAULT_AKKA_PORT = 10086;
public static final int SERVER_DEFAULT_HTTP_PORT = 10010;
public static final String TIME_PATTERN = "yyyy-MM-dd HH:mm:ss";
public static final String TIME_PATTERN_PLUS = "yyyy-MM-dd HH:mm:ss.SSS";
public static final String NONE = "N/A";
public static final String COMMA = ",";
public static final String AND = "&";
public static final String EQUAL = "=";
public static final String LINE_SEPARATOR = "\r\n";
public static final String HTTP_HEADER_CONTENT_TYPE = "Content-Type";
public static final String JSON_MEDIA_TYPE = "application/json; charset=utf-8";
public static final String NULL = "null";
}

View File

@ -1,4 +1,4 @@
package com.github.kfcfans.powerjob.common; package tech.powerjob.common;
/** /**
* OpenAPI 常量 * OpenAPI 常量
@ -8,34 +8,62 @@ package com.github.kfcfans.powerjob.common;
*/ */
public class OpenAPIConstant { public class OpenAPIConstant {
private OpenAPIConstant(){
}
public static final String WEB_PATH = "/openApi"; public static final String WEB_PATH = "/openApi";
public static final String ASSERT = "/assert"; public static final String ASSERT = "/assert";
public static final String AUTH_APP = "/authApp";
/* ************* JOB 区 ************* */ /* ************* JOB 区 ************* */
public static final String SAVE_JOB = "/saveJob"; public static final String SAVE_JOB = "/saveJob";
public static final String COPY_JOB = "/copyJob";
public static final String EXPORT_JOB = "/exportJob";
public static final String FETCH_JOB = "/fetchJob"; public static final String FETCH_JOB = "/fetchJob";
public static final String FETCH_ALL_JOB = "/fetchAllJob";
public static final String QUERY_JOB = "/queryJob";
public static final String DISABLE_JOB = "/disableJob"; public static final String DISABLE_JOB = "/disableJob";
public static final String ENABLE_JOB = "/enableJob"; public static final String ENABLE_JOB = "/enableJob";
public static final String DELETE_JOB = "/deleteJob"; public static final String DELETE_JOB = "/deleteJob";
public static final String RUN_JOB = "/runJob"; public static final String RUN_JOB = "/runJob";
/* ************* Instance 区 ************* */ /* ************* Instance 区 ************* */
public static final String STOP_INSTANCE = "/stopInstance"; public static final String STOP_INSTANCE = "/stopInstance";
public static final String CANCEL_INSTANCE = "/cancelInstance"; public static final String CANCEL_INSTANCE = "/cancelInstance";
public static final String RETRY_INSTANCE = "/retryInstance"; public static final String RETRY_INSTANCE = "/retryInstance";
public static final String FETCH_INSTANCE_STATUS = "/fetchInstanceStatus"; public static final String FETCH_INSTANCE_STATUS = "/fetchInstanceStatus";
public static final String FETCH_INSTANCE_INFO = "/fetchInstanceInfo"; public static final String FETCH_INSTANCE_INFO = "/fetchInstanceInfo";
public static final String QUERY_INSTANCE = "/queryInstance";
/* ************* Workflow 区 ************* */ /* ************* Workflow 区 ************* */
public static final String SAVE_WORKFLOW = "/saveWorkflow"; public static final String SAVE_WORKFLOW = "/saveWorkflow";
public static final String COPY_WORKFLOW = "/copyWorkflow";
public static final String FETCH_WORKFLOW = "/fetchWorkflow"; public static final String FETCH_WORKFLOW = "/fetchWorkflow";
public static final String DISABLE_WORKFLOW = "/disableWorkflow"; public static final String DISABLE_WORKFLOW = "/disableWorkflow";
public static final String ENABLE_WORKFLOW = "/enableWorkflow"; public static final String ENABLE_WORKFLOW = "/enableWorkflow";
public static final String DELETE_WORKFLOW = "/deleteWorkflow"; public static final String DELETE_WORKFLOW = "/deleteWorkflow";
public static final String RUN_WORKFLOW = "/runWorkflow"; public static final String RUN_WORKFLOW = "/runWorkflow";
public static final String SAVE_WORKFLOW_NODE = "/addWorkflowNode";
/* ************* WorkflowInstance 区 ************* */ /* ************* WorkflowInstance 区 ************* */
public static final String STOP_WORKFLOW_INSTANCE = "/stopWfInstance"; public static final String STOP_WORKFLOW_INSTANCE = "/stopWfInstance";
public static final String RETRY_WORKFLOW_INSTANCE = "/retryWfInstance";
public static final String FETCH_WORKFLOW_INSTANCE_INFO = "/fetchWfInstanceInfo"; public static final String FETCH_WORKFLOW_INSTANCE_INFO = "/fetchWfInstanceInfo";
public static final String MARK_WORKFLOW_NODE_AS_SUCCESS = "/markWorkflowNodeAsSuccess";
/* ************* 鉴权 ************* */
public static final String REQUEST_HEADER_ACCESS_TOKEN = "X-POWERJOB-ACCESS-TOKEN";
public static final String REQUEST_HEADER_APP_ID = "X-POWERJOB-APP-ID";
public static final String RESPONSE_HEADER_AUTH_STATUS = "X-POWERJOB-AUTH-PASSED";
} }

View File

@ -0,0 +1,69 @@
package tech.powerjob.common;
import java.net.NetworkInterface;
/**
* 通过 JVM 启动参数传入的配置信息
*
* @author tjq
* @since 2020/8/8
*/
public class PowerJobDKey {
/**
* The property name for {@link NetworkInterface#getDisplayName() the name of network interface} that the PowerJob application prefers
*/
public static final String PREFERRED_NETWORK_INTERFACE = "powerjob.network.interface.preferred";
/**
* 绑定地址一般填写本机网卡地址
*/
public static final String BIND_LOCAL_ADDRESS = "powerjob.network.local.address";
/**
* 外部地址可选默认与绑定地址相同当存在 NAT 等场景时可通过单独传递外部地址来实现通讯
*/
public static final String NT_EXTERNAL_ADDRESS = "powerjob.network.external.address";
public static final String NT_EXTERNAL_PORT = "powerjob.network.external.port";
/**
* Java regular expressions for network interfaces that will be ignored.
*/
public static final String IGNORED_NETWORK_INTERFACE_REGEX = "powerjob.network.interface.ignored";
/**
* Enables compression during data transfer, such as gzip under the HTTP protocol. default value is 'false'
* Note that enabling compression reduces network usage, but increases CPU consumption
*/
public static final String TRANSPORTER_USE_COMPRESSING = "powerjob.transporter.compression.enabled";
/**
* keep-alive connection timeout(in seconds), value <= 0 means disable keepalive. default value is 75
*/
public static final String TRANSPORTER_KEEP_ALIVE_TIMEOUT = "powerjob.transporter.keepalive.timeout";
public static final String WORKER_STATUS_CHECK_PERIOD = "powerjob.worker.status-check.normal.period";
/**
* allowed PowerJob to invoke Thread#stop to kill a thread when PowerJob can't interrupt the thread
* <a href="https://stackoverflow.com/questions/16504140/thread-stop-deprecated">It's VERY dangerous</a>
*/
public static final String WORKER_ALLOWED_FORCE_STOP_THREAD = "powerjob.worker.allowed-force-stop-thread";
public static final String WORKER_WORK_SPACE = "powerjob.worker.workspace";
/**
* ms
*/
public static final String FREQUENCY_JOB_MAX_INTERVAL = "powerjob.server.frequency-job.max-interval";
/* ******************* 不太可能有人用的参数,主要方便内部测试 ******************* */
/**
* 最大活跃任务数量超出部分 SWAP 到磁盘以提升性能
*/
public static final String WORKER_RUNTIME_SWAP_MAX_ACTIVE_TASK_NUM = "powerjob.worker.swap.max-active-task-num";
public static final String WORKER_RUNTIME_SWAP_TASK_SCHEDULE_INTERVAL_MS = "powerjob.worker.swap.scan-interval";
public static final String SERVER_TEST_ACCOUNT_USERNAME = "powerjob.server.test-accounts";
}

View File

@ -0,0 +1,41 @@
package tech.powerjob.common;
import lombok.Getter;
import lombok.Setter;
/**
* PowerJob Query interface
*
* @author tjq
* @since 2021/1/15
*/
@Getter
@Setter
public abstract class PowerQuery {
public static String EQUAL = "Eq";
public static String NOT_EQUAL = "NotEq";
public static String LIKE = "Like";
public static String NOT_LIKE = "NotLike";
public static String LESS_THAN = "Lt";
public static String LESS_THAN_EQUAL = "LtEq";
public static String GREATER_THAN = "Gt";
public static String GREATER_THAN_EQUAL = "GtEq";
public static String IN = "In";
public static String NOT_IN = "NotIn";
public static String IS_NULL = "IsNull";
public static String IS_NOT_NULL = "IsNotNull";
private Long appIdEq;
}

View File

@ -0,0 +1,12 @@
package tech.powerjob.common;
import java.io.Serializable;
/**
* PowerJob serializable interface.
*
* @author tjq
* @since 2020/4/16
*/
public interface PowerSerializable extends Serializable {
}

View File

@ -0,0 +1,104 @@
package tech.powerjob.common;
/**
* RemoteConstant
*
* @author tjq
* @since 2020/3/17
*/
public class RemoteConstant {
/* ************************ AKKA WORKER ************************ */
public static final int DEFAULT_WORKER_PORT = 27777;
/* ************************ OTHERS ************************ */
public static final String EMPTY_ADDRESS = "N/A";
public static final long DEFAULT_TIMEOUT_MS = 5000;
/* ************************ SERVER-self_side (s4s == server for server side) ************************ */
public static final String S4S_PATH = "friend";
/**
* server 集群间的心跳处理
*/
public static final String S4S_HANDLER_PING = "ping";
/**
* 处理其他 server 的执行请求
*/
public static final String S4S_HANDLER_PROCESS = "process";
/* ************************ SERVER-worker_sides4w == server for worker side ************************ */
public static final String S4W_PATH = "server";
/**
* server 处理在线日志
*/
public static final String S4W_HANDLER_REPORT_LOG = "reportLog";
/**
* server 处理 worker 心跳
*/
public static final String S4W_HANDLER_WORKER_HEARTBEAT = "workerHeartbeat";
/**
* server 处理 TaskTracker 上报的任务实例状态
*/
public static final String S4W_HANDLER_REPORT_INSTANCE_STATUS = "reportInstanceStatus";
/**
* server 查询任务的可执行集群
*/
public static final String S4W_HANDLER_QUERY_JOB_CLUSTER = "queryJobCluster";
/**
* server 处理 worker 请求部署容器命令
*/
public static final String S4W_HANDLER_WORKER_NEED_DEPLOY_CONTAINER = "queryContainer";
/* ************************ Worker-TaskTracker ************************ */
public static final String WTT_PATH = "taskTracker";
/**
* server 任务执行命令
*/
public static final String WTT_HANDLER_RUN_JOB = "runJob";
/**
* server 停止任务实例命令
*/
public static final String WTT_HANDLER_STOP_INSTANCE = "stopInstance";
/**
* sever 查询任务状态
*/
public static final String WTT_HANDLER_QUERY_INSTANCE_STATUS = "queryInstanceStatus";
/**
* PT 上报任务状态包含执行结果
*/
public static final String WTT_HANDLER_REPORT_TASK_STATUS = "reportTaskStatus";
/**
* PT 上报自身状态
*/
public static final String WTT_HANDLER_REPORT_PROCESSOR_TRACKER_STATUS = "reportProcessorTrackerStatus";
/**
* Map 任务
*/
public static final String WTT_HANDLER_MAP_TASK = "mapTask";
/* ************************ Worker-ProcessorTracker ************************ */
public static final String WPT_PATH = "processorTracker";
public static final String WPT_HANDLER_START_TASK = "startTask";
public static final String WPT_HANDLER_STOP_INSTANCE = "stopInstance";
/* ************************ Worker-NORMAL ************************ */
public static final String WORKER_PATH = "worker";
public static final String WORKER_HANDLER_DEPLOY_CONTAINER = "deployContainer";
public static final String WORKER_HANDLER_DESTROY_CONTAINER = "destroyContainer";
}

View File

@ -0,0 +1,91 @@
package tech.powerjob.common;
/**
* 系统生成的任务实例运行结果
*
* @author tjq
* @since 2020/4/11
*/
public class SystemInstanceResult {
private SystemInstanceResult() {
}
/* *********** 普通instance 专用 *********** */
/**
* 同时运行的任务实例数过多
*/
public static final String TOO_MANY_INSTANCES = "too many instances(%d>%d)";
/**
* 无可用worker
*/
public static final String NO_WORKER_AVAILABLE = "no worker available";
/**
* 任务执行超时
*/
public static final String INSTANCE_EXECUTE_TIMEOUT = "instance execute timeout";
/**
* 任务执行超时成功打断任务
*/
public static final String INSTANCE_EXECUTE_TIMEOUT_INTERRUPTED = "instance execute timeout,interrupted success";
/**
* 任务执行超时强制终止任务
*/
public static final String INSTANCE_EXECUTE_TIMEOUT_FORCE_STOP= "instance execute timeout,force stop success";
/**
* 用户手动停止任务成功打断任务
*/
public static final String USER_STOP_INSTANCE_INTERRUPTED= "user stop instance,interrupted success";
/**
* 用户手动停止任务被系统强制终止
*/
public static final String USER_STOP_INSTANCE_FORCE_STOP= "user stop instance,force stop success";
/**
* 创建根任务失败
*/
public static final String TASK_INIT_FAILED = "create root task failed";
/**
* 未知错误
*/
public static final String UNKNOWN_BUG = "unknown bug";
/**
* TaskTracker 长时间未上报
*/
public static final String REPORT_TIMEOUT = "worker report timeout, maybe TaskTracker down";
public static final String CAN_NOT_FIND_JOB_INFO = "can't find job info";
/* *********** workflow 专用 *********** */
public static final String MIDDLE_JOB_FAILED = "middle job failed";
public static final String MIDDLE_JOB_STOPPED = "middle job stopped by user";
public static final String CAN_NOT_FIND_JOB = "can't find some job";
public static final String CAN_NOT_FIND_NODE = "can't find some node";
public static final String ILLEGAL_NODE = "illegal node info";
/**
* 没有启用的节点
*/
public static final String NO_ENABLED_NODES = "no enabled nodes";
/**
* 被用户手动停止
*/
public static final String STOPPED_BY_USER = "stopped by user";
public static final String CANCELED_BY_USER = "canceled by user";
/**
* 无效 DAG
*/
public static final String INVALID_DAG = "invalid dag";
/**
* 被禁用的节点
*/
public static final String DISABLE_NODE = "disable node";
/**
* 标记为成功的节点
*/
public static final String MARK_AS_SUCCESSFUL_NODE = "mark as successful node";
}

View File

@ -0,0 +1,18 @@
package tech.powerjob.common;
/**
* 工作流上下文相关常量
*
* @author Echo009
* @since 2021/2/3
*/
public final class WorkflowContextConstant {
/**
* 上下文初始参数
*/
public static final String CONTEXT_INIT_PARAMS_KEY = "initParams";
}

View File

@ -0,0 +1,26 @@
package tech.powerjob.common.enhance;
import lombok.extern.slf4j.Slf4j;
import java.util.concurrent.ScheduledExecutorService;
/**
* 安全的 runnable可防止因抛出异常导致周期性任务终止
* 使用 {@link ScheduledExecutorService} 执行任务时推荐继承此类捕获并打印异常避免因为抛出异常导致周期性任务终止
*
* @author songyinyin
* @since 2023/9/20 15:52
*/
@Slf4j
public abstract class SafeRunnable implements Runnable{
@Override
public void run() {
try {
run0();
} catch (Exception e) {
log.error("[SafeRunnable] run failed", e);
}
}
protected abstract void run0();
}

View File

@ -0,0 +1,30 @@
package tech.powerjob.common.enhance;
import lombok.extern.slf4j.Slf4j;
import java.util.concurrent.ScheduledExecutorService;
/**
* 使用 {@link ScheduledExecutorService} 执行任务时推荐使用此对象包装一层避免因为抛出异常导致周期性任务终止
*
* @author songyinyin
* @since 2023/9/20 16:04
*/
@Slf4j
public class SafeRunnableWrapper implements Runnable {
private final Runnable runnable;
public SafeRunnableWrapper(Runnable runnable) {
this.runnable = runnable;
}
@Override
public void run() {
try {
runnable.run();
} catch (Exception e) {
log.error("[SafeRunnableWrapper] run failed", e);
}
}
}

View File

@ -0,0 +1,43 @@
package tech.powerjob.common.enums;
import lombok.AllArgsConstructor;
import lombok.Getter;
/**
* DispatchStrategy
*
* @author tjq
* @since 2021/2/22
*/
@Getter
@AllArgsConstructor
public enum DispatchStrategy {
/**
* 健康度优先
*/
HEALTH_FIRST(1),
/**
* 随机
*/
RANDOM(2),
/**
* 指定执行
*/
SPECIFY(11)
;
private final int v;
public static DispatchStrategy of(Integer v) {
if (v == null) {
return HEALTH_FIRST;
}
for (DispatchStrategy ds : values()) {
if (v.equals(ds.v)) {
return ds;
}
}
throw new IllegalArgumentException("unknown DispatchStrategy of " + v);
}
}

View File

@ -0,0 +1,22 @@
package tech.powerjob.common.enums;
import lombok.AllArgsConstructor;
import lombok.Getter;
/**
* 加密类型
*
* @author tjq
* @since 2024/8/10
*/
@Getter
@AllArgsConstructor
public enum EncryptType {
NONE("none"),
MD5("md5")
;
private final String code;
}

View File

@ -0,0 +1,22 @@
package tech.powerjob.common.enums;
/**
* Environment Enum class.
*
* @author tjq
* @since 2020/5/3
*/
public enum Env {
/**
* Development or test environment.
*/
DAILY,
/**
* Pre-release environment.
*/
PRE,
/**
* Production environment.
*/
PRODUCT
}

Some files were not shown because too many files have changed in this diff Show More