Compare commits

...

527 Commits

Author SHA1 Message Date
tjq
5d82b7cc5c chore: github workflows 2024-12-07 21:09:09 +08:00
tjq
f506ba8956 chore: github workflows 2024-12-07 21:03:29 +08:00
tjq
8166b78e68 chore: github workflows 2024-12-07 20:57:00 +08:00
tjq
730982b085 chore: github workflows 2024-12-07 20:52:39 +08:00
tjq
f6a8666031 chore: github workflows 2024-12-07 20:49:14 +08:00
tjq
e6264fc9a4 Merge branch '5.1.1_v2' 2024-12-07 20:43:22 +08:00
tjq
dc62c1b992 refactor: optimize worker skip round log 2024-12-07 20:42:45 +08:00
tjq
c627776764 chore: upgrade project version to 5.1.1 2024-12-07 20:38:31 +08:00
tjq
aefa9290c9 chore: github workflows 2024-12-07 20:36:16 +08:00
tjq
fdd80f6cf9 feat: opt log 2024-12-07 17:32:20 +08:00
tjq
7333ee3951 Merge branch '5.1.0-bugfix2' into 5.1.1_v2 2024-12-07 17:00:45 +08:00
tjq
92ddc6af4d feat: support create app with namespace_code #976 2024-11-22 22:03:06 +08:00
tjq
508127426f feat: appname and namespace duplicate check #1009 2024-11-22 21:45:54 +08:00
tjq
57627305fa fix: Repeated execution after broadcast worker node down #1003 2024-11-22 21:32:13 +08:00
tjq
4e84bc60d7 feat: support method job direct return ProcessResult #798 2024-11-22 21:05:23 +08:00
tjq
4fe2d7fdf1 feat: Add PowerjobClient api /queryInstance #1034 2024-11-21 22:53:57 +08:00
tjq
f44bd43d13 fix: reduce Probabilistic non-execution #1033 2024-11-21 22:11:44 +08:00
tjq
e912e2c31d feat: NOT_ALLOWED_CHANGE_PASSWORD_ACCOUNTS 2024-11-09 18:05:26 +08:00
tjq
f9dd8d7713 fix: PADDLING not work 2024-11-06 23:23:52 +08:00
tjq
0bb069fa5b Merge branch '5.1.0-bugfix' 2024-10-31 00:23:52 +08:00
tjq
f0b2fbb5b7 chore: change main verison to 5.1.0-bugfix 2024-10-31 00:03:54 +08:00
tjq
7443edf735 Merge branch 'pr-panyyf-master' into 5.1.0-bugfix 2024-10-30 23:53:25 +08:00
panyy_f
1383e48fec fix: worker-agent cannot specify multiple server issues when registering with the server #1012 2024-10-28 14:14:15 +08:00
tjq
243f7bb179 fix: PostgresqlSeriesDfsService can't restartup #974 2024-10-26 01:16:25 +08:00
tjq
827bcd2502 Merge branch 'pr-HeZhanfeng-fix-multi-thread-safety-vulnerabilities' into 5.1.0-bugfix 2024-10-26 00:44:59 +08:00
tjq
8f3981dd09 fix: OpenAPi Save task generates duplicate records #1018 2024-10-26 00:37:14 +08:00
hezhanfeng
9bab361618 fix:multi-thread safety vulnerabilities(修复格式化对象非线程安全的漏洞) 2024-10-17 11:11:28 +08:00
hezhanfeng
01e15bda39 fix:multi-thread safety vulnerabilities(修复格式化对象非线程安全的漏洞) 2024-09-30 11:02:25 +08:00
tjq
0bf95cf419 Merge branch '5.1.0' 2024-08-12 00:39:33 +08:00
tjq
f2bed56544 chore: add database schema file 2024-08-11 23:35:27 +08:00
tjq
85f5faaaac fix: PowerJobClient refresh token failed when jwt expired 2024-08-11 23:00:37 +08:00
tjq
44ef76328b refactor: optimize SwitchableStatus's package 2024-08-11 22:25:49 +08:00
tjq
5c49b8d8dd chore: upgrade project version and fe resource 2024-08-11 20:01:18 +08:00
tjq
944b06ee82 fix: list all app when user doesn't have any app permission 2024-08-11 19:34:46 +08:00
tjq
a35573544c fix: NetworkInterfaceChecker can't worker 2024-08-11 18:22:48 +08:00
tjq
fea1974014 feat: worker use random server address #953 2024-08-11 11:32:28 +08:00
tjq
4527454a7c feat: AdpPostgreSQLDialect #750 2024-08-11 01:51:03 +08:00
tjq
a261b864ca fix: Single worker use padding mode made deadlock #918 2024-08-11 01:20:04 +08:00
tjq
4f5ea6f897 Merge branch 'pr-LittleCadet-master' into 5.1.0 2024-08-11 01:13:58 +08:00
tjq
0a1191572e refactor: optimize worker log 2024-08-11 01:12:56 +08:00
tjq
605497b36d feat: PowerJobClient support shutdown #895 2024-08-11 00:50:25 +08:00
tjq
3e0088870a feat: PowerJob Client support ClientExtension(current for dynamic server ip) #895 2024-08-11 00:43:18 +08:00
tjq
a1dad6c39e fix: AKKA NAT BUG #929 2024-08-11 00:20:34 +08:00
tjq
6426424401 fix: SqlProcessorConfiguration can't work due to Conditional bean config error #946 2024-08-10 23:41:29 +08:00
tjq
1774680792 fix: Correct protocol not used for inter-server communication #949 2024-08-10 23:32:44 +08:00
tjq
29e0b2deb0 feat: app password use AES GCM #935 2024-08-10 23:26:49 +08:00
tjq
eb4d7ab8eb feat: app password use ciphertext 2024-08-10 14:42:09 +08:00
tjq
e711ed7251 feat: PowerJobClient Support Authentication 2024-08-10 11:41:07 +08:00
tjq
53be566173 feat: OpenApiInterceptor 2024-08-10 00:04:12 +08:00
tjq
bee4795027 feat: OpenApiInterceptor 2024-08-09 21:47:47 +08:00
tjq
84b90a366c feat: open-api support auth 2024-08-09 20:55:08 +08:00
tjq
c04cb08390 fix: use new jwt header name to fix nginx remove header bug #910 2024-07-08 23:44:58 +08:00
shenkang
4507a6a883 fix: 当该appId的worker是单机运行 且 padding时, 导致Dispatcher分发任务处于死循环中, 致使无法分发任务,状态一直为运行中 2024-05-30 11:11:46 +08:00
tjq
3edaae67dd chore: Synchronising version 5.0.1 resources 2024-03-16 22:09:39 +08:00
tjq
bb99ec9d04 Merge branch 'dev' into 5.0.1_beta 2024-03-16 22:02:30 +08:00
tjq
54fadf6368 feat: extend dfs support PostgresqlSeriesDfsService 2024-03-16 22:01:12 +08:00
tjq
1e092bb866 Merge branch 'pr-Jetol-storage-dfs-pg-jetol' into 5.0.1_beta 2024-03-16 21:57:21 +08:00
tjq
6a59f50b96 fix: can't upload container #843 2024-03-16 21:53:26 +08:00
tjq
9b5916daf3 feat: support user manager #860 2024-03-16 18:41:33 +08:00
tjq
5e7751f092 fix: Java 8 date/time type java.time.LocalDateTime not supported by default #869 2024-03-16 13:56:03 +08:00
tjq
e21b171b98 fix: Java 8 date/time type java.time.LocalDateTime not supported by default #869 2024-03-16 13:37:22 +08:00
tjq
a9d8a680dc feat: optimize TaskTracker log #858 2024-03-16 13:15:41 +08:00
tjq
b822a685f4 feat: CSInitializerFactory support graalvm #868 2024-03-16 13:08:47 +08:00
tjq
dd3a17275f fix: openapi can't work #854 2024-03-16 11:47:48 +08:00
tjq
89e7ef8b40 fix: duplicate authorisation #854 2024-03-15 23:32:05 +08:00
tjq
32cecc59e9 fix: Some attributes of namespace cannot be shown back #854 2024-03-15 23:18:06 +08:00
tjq
5be7b24f4b feat: support delete app #855 2024-03-15 22:58:07 +08:00
tjq
1a2df5e616 fix: Authorized users do not display the app password #870 2024-03-15 22:39:25 +08:00
tjq
075ecd8a84 chore: merge 5.0.0 and 4.3.9 to 5.0.1 2024-03-09 15:55:19 +08:00
tjq
02304fe921 Merge branch '5.0.0_v2' into 5.0.1_beta
# Conflicts:
#	others/powerjob-mysql.sql
#	pom.xml
#	powerjob-client/pom.xml
#	powerjob-common/pom.xml
#	powerjob-official-processors/pom.xml
#	powerjob-remote/pom.xml
#	powerjob-remote/powerjob-remote-benchmark/pom.xml
#	powerjob-remote/powerjob-remote-framework/pom.xml
#	powerjob-remote/powerjob-remote-impl-akka/pom.xml
#	powerjob-remote/powerjob-remote-impl-http/pom.xml
#	powerjob-server/pom.xml
#	powerjob-server/powerjob-server-common/pom.xml
#	powerjob-server/powerjob-server-core/pom.xml
#	powerjob-server/powerjob-server-extension/pom.xml
#	powerjob-server/powerjob-server-migrate/pom.xml
#	powerjob-server/powerjob-server-monitor/pom.xml
#	powerjob-server/powerjob-server-persistence/pom.xml
#	powerjob-server/powerjob-server-remote/pom.xml
#	powerjob-server/powerjob-server-starter/pom.xml
#	powerjob-server/powerjob-server-starter/src/main/resources/static/index.html
#	powerjob-server/powerjob-server-starter/src/main/resources/static/js/1.js
#	powerjob-server/powerjob-server-starter/src/main/resources/static/js/10.js
#	powerjob-server/powerjob-server-starter/src/main/resources/static/js/11.js
#	powerjob-server/powerjob-server-starter/src/main/resources/static/js/2.js
#	powerjob-server/powerjob-server-starter/src/main/resources/static/js/3.js
#	powerjob-server/powerjob-server-starter/src/main/resources/static/js/4.js
#	powerjob-server/powerjob-server-starter/src/main/resources/static/js/5.js
#	powerjob-server/powerjob-server-starter/src/main/resources/static/js/6.js
#	powerjob-server/powerjob-server-starter/src/main/resources/static/js/7.js
#	powerjob-server/powerjob-server-starter/src/main/resources/static/js/8.js
#	powerjob-server/powerjob-server-starter/src/main/resources/static/js/9.js
#	powerjob-server/powerjob-server-starter/src/main/resources/static/js/app.js
#	powerjob-worker-agent/pom.xml
#	powerjob-worker-samples/pom.xml
#	powerjob-worker-spring-boot-starter/pom.xml
#	powerjob-worker/pom.xml
2024-03-09 15:40:54 +08:00
Jetol
b3abb461db 增加Postgresql存储扩展,配置项参考:tech.powerjob.server.persistence.storage.impl.PostgresqlSeriesDfsService 2024-03-08 17:23:00 +08:00
tjq
d44131128e Merge branch '4.3.9' 2024-03-02 20:11:56 +08:00
tjq
f4c0a0309f chore: Main version upgraded to 4.3.9 2024-03-02 19:44:54 +08:00
tjq
9046a8bfcf fix: Server scheduling exception when worker is overloaded #853 2024-03-01 22:06:51 +08:00
tjq
9d95d4ce04 feat: Optimising container deployment logs #850 2024-03-01 21:52:10 +08:00
tjq
86ec85331a fix: Processor not changed after container redeployment #850 2024-03-01 21:21:26 +08:00
tjq
fb1159e1b5 fix: MinioOssService is not working properly #844 2024-02-26 23:39:44 +08:00
tjq
9c0731f20d Merge branch '4.3.8_zjlab_v2' 2024-02-26 01:18:09 +08:00
tjq
bd725aac15 chore: optimize docker publish script 2024-02-26 01:17:51 +08:00
tjq
c35ae19ba8 fix: Some issues found by codereview 2024-02-25 21:58:25 +08:00
tjq
c11d544afe chore: upgrade project version to 4.3.8 2024-02-25 19:47:38 +08:00
tjq
e64ad0f74d fix: Loss of subtask data when mapreduce enters swap mode 2024-02-25 19:13:03 +08:00
tjq
07e0e17ec0 feat: [ops] enhance Map/MapReduce's dev ops 2024-02-25 12:23:58 +08:00
tjq
37ef35bd80 feat: [ops] enhance Map/MapReduce's dev ops 2024-02-25 02:12:19 +08:00
tjq
4046ea39b5 feat: support TaskTrackerBehavior(PADDLING) 2024-02-24 20:58:48 +08:00
tjq
815d44ef7e feat: Supports selection of TaskTracker nodes for execution 2024-02-24 19:59:50 +08:00
tjq
c717fd3fb8 feat: [SuperMR] Map/MapReduce job can use swap to support ∞ subtask 2024-02-24 13:46:51 +08:00
tjq
cf4ed93812 feat: [SuperMR] YuGong 2024-02-24 11:50:16 +08:00
tjq
dda79439ca feat: [SuperMR] ExternalTaskPersistenceService 2024-02-23 23:07:16 +08:00
songyinyin
7437ad6067 fix: error accessing homepage when not logged in 2024-02-20 16:50:23 +08:00
tjq
12e2f83764 chore: fix some problem then upgrade project version to 5.0.0-beat2 2024-02-18 20:06:29 +08:00
tjq
a750d0c55c feat: support user become app admin by username and passwor 2024-02-18 19:56:51 +08:00
tjq
d1d0407046 fix: Jackson compatibility issue preventing 5.x server from scheduling 4.x worker 2024-02-18 18:08:49 +08:00
tjq
9dbd470c5a feat: upgrade sql for 5.0.0 2024-02-17 22:24:44 +08:00
tjq
5e1f7e2d11 chore: upgrade project version to 5.0.0-beta 2024-02-17 21:59:13 +08:00
tjq
3ea089eaee feat: optimize app manager list 2024-02-16 21:45:27 +08:00
tjq
7b7582dd91 feat: [auth] global admins 2024-02-16 13:28:58 +08:00
tjq
686189e6ca feat: NewSystemInitializer and token verify 2024-02-16 12:38:42 +08:00
tjq
9419340829 fix: [auth] Bugs in user login module 2024-02-13 20:34:45 +08:00
tjq
6539c66226 fix: [auth] Extracting PwjbUserInfo to resolve user contamination 2024-02-13 18:23:29 +08:00
tjq
c350607762 feat: support user related query 2024-02-13 16:22:56 +08:00
tjq
31a7690844 feat: [auth] The web interface adds ApiPermission for authentication. 2024-02-13 11:21:14 +08:00
tjq
05c22a5dc5 fix: [auth] grant and chekc permission's bug 2024-02-12 23:42:21 +08:00
tjq
919a5c3b35 feat: support namespace 2024-02-12 22:12:03 +08:00
tjq
841c7891c8 feat: [auth] grant permission 2024-02-12 11:11:06 +08:00
tjq
3fdcc1e599 feat: [auth] use CachingRequestBodyFilter fix multi read problem 2024-02-11 23:52:35 +08:00
tjq
e18b9a8962 feat: [auth] finished login part 2024-02-11 17:14:21 +08:00
tjq
cf8153ae39 feat: [auth] AuthController 2024-02-11 11:04:20 +08:00
tjq
a1c12bf1c7 feat: [auth] PowerJobPermissionService 2024-02-11 10:32:13 +08:00
tjq
0caa854409 feat: [auth] PowerJobLoginService 2024-02-11 10:14:47 +08:00
tjq
cda55c918b feat: [auth] design ThirdPartyLoginService 2024-02-10 14:11:14 +08:00
tjq
4793c19af6 chore: docker publish script add support for Apple Silicon device 2024-02-10 12:44:44 +08:00
tjq
78b58d02e8 Merge branch '4.3.7_v2' 2024-02-09 13:36:21 +08:00
tjq
ea919b102f docs: Happy New Year 2024-02-09 11:13:05 +08:00
tjq
599d710e27 refactor: rename RunnableAndCatch to SafeRunnable 2024-02-08 20:14:35 +08:00
tjq
ab7a398f61 Merge branch 'dev' into 4.3.7_v2 2024-02-08 20:06:27 +08:00
tjq
01d7247efa chore: Upgrade project version to 4.3.7 2024-02-08 19:45:56 +08:00
tjq
b29e265e42 feat: Optimizing IP acquisition logic with PingPongSocketServer #762 2024-02-08 19:41:40 +08:00
tjq
61aecc6354 refactor: optimize NetUtils 2024-02-08 16:08:39 +08:00
tjq
6de5e83a2f chore: upgrade logback version to fix logback serialization vulnerability #80 2024-02-08 15:12:55 +08:00
tjq
88f7a06596 Merge branch 'pr-yuhan0501-4.3.6_bugfix' into 4.3.7_v2 2024-02-08 15:04:11 +08:00
tjq
98fc4d3320 Merge branch 'lvhjean-4.3.6-bugfix-email-empty' into 4.3.7_v2 2024-02-08 15:01:17 +08:00
tjq
6842fb6a7b perf: add cost log for TaskPersistenceService 2024-02-08 14:25:55 +08:00
tjq
debc2e0abb fix: instanceInfo cannot display details on non-scheduled server nodes 2024-02-08 13:14:59 +08:00
tjq
f3dd56bf54 chore: upgrade h2 and spring version 2024-02-08 12:26:22 +08:00
tjq
e63dc91643 fix: @PowerJobHandler does not work in cglib proxy #770 2024-02-08 11:14:19 +08:00
tjq
4be6a139dd fix: Cyclic Logging on Exception #769 2024-02-08 10:42:47 +08:00
tjq
1ba74bf0af test: performance test for h2 2024-02-05 00:12:37 +08:00
tjq
ff84d46713 perf: Discarding the results of the map task to improve performance 2024-02-04 22:17:53 +08:00
liwh
d61d85abd4 fix: 修复 email未填写导致告警报异常, #808 2024-01-04 14:04:32 +08:00
yuhan
23d94ed46f 修复使用mysql存储日志的情况下文件流未关闭造成的文件文件句柄不释放的bug。
(cherry picked from commit 2a9444770d227ffe46d6c700a7e8570ef3e1bc17)
2024-01-03 14:15:46 +08:00
songyinyin
8f3803bda6 fix: 周期性任务出现异常时,导致任务停止 2023-09-20 17:17:36 +08:00
songyinyin
592dff8d75 chore: When the TaskTracker is successfully executed normally, the log level changes to Info #657 2023-09-20 17:16:51 +08:00
tjq
9b7c237cf0 Merge branch '4.3.6' 2023-09-03 14:05:03 +08:00
tjq
9b3a3cd586 chore: change main version to 4.3.6 2023-09-03 13:44:10 +08:00
tjq
966f09c034 feat: optimize NetUtils 2023-09-03 13:30:26 +08:00
tjq
4a9f38c760 Merge branch 'pr-disk-overflow' into 4.3.6 2023-09-02 13:49:25 +08:00
tjq
b96768208b Merge branch 'dev-pr' of https://github.com/zhihui1999/PowerJob into pr-disk-overflow 2023-09-02 13:48:09 +08:00
tjq
1545733012 Merge branch 'pr-Minio' into 4.3.6 2023-09-02 13:47:20 +08:00
tjq
fa8b4f2b62 Merge branch 'xinyi' of https://github.com/Yusanku/PowerJob into pr-Minio 2023-09-02 13:46:57 +08:00
tjq
882392a5e5 feat: support lazy init#725 2023-09-02 13:35:54 +08:00
tjq
c875ba3d37 feat: support lazy init#725 2023-09-02 13:19:15 +08:00
tjq
a138f9c8cc feat: support lazy init#725 2023-09-02 11:53:01 +08:00
tjq
360f105c01 docs: add second qq group 2023-08-31 23:51:50 +08:00
赵志辉
3544f76aaa fix: Memory overflow caused by mounting cloud disks 2023-08-24 11:06:16 +08:00
yw
8de1c47971 日志存储扩展-Minio 2023-08-23 09:37:14 +08:00
tjq
5dbceb7ce4 chore: upgrade dependency version 2023-08-22 22:52:20 +08:00
songyinyin
93dadab832 fix: When debugging, ShellProcessor appears Java.io.IOException: Stream closed #682 2023-08-17 14:37:17 +08:00
tjq
9ab2e92934 Merge branch '4.3.5' 2023-08-14 23:57:14 +08:00
tjq
0d359b8a8e docs: optimize README 2023-08-14 23:56:50 +08:00
tjq
c91240c1b8 chore: change main version to 4.3.5 2023-08-14 23:50:17 +08:00
tjq
b14b49f75e chore: optimize test env script 2023-08-14 23:41:06 +08:00
tjq
74a92622a3 Merge branch 'pr-gitee-worker-cluster-bugfix' into 4.3.5 2023-08-14 23:21:28 +08:00
tjq
b5085e09f1 Merge branch 'patch-redeploy-contationer-in-multi-server' of https://gitee.com/diligents/PowerJob into pr-gitee-worker-cluster-bugfix 2023-08-14 23:20:55 +08:00
tjq
91abbc03d9 fix: rollback NetUtils's interface index sort 2023-08-14 23:14:44 +08:00
tjq
bdcc9b131d refactor: optimize DfsService's log 2023-08-14 23:14:07 +08:00
tjq
58e542c69a Merge branch '4.3.4' 2023-08-13 23:05:55 +08:00
tjq
15fa1abd91 feat: Complete all testing and ready for release 2023-08-13 22:29:31 +08:00
tjq
c08b4f1858 fix: timeout bug #678 2023-08-13 21:30:58 +08:00
tjq
89b35c8495 feat: [officialProcessor] add VerificationProcessor 2023-08-13 18:00:20 +08:00
tjq
73ebe83c05 feat: code review problem modification, ready to release 4.3.4 2023-08-13 16:31:12 +08:00
tjq
ad08406d0b feat: [storageExt] finished MySqlSeriesDfsService 2023-08-13 16:11:10 +08:00
tjq
37a62549db feat: [storageExt] MySqlSeriesDfsService 2023-08-10 23:59:07 +08:00
tjq
c50a3edebf feat: [storageExt] MySqlSeriesDfsService 2023-08-10 00:16:40 +08:00
tjq
09b15dfbc1 feat: optimize online log cache time(60 -> 10) 2023-08-06 21:03:41 +08:00
tjq
6bcc275a70 Merge branch 'pr-archibald-nice-empty-job-notifyuserids' into 4.3.4 2023-08-06 20:42:48 +08:00
tjq
88ebd5e042 Merge branch 'pr-#669' into 4.3.4 2023-08-06 20:41:37 +08:00
tjq
df5e259e54 chore: change main version to 4.3.4 2023-08-06 20:17:40 +08:00
tjq
570ea0487b feat: output vertx request error 2023-08-06 19:25:47 +08:00
tjq
dd32916637 docs: add SECURITY.md #698 2023-08-04 22:20:01 +08:00
tjq
c3ce46aee9 refactor: optimize alarm code 2023-07-31 23:45:52 +08:00
tjq
d03247ea03 test: [storageExt] finished gridfs service's test 2023-07-30 21:59:35 +08:00
tjq
1c70bbc670 feat: [storageExt] use PropertyAndOneBeanCondition to control multi impl 2023-07-30 21:14:18 +08:00
tjq
b251df4c35 feat: [storageExt] support alicloud oss and add some test code 2023-07-30 15:35:18 +08:00
tjq
f0514ac65f feat: [storageExt] support alicloud oss 2023-07-30 14:39:57 +08:00
tjq
236d0a7f3b feat: [storageExt] Unified File Storage Solution with DFsService 2023-07-30 12:00:45 +08:00
tjq
fc57226d3a refactor: optimize extension package 2023-07-16 18:14:20 +08:00
tjq
5e9935fed4 feat: [storageExt] define DFsService 2023-07-16 17:55:01 +08:00
tjq
d3140d0501 feat: support non-LAN communication(server side) 2023-07-15 22:22:38 +08:00
tjq
7318fed73a feat: support non-LAN communication(worker side) 2023-07-15 21:38:56 +08:00
tjq
67a22e8b7e feat: add log for ContainerService 2023-07-12 20:45:38 +08:00
archieself
8aaa602082 fix-[#676] Set notifyUserIds to null when empty the notify info of a job. 2023-07-05 10:49:37 +08:00
张家伟
1adc25308f 获取主机网络连接信息时按索引从小到大排序,优先使用索引值小的网络接口。 2023-06-30 15:22:32 +08:00
tjq
00228f3b3e chore: update dependency maven version(3.6.3 to 3.9.2) 2023-06-11 17:51:42 +08:00
tjq
c3c735e5f1 chore: change main version to 4.3.3 2023-06-11 17:44:48 +08:00
tjq
df7ceb7ba5 Merge branch '4.3.3' 2023-06-11 17:24:05 +08:00
tjq
5da0c694c3 chore: change main version to 4.3.3 2023-06-11 17:18:51 +08:00
tjq
5cfd0f8e74 chore: change main version to 4.3.3 2023-06-11 16:45:44 +08:00
tjq
e2887a12f6 fix: SpringMethodProcessor can't throw correct exception 2023-06-11 16:44:28 +08:00
tjq
388581c321 chore: add <classifier>exec</classifier> for server 2023-06-11 15:51:12 +08:00
tjq
8953ecc74f feat: support powerjob method handler 2023-06-11 15:40:50 +08:00
tjq
8ecc5768c7 Merge branch 'pr-vannewang-master' into 4.3.3 2023-05-07 22:29:36 +08:00
tjq
a7394e518c fix: server can't record zero cost processor's status #620 2023-05-07 22:08:14 +08:00
tjq
174696066d feat: add log to check resource release status #627 2023-05-07 21:41:00 +08:00
wangxiaopeng
47b050aba2 抽取Spring API为公共抽象父类 2023-04-18 15:59:45 +08:00
wangxiaopeng
46165ccd97 获取任务执行结果,并将结果返回给控制台 2023-04-18 14:43:38 +08:00
wangxiaopeng
b9bd8079f2 调度方式(方法形式)新增 2023-04-06 18:08:56 +08:00
tjq
1a40447b23 Merge branch '4.3.2-main' 2023-03-19 23:33:16 +08:00
tjq
e2f5ca440f chore: upgrade main version to 4.3.2 2023-03-19 23:25:44 +08:00
tjq
b71edc1f26 fix: netty compatibility issues #591 2023-03-19 20:34:26 +08:00
tjq
2a87a24980 fix: workflow append string will have quotes #307 2023-03-17 23:06:08 +08:00
tjq
93b01191d0 fix: oms.table-prefix can't work #584 2023-03-16 23:53:58 +08:00
tjq
aa65884b3f test: test chinese result #581 2023-03-16 23:47:33 +08:00
tjq
4b79bd73bb fix: test mode can't startup when server not available #580 2023-03-16 23:35:10 +08:00
tjq
0a76d057ac feat: samples start to use http as protocl 2023-03-16 23:23:05 +08:00
tjq
7b003ed895 Merge branch '4.3.2-findbug' 2023-03-07 23:37:34 +08:00
tjq
70a696aaa8 chore: upgrade project version 2023-03-07 23:37:03 +08:00
tjq
5011ea983b chore: rollback akka version 2023-03-07 23:20:26 +08:00
tjq
a93602f845 test: add test code for repetitions 2023-03-07 23:19:57 +08:00
tjq
805046dccb Merge branch '4.3.1-main' 2023-03-07 00:36:47 +08:00
tjq
0772b41fac feat: sync web resource 2023-03-07 00:36:27 +08:00
tjq
1fc240ab27 Merge branch '4.3.1-main' 2023-03-07 00:12:56 +08:00
tjq
36b439603c docs: optimize README.md 2023-03-07 00:12:11 +08:00
tjq
0bb46be9d0 chore: upgrade project version 2023-03-07 00:09:31 +08:00
tjq
4eeda2c662 chore: skip deploy testing package 2023-03-05 21:49:59 +08:00
tjq
fcca0c0c94 feat: powerjob client support export job 2023-03-05 16:42:56 +08:00
tjq
06c4e085cb feat: sync console code from powerjob-console 2023-03-05 16:12:17 +08:00
tjq
81752dd26f test: TestFindByBeanNameProcessor 2023-03-05 15:58:54 +08:00
tjq
93d44ea07d chore: unified official processor's version 2023-03-05 15:55:16 +08:00
tjq
a90cf82974 fix: SQL script GITEE#I6DXY3 2023-03-05 15:47:22 +08:00
tjq
e36ac8bc59 chore: change main version to 4.3.1 2023-03-05 15:45:28 +08:00
tjq
3fcd99e364 feat: optimize pr code 2023-03-05 15:43:55 +08:00
tjq
74ef8f1d23 Merge branch '4.3.1-pr-AZI-D-patch-1' into 4.3.1-main 2023-03-05 15:41:35 +08:00
tjq
40192486c5 feat: change jobname when export job 2023-03-05 12:57:15 +08:00
tjq
d45cb0712c fix: openapi can't work in springboot 2.7.4 #559 2023-03-05 12:15:19 +08:00
tjq
5985c04997 feat: remove all circular-references by aware 2023-03-04 21:03:22 +08:00
tjq
5ddaa33f47 feat: support job export #571 2023-03-04 15:03:22 +08:00
AZI-D
d78d139276
修复精确查询没有限制的bug 2023-03-02 15:41:44 +08:00
tjq
43dfc9a265 feat: finished DAILY_TIME_INTERVAL processor 2023-02-25 23:07:45 +08:00
tjq
3aa42819e4 fix: NPE of DailyTimeIntervalStrategyHandler 2023-02-25 18:24:25 +08:00
songyinyin
8ea4a5b260 feat: powerjob-worker-spring-boot-starter support SpringBoot 3 and jdk 17 2023-02-19 15:07:14 +08:00
tjq
34352a1eea chore: optimize build test env script 2023-02-18 22:29:15 +08:00
tjq
2d0dcf6c7b chore: support mongodb in testenv 2023-02-18 21:37:50 +08:00
songyinyin
cdf416d693 chore: powerjob-remote http set httpRequest head "application/json" 2023-02-18 19:59:56 +08:00
raylua
8c32c775da fix: In multi-servers node , no workers found prompt without @DesignateServer in not conform server node 2023-02-17 19:29:01 +08:00
tjq
369ebdab0b test: DailyTimeIntervalStrategyHandlerTest 2023-02-13 00:26:13 +08:00
tjq
e01770adc7 perf: optimize DailyTimeIntervalStrategyHandler 2023-02-11 23:02:26 +08:00
tjq
42823b8bdd feat: support DailyTimeInterval #558 2023-02-11 22:57:43 +08:00
tjq
9f2f68344c feat: support DailyTimeInterval #558 2023-02-11 15:22:40 +08:00
tjq
3f7d4328e9 feat: powerjob-worker-agent support use custom protocl 2023-02-09 23:32:18 +08:00
tjq
421705e1bc feat: optimize exception log when load failed in spring #550 2023-02-08 22:39:29 +08:00
tjq
69dc1c50aa feat: support OmsServerAndLocalLogger #553 2023-02-08 22:34:05 +08:00
tjq
b89ac389fd feat: optimize exception log in server elect #551 2023-02-08 22:26:36 +08:00
tjq
a5e3e829b5 chore: test env 2023-02-08 01:07:05 +08:00
tjq
7d947038eb feat: try to load by bean name 2023-02-07 23:42:33 +08:00
songyinyin
c8a456f56d chore: docker-compose powerjob-worker-samples runs after powerjob-server 2023-02-03 22:22:50 +08:00
tjq
afa54e7958 chore: limit memory to ensure test env alive 2023-02-01 22:42:48 +08:00
tjq
39893b1e92 feat: optimize PowerJobSpringWorker 2023-02-01 22:13:52 +08:00
tjq
95a1f43994 feat: use async log to optimzie agent performance 2023-01-31 00:02:41 +08:00
tjq
d7c494e463 chore: add gclog for test env 2023-01-30 22:53:06 +08:00
tjq
5ea57eebcc chore: optimize test env docker-compose 2023-01-30 20:52:35 +08:00
tjq
ae36ccf75a Merge branch '4.3.0' 2023-01-28 12:17:12 +08:00
tjq
d5b4faa49c chore: update docker build script 2023-01-28 12:16:47 +08:00
tjq
b0fae5edf8 docs: optimize comment 2023-01-28 10:51:19 +08:00
tjq
1c60f17b1b feat: optimize server info 2023-01-27 19:16:54 +08:00
tjq
d9b1272802 chore: optimize test env script 2023-01-27 19:08:23 +08:00
tjq
31d9b5b7e6 chore: optimize test env script 2023-01-27 16:10:40 +08:00
tjq
19a3f2fbed docs: optimize readme 2023-01-27 15:58:53 +08:00
tjq
c9f5fb3f51 feat: optimize container log 2023-01-27 15:22:19 +08:00
tjq
a25eac67c7 fix: create TaskTracker failed causes HashMap to deadlock 2023-01-27 13:13:56 +08:00
tjq
fb2046649e feat: optimize code 2023-01-27 13:05:35 +08:00
tjq
54beb3b2d1 feat: optimize container service 2023-01-27 11:22:16 +08:00
tjq
2bd2ceca8e chore: optimize test env 2023-01-24 16:12:54 +08:00
tjq
8df74b9670 chore: optimize test env 2023-01-24 16:12:15 +08:00
tjq
6921cfdcf5 chore: optimize test env 2023-01-24 16:10:53 +08:00
tjq
da4aa8a9fe chore: optimize test env 2023-01-24 15:54:34 +08:00
tjq
a9f81d260c feat: add script for build test env 2023-01-24 15:22:53 +08:00
tjq
7b56393aee feat: use softValues to optimize memory usage 2023-01-24 13:23:34 +08:00
tjq
1b1efe6b80 feat: optimize vertx config 2023-01-24 12:56:56 +08:00
tjq
3bfe58abd2 feat: process empty return in vertx 2023-01-24 12:46:35 +08:00
tjq
55e259bcf7 chore: upgrade project version to 4.3.0 2023-01-24 10:39:30 +08:00
tjq
bc08b76d23 chore: fix compile error in official-processors 2023-01-24 10:29:34 +08:00
tjq
5f75dbe9fc feat: replace Deprecated method 2023-01-23 13:03:32 +08:00
tjq
e73675ce09 feat: add comment for ProcessorFactory 2023-01-23 10:18:59 +08:00
tjq
8e94976cdd feat: allowed user to customize the storage path of the h2 database #521 2023-01-22 17:50:29 +08:00
tjq
b8199bf036 feat: optimize demo project 2023-01-22 17:37:03 +08:00
tjq
6c21c7864a feat: add jaxb-api to samples project to support JDK 2023-01-22 17:22:24 +08:00
tjq
dc61bb4648 fix: JavaUtils cache failed in windows env 2023-01-22 17:11:56 +08:00
tjq
afdf4a7dc2 feat: worker starter use new port config name 2023-01-22 16:38:05 +08:00
tjq
789bcb5d10 chore: worker http support 2023-01-22 11:15:11 +08:00
tjq
5b78204beb chore: optimize pom config 2023-01-22 11:04:28 +08:00
tjq
dca97010c7 fix: server return Optional to worker 2023-01-22 10:52:53 +08:00
tjq
63a5e2b458 feat: optimize ServerElectionService 2023-01-22 10:36:22 +08:00
tjq
17b842a2a2 fix: server elect bug 2023-01-22 00:40:14 +08:00
tjq
4a41e322ab fix: server elect bug 2023-01-22 00:37:09 +08:00
tjq
e26f2df2d0 fix: server elect bug 2023-01-22 00:33:11 +08:00
tjq
571b7cf3f2 feat: optimize remote framework log output 2023-01-21 23:15:45 +08:00
tjq
4fece7be40 feat: optimize remote framework log output 2023-01-21 23:13:28 +08:00
tjq
bfb9c68590 feat: close remoteEngine when jvm exit 2023-01-21 22:37:18 +08:00
tjq
25c6a9a6d6 feat: remove PowerSerializable's path method 2023-01-21 11:10:51 +08:00
tjq
b746aa1859 feat: redefine PowerAkkaSerializer 2023-01-21 11:05:24 +08:00
tjq
e74fc2d138 chore: remove akka in common package 2023-01-21 10:34:37 +08:00
tjq
dedefd5a6d feat: replace akka by PowerJobRemoteEngine in server side 2023-01-21 10:31:12 +08:00
tjq
b013fbfefd feat: replace akka by PowerJobRemoteEngine in server side 2023-01-21 10:28:11 +08:00
tjq
5a14b300f9 feat: suit PowerJobAutoConfiguration for new properties 2023-01-20 17:15:49 +08:00
tjq
3892c38785 feat: remove all spring dependencies in powerjob-worker 2023-01-20 17:08:40 +08:00
tjq
8e96fdacc6 feat: remove springUtils 2023-01-20 16:39:53 +08:00
tjq
503e9db5c2 feat: remove spring in PowerJobWorker 2023-01-20 16:34:03 +08:00
tjq
f6a6914f91 feat: allow user to extend ProcessorFactory 2023-01-20 16:06:03 +08:00
tjq
847cf23738 feat: allow user to extend ProcessorFactory 2023-01-20 15:54:48 +08:00
tjq
16f5e67cf0 feat: use PowerJobRemoteEngine to replace akka 2023-01-20 15:09:21 +08:00
tjq
74358bca8d fix: determinePackageVersion throw exception in ide env 2023-01-20 14:51:56 +08:00
tjq
7eea92bfc7 fix: determinePackageVersion throw exception in ide env 2023-01-20 14:51:26 +08:00
tjq
5b94247daf feat: use PowerJobRemoteEngine to replace akka 2023-01-20 14:40:18 +08:00
tjq
2020f72905 feat: use PowerJobRemoteEngine to replace akka 2023-01-20 14:19:09 +08:00
tjq
f0da89503e feat: use PowerJobRemoteEngine to replace akka 2023-01-20 13:41:28 +08:00
tjq
d46a6de26e feat: chang to use PowerJobRemoteEngine to replace akka 2023-01-20 13:18:58 +08:00
tjq
43df09bb38 feat: worker use PowerJobRemoteFramework 2023-01-20 12:05:18 +08:00
tjq
0400eceab1 chore: optimize package version 2023-01-20 09:05:06 +08:00
tjq
38d6b16c74 feat: HandlerLocation use serverType 2023-01-20 09:00:52 +08:00
tjq
3d5a5ac342 feat: optimize code of PowerJobProcessorLoader 2023-01-17 23:18:14 +08:00
tjq
4d2e037107 feat: optimize code of BuiltInSpringProcessorFactory 2023-01-17 22:57:36 +08:00
tjq
cc7a63c69f feat: JarContainerProcessorFactory 2023-01-17 22:53:32 +08:00
tjq
57450a98ad feat: PowerJobProcessorLoader 2023-01-17 22:47:05 +08:00
tjq
44e6ea2373 feat: two default impl for ProcessorFactory 2023-01-17 22:39:50 +08:00
tjq
1ca5fed9cf feat: define ProcessorFactory 2023-01-17 22:08:24 +08:00
tjq
2982410d80 feat: define ProcessorFactory 2023-01-17 22:05:48 +08:00
tjq
fbd75a6ec7 feat: define ProcessorFactory 2023-01-17 22:00:35 +08:00
tjq
d6f3ae6c44 feat: define insideCluster in HandlerLocation 2023-01-17 21:37:41 +08:00
tjq
59121684a8 chore: merge master 2023-01-16 00:14:30 +08:00
tjq
c47fd69859 test: use gatling to test remote framework performance 2023-01-15 22:58:44 +08:00
Echo009
ccbe11ed0e
Merge pull request #531 from PowerJob/4.2.1-main
release 4.2.1
2023-01-15 21:37:26 +08:00
Echo009
30abf08703 feat: add some powerjob worker sample processores 2023-01-15 21:06:59 +08:00
Echo009
1b3134291c feat: optimize the code of LightTaskTracker 2023-01-15 21:04:32 +08:00
Echo009
2c51e0601d feat: optimize the code of persistence layer entity class 2023-01-15 16:25:01 +08:00
tjq
cd7a743097 feat: use gatling to have a presure test for remote framework 2023-01-08 21:41:15 +08:00
tjq
2afb20df0b feat: use gatling to have a presure test for remote framework 2023-01-08 21:22:56 +08:00
tjq
0d29b6369a feat: use gatling to have a presure test for remote framework 2023-01-08 20:48:42 +08:00
tjq
24b4cc4eb5 feat: use gatling to have a presure test for remote framework 2023-01-08 19:34:44 +08:00
tjq
50b4ca3cca feat: optimize resource clean 2023-01-08 18:30:44 +08:00
tjq
7b9ee74c21 feat: benchmark remote framework 2023-01-07 16:58:33 +08:00
tjq
8b9d6df172 feat: akka remote impl 2023-01-07 14:53:58 +08:00
tjq
676388a988 feat: akka remote impl 2023-01-07 14:38:17 +08:00
tjq
b0b2c24571 feat: optimize remote framework 2023-01-07 14:14:32 +08:00
tjq
5d3bfedf5d feat: akka proxy actor 2023-01-06 23:34:36 +08:00
tjq
d73b8e21e6 feat: suit path 2023-01-06 22:58:34 +08:00
tjq
94a0e2fa42 feat: optimize code for PowerJobActor 2023-01-06 22:54:52 +08:00
tjq
79cde85256 feat: define PowerJobActor 2023-01-06 22:53:11 +08:00
Echo009
fe03b8faab feat: optimize the code of TaskTracker 2023-01-04 22:43:23 +08:00
tjq
9f6d421ed2 feat: optimize Thread#stop usage 2023-01-04 00:39:31 +08:00
tjq
da04e4b048 feat: replace deprecated method 2023-01-03 23:22:33 +08:00
Echo009
a1beb44ccf pref: 支持轻量级任务模型,优化任务派发以及实例状态检查的处理逻辑 2023-01-02 23:57:09 +08:00
tjq
4b2d9d4d74 feat: optimize remote http impl 2023-01-02 12:47:28 +08:00
tjq
432adeb00f feat: optimize akka remote impl 2023-01-02 11:24:44 +08:00
tjq
f2b9ae222a feat: optimize akka remote impl 2023-01-02 11:21:49 +08:00
tjq
af8fbb0167 fix: throw exception when http server startup failed 2023-01-02 11:14:16 +08:00
tjq
d12ac4d6cd feat: optimize performance 2023-01-02 10:16:16 +08:00
tjq
e6a171d775 feat: add comment 2023-01-02 09:59:06 +08:00
tjq
2606440f44 feat: optimize HttpVertxCSInitializer 2023-01-02 00:22:48 +08:00
tjq
d3bd22302f feat: finished config 2023-01-02 00:11:31 +08:00
tjq
2c31e81c5f feat: framwork api 2023-01-01 20:25:11 +08:00
tjq
87a1a1d7c1 feat: vertx http framwork 2023-01-01 20:12:00 +08:00
tjq
268f5dd5c7 feat: add BenchmarkActor for performance test 2023-01-01 10:25:58 +08:00
tjq
eb6b0c35a5 feat: add BenchmarkActor for performance test 2023-01-01 09:41:13 +08:00
tjq
0c8e339140 feat: HttpCSInitializer 2023-01-01 09:34:05 +08:00
tjq
68a9cc52e2 refactor: change remote framework api 2022-12-31 16:56:00 +08:00
tjq
31d2283f99 feat: add remote akka impl 2022-12-31 16:40:33 +08:00
tjq
c6d90be839 feat: add remote akka impl 2022-12-31 16:34:13 +08:00
tjq
4356c5566d feat: finished remote engine 2022-12-31 15:07:27 +08:00
tjq
84ef2fd120 feat: define powerjob remote framework 2022-12-31 12:52:43 +08:00
tjq
d3b8c4e353 feat: define powerjob remote 2022-12-31 12:42:57 +08:00
ZhangJun
3f95ee8a33 部分代码优化
1.邮件通知服务,优雅注入发件人
2.雪花算法,对时钟回拨情况做优化,避免服务直接不可用
3.扫描数据库task,部分代码调整减少重复计算性能消耗
4.部分枚举类,增强代码安全性
5.其它,规范部分代码
2022-12-29 20:55:22 +08:00
Echo009
5ba4ce5457 fix: issue with nested workflow node state updates,#465 2022-10-31 14:01:12 +08:00
Echo009
39eb79de54 refactor: optimize the code of the server module
1. use constructor based dependency injection to replace field injection and solve the problem of circular dependencies
2. replace deprecated API calls
2022-10-30 12:59:48 +08:00
tjq
5189634b60 chore: change project version to 4.2.1 2022-10-23 14:27:15 +08:00
tjq
eb195cf891 feat: change client name to Pantheon 2022-10-23 14:25:24 +08:00
tjq
b9222b8594 test: add test code for h2 2022-10-23 14:08:01 +08:00
tjq
987aa966a0 test: add test code for h2 2022-10-23 13:27:18 +08:00
tjq
75e5c7049f feat: print h2 database version 2022-10-23 12:03:57 +08:00
tjq
33539857f4 feat: extract package util 2022-10-23 11:58:22 +08:00
tjq
a9936b8dba feat: optimize mongodb config 2022-10-23 11:40:48 +08:00
tjq
757b994176 feat: merge h2 upgrade from fjf 2022-10-23 11:30:49 +08:00
tjq
e6c94af599 feat: change StringUtils(from spring to apache) 2022-10-11 23:00:54 +08:00
tjq
3bcfbd8e9f feat: upgrade powerjob-server's version 2022-10-11 22:54:31 +08:00
tjq
0541216944 feat: upgrade HikariCP to latestversion for Java8 2022-10-11 22:43:24 +08:00
tjq
50b68e82bd feat: upgrade junit-jupiter-api to latest version 2022-10-11 22:19:38 +08:00
tjq
daaaa15b94 feat: upgrade kryo5 to latest version 2022-10-11 22:13:06 +08:00
tjq
195984bb95 feat: updrage jackson's version 2022-10-11 22:12:31 +08:00
tjq
7867b07d9c feat:upgrade slf4j to latest version 2022-10-11 22:11:00 +08:00
tjq
223fac9828 feat: upgrade commons-io and logback version 2022-10-11 22:06:45 +08:00
ocean
1b710c1332 refactor: 升级h2的版本 2022-10-06 17:03:26 +08:00
tjq
dc98f5f37a Merge branch 'v4.2.0' 2022-10-03 15:55:27 +08:00
tjq
c6009c8b5e feat: upgrade pom version 2022-10-03 15:50:03 +08:00
tjq
ce0290ea03 fix: front-end NPE #455 2022-10-03 15:47:47 +08:00
tjq
1301da0d7d feat: optimize code 2022-10-03 14:58:42 +08:00
tjq
6eb5966e96 feat: define LogType 2022-10-03 14:53:51 +08:00
tjq
db7f5855e1 feat: upgrade front-end to support more log type 2022-10-03 14:50:02 +08:00
tjq
fe1fad6a7b feat: tempory skip cycle reference check 2022-10-03 14:49:02 +08:00
tjq
7feb25cf8a feat: support OmsStdOutLogger 2022-10-03 14:36:55 +08:00
tjq
cded964bcd feat: support OmsNullLogger 2022-10-03 14:23:25 +08:00
tjq
5d5b1e3854 feat: optimzie comment 2022-10-03 14:16:17 +08:00
tjq
5b68b4dc75 feat: add 4.2.x upgrade SQL 2022-10-03 14:13:00 +08:00
tjq
2f62f448a8 feat: upgrade pom version to 4.2.0 2022-10-03 14:07:23 +08:00
tjq
cb72fcb08a feat: upgrade samples's springboot version 2022-10-03 14:01:05 +08:00
tjq
60209ebbc1 Merge branch '4.2.0-main' into v4.2.0 2022-10-03 13:59:14 +08:00
tjq
050190ba89 Merge branch '4.2.0-main-upgrade-spring' into v4.2.0 2022-10-03 13:57:24 +08:00
tjq
54db609d32 feat: support random DispatchStrategy #461 2022-10-03 13:54:56 +08:00
tjq
806747d88c fix: change inner class scope to fix JDK17 exception #457 2022-10-03 13:49:38 +08:00
ocean
6de2be72ef 完成spring的升级 2022-09-24 21:39:38 +08:00
songyinyin
4bc94dd465 refactor: update powerjob-server banner color 2022-09-24 20:02:13 +08:00
songyinyin
a5b46f6a47 refactor: change docker-compose mysql port to 3307 2022-09-24 20:00:23 +08:00
tjq
5a73e6ad91 fix: froent-ent NPE 2022-09-18 23:25:48 +08:00
tjq
91b48c0a5e fix: auto build script 2022-09-18 23:08:56 +08:00
tjq
78d793d28e fix: auto build script 2022-09-18 22:39:07 +08:00
tjq
653dcb4a92 feat: update front-end 2022-09-18 22:06:29 +08:00
tjq
ce555ad18f test: add test log processor in samples 2022-09-18 21:56:03 +08:00
tjq
e5d3139990 feat: use worker-samples as try demo 2022-09-18 21:18:56 +08:00
tjq
b2b8241295 feat: use worker-samples as try demo 2022-09-18 18:30:42 +08:00
tjq
f20a849a93 feat: support shutdown log by OFF level 2022-09-18 15:13:17 +08:00
tjq
f3c7ed8baf feat: add max queue size for log handler in worker to prevent OOM 2022-09-18 14:02:05 +08:00
tjq
483227f840 feat: script processor support cmd and powershel by fddc 2022-09-18 00:48:02 +08:00
tjq
45f7b17e14 feat: script processor support cmd and powershel by fddc 2022-09-18 00:40:35 +08:00
tjq
3823b3bc56 Merge branch '4.2.0-win-support' into 4.2.0-main 2022-09-18 00:28:52 +08:00
tjq
a39751818f Merge branch 'master' of https://github.com/fddc/PowerJob into 4.2.0-win-support 2022-09-18 00:28:27 +08:00
tjq
ec47f5a8c5 feat: add ConfigProcessor in official-processor 2022-09-18 00:22:03 +08:00
tjq
74b6acc927 feat: add ConfigProcessor in official-processor 2022-09-18 00:12:04 +08:00
tjq
dc90f272c7 feat: optimize worker log 2022-09-17 23:47:10 +08:00
tjq
e501cb9dfa feat: support LogConfig 2022-09-17 00:24:26 +08:00
tjq
a4a41c4ab7 feat: define JobLogConfig 2022-09-16 23:28:42 +08:00
tjq
3842acf952 feat: remove useless code 2022-09-16 23:10:25 +08:00
tjq
3ffaf382c7 chore: upgrade project version to 4.1.1 and ready to release 2022-09-13 01:32:54 +08:00
tjq
ca063803db release: v4.1.1 2022-09-13 00:23:33 +08:00
tjq
be2c5ea20e chore: upgrade project version to 4.1.1 and ready to release 2022-09-12 23:43:57 +08:00
tjq
2a3b9323a6 chore: upgrade project version to 4.1.1 and ready to release 2022-09-12 23:41:44 +08:00
tjq
12ff1335f2 test: fix unit test by ocean-fujfu 2022-09-12 23:24:49 +08:00
tjq
112628f386 Merge branch '4.1.1-docker-compose' into v4.1.1 2022-09-12 23:15:51 +08:00
tjq
42fa628a61 Merge branch '4.1.1-worker-enabled' into v4.1.1 2022-09-12 23:09:10 +08:00
tjq
65f2a58d2f Merge branch '4.1.1-monitor' into v4.1.1 2022-09-12 23:03:38 +08:00
tjq
5acb8f82e7 feat: optimize OmsLocalDbPool config 2022-09-12 22:59:00 +08:00
tjq
a32d7cebb5 feat: optimize server monitor event 2022-09-12 22:10:07 +08:00
tjq
5b223d23ad feat: optimize server monitor event 2022-09-12 21:52:26 +08:00
tjq
fd562d8ea0 fix: worker heartbeat use wrong thread pool 2022-09-12 21:26:01 +08:00
tjq
e6d32c9a05 fix: worker heartbeat use wrong thread pool 2022-09-12 21:18:40 +08:00
tjq
fadf2ce14e feat: optimize monitor logger config 2022-09-12 21:16:00 +08:00
songyinyin
510b5ab546 feat: add docker-compose sample, fix volumes path 2022-09-12 21:15:01 +08:00
tjq
a77ba8084e feat: config logback for monitor 2022-09-12 21:09:47 +08:00
tjq
1b9d8331a1 feat: optimize DatabaseMonitorAspect 2022-09-12 21:00:01 +08:00
ocean
ac2b28bb5f 升级spring boot后同步修改unit test 2022-09-12 15:41:32 +08:00
tjq
74f70cd58b feat: support serverInfoAware 2022-09-12 12:56:12 +08:00
tjq
5450ac00db feat: optimize WorkerLogReportEvent 2022-09-12 11:58:56 +08:00
tjq
2db0f05feb feat: optimize thread pool config 2022-09-12 11:33:13 +08:00
ocean
3466ff3f05 修复unit test 2022-09-12 11:31:18 +08:00
tjq
d531bf3a22 feat: optimize threadpool config 2022-09-12 11:07:05 +08:00
tjq
3b73a750e6 feat: server async process log report to prevent timeout #432 2022-09-12 10:45:56 +08:00
tjq
3869b115ce feat: optimize RejectedExecutionHandlerFactory 2022-09-12 09:36:29 +08:00
tjq
614349370a feat: refactor worker request handler and add monitor 2022-09-11 17:14:00 +08:00
tjq
48ac446014 feat: define MonitorService to inject monitor context 2022-09-10 23:41:48 +08:00
tjq
ac1b1fe0c8 fix: limit worker num failed in map/mapreduce job #450 2022-09-10 09:55:26 +08:00
tjq
dfd1fd069b fix: invalid random when JobInfo's maxWorkerCount is a small value #449 2022-09-10 09:27:57 +08:00
tjq
22db37cad9 feat: optimize instanceMetadataCacheSize 2022-09-10 08:40:29 +08:00
tjq
22522c099d feat: status report append appId info 2022-09-10 08:32:32 +08:00
tjq
eaf6dcad4f feat: add appId in WorkerHeartbeatEvent 2022-09-10 08:30:18 +08:00
tjq
b6de5aa563 feat: add WorkerHeartbeatEvent 2022-09-09 22:45:05 +08:00
tjq
519213ad4a feat: add SlowLockEvent 2022-09-09 00:41:50 +08:00
tjq
ce369b3e30 feat: optimize parseEffectRows 2022-09-09 00:31:54 +08:00
tjq
29a50ed89a feat: optimize parseEffectRows 2022-09-09 00:27:13 +08:00
tjq
2a76e7d043 feat: monitor db effect rows 2022-09-09 00:22:27 +08:00
tjq
cca9c5421e feat: finished jpa base monitor 2022-09-09 00:09:22 +08:00
tjq
e23825c399 feat: use package aop 2022-09-08 23:59:17 +08:00
songyinyin
a7e3c05f6c feat: add docker-compose sample, docker build tag latest 2022-09-08 22:53:48 +08:00
songyinyin
5b865fe49b feat: add docker-compose sample 2022-09-08 22:14:21 +08:00
songyinyin
88bc28140f feat: powerjob worker add property: powerjob.worker.enabled 2022-09-07 23:44:36 +08:00
tjq
a0cc5670d4 feat: define DatabaseEvent 2022-09-06 01:54:10 +08:00
tjq
5080796c6f feat: add monitor module 2022-09-06 00:26:04 +08:00
Echo009
ad1a7227d6 chore: update console resources 2022-09-04 10:36:29 +08:00
Echo009
d0e95c2129 style: rename lifecycle to lifeCycle 2022-09-04 10:36:24 +08:00
Echo009
03165bf5e4 chore: upgrade mysql connector version (8.0.19 -> 8.0.28) 2022-09-03 00:02:15 +08:00
Echo009
bdd9b978f9 feat: support inject workflow context directly 2022-09-02 20:12:58 +08:00
Echo009
54524553c1 chore: update powerjob console resources and upgrade project version to 4.1.0 2022-09-01 08:21:24 +08:00
Echo009
0b5a404cf4 chore: upgrade fastjson version (1.2.68 -> 1.2.83) 2022-09-01 07:29:17 +08:00
Echo009
e1c4946a73 chore: update sql script 2022-09-01 07:14:30 +08:00
Echo009
3566569dc5 feat: add InjectWorkflowContextProcessor 2022-09-01 07:06:21 +08:00
Echo009
08711f93d0 perf: optimize akka config 2022-09-01 07:06:21 +08:00
Echo009
5ed6eac38a perf: use cached lock replace SegmentLock 2022-09-01 07:06:21 +08:00
Echo009
0c4eb3834a fix: task status transfer anomaly, #404 2022-09-01 07:06:21 +08:00
Echo009
a9a0422de1 fix: the problem of saving frequent job 2022-09-01 07:06:21 +08:00
Echo009
812d71f090 fix: NPE in FrequentScheduler 2022-09-01 07:06:21 +08:00
Echo009
7539faffff feat: use Groovy Engine replace Nashorn Engine. 2022-09-01 07:06:21 +08:00
Echo009
88b92e2994 chore: add stop task demo 2022-09-01 07:06:21 +08:00
Echo009
2e1c585b5e fix: Illegal nested workflow node 2022-09-01 07:06:21 +08:00
Echo009
3923937f6c fix: the problem of failed to save workflow and job 2022-09-01 07:06:21 +08:00
Echo009
8909584976 feat: replace cron implementation and support job lifecycle #382 #208 2022-09-01 07:06:21 +08:00
Echo009
abf266b7f8 feat: support passing instance parameters. #381 2022-09-01 07:06:21 +08:00
Echo009
1d34547f45 chore: update config 2022-09-01 07:06:21 +08:00
Echo009
56447596f7 fix: Repetitive execution of frequency tasks #375 2022-09-01 07:06:21 +08:00
Echo009
8488a10465 fix: stop nested workflow 2022-09-01 07:06:21 +08:00
Echo009
b60c236824 feat: workflow's DAG must be not empty 2022-09-01 07:06:21 +08:00
Echo009
d87c358743 fix: workflow cron bug #316 2022-09-01 07:06:21 +08:00
Echo009
5791b43ac6 fix: the problem of retrying nested workflow node 2022-09-01 07:06:21 +08:00
Echo009
d4eb8e3303 fix: the problem of incorrect sub-workflow state 2022-09-01 07:06:21 +08:00
Echo009
9c30e5ee83 fix: update data structure of JobInfoVO 2022-09-01 07:06:21 +08:00
Echo009
8aa5140265 feat: support frequent task alarm #370 2022-09-01 07:06:21 +08:00
Echo009
d7c0d12a30 fix: decision node missing job param 2022-09-01 07:06:21 +08:00
Echo009
9194641c6f fix: workflow node validator 2022-09-01 07:06:21 +08:00
Echo009
4b14be8321 feat: update the workflow maintenance interface 2022-09-01 07:06:21 +08:00
Echo009
d996b34a54 feat: support nested workflow #266 2022-09-01 07:06:21 +08:00
Echo009
c15cefc447 feat: support decision node #188 2022-09-01 07:06:21 +08:00
Echo009
8663f3b79f fix: problem of task process in case of task slice exception. #355 2022-09-01 07:06:21 +08:00
Echo009
ac8e96508c feat: use CompatibleFieldSerializer as default kyro serializer, providing both forward and backward compatibility 2022-09-01 07:06:21 +08:00
Echo009
d799586ce9 fix: NetUtils, use the first valid network interface 2022-09-01 07:06:21 +08:00
脏兮兮
e585ba5a19
feat: 添加 worker tag 配置 2022-09-01 07:05:16 +08:00
读钓
62d682fbd5
fix: When you append a string to the workflow context, the value has multiple double quotes.(#307) 2022-08-31 23:15:04 +08:00
Ryan
fb6e57a75c
1.基于作者4.0.1版本SQL修改 (#263)
2.增加表注释、字段注释
3.调整排序规则 utf8mb4_0900_ai_ci -> utf8mb4_general_ci
2022-08-31 23:13:03 +08:00
Justin Zhang
75c88c32ed
fix: bug #324
* fix: bug #324

Fail to cancel delay job instance by id via API

* style: correct log

* style: update log

Co-authored-by: Echo009 <ech0.extreme@foxmail.com>
2022-08-31 23:12:10 +08:00
fddc
0aa06d1ae6 解决win平台bat脚本中文路径执行乱码问题 2021-08-06 20:41:24 +08:00
fddc
973322370a agent新增tag启动参数 2021-05-31 13:06:09 +08:00
fddc
49c7d18c00 增加powershell,以支持windws平台 2021-05-14 18:23:27 +08:00
fddc
4fccc81697 非windows系统才需要chmod 2021-05-13 11:02:42 +08:00
tjq
e094c22952 [release] v4.0.1 2021-04-05 16:47:31 +08:00
tjq
6ae809617b docs: update user.png and readme 2021-04-05 16:47:10 +08:00
tjq
cbcd5dcca7 refactor: update guava version for security #CVE-2020-8908 2021-04-05 15:08:59 +08:00
tjq
7a471a3917 chore: change version to 4.0.1 2021-04-05 15:01:00 +08:00
tjq
584b20ae9b chore: use project.parent.version to manage powerjob-server's version 2021-04-05 14:47:01 +08:00
tjq
f955ae2f61 fix: incorrect worker list display #245 2021-04-05 14:18:24 +08:00
tjq
c8a1f536c3 feat: output more server info 2021-04-05 13:54:16 +08:00
tjq
7527b31ece fix: update powerjob-server's sql to fix the bug of can't save workflow #247 2021-04-05 13:24:39 +08:00
tjq
4e9c1f98b7 refactor: optimize dialect properties 2021-04-05 12:57:50 +08:00
tjq
12d0d4dbb0 fix: compatibility issues for PostgreSQL 2021-04-05 12:52:48 +08:00
tjq
17439536f0 fix: server election bug 2021-04-05 12:51:10 +08:00
tjq
9949e23bc4 fix: NPE when some app has on worker connected 2021-04-05 12:49:37 +08:00
Echo009
4d236153de chore: add PostgreSQL dialect config hint to pre and product env config file 2021-04-03 17:12:57 +08:00
Echo009
a0f76f7ba9 refactor: optimize code for pull request 249,fix issue #153 2021-04-03 17:08:16 +08:00
Chang Kung Yao
5f0865129e Add Postgresql Support 2021-03-30 08:27:15 +08:00
lwc1
c31f10b3c1 fix server election 2021-03-27 14:32:12 +08:00
luter
0e5873ca05 getWorkerInfoByAddress NPE 问题的修正 2021-03-23 22:13:28 +08:00
tjq
cfb05d9871 chore: fix github CI failed 2021-03-22 00:17:41 +08:00
tjq
98330846c3 [release] v4.0.0 2021-03-21 23:10:43 +08:00
672 changed files with 45914 additions and 23470 deletions

View File

@ -1,30 +0,0 @@
name: Docker Image CI
on:
push:
branches: [ master ]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Build the Docker image
run: mvn clean package -Pdev -DskipTests -U -e -pl powerjob-server,powerjob-worker-agent -am && /bin/cp -rf powerjob-server/target/*.jar powerjob-server/docker/powerjob-server.jar && /bin/cp -rf powerjob-worker-agent/target/*.jar powerjob-worker-agent/powerjob-agent.jar
- uses: docker/build-push-action@v1
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
repository: tjqq/powerjob-server
tags: latest
path: powerjob-server/docker/
- uses: docker/build-push-action@v1
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
repository: tjqq/powerjob-agent
tags: latest
path: powerjob-worker-agent/

68
.github/workflows/docker_publish.yml vendored Normal file
View File

@ -0,0 +1,68 @@
name: build_docker
on:
push:
tags:
- 'v*' # Push events to matching v*, i.e. v1.0, v20.15.10
jobs:
build_docker:
name: Build docker
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Build Maven Project
uses: actions/setup-java@v4
with:
java-version: '8'
distribution: 'temurin'
- name: Publish package
run: mvn clean package -Pdev -DskipTests -U -e && /bin/cp -rf powerjob-server/powerjob-server-starter/target/*.jar powerjob-server/docker/powerjob-server.jar && /bin/cp -rf powerjob-worker-agent/target/*.jar powerjob-worker-agent/powerjob-agent.jar && /bin/cp -rf powerjob-worker-samples/target/*.jar powerjob-worker-samples/powerjob-worker-samples.jar
# Login
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Build And Push [powerjob-server]
uses: docker/build-push-action@v6
with:
context: powerjob-server/docker/
push: true
platforms: linux/amd64,linux/arm64
tags: |
tjqq/powerjob-server:latest
powerjob/powerjob-server:latest
tjqq/powerjob-server:${{ github.ref_name }}
powerjob/powerjob-server:${{ github.ref_name }}
- name: Build And Push [powerjob-agent]
uses: docker/build-push-action@v6
with:
context: powerjob-worker-agent/
push: true
platforms: linux/amd64,linux/arm64
tags: |
tjqq/powerjob-agent:latest
powerjob/powerjob-agent:latest
tjqq/powerjob-agent:${{ github.ref_name }}
powerjob/powerjob-agent:${{ github.ref_name }}
- name: Build And Push [powerjob-worker-samples]
uses: docker/build-push-action@v6
with:
context: powerjob-worker-samples/
push: true
platforms: linux/amd64,linux/arm64
tags: |
tjqq/powerjob-worker-samples:latest
powerjob/powerjob-worker-samples:latest
tjqq/powerjob-worker-samples:${{ github.ref_name }}
powerjob/powerjob-worker-samples:${{ github.ref_name }}

View File

@ -1,38 +0,0 @@
# This workflow will build a Java project with Maven
# For more information see: https://help.github.com/actions/language-and-framework-guides/building-and-testing-java-with-maven
name: Java CI with Maven
on:
push:
branches: [ master ]
pull_request:
branches: [ master ]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up JDK 1.8
uses: actions/setup-java@v1
with:
java-version: 1.8
- name: Build with Maven
run: mvn -B clean package -Pdev -DskipTests --file pom.xml
- name: upload build result
run: mkdir staging && cp powerjob-server/target/*.jar staging/powerjob-server.jar && cp powerjob-client/target/*.jar staging/powerjob-client.jar && cp powerjob-worker-agent/target/*.jar staging/powerjob-agent.jar
- uses: actions/upload-artifact@v1
with:
name: powerjob-server.jar
path: staging/powerjob-server.jar
- uses: actions/upload-artifact@v1
with:
name: powerjob-client.jar
path: staging/powerjob-client.jar
- uses: actions/upload-artifact@v1
with:
name: powerjob-agent.jar
path: staging/powerjob-agent.jar

28
.github/workflows/maven_build.yml vendored Normal file
View File

@ -0,0 +1,28 @@
# This workflow will build a Java project with Maven
# For more information see: https://docs.github.com/zh/actions/use-cases-and-examples/building-and-testing/building-and-testing-java-with-maven
name: Java CI with Maven
on:
push:
branches: [ master ]
pull_request:
branches: [ master ]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-java@v4
with:
java-version: '8'
distribution: 'temurin'
- run: mvn -B clean package -Pdev -DskipTests --file pom.xml
- run: mkdir staging && cp powerjob-server/powerjob-server-starter/target/*.jar staging/powerjob-server.jar && cp powerjob-client/target/*.jar staging/powerjob-client.jar && cp powerjob-worker-agent/target/*.jar staging/powerjob-agent.jar && cp powerjob-worker-spring-boot-starter/target/*.jar staging/powerjob-worker-spring-boot-starter.jar
- uses: actions/upload-artifact@v4
with:
name: Package
path: staging

22
.github/workflows/maven_publish.yml vendored Normal file
View File

@ -0,0 +1,22 @@
name: Publish package to the Maven Central Repository
on:
release:
types: [created]
jobs:
publish:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up Maven Central Repository
uses: actions/setup-java@v4
with:
java-version: '8'
distribution: 'temurin'
server-id: ossrh
server-username: MAVEN_USERNAME
server-password: MAVEN_PASSWORD
- name: Publish package
run: mvn --batch-mode clean deploy -pl powerjob-worker,powerjob-client,powerjob-worker-spring-boot-starter,powerjob-official-processors,powerjob-worker-agent -DskipTests -Prelease -am
env:
MAVEN_USERNAME: ${{ secrets.OSSRH_USERNAME }}
MAVEN_PASSWORD: ${{ secrets.OSSRH_TOKEN }}

3
.gitignore vendored
View File

@ -41,4 +41,5 @@ build/
.trc
*/.phd
*/.txt
*/.trc
*/.trc
powerjob-data/

View File

@ -1,12 +1,16 @@
# English | [简体中文](./README_zhCN.md)
<p align="center">
🏮PowerJob 全体成员祝大家龙年腾飞,新的一年身体健康,万事如意,阖家欢乐,幸福安康!🏮
</p>
<p align="center">
<img src="https://raw.githubusercontent.com/KFCFans/PowerJob/master/others/images/logo.png" alt="PowerJob" title="PowerJob" width="557"/>
</p>
<p align="center">
<a href="https://github.com/PowerJob/PowerJob/actions"><img src="https://github.com/PowerJob/PowerJob/workflows/Java%20CI%20with%20Maven/badge.svg?branch=master" alt="actions"></a>
<a href="https://search.maven.org/search?q=tech.powerjob"><img alt="Maven Central" src="https://img.shields.io/maven-central/v/tech.powerjob/powerjob-worker"></a>
<a href="https://central.sonatype.com/search?smo=true&q=powerjob-worker&namespace=tech.powerjob"><img alt="Maven Central" src="https://img.shields.io/maven-central/v/tech.powerjob/powerjob-worker"></a>
<a href="https://github.com/PowerJob/PowerJob/releases"><img alt="GitHub release (latest SemVer)" src="https://img.shields.io/github/v/release/kfcfans/powerjob?color=%23E59866"></a>
<a href="https://github.com/PowerJob/PowerJob/blob/master/LICENSE"><img src="https://img.shields.io/github/license/KFCFans/PowerJob" alt="LICENSE"></a>
</p>

View File

@ -1,5 +1,9 @@
# [English](./README.md) | 简体中文
<p align="center">
🏮PowerJob 全体成员祝大家龙年腾飞,新的一年身体健康,万事如意,阖家欢乐,幸福安康!🏮
</p>
<p align="center">
<img src="https://raw.githubusercontent.com/KFCFans/PowerJob/master/others/images/logo.png" alt="PowerJob" title="PowerJob" width="557"/>
</p>
@ -20,7 +24,7 @@ PowerJob原OhMyScheduler是全新一代分布式调度与计算框架
* DAG工作流支持支持在线配置任务依赖关系可视化得对任务进行编排同时还支持上下游任务间的数据传递
* 执行器支持广泛支持Spring Bean、内置/外置Java类、Shell、Python等处理器应用范围广。
* 运维便捷支持在线日志功能执行器产生的日志可以在前端控制台页面实时显示降低debug成本极大地提高开发效率。
* 依赖精简最小仅依赖关系型数据库MySQL/Oracle/MS SQLServer...扩展依赖为MongoDB用于存储庞大的在线日志
* 依赖精简最小仅依赖关系型数据库MySQL/Oracle/MS SQLServer...)。
* 高可用&高性能:调度服务器经过精心设计,一改其他调度框架基于数据库锁的策略,实现了无锁化调度。部署多个调度服务器可以同时实现高可用和性能的提升(支持无限的水平扩展)。
* 故障转移与恢复:任务执行失败后,可根据配置的重试策略完成重试,只要执行器集群有足够的计算节点,任务就能顺利完成。
@ -34,8 +38,7 @@ PowerJob原OhMyScheduler是全新一代分布式调度与计算框架
PowerJob 的设计目标为企业级的分布式任务调度平台,即成为公司内部的**任务调度中间件**。整个公司统一部署调度中心 powerjob-server旗下所有业务线应用只需要依赖 `powerjob-worker` 即可接入调度中心获取任务调度与分布式计算能力。
### 在线试用
* 试用地址:[try.powerjob.tech](http://try.powerjob.tech/#/welcome?appName=powerjob-agent-test&password=123)
* [建议先阅读使用教程了解 PowerJob 的概念和基本用法](https://www.yuque.com/powerjob/guidence/trial)
* [点击查看试用说明和教程](https://www.yuque.com/powerjob/guidence/trial)
### 同类产品对比
| | QuartZ | xxl-job | SchedulerX 2.0 | PowerJob |
@ -70,4 +73,6 @@ PowerJob 的设计目标为企业级的分布式任务调度平台,即成为
* 欢迎共同参与本项目的贡献PR和Issue都大大滴欢迎求求了
* 觉得还不错的话可以点个Star支持一下哦 = ̄ω ̄=
* 联系方式@KFCFans -> `tengjiqi@gmail.com`
* 用户交流QQ群487453839
* 用户交流QQ群因广告信息泛滥加群需要验证请认真填写申请原因
* 一群已满487453839
* 二群834937813

4
SECURITY.md Normal file
View File

@ -0,0 +1,4 @@
# Security notices relating to PowerJob
Please disclose any security issues or vulnerabilities found through [Tidelift's coordinated disclosure system](https://tidelift.com/security) or to the maintainers privately(tengjiqi@gmail.com).

54
docker-compose.yml Normal file
View File

@ -0,0 +1,54 @@
# 使用说明 V4.3.1
# 1. PowerJob 根目录执行docker-compose up
# 2. 静静等待服务启动。
version: '3'
services:
powerjob-mysql:
environment:
MYSQL_ROOT_HOST: "%"
MYSQL_ROOT_PASSWORD: No1Bug2Please3!
restart: always
container_name: powerjob-mysql
image: powerjob/powerjob-mysql:latest
ports:
- "3307:3306"
volumes:
- ./powerjob-data/powerjob-mysql:/var/lib/mysql
command: --lower_case_table_names=1
powerjob-server:
container_name: powerjob-server
image: powerjob/powerjob-server:latest
restart: always
depends_on:
- powerjob-mysql
environment:
JVMOPTIONS: "-Xmx512m"
PARAMS: "--oms.mongodb.enable=false --spring.datasource.core.jdbc-url=jdbc:mysql://powerjob-mysql:3306/powerjob-daily?useUnicode=true&characterEncoding=UTF-8&serverTimezone=Asia/Shanghai"
ports:
- "7700:7700"
- "10086:10086"
- "10010:10010"
volumes:
- ./powerjob-data/powerjob-server:/root/powerjob/server/
powerjob-worker-samples:
container_name: powerjob-worker-samples
image: powerjob/powerjob-worker-samples:latest
restart: always
depends_on:
- powerjob-mysql
- powerjob-server
# environment:
# PARAMS: "--powerjob.worker.server-address=powerjob-server:7700"
ports:
- "8081:8081"
- "27777:27777"
volumes:
- ./powerjob-data/powerjob-worker-samples:/root/powerjob/worker
- ./others/script/wait-for-it.sh:/wait-for-it.sh
entrypoint:
- "sh"
- "-c"
- "chmod +x wait-for-it.sh && ./wait-for-it.sh powerjob-server:7700 --strict -- java -Xmx512m -jar /powerjob-worker-samples.jar --powerjob.worker.server-address=powerjob-server:7700"

11
others/Dockerfile Normal file
View File

@ -0,0 +1,11 @@
FROM mysql/mysql-server:8.0.30
MAINTAINER dudiao(idudaio@163.com)
ENV TZ=Asia/Shanghai
RUN ln -sf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
COPY sql/01schema.sql /docker-entrypoint-initdb.d
COPY sql/02worker-samples.sql /docker-entrypoint-initdb.d

19
others/dev/build_test_env.sh Executable file
View File

@ -0,0 +1,19 @@
#!/bin/bash
# 构建 PowerJob 测试环境
echo "================== 关闭全部服务 =================="
docker-compose down
echo "================== 构建 jar =================="
cd `dirname $0`/../.. || exit
# mvn clean package -Pdev -DskipTests -U -e -pl powerjob-server,powerjob-worker-agent -am
# -U强制检查snapshot库 -pl指定需要构建的模块多模块逗号分割 -am同时构建依赖模块一般与pl连用 -Pxxx指定使用的配置文件
mvn clean package -Pdev -DskipTests
echo "================== 拷贝 jar =================="
/bin/cp -rf powerjob-server/powerjob-server-starter/target/*.jar powerjob-server/docker/powerjob-server.jar
/bin/cp -rf powerjob-worker-agent/target/*.jar powerjob-worker-agent/powerjob-agent.jar
ls -l powerjob-server/docker/powerjob-server.jar
ls -l powerjob-worker-agent/powerjob-agent.jar
cd others/dev
docker-compose build
docker-compose --compatibility up

View File

@ -0,0 +1,109 @@
# 构建 PowerJob 测试环境
version: '3.7'
services:
powerjob-mysql:
build:
context: ../
environment:
MYSQL_ROOT_HOST: "%"
MYSQL_ROOT_PASSWORD: No1Bug2Please3!
deploy:
resources:
limits:
memory: 768M
restart: always
container_name: powerjob-mysql
image: powerjob/powerjob-mysql:test_env
ports:
- "3309:3306"
volumes:
- ~/powerjob-data/powerjob-mysql:/var/lib/mysql
command: --lower_case_table_names=1
# powerjob-mongodb:
# image: mongo:latest
# container_name: powerjob-mongodb
# restart: always
# deploy:
# resources:
# limits:
# memory: 256M
# environment:
# MONGO_INITDB_ROOT_USERNAME: "root"
# MONGO_INITDB_ROOT_PASSWORD: "No1Bug2Please3!"
# MONGO_INITDB_DATABASE: "powerjob_daily"
# ports:
# - "27017:27017"
# volumes:
# - ./testenv/init_mongodb.js:/docker-entrypoint-initdb.d/mongo-init.js:ro
# - ~/powerjob-data/powerjob-mongodb:/data/db
powerjob-server:
build:
context: ../../powerjob-server/docker
deploy:
resources:
limits:
memory: 896M
container_name: powerjob-server
image: powerjob/powerjob-server:test_env
restart: always
depends_on:
- powerjob-mysql
# - powerjob-mongodb
environment:
PARAMS: "--spring.profiles.active=daily --spring.datasource.core.jdbc-url=jdbc:mysql://powerjob-mysql:3306/powerjob-daily?useUnicode=true&characterEncoding=UTF-8&serverTimezone=Asia/Shanghai --oms.storage.dfs.mysql_series.url=jdbc:mysql://powerjob-mysql:3306/powerjob-daily?useUnicode=true&characterEncoding=UTF-8&serverTimezone=Asia/Shanghai"
JVMOPTIONS: "-server -XX:+UseG1GC -XX:+PrintGCDetails -XX:+PrintGCDateStamps -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=7 -XX:GCLogFileSize=100M -Xloggc:/root/powerjob/server/gc.log"
ports:
- "7700:7700"
- "10086:10086"
- "10010:10010"
volumes:
- ~/powerjob-data/powerjob-server:/root/powerjob/server/
- ~/.m2:/root/.m2
powerjob-worker-agent:
build:
context: ../../powerjob-worker-agent
deploy:
resources:
limits:
memory: 384M
container_name: powerjob-worker-agent
image: powerjob/powerjob-worker-agent:test_env
restart: always
depends_on:
- powerjob-mysql
- powerjob-server
ports:
- "5002:5005"
- "10002:10000"
- "27777:27777"
volumes:
- ~/powerjob-data/powerjob-worker-agent:/root
entrypoint:
- "sh"
- "-c"
- "./wait-for-it.sh powerjob-server:7700 --strict -- java -agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5005 -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.port=10000 -Dcom.sun.management.jmxremote.rmi.port=10000 -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -server -XX:+UseG1GC -XX:+PrintGCDetails -XX:+PrintGCDateStamps -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=7 -XX:GCLogFileSize=100M -Xloggc:/root/gc.log -jar /powerjob-agent.jar --app powerjob-worker-samples --server powerjob-server:7700"
powerjob-worker-agent2:
deploy:
resources:
limits:
memory: 384M
container_name: powerjob-worker-agent2
image: powerjob/powerjob-worker-agent:test_env
restart: always
depends_on:
- powerjob-mysql
- powerjob-server
ports:
- "5003:5005"
- "10003:10000"
- "27778:27777"
volumes:
- ~/powerjob-data/powerjob-worker-agent2:/root
entrypoint:
- "sh"
- "-c"
- "./wait-for-it.sh powerjob-server:7700 --strict -- java -agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5005 -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.port=10000 -Dcom.sun.management.jmxremote.rmi.port=10000 -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -server -XX:+UseG1GC -XX:+PrintGCDetails -XX:+PrintGCDateStamps -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=7 -XX:GCLogFileSize=100M -Xloggc:/root/gc.log -jar /powerjob-agent.jar --app powerjob-worker-samples --server powerjob-server:7700"

View File

@ -33,11 +33,19 @@ read -r -p "是否重新构建镜像y/n:" rebuild
if [ "$rebuild" = "y" ] || [ "$rebuild" = "Y" ]; then
echo "================== 删除旧镜像 =================="
docker rmi -f tjqq/powerjob-server:$version
docker rmi -f powerjob/powerjob-server:$version
docker rmi -f tjqq/powerjob-agent:$version
docker rmi -f powerjob/powerjob-agent:$version
docker rmi -f powerjob/powerjob-mysql:$version
docker rmi -f powerjob/powerjob-worker-samples:$version
echo "================== 构建 powerjob-server 镜像 =================="
docker build -t tjqq/powerjob-server:$version powerjob-server/docker/. || exit
echo "================== 构建 powerjob-agent 镜像 =================="
docker build -t tjqq/powerjob-agent:$version powerjob-worker-agent/. || exit
echo "================== 构建 powerjob-mysql 镜像 =================="
docker build -t powerjob/powerjob-mysql:$version others/. || exit
echo "================== 构建 powerjob-worker-samples 镜像 =================="
docker build -t powerjob/powerjob-worker-samples:$version powerjob-worker-samples/. || exit
read -r -p "是否正式发布该镜像y/n:" needrelease
if [ "$needrelease" = "y" ] || [ "$needrelease" = "Y" ]; then
@ -47,6 +55,25 @@ if [ "$rebuild" = "y" ] || [ "$rebuild" = "Y" ]; then
docker push tjqq/powerjob-server:$version
echo "================== 正在推送 agent 镜像到中央仓库 =================="
docker push tjqq/powerjob-agent:$version
echo "================== 正在推送 powerjob-mysql 镜像到中央仓库 =================="
docker push powerjob/powerjob-mysql:$version
echo "================== 正在推送 samples 镜像到中央仓库 =================="
docker push powerjob/powerjob-worker-samples:$version
echo "================== 双写推送 =================="
docker tag tjqq/powerjob-server:$version powerjob/powerjob-server:$version
docker push powerjob/powerjob-server:$version
docker tag tjqq/powerjob-agent:$version powerjob/powerjob-agent:$version
docker push powerjob/powerjob-agent:$version
echo "================== 更新 LATEST 版本 =================="
docker tag powerjob/powerjob-server:$version powerjob/powerjob-server:latest
docker push powerjob/powerjob-server:latest
docker tag powerjob/powerjob-agent:$version powerjob/powerjob-agent:latest
docker push powerjob/powerjob-agent:latest
docker tag powerjob/powerjob-mysql:$version powerjob/powerjob-mysql:latest
docker push powerjob/powerjob-mysql:latest
docker tag powerjob/powerjob-worker-samples:$version powerjob/powerjob-worker-samples:latest
docker push powerjob/powerjob-worker-samples:latest
echo "================== Docker 推送完毕 =================="
fi
fi
fi
@ -63,7 +90,7 @@ if [ "$startup" = "y" ] || [ "$startup" = "Y" ]; then
echo "================== 准备启动 powerjob-server =================="
docker run -d \
--name powerjob-server \
-p 7700:7700 -p 10086:10086 -p 5001:5005 -p 10001:10000 \
-p 7700:7700 -p 10086:10086 -p 10010:10010 -p 5001:5005 -p 10001:10000 \
-e JVMOPTIONS="-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5005 -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.port=10000 -Dcom.sun.management.jmxremote.rmi.port=10000 -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false" \
-e PARAMS="--spring.profiles.active=pre" \
-e TZ="Asia/Shanghai" \

View File

@ -0,0 +1,71 @@
#!/bin/bash
echo "A docker image release script for the Apple Silicon device."
# -p允许后面跟一个字符串作为提示 -r保证读入的是原始内容不会发生任何转义
read -r -p "请输入Dockedr镜像版本:" version
echo "即将构建的 server 镜像powerjob-server:$version"
echo "即将构建的 agent 镜像powerjob-agent:$version"
read -r -p "任意键继续:"
# 一键部署脚本,请勿挪动脚本
cd `dirname $0`/../.. || exit
read -r -p "是否进行maven构建y/n:" needmvn
if [ "$needmvn" = "y" ] || [ "$needmvn" = "Y" ]; then
echo "================== 构建 jar =================="
# mvn clean package -Pdev -DskipTests -U -e -pl powerjob-server,powerjob-worker-agent -am
# -U强制检查snapshot库 -pl指定需要构建的模块多模块逗号分割 -am同时构建依赖模块一般与pl连用 -Pxxx指定使用的配置文件
mvn clean package -Pdev -DskipTests -U -e
echo "================== 拷贝 jar =================="
/bin/cp -rf powerjob-server/powerjob-server-starter/target/*.jar powerjob-server/docker/powerjob-server.jar
/bin/cp -rf powerjob-worker-agent/target/*.jar powerjob-worker-agent/powerjob-agent.jar
ls -l powerjob-server/docker/powerjob-server.jar
ls -l powerjob-worker-agent/powerjob-agent.jar
fi
echo "================== 关闭老应用 =================="
docker stop powerjob-server
docker stop powerjob-agent
docker stop powerjob-agent2
echo "================== 删除老容器 =================="
docker container rm powerjob-server
docker container rm powerjob-agent
docker container rm powerjob-agent2
read -r -p "是否构建并发布镜像y/n:" rebuild
if [ "$rebuild" = "y" ] || [ "$rebuild" = "Y" ]; then
echo "================== 删除旧镜像 =================="
docker rmi -f tjqq/powerjob-server:$version
docker rmi -f powerjob/powerjob-server:$version
docker rmi -f tjqq/powerjob-agent:$version
docker rmi -f powerjob/powerjob-agent:$version
docker rmi -f powerjob/powerjob-mysql:$version
docker rmi -f powerjob/powerjob-worker-samples:$version
echo "================== 构建 powerjob-server 镜像(tjqq) =================="
docker buildx build --platform=linux/amd64,linux/arm64 --tag tjqq/powerjob-server:$version powerjob-server/docker/. --push || exit
echo "================== 构建 powerjob-server 镜像(powerjob) =================="
docker buildx build --platform=linux/amd64,linux/arm64 --tag powerjob/powerjob-server:$version powerjob-server/docker/. --push || exit
echo "================== 构建 powerjob-agent 镜像(tjqq) =================="
docker buildx build --platform=linux/amd64,linux/arm64 --tag tjqq/powerjob-agent:$version powerjob-worker-agent/. --push|| exit
echo "================== 构建 powerjob-agent 镜像(powerjob) =================="
docker buildx build --platform=linux/amd64,linux/arm64 --tag powerjob/powerjob-agent:$version powerjob-worker-agent/. --push|| exit
echo "================== 构建 powerjob-mysql 镜像 =================="
docker buildx build --platform=linux/amd64,linux/arm64 --tag powerjob/powerjob-mysql:$version others/. --push|| exit
echo "================== 构建 powerjob-worker-samples 镜像 =================="
docker buildx build --platform=linux/amd64,linux/arm64 --tag powerjob/powerjob-worker-samples:$version powerjob-worker-samples/. --push|| exit
fi
read -r -p "是否推送LATESTy/n:" push_latest
if [ "$push_latest" = "y" ] || [ "$push_latest" = "Y" ]; then
echo "================== powerjob-server LATEST (tjqq) =================="
docker buildx build --platform=linux/amd64,linux/arm64 --tag tjqq/powerjob-server:latest powerjob-server/docker/. --push || exit
echo "================== powerjob-server LATEST (powerjob) =================="
docker buildx build --platform=linux/amd64,linux/arm64 --tag powerjob/powerjob-server:latest powerjob-server/docker/. --push || exit
echo "================== powerjob-agent LATEST (tjqq) =================="
docker buildx build --platform=linux/amd64,linux/arm64 --tag tjqq/powerjob-agent:latest powerjob-worker-agent/. --push|| exit
echo "================== powerjob-agent LATEST (powerjob) =================="
docker buildx build --platform=linux/amd64,linux/arm64 --tag powerjob/powerjob-agent:latest powerjob-worker-agent/. --push|| exit
echo "================== powerjob-mysql LATEST =================="
docker buildx build --platform=linux/amd64,linux/arm64 --tag powerjob/powerjob-mysql:latest others/. --push|| exit
echo "================== powerjob-worker-samples LATEST =================="
docker buildx build --platform=linux/amd64,linux/arm64 --tag powerjob/powerjob-worker-samples:latest powerjob-worker-samples/. --push|| exit
fi

View File

@ -0,0 +1,12 @@
db.createUser(
{
user: "zqq",
pwd: "No1Bug2Please3!",
roles: [
{
role: "readWrite",
db: "powerjob_daily"
}
]
}
);

Binary file not shown.

Before

Width:  |  Height:  |  Size: 365 KiB

After

Width:  |  Height:  |  Size: 209 KiB

View File

@ -1,17 +1,22 @@
/*
SQL MySQL8 SQL
使 SQL使 SpringDataJPA SQL
*/
/*
Navicat Premium Data Transfer
Source Server : Local@3306
Source Server Type : MySQL
Source Server Version : 80021
Source Server Version : 80300 (8.3.0)
Source Host : localhost:3306
Source Schema : powerjob-daily
Source Schema : powerjob5
Target Server Type : MySQL
Target Server Version : 80021
Target Server Version : 80300 (8.3.0)
File Encoding : 65001
Date: 14/03/2021 19:54:37
Date: 11/08/2024 23:23:30
*/
SET NAMES utf8mb4;
@ -24,12 +29,18 @@ DROP TABLE IF EXISTS `app_info`;
CREATE TABLE `app_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_name` varchar(255) DEFAULT NULL,
`creator` bigint DEFAULT NULL,
`current_server` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`modifier` bigint DEFAULT NULL,
`namespace_id` bigint DEFAULT NULL,
`password` varchar(255) DEFAULT NULL,
`tags` varchar(255) DEFAULT NULL,
`title` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `appNameUK` (`app_name`)
UNIQUE KEY `uidx01_app_info` (`app_name`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
@ -48,7 +59,7 @@ CREATE TABLE `container_info` (
`status` int DEFAULT NULL,
`version` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `IDX8hixyaktlnwil2w9up6b0p898` (`app_id`)
KEY `idx01_container_info` (`app_id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
@ -75,9 +86,9 @@ CREATE TABLE `instance_info` (
`type` int DEFAULT NULL,
`wf_instance_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `IDX5b1nhpe5je7gc5s1ur200njr7` (`job_id`),
KEY `IDXjnji5lrr195kswk6f7mfhinrs` (`app_id`),
KEY `IDXa98hq3yu0l863wuotdjl7noum` (`instance_id`)
KEY `idx01_instance_info` (`job_id`,`status`),
KEY `idx02_instance_info` (`app_id`,`status`),
KEY `idx03_instance_info` (`instance_id`,`status`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
@ -86,10 +97,13 @@ CREATE TABLE `instance_info` (
DROP TABLE IF EXISTS `job_info`;
CREATE TABLE `job_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`advanced_runtime_config` varchar(255) DEFAULT NULL,
`alarm_config` varchar(255) DEFAULT NULL,
`app_id` bigint DEFAULT NULL,
`concurrency` int DEFAULT NULL,
`designated_workers` varchar(255) DEFAULT NULL,
`dispatch_strategy` int DEFAULT NULL,
`dispatch_strategy_config` varchar(255) DEFAULT NULL,
`execute_type` int DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
@ -100,6 +114,7 @@ CREATE TABLE `job_info` (
`job_name` varchar(255) DEFAULT NULL,
`job_params` longtext,
`lifecycle` varchar(255) DEFAULT NULL,
`log_config` varchar(255) DEFAULT NULL,
`max_instance_num` int DEFAULT NULL,
`max_worker_count` int DEFAULT NULL,
`min_cpu_cores` double NOT NULL,
@ -110,13 +125,35 @@ CREATE TABLE `job_info` (
`processor_info` varchar(255) DEFAULT NULL,
`processor_type` int DEFAULT NULL,
`status` int DEFAULT NULL,
`tag` varchar(255) DEFAULT NULL,
`task_retry_num` int DEFAULT NULL,
`time_expression` varchar(255) DEFAULT NULL,
`time_expression_type` int DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `IDXk2xprmn3lldmlcb52i36udll1` (`app_id`)
KEY `idx01_job_info` (`app_id`,`status`,`time_expression_type`,`next_trigger_time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for namespace
-- ----------------------------
DROP TABLE IF EXISTS `namespace`;
CREATE TABLE `namespace` (
`id` bigint NOT NULL AUTO_INCREMENT,
`code` varchar(255) DEFAULT NULL,
`creator` bigint DEFAULT NULL,
`dept` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`modifier` bigint DEFAULT NULL,
`name` varchar(255) DEFAULT NULL,
`status` int DEFAULT NULL,
`tags` varchar(255) DEFAULT NULL,
`token` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_namespace` (`code`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for oms_lock
-- ----------------------------
@ -129,7 +166,22 @@ CREATE TABLE `oms_lock` (
`max_lock_time` bigint DEFAULT NULL,
`ownerip` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `lockNameUK` (`lock_name`)
UNIQUE KEY `uidx01_oms_lock` (`lock_name`)
) ENGINE=InnoDB AUTO_INCREMENT=4 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for pwjb_user_info
-- ----------------------------
DROP TABLE IF EXISTS `pwjb_user_info`;
CREATE TABLE `pwjb_user_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`password` varchar(255) DEFAULT NULL,
`username` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_username` (`username`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
@ -142,25 +194,66 @@ CREATE TABLE `server_info` (
`gmt_modified` datetime(6) DEFAULT NULL,
`ip` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `UKtk8ytgpl7mpukhnvhbl82kgvy` (`ip`)
UNIQUE KEY `uidx01_server_info` (`ip`),
KEY `idx01_server_info` (`gmt_modified`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for sundry
-- ----------------------------
DROP TABLE IF EXISTS `sundry`;
CREATE TABLE `sundry` (
`id` bigint NOT NULL AUTO_INCREMENT,
`content` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`pkey` varchar(255) DEFAULT NULL,
`skey` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_sundry` (`pkey`,`skey`)
) ENGINE=InnoDB AUTO_INCREMENT=3 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for user_info
-- ----------------------------
DROP TABLE IF EXISTS `user_info`;
CREATE TABLE `user_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`account_type` varchar(255) DEFAULT NULL,
`email` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`nick` varchar(255) DEFAULT NULL,
`origin_username` varchar(255) DEFAULT NULL,
`password` varchar(255) DEFAULT NULL,
`phone` varchar(255) DEFAULT NULL,
`status` int DEFAULT NULL,
`token_login_verify_info` varchar(255) DEFAULT NULL,
`username` varchar(255) DEFAULT NULL,
`web_hook` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_user_name` (`username`),
KEY `uidx02_user_info` (`email`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for user_role
-- ----------------------------
DROP TABLE IF EXISTS `user_role`;
CREATE TABLE `user_role` (
`id` bigint NOT NULL AUTO_INCREMENT,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`role` int DEFAULT NULL,
`scope` int DEFAULT NULL,
`target` bigint DEFAULT NULL,
`user_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `uidx01_user_id` (`user_id`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for workflow_info
@ -183,7 +276,7 @@ CREATE TABLE `workflow_info` (
`wf_description` varchar(255) DEFAULT NULL,
`wf_name` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `IDX7uo5w0e3beeho3fnx9t7eiol3` (`app_id`)
KEY `idx01_workflow_info` (`app_id`,`status`,`time_expression_type`,`next_trigger_time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
@ -199,13 +292,16 @@ CREATE TABLE `workflow_instance_info` (
`finished_time` bigint DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`parent_wf_instance_id` bigint DEFAULT NULL,
`result` longtext,
`status` int DEFAULT NULL,
`wf_context` longtext,
`wf_init_params` longtext,
`wf_instance_id` bigint DEFAULT NULL,
`workflow_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`)
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_wf_instance` (`wf_instance_id`),
KEY `idx01_wf_instance` (`workflow_id`,`status`,`app_id`,`expected_trigger_time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
@ -224,10 +320,9 @@ CREATE TABLE `workflow_node_info` (
`node_params` longtext,
`skip_when_failed` bit(1) NOT NULL,
`type` int DEFAULT NULL,
`workflow_id` bigint NOT NULL,
`workflow_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `IDX36t7rhj4mkg2a5pb4ttorscta` (`app_id`),
KEY `IDXacr0i6my8jr002ou8i1gmygju` (`workflow_id`)
KEY `idx01_workflow_node_info` (`workflow_id`,`gmt_create`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
SET FOREIGN_KEY_CHECKS = 1;

View File

@ -1,32 +1,31 @@
#!/bin/bash
cd `dirname $0`/../.. || exit
echo "================== 构建 jar =================="
mvn clean package -Pdev -DskipTests -U -e
mvn clean package -Pdev -DskipTests -e
echo "================== 拷贝 jar =================="
/bin/cp -rf powerjob-server/powerjob-server-starter/target/*.jar powerjob-server/docker/powerjob-server.jar
/bin/cp -rf powerjob-worker-agent/target/*.jar powerjob-worker-agent/powerjob-agent.jar
echo "================== 关闭老应用 =================="
docker stop powerjob-server
docker stop powerjob-agent
docker stop powerjob-agent2
docker stop powerjob-worker-samples
docker stop powerjob-worker-samples2
echo "================== 删除老容器 =================="
docker container rm powerjob-server
docker container rm powerjob-agent
docker container rm powerjob-agent2
docker container rm powerjob-worker-samples
docker container rm powerjob-worker-samples2
echo "================== 删除旧镜像 =================="
docker rmi -f tjqq/powerjob-server:latest
docker rmi -f tjqq/powerjob-agent:latest
docker rmi -f tjqq/powerjob-worker-samples:latest
echo "================== 构建 powerjob-server 镜像 =================="
docker build -t tjqq/powerjob-server:latest powerjob-server/docker/. || exit
echo "================== 构建 powerjob-agent 镜像 =================="
docker build -t tjqq/powerjob-agent:latest powerjob-worker-agent/. || exit
echo "================== 构建 powerjob-worker-samples 镜像 =================="
docker build -t tjqq/powerjob-worker-samples:latest powerjob-worker-samples/. || exit
echo "================== 准备启动 powerjob-server =================="
docker run -d \
--restart=always \
--name powerjob-server \
-p 7700:7700 -p 10086:10086 -p 5001:5005 -p 10001:10000 \
-e JVMOPTIONS="-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5005 -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.port=10000 -Dcom.sun.management.jmxremote.rmi.port=10000 -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false" \
-e PARAMS="--oms.swagger.enable=true --spring.profiles.active=product --spring.datasource.core.jdbc-url=jdbc:mysql://remotehost:3306/powerjob-product?useUnicode=true&characterEncoding=UTF-8 --spring.data.mongodb.uri=mongodb://remotehost:27017/powerjob-product" \
-e PARAMS="--oms.swagger.enable=true --spring.profiles.active=product --spring.datasource.core.jdbc-url=jdbc:mysql://remotehost:3306/powerjob-product?useUnicode=true&characterEncoding=UTF-8 --oms.mongodb.enable=false --spring.data.mongodb.uri=mongodb://remotehost:27017/powerjob-product" \
-v ~/docker/powerjob-server:/root/powerjob/server -v ~/.m2:/root/.m2 \
tjqq/powerjob-server:latest
sleep 60
@ -37,19 +36,19 @@ echo "使用的Server地址$serverAddress"
docker run -d \
--restart=always \
--name powerjob-agent \
--name powerjob-worker-samples \
-p 27777:27777 -p 5002:5005 -p 10002:10000 \
-e JVMOPTIONS="-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5005 -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.port=10000 -Dcom.sun.management.jmxremote.rmi.port=10000 -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false" \
-e PARAMS="--app powerjob-agent-test --server $serverAddress" \
-v ~/docker/powerjob-agent:/root \
tjqq/powerjob-agent:latest
-e PARAMS="--powerjob.worker.server-address=$serverAddress" \
-v ~/docker/powerjob-worker-samples:/root \
tjqq/powerjob-worker-samples:latest
docker run -d \
--restart=always \
--name powerjob-agent2 \
--name powerjob-worker-samples2 \
-p 27778:27777 -p 5003:5005 -p 10003:10000 \
-e JVMOPTIONS="-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5005 -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.port=10000 -Dcom.sun.management.jmxremote.rmi.port=10000 -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false" \
-e PARAMS="--app powerjob-agent-test --server $serverAddress" \
-v ~/docker/powerjob-agent2:/root \
tjqq/powerjob-agent:latest
-e PARAMS="--powerjob.worker.server-address=$serverAddress" \
-v ~/docker/powerjob-worker-samples2:/root \
tjqq/powerjob-worker-samples:latest

182
others/script/wait-for-it.sh Executable file
View File

@ -0,0 +1,182 @@
#!/usr/bin/env bash
# Use this script to test if a given TCP host/port are available
WAITFORIT_cmdname=${0##*/}
echoerr() { if [[ $WAITFORIT_QUIET -ne 1 ]]; then echo "$@" 1>&2; fi }
usage()
{
cat << USAGE >&2
Usage:
$WAITFORIT_cmdname host:port [-s] [-t timeout] [-- command args]
-h HOST | --host=HOST Host or IP under test
-p PORT | --port=PORT TCP port under test
Alternatively, you specify the host and port as host:port
-s | --strict Only execute subcommand if the test succeeds
-q | --quiet Don't output any status messages
-t TIMEOUT | --timeout=TIMEOUT
Timeout in seconds, zero for no timeout
-- COMMAND ARGS Execute command with args after the test finishes
USAGE
exit 1
}
wait_for()
{
if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
echoerr "$WAITFORIT_cmdname: waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
else
echoerr "$WAITFORIT_cmdname: waiting for $WAITFORIT_HOST:$WAITFORIT_PORT without a timeout"
fi
WAITFORIT_start_ts=$(date +%s)
while :
do
if [[ $WAITFORIT_ISBUSY -eq 1 ]]; then
nc -z $WAITFORIT_HOST $WAITFORIT_PORT
WAITFORIT_result=$?
else
(echo -n > /dev/tcp/$WAITFORIT_HOST/$WAITFORIT_PORT) >/dev/null 2>&1
WAITFORIT_result=$?
fi
if [[ $WAITFORIT_result -eq 0 ]]; then
WAITFORIT_end_ts=$(date +%s)
echoerr "$WAITFORIT_cmdname: $WAITFORIT_HOST:$WAITFORIT_PORT is available after $((WAITFORIT_end_ts - WAITFORIT_start_ts)) seconds"
break
fi
sleep 1
done
return $WAITFORIT_result
}
wait_for_wrapper()
{
# In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692
if [[ $WAITFORIT_QUIET -eq 1 ]]; then
timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --quiet --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
else
timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
fi
WAITFORIT_PID=$!
trap "kill -INT -$WAITFORIT_PID" INT
wait $WAITFORIT_PID
WAITFORIT_RESULT=$?
if [[ $WAITFORIT_RESULT -ne 0 ]]; then
echoerr "$WAITFORIT_cmdname: timeout occurred after waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
fi
return $WAITFORIT_RESULT
}
# process arguments
while [[ $# -gt 0 ]]
do
case "$1" in
*:* )
WAITFORIT_hostport=(${1//:/ })
WAITFORIT_HOST=${WAITFORIT_hostport[0]}
WAITFORIT_PORT=${WAITFORIT_hostport[1]}
shift 1
;;
--child)
WAITFORIT_CHILD=1
shift 1
;;
-q | --quiet)
WAITFORIT_QUIET=1
shift 1
;;
-s | --strict)
WAITFORIT_STRICT=1
shift 1
;;
-h)
WAITFORIT_HOST="$2"
if [[ $WAITFORIT_HOST == "" ]]; then break; fi
shift 2
;;
--host=*)
WAITFORIT_HOST="${1#*=}"
shift 1
;;
-p)
WAITFORIT_PORT="$2"
if [[ $WAITFORIT_PORT == "" ]]; then break; fi
shift 2
;;
--port=*)
WAITFORIT_PORT="${1#*=}"
shift 1
;;
-t)
WAITFORIT_TIMEOUT="$2"
if [[ $WAITFORIT_TIMEOUT == "" ]]; then break; fi
shift 2
;;
--timeout=*)
WAITFORIT_TIMEOUT="${1#*=}"
shift 1
;;
--)
shift
WAITFORIT_CLI=("$@")
break
;;
--help)
usage
;;
*)
echoerr "Unknown argument: $1"
usage
;;
esac
done
if [[ "$WAITFORIT_HOST" == "" || "$WAITFORIT_PORT" == "" ]]; then
echoerr "Error: you need to provide a host and port to test."
usage
fi
WAITFORIT_TIMEOUT=${WAITFORIT_TIMEOUT:-15}
WAITFORIT_STRICT=${WAITFORIT_STRICT:-0}
WAITFORIT_CHILD=${WAITFORIT_CHILD:-0}
WAITFORIT_QUIET=${WAITFORIT_QUIET:-0}
# Check to see if timeout is from busybox?
WAITFORIT_TIMEOUT_PATH=$(type -p timeout)
WAITFORIT_TIMEOUT_PATH=$(realpath $WAITFORIT_TIMEOUT_PATH 2>/dev/null || readlink -f $WAITFORIT_TIMEOUT_PATH)
WAITFORIT_BUSYTIMEFLAG=""
if [[ $WAITFORIT_TIMEOUT_PATH =~ "busybox" ]]; then
WAITFORIT_ISBUSY=1
# Check if busybox timeout uses -t flag
# (recent Alpine versions don't support -t anymore)
if timeout &>/dev/stdout | grep -q -e '-t '; then
WAITFORIT_BUSYTIMEFLAG="-t"
fi
else
WAITFORIT_ISBUSY=0
fi
if [[ $WAITFORIT_CHILD -gt 0 ]]; then
wait_for
WAITFORIT_RESULT=$?
exit $WAITFORIT_RESULT
else
if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
wait_for_wrapper
WAITFORIT_RESULT=$?
else
wait_for
WAITFORIT_RESULT=$?
fi
fi
if [[ $WAITFORIT_CLI != "" ]]; then
if [[ $WAITFORIT_RESULT -ne 0 && $WAITFORIT_STRICT -eq 1 ]]; then
echoerr "$WAITFORIT_cmdname: strict mode, refusing to execute subprocess"
exit $WAITFORIT_RESULT
fi
exec "${WAITFORIT_CLI[@]}"
else
exit $WAITFORIT_RESULT
fi

2
others/sql/01schema.sql Normal file
View File

@ -0,0 +1,2 @@
-- powerjob
create database `powerjob-daily` default character set utf8mb4 collate utf8mb4_general_ci;

View File

@ -0,0 +1,21 @@
USE powerjob-daily;
SET NAMES utf8mb4;
SET FOREIGN_KEY_CHECKS = 0;
CREATE TABLE IF NOT EXISTS `app_info` (
`id` bigint NOT NULL AUTO_INCREMENT COMMENT '应用ID',
`app_name` varchar(128) not NULL COMMENT '应用名称',
`current_server` varchar(255) default null COMMENT 'Server地址,用于负责调度应用的ActorSystem地址',
`gmt_create` datetime not null COMMENT '创建时间',
`gmt_modified` datetime not null COMMENT '更新时间',
`password` varchar(255) not null COMMENT '应用密码',
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_app_info` (`app_name`)
) ENGINE = InnoDB AUTO_INCREMENT = 1
DEFAULT CHARSET = utf8mb4
COLLATE = utf8mb4_general_ci COMMENT ='应用表';
insert into app_info (app_name, gmt_create, gmt_modified, password) select 'powerjob-worker-samples', current_timestamp(), current_timestamp(), 'powerjob123' from dual where not exists ( select * from app_info where app_name = 'powerjob-worker-samples');
SET FOREIGN_KEY_CHECKS = 1;

View File

@ -0,0 +1,243 @@
/*
Navicat Premium Data Transfer
Source Server : Local@3306
Source Server Type : MySQL
Source Server Version : 80300 (8.3.0)
Source Host : localhost:3306
Source Schema : powerjob4
Target Server Type : MySQL
Target Server Version : 80300 (8.3.0)
File Encoding : 65001
Date: 02/03/2024 18:51:36
*/
SET NAMES utf8mb4;
SET FOREIGN_KEY_CHECKS = 0;
-- ----------------------------
-- Table structure for app_info
-- ----------------------------
DROP TABLE IF EXISTS `app_info`;
CREATE TABLE `app_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_name` varchar(255) DEFAULT NULL,
`current_server` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`password` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_app_info` (`app_name`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for container_info
-- ----------------------------
DROP TABLE IF EXISTS `container_info`;
CREATE TABLE `container_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_id` bigint DEFAULT NULL,
`container_name` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`last_deploy_time` datetime(6) DEFAULT NULL,
`source_info` varchar(255) DEFAULT NULL,
`source_type` int DEFAULT NULL,
`status` int DEFAULT NULL,
`version` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_container_info` (`app_id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for instance_info
-- ----------------------------
DROP TABLE IF EXISTS `instance_info`;
CREATE TABLE `instance_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`actual_trigger_time` bigint DEFAULT NULL,
`app_id` bigint DEFAULT NULL,
`expected_trigger_time` bigint DEFAULT NULL,
`finished_time` bigint DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`instance_id` bigint DEFAULT NULL,
`instance_params` longtext,
`job_id` bigint DEFAULT NULL,
`job_params` longtext,
`last_report_time` bigint DEFAULT NULL,
`result` longtext,
`running_times` bigint DEFAULT NULL,
`status` int DEFAULT NULL,
`task_tracker_address` varchar(255) DEFAULT NULL,
`type` int DEFAULT NULL,
`wf_instance_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_instance_info` (`job_id`,`status`),
KEY `idx02_instance_info` (`app_id`,`status`),
KEY `idx03_instance_info` (`instance_id`,`status`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for job_info
-- ----------------------------
DROP TABLE IF EXISTS `job_info`;
CREATE TABLE `job_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`advanced_runtime_config` varchar(255) DEFAULT NULL,
`alarm_config` varchar(255) DEFAULT NULL,
`app_id` bigint DEFAULT NULL,
`concurrency` int DEFAULT NULL,
`designated_workers` varchar(255) DEFAULT NULL,
`dispatch_strategy` int DEFAULT NULL,
`dispatch_strategy_config` varchar(255) DEFAULT NULL,
`execute_type` int DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`instance_retry_num` int DEFAULT NULL,
`instance_time_limit` bigint DEFAULT NULL,
`job_description` varchar(255) DEFAULT NULL,
`job_name` varchar(255) DEFAULT NULL,
`job_params` longtext,
`lifecycle` varchar(255) DEFAULT NULL,
`log_config` varchar(255) DEFAULT NULL,
`max_instance_num` int DEFAULT NULL,
`max_worker_count` int DEFAULT NULL,
`min_cpu_cores` double NOT NULL,
`min_disk_space` double NOT NULL,
`min_memory_space` double NOT NULL,
`next_trigger_time` bigint DEFAULT NULL,
`notify_user_ids` varchar(255) DEFAULT NULL,
`processor_info` varchar(255) DEFAULT NULL,
`processor_type` int DEFAULT NULL,
`status` int DEFAULT NULL,
`tag` varchar(255) DEFAULT NULL,
`task_retry_num` int DEFAULT NULL,
`time_expression` varchar(255) DEFAULT NULL,
`time_expression_type` int DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_job_info` (`app_id`,`status`,`time_expression_type`,`next_trigger_time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for oms_lock
-- ----------------------------
DROP TABLE IF EXISTS `oms_lock`;
CREATE TABLE `oms_lock` (
`id` bigint NOT NULL AUTO_INCREMENT,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`lock_name` varchar(255) DEFAULT NULL,
`max_lock_time` bigint DEFAULT NULL,
`ownerip` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_oms_lock` (`lock_name`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for server_info
-- ----------------------------
DROP TABLE IF EXISTS `server_info`;
CREATE TABLE `server_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`ip` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_server_info` (`ip`),
KEY `idx01_server_info` (`gmt_modified`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for user_info
-- ----------------------------
DROP TABLE IF EXISTS `user_info`;
CREATE TABLE `user_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`email` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`password` varchar(255) DEFAULT NULL,
`phone` varchar(255) DEFAULT NULL,
`username` varchar(255) DEFAULT NULL,
`web_hook` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `uidx01_user_info` (`username`),
KEY `uidx02_user_info` (`email`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for workflow_info
-- ----------------------------
DROP TABLE IF EXISTS `workflow_info`;
CREATE TABLE `workflow_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_id` bigint DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`lifecycle` varchar(255) DEFAULT NULL,
`max_wf_instance_num` int DEFAULT NULL,
`next_trigger_time` bigint DEFAULT NULL,
`notify_user_ids` varchar(255) DEFAULT NULL,
`pedag` longtext,
`status` int DEFAULT NULL,
`time_expression` varchar(255) DEFAULT NULL,
`time_expression_type` int DEFAULT NULL,
`wf_description` varchar(255) DEFAULT NULL,
`wf_name` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_workflow_info` (`app_id`,`status`,`time_expression_type`,`next_trigger_time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for workflow_instance_info
-- ----------------------------
DROP TABLE IF EXISTS `workflow_instance_info`;
CREATE TABLE `workflow_instance_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`actual_trigger_time` bigint DEFAULT NULL,
`app_id` bigint DEFAULT NULL,
`dag` longtext,
`expected_trigger_time` bigint DEFAULT NULL,
`finished_time` bigint DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`parent_wf_instance_id` bigint DEFAULT NULL,
`result` longtext,
`status` int DEFAULT NULL,
`wf_context` longtext,
`wf_init_params` longtext,
`wf_instance_id` bigint DEFAULT NULL,
`workflow_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_wf_instance` (`wf_instance_id`),
KEY `idx01_wf_instance` (`workflow_id`,`status`,`app_id`,`expected_trigger_time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for workflow_node_info
-- ----------------------------
DROP TABLE IF EXISTS `workflow_node_info`;
CREATE TABLE `workflow_node_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_id` bigint NOT NULL,
`enable` bit(1) NOT NULL,
`extra` longtext,
`gmt_create` datetime(6) NOT NULL,
`gmt_modified` datetime(6) NOT NULL,
`job_id` bigint DEFAULT NULL,
`node_name` varchar(255) DEFAULT NULL,
`node_params` longtext,
`skip_when_failed` bit(1) NOT NULL,
`type` int DEFAULT NULL,
`workflow_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_workflow_node_info` (`workflow_id`,`gmt_create`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
SET FOREIGN_KEY_CHECKS = 1;

View File

@ -0,0 +1,323 @@
/*
Navicat Premium Data Transfer
Source Server : Local@3306
Source Server Type : MySQL
Source Server Version : 80300 (8.3.0)
Source Host : localhost:3306
Source Schema : powerjob5
Target Server Type : MySQL
Target Server Version : 80300 (8.3.0)
File Encoding : 65001
Date: 16/03/2024 22:07:31
*/
SET NAMES utf8mb4;
SET FOREIGN_KEY_CHECKS = 0;
-- ----------------------------
-- Table structure for app_info
-- ----------------------------
DROP TABLE IF EXISTS `app_info`;
CREATE TABLE `app_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_name` varchar(255) DEFAULT NULL,
`creator` bigint DEFAULT NULL,
`current_server` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`modifier` bigint DEFAULT NULL,
`namespace_id` bigint DEFAULT NULL,
`password` varchar(255) DEFAULT NULL,
`tags` varchar(255) DEFAULT NULL,
`title` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_app_info` (`app_name`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for container_info
-- ----------------------------
DROP TABLE IF EXISTS `container_info`;
CREATE TABLE `container_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_id` bigint DEFAULT NULL,
`container_name` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`last_deploy_time` datetime(6) DEFAULT NULL,
`source_info` varchar(255) DEFAULT NULL,
`source_type` int DEFAULT NULL,
`status` int DEFAULT NULL,
`version` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_container_info` (`app_id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for instance_info
-- ----------------------------
DROP TABLE IF EXISTS `instance_info`;
CREATE TABLE `instance_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`actual_trigger_time` bigint DEFAULT NULL,
`app_id` bigint DEFAULT NULL,
`expected_trigger_time` bigint DEFAULT NULL,
`finished_time` bigint DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`instance_id` bigint DEFAULT NULL,
`instance_params` longtext,
`job_id` bigint DEFAULT NULL,
`job_params` longtext,
`last_report_time` bigint DEFAULT NULL,
`result` longtext,
`running_times` bigint DEFAULT NULL,
`status` int DEFAULT NULL,
`task_tracker_address` varchar(255) DEFAULT NULL,
`type` int DEFAULT NULL,
`wf_instance_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_instance_info` (`job_id`,`status`),
KEY `idx02_instance_info` (`app_id`,`status`),
KEY `idx03_instance_info` (`instance_id`,`status`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for job_info
-- ----------------------------
DROP TABLE IF EXISTS `job_info`;
CREATE TABLE `job_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`advanced_runtime_config` varchar(255) DEFAULT NULL,
`alarm_config` varchar(255) DEFAULT NULL,
`app_id` bigint DEFAULT NULL,
`concurrency` int DEFAULT NULL,
`designated_workers` varchar(255) DEFAULT NULL,
`dispatch_strategy` int DEFAULT NULL,
`dispatch_strategy_config` varchar(255) DEFAULT NULL,
`execute_type` int DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`instance_retry_num` int DEFAULT NULL,
`instance_time_limit` bigint DEFAULT NULL,
`job_description` varchar(255) DEFAULT NULL,
`job_name` varchar(255) DEFAULT NULL,
`job_params` longtext,
`lifecycle` varchar(255) DEFAULT NULL,
`log_config` varchar(255) DEFAULT NULL,
`max_instance_num` int DEFAULT NULL,
`max_worker_count` int DEFAULT NULL,
`min_cpu_cores` double NOT NULL,
`min_disk_space` double NOT NULL,
`min_memory_space` double NOT NULL,
`next_trigger_time` bigint DEFAULT NULL,
`notify_user_ids` varchar(255) DEFAULT NULL,
`processor_info` varchar(255) DEFAULT NULL,
`processor_type` int DEFAULT NULL,
`status` int DEFAULT NULL,
`tag` varchar(255) DEFAULT NULL,
`task_retry_num` int DEFAULT NULL,
`time_expression` varchar(255) DEFAULT NULL,
`time_expression_type` int DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_job_info` (`app_id`,`status`,`time_expression_type`,`next_trigger_time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for namespace
-- ----------------------------
DROP TABLE IF EXISTS `namespace`;
CREATE TABLE `namespace` (
`id` bigint NOT NULL AUTO_INCREMENT,
`code` varchar(255) DEFAULT NULL,
`creator` bigint DEFAULT NULL,
`dept` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`modifier` bigint DEFAULT NULL,
`name` varchar(255) DEFAULT NULL,
`status` int DEFAULT NULL,
`tags` varchar(255) DEFAULT NULL,
`token` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_namespace` (`code`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for oms_lock
-- ----------------------------
DROP TABLE IF EXISTS `oms_lock`;
CREATE TABLE `oms_lock` (
`id` bigint NOT NULL AUTO_INCREMENT,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`lock_name` varchar(255) DEFAULT NULL,
`max_lock_time` bigint DEFAULT NULL,
`ownerip` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_oms_lock` (`lock_name`)
) ENGINE=InnoDB AUTO_INCREMENT=4 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for pwjb_user_info
-- ----------------------------
DROP TABLE IF EXISTS `pwjb_user_info`;
CREATE TABLE `pwjb_user_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`password` varchar(255) DEFAULT NULL,
`username` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_username` (`username`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for server_info
-- ----------------------------
DROP TABLE IF EXISTS `server_info`;
CREATE TABLE `server_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`ip` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_server_info` (`ip`),
KEY `idx01_server_info` (`gmt_modified`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for sundry
-- ----------------------------
DROP TABLE IF EXISTS `sundry`;
CREATE TABLE `sundry` (
`id` bigint NOT NULL AUTO_INCREMENT,
`content` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`pkey` varchar(255) DEFAULT NULL,
`skey` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_sundry` (`pkey`,`skey`)
) ENGINE=InnoDB AUTO_INCREMENT=3 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for user_info
-- ----------------------------
DROP TABLE IF EXISTS `user_info`;
CREATE TABLE `user_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`account_type` varchar(255) DEFAULT NULL,
`email` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`nick` varchar(255) DEFAULT NULL,
`origin_username` varchar(255) DEFAULT NULL,
`password` varchar(255) DEFAULT NULL,
`phone` varchar(255) DEFAULT NULL,
`status` int DEFAULT NULL,
`token_login_verify_info` varchar(255) DEFAULT NULL,
`username` varchar(255) DEFAULT NULL,
`web_hook` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_user_name` (`username`),
KEY `uidx02_user_info` (`email`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for user_role
-- ----------------------------
DROP TABLE IF EXISTS `user_role`;
CREATE TABLE `user_role` (
`id` bigint NOT NULL AUTO_INCREMENT,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`role` int DEFAULT NULL,
`scope` int DEFAULT NULL,
`target` bigint DEFAULT NULL,
`user_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `uidx01_user_id` (`user_id`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for workflow_info
-- ----------------------------
DROP TABLE IF EXISTS `workflow_info`;
CREATE TABLE `workflow_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_id` bigint DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`lifecycle` varchar(255) DEFAULT NULL,
`max_wf_instance_num` int DEFAULT NULL,
`next_trigger_time` bigint DEFAULT NULL,
`notify_user_ids` varchar(255) DEFAULT NULL,
`pedag` longtext,
`status` int DEFAULT NULL,
`time_expression` varchar(255) DEFAULT NULL,
`time_expression_type` int DEFAULT NULL,
`wf_description` varchar(255) DEFAULT NULL,
`wf_name` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_workflow_info` (`app_id`,`status`,`time_expression_type`,`next_trigger_time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for workflow_instance_info
-- ----------------------------
DROP TABLE IF EXISTS `workflow_instance_info`;
CREATE TABLE `workflow_instance_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`actual_trigger_time` bigint DEFAULT NULL,
`app_id` bigint DEFAULT NULL,
`dag` longtext,
`expected_trigger_time` bigint DEFAULT NULL,
`finished_time` bigint DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`parent_wf_instance_id` bigint DEFAULT NULL,
`result` longtext,
`status` int DEFAULT NULL,
`wf_context` longtext,
`wf_init_params` longtext,
`wf_instance_id` bigint DEFAULT NULL,
`workflow_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_wf_instance` (`wf_instance_id`),
KEY `idx01_wf_instance` (`workflow_id`,`status`,`app_id`,`expected_trigger_time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for workflow_node_info
-- ----------------------------
DROP TABLE IF EXISTS `workflow_node_info`;
CREATE TABLE `workflow_node_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_id` bigint NOT NULL,
`enable` bit(1) NOT NULL,
`extra` longtext,
`gmt_create` datetime(6) NOT NULL,
`gmt_modified` datetime(6) NOT NULL,
`job_id` bigint DEFAULT NULL,
`node_name` varchar(255) DEFAULT NULL,
`node_params` longtext,
`skip_when_failed` bit(1) NOT NULL,
`type` int DEFAULT NULL,
`workflow_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_workflow_node_info` (`workflow_id`,`gmt_create`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
SET FOREIGN_KEY_CHECKS = 1;

View File

@ -0,0 +1,323 @@
/*
Navicat Premium Data Transfer
Source Server : Local@3306
Source Server Type : MySQL
Source Server Version : 80300 (8.3.0)
Source Host : localhost:3306
Source Schema : powerjob5
Target Server Type : MySQL
Target Server Version : 80300 (8.3.0)
File Encoding : 65001
Date: 11/08/2024 23:23:30
*/
SET NAMES utf8mb4;
SET FOREIGN_KEY_CHECKS = 0;
-- ----------------------------
-- Table structure for app_info
-- ----------------------------
DROP TABLE IF EXISTS `app_info`;
CREATE TABLE `app_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_name` varchar(255) DEFAULT NULL,
`creator` bigint DEFAULT NULL,
`current_server` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`modifier` bigint DEFAULT NULL,
`namespace_id` bigint DEFAULT NULL,
`password` varchar(255) DEFAULT NULL,
`tags` varchar(255) DEFAULT NULL,
`title` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_app_info` (`app_name`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for container_info
-- ----------------------------
DROP TABLE IF EXISTS `container_info`;
CREATE TABLE `container_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_id` bigint DEFAULT NULL,
`container_name` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`last_deploy_time` datetime(6) DEFAULT NULL,
`source_info` varchar(255) DEFAULT NULL,
`source_type` int DEFAULT NULL,
`status` int DEFAULT NULL,
`version` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_container_info` (`app_id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for instance_info
-- ----------------------------
DROP TABLE IF EXISTS `instance_info`;
CREATE TABLE `instance_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`actual_trigger_time` bigint DEFAULT NULL,
`app_id` bigint DEFAULT NULL,
`expected_trigger_time` bigint DEFAULT NULL,
`finished_time` bigint DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`instance_id` bigint DEFAULT NULL,
`instance_params` longtext,
`job_id` bigint DEFAULT NULL,
`job_params` longtext,
`last_report_time` bigint DEFAULT NULL,
`result` longtext,
`running_times` bigint DEFAULT NULL,
`status` int DEFAULT NULL,
`task_tracker_address` varchar(255) DEFAULT NULL,
`type` int DEFAULT NULL,
`wf_instance_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_instance_info` (`job_id`,`status`),
KEY `idx02_instance_info` (`app_id`,`status`),
KEY `idx03_instance_info` (`instance_id`,`status`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for job_info
-- ----------------------------
DROP TABLE IF EXISTS `job_info`;
CREATE TABLE `job_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`advanced_runtime_config` varchar(255) DEFAULT NULL,
`alarm_config` varchar(255) DEFAULT NULL,
`app_id` bigint DEFAULT NULL,
`concurrency` int DEFAULT NULL,
`designated_workers` varchar(255) DEFAULT NULL,
`dispatch_strategy` int DEFAULT NULL,
`dispatch_strategy_config` varchar(255) DEFAULT NULL,
`execute_type` int DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`instance_retry_num` int DEFAULT NULL,
`instance_time_limit` bigint DEFAULT NULL,
`job_description` varchar(255) DEFAULT NULL,
`job_name` varchar(255) DEFAULT NULL,
`job_params` longtext,
`lifecycle` varchar(255) DEFAULT NULL,
`log_config` varchar(255) DEFAULT NULL,
`max_instance_num` int DEFAULT NULL,
`max_worker_count` int DEFAULT NULL,
`min_cpu_cores` double NOT NULL,
`min_disk_space` double NOT NULL,
`min_memory_space` double NOT NULL,
`next_trigger_time` bigint DEFAULT NULL,
`notify_user_ids` varchar(255) DEFAULT NULL,
`processor_info` varchar(255) DEFAULT NULL,
`processor_type` int DEFAULT NULL,
`status` int DEFAULT NULL,
`tag` varchar(255) DEFAULT NULL,
`task_retry_num` int DEFAULT NULL,
`time_expression` varchar(255) DEFAULT NULL,
`time_expression_type` int DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_job_info` (`app_id`,`status`,`time_expression_type`,`next_trigger_time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for namespace
-- ----------------------------
DROP TABLE IF EXISTS `namespace`;
CREATE TABLE `namespace` (
`id` bigint NOT NULL AUTO_INCREMENT,
`code` varchar(255) DEFAULT NULL,
`creator` bigint DEFAULT NULL,
`dept` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`modifier` bigint DEFAULT NULL,
`name` varchar(255) DEFAULT NULL,
`status` int DEFAULT NULL,
`tags` varchar(255) DEFAULT NULL,
`token` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_namespace` (`code`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for oms_lock
-- ----------------------------
DROP TABLE IF EXISTS `oms_lock`;
CREATE TABLE `oms_lock` (
`id` bigint NOT NULL AUTO_INCREMENT,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`lock_name` varchar(255) DEFAULT NULL,
`max_lock_time` bigint DEFAULT NULL,
`ownerip` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_oms_lock` (`lock_name`)
) ENGINE=InnoDB AUTO_INCREMENT=4 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for pwjb_user_info
-- ----------------------------
DROP TABLE IF EXISTS `pwjb_user_info`;
CREATE TABLE `pwjb_user_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`password` varchar(255) DEFAULT NULL,
`username` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_username` (`username`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for server_info
-- ----------------------------
DROP TABLE IF EXISTS `server_info`;
CREATE TABLE `server_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`ip` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_server_info` (`ip`),
KEY `idx01_server_info` (`gmt_modified`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for sundry
-- ----------------------------
DROP TABLE IF EXISTS `sundry`;
CREATE TABLE `sundry` (
`id` bigint NOT NULL AUTO_INCREMENT,
`content` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`pkey` varchar(255) DEFAULT NULL,
`skey` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_sundry` (`pkey`,`skey`)
) ENGINE=InnoDB AUTO_INCREMENT=3 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for user_info
-- ----------------------------
DROP TABLE IF EXISTS `user_info`;
CREATE TABLE `user_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`account_type` varchar(255) DEFAULT NULL,
`email` varchar(255) DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`nick` varchar(255) DEFAULT NULL,
`origin_username` varchar(255) DEFAULT NULL,
`password` varchar(255) DEFAULT NULL,
`phone` varchar(255) DEFAULT NULL,
`status` int DEFAULT NULL,
`token_login_verify_info` varchar(255) DEFAULT NULL,
`username` varchar(255) DEFAULT NULL,
`web_hook` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_user_name` (`username`),
KEY `uidx02_user_info` (`email`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for user_role
-- ----------------------------
DROP TABLE IF EXISTS `user_role`;
CREATE TABLE `user_role` (
`id` bigint NOT NULL AUTO_INCREMENT,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`role` int DEFAULT NULL,
`scope` int DEFAULT NULL,
`target` bigint DEFAULT NULL,
`user_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `uidx01_user_id` (`user_id`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for workflow_info
-- ----------------------------
DROP TABLE IF EXISTS `workflow_info`;
CREATE TABLE `workflow_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_id` bigint DEFAULT NULL,
`extra` varchar(255) DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`lifecycle` varchar(255) DEFAULT NULL,
`max_wf_instance_num` int DEFAULT NULL,
`next_trigger_time` bigint DEFAULT NULL,
`notify_user_ids` varchar(255) DEFAULT NULL,
`pedag` longtext,
`status` int DEFAULT NULL,
`time_expression` varchar(255) DEFAULT NULL,
`time_expression_type` int DEFAULT NULL,
`wf_description` varchar(255) DEFAULT NULL,
`wf_name` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_workflow_info` (`app_id`,`status`,`time_expression_type`,`next_trigger_time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for workflow_instance_info
-- ----------------------------
DROP TABLE IF EXISTS `workflow_instance_info`;
CREATE TABLE `workflow_instance_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`actual_trigger_time` bigint DEFAULT NULL,
`app_id` bigint DEFAULT NULL,
`dag` longtext,
`expected_trigger_time` bigint DEFAULT NULL,
`finished_time` bigint DEFAULT NULL,
`gmt_create` datetime(6) DEFAULT NULL,
`gmt_modified` datetime(6) DEFAULT NULL,
`parent_wf_instance_id` bigint DEFAULT NULL,
`result` longtext,
`status` int DEFAULT NULL,
`wf_context` longtext,
`wf_init_params` longtext,
`wf_instance_id` bigint DEFAULT NULL,
`workflow_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `uidx01_wf_instance` (`wf_instance_id`),
KEY `idx01_wf_instance` (`workflow_id`,`status`,`app_id`,`expected_trigger_time`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- ----------------------------
-- Table structure for workflow_node_info
-- ----------------------------
DROP TABLE IF EXISTS `workflow_node_info`;
CREATE TABLE `workflow_node_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`app_id` bigint NOT NULL,
`enable` bit(1) NOT NULL,
`extra` longtext,
`gmt_create` datetime(6) NOT NULL,
`gmt_modified` datetime(6) NOT NULL,
`job_id` bigint DEFAULT NULL,
`node_name` varchar(255) DEFAULT NULL,
`node_params` longtext,
`skip_when_failed` bit(1) NOT NULL,
`type` int DEFAULT NULL,
`workflow_id` bigint DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx01_workflow_node_info` (`workflow_id`,`gmt_create`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
SET FOREIGN_KEY_CHECKS = 1;

View File

@ -0,0 +1,7 @@
由于存在不同数据库、不同版本的升级,官方能给出的 upgrade SQL 相对有限,大家可参考以下方式自行生成升级 SQL
- 【官方脚本】参考官方每个版本的数据库全库建表文件(项目 others - sql - schema自行进行字段 DIFF
- 【自己动手版】导出当前您的 powerjob 数据库表结构,同时创建一个测试库,让 5.x 版本的 server 直连该测试库,自动建表。分别拿到两个版本的表结构 SQL 后,借用工具生产 update SQL 即可navigate 等数据库管理软件均支持结构对比)
参考文档https://www.yuque.com/powerjob/guidence/upgrade

View File

@ -0,0 +1,10 @@
-- Upgrade SQL FROM 4.0.x to 4.1.x
-- ----------------------------
-- Table change for workflow_instance_info
-- ----------------------------
alter table workflow_instance_info
add parent_wf_instance_id bigint default null null comment '上层工作流实例ID';
-- ----------------------------
-- Table change for job_info
-- ----------------------------
alter table job_info add alarm_config varchar(512) comment '告警配置' default null;

View File

@ -0,0 +1,6 @@
-- Upgrade SQL FROM 4.1.x to 4.2.x
-- ----------------------------
-- Table change for job_info
-- ----------------------------
alter table job_info add tag varchar(255) comment 'TAG' default null;
alter table job_info add log_config varchar(255) comment 'logConfig' default null;

View File

@ -0,0 +1,6 @@
-- Upgrade SQL FROM 4.3.7 to 4.3.8
-- ----------------------------
-- Table change for job_info
-- ----------------------------
alter table job_info add dispatch_strategy_config varchar(255) comment 'dispatch_strategy_config' default null;
alter table job_info add advanced_runtime_config varchar(255) comment 'advanced_runtime_config' default null;

View File

@ -0,0 +1,88 @@
-- Upgrade SQL FROM 4.1.x to 4.2.x
-- ----------------------------
-- Table change for app_info
-- ----------------------------
SET FOREIGN_KEY_CHECKS=0;
ALTER TABLE `app_info` ADD COLUMN `creator` bigint NULL DEFAULT NULL;
ALTER TABLE `app_info` ADD COLUMN `extra` varchar(255) NULL DEFAULT NULL;
ALTER TABLE `app_info` ADD COLUMN `modifier` bigint NULL DEFAULT NULL;
ALTER TABLE `app_info` ADD COLUMN `namespace_id` bigint NULL DEFAULT NULL;
ALTER TABLE `app_info` ADD COLUMN `tags` varchar(255) NULL DEFAULT NULL;
ALTER TABLE `app_info` ADD COLUMN `title` varchar(255) NULL DEFAULT NULL;
-- ----------------------------
-- Table change for user_info
-- ----------------------------
ALTER TABLE `user_info` ADD COLUMN `account_type` varchar(255) NULL DEFAULT NULL;
ALTER TABLE `user_info` ADD COLUMN `nick` varchar(255) NULL DEFAULT NULL;
ALTER TABLE `user_info` ADD COLUMN `origin_username` varchar(255) NULL DEFAULT NULL;
ALTER TABLE `user_info` ADD COLUMN `token_login_verify_info` varchar(255) NULL DEFAULT NULL;
ALTER TABLE `user_info` ADD UNIQUE INDEX `uidx01_user_name`(`username` ASC) USING BTREE;
-- ----------------------------
-- new table 'namespace'
-- ----------------------------
CREATE TABLE `namespace` (
`id` bigint NOT NULL AUTO_INCREMENT,
`code` varchar(255) NULL DEFAULT NULL,
`creator` bigint NULL DEFAULT NULL,
`dept` varchar(255) NULL DEFAULT NULL,
`extra` varchar(255) NULL DEFAULT NULL,
`gmt_create` datetime(6) NULL DEFAULT NULL,
`gmt_modified` datetime(6) NULL DEFAULT NULL,
`modifier` bigint NULL DEFAULT NULL,
`name` varchar(255) NULL DEFAULT NULL,
`status` int NULL DEFAULT NULL,
`tags` varchar(255) NULL DEFAULT NULL,
`token` varchar(255) NULL DEFAULT NULL,
PRIMARY KEY (`id`) USING BTREE,
UNIQUE INDEX `uidx01_namespace`(`code` ASC) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 2 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_0900_ai_ci ROW_FORMAT = Dynamic;
-- ----------------------------
-- new table 'pwjb_user_info'
-- ----------------------------
CREATE TABLE `pwjb_user_info` (
`id` bigint NOT NULL AUTO_INCREMENT,
`extra` varchar(255) NULL DEFAULT NULL,
`gmt_create` datetime(6) NULL DEFAULT NULL,
`gmt_modified` datetime(6) NULL DEFAULT NULL,
`password` varchar(255) NULL DEFAULT NULL,
`username` varchar(255) NULL DEFAULT NULL,
PRIMARY KEY (`id`) USING BTREE,
UNIQUE INDEX `uidx01_username`(`username` ASC) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 2 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_0900_ai_ci ROW_FORMAT = Dynamic;
-- ----------------------------
-- new table 'sundry'
-- ----------------------------
CREATE TABLE `sundry` (
`id` bigint NOT NULL AUTO_INCREMENT,
`content` varchar(255) NULL DEFAULT NULL,
`extra` varchar(255) NULL DEFAULT NULL,
`gmt_create` datetime(6) NULL DEFAULT NULL,
`gmt_modified` datetime(6) NULL DEFAULT NULL,
`pkey` varchar(255) NULL DEFAULT NULL,
`skey` varchar(255) NULL DEFAULT NULL,
PRIMARY KEY (`id`) USING BTREE,
UNIQUE INDEX `uidx01_sundry`(`pkey` ASC, `skey` ASC) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 3 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_0900_ai_ci ROW_FORMAT = Dynamic;
-- ----------------------------
-- new table 'user_role'
-- ----------------------------
CREATE TABLE `user_role` (
`id` bigint NOT NULL AUTO_INCREMENT,
`extra` varchar(255) NULL DEFAULT NULL,
`gmt_create` datetime(6) NULL DEFAULT NULL,
`gmt_modified` datetime(6) NULL DEFAULT NULL,
`role` int NULL DEFAULT NULL,
`scope` int NULL DEFAULT NULL,
`target` bigint NULL DEFAULT NULL,
`user_id` bigint NULL DEFAULT NULL,
PRIMARY KEY (`id`) USING BTREE,
INDEX `uidx01_user_id`(`user_id` ASC) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 2 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_0900_ai_ci ROW_FORMAT = Dynamic;

14
pom.xml
View File

@ -6,7 +6,7 @@
<groupId>tech.powerjob</groupId>
<artifactId>powerjob</artifactId>
<version>3.0.0</version>
<version>5.1.1</version>
<packaging>pom</packaging>
<name>powerjob</name>
<url>http://www.powerjob.tech</url>
@ -44,6 +44,7 @@
<module>powerjob-worker-spring-boot-starter</module>
<module>powerjob-worker-samples</module>
<module>powerjob-official-processors</module>
<module>powerjob-remote</module>
</modules>
<properties>
@ -150,6 +151,17 @@
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.sonatype.plugins</groupId>
<artifactId>nexus-staging-maven-plugin</artifactId>
<version>1.6.7</version>
<extensions>true</extensions>
<configuration>
<serverId>ossrh</serverId>
<nexusUrl>https://s01.oss.sonatype.org/</nexusUrl>
<autoReleaseAfterClose>true</autoReleaseAfterClose>
</configuration>
</plugin>
</plugins>
</build>

View File

@ -5,18 +5,19 @@
<parent>
<artifactId>powerjob</artifactId>
<groupId>tech.powerjob</groupId>
<version>3.0.0</version>
<version>5.1.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>powerjob-client</artifactId>
<version>4.0.0</version>
<version>5.1.1</version>
<packaging>jar</packaging>
<properties>
<junit.version>5.6.1</junit.version>
<fastjson.version>1.2.68</fastjson.version>
<powerjob.common.version>4.0.0</powerjob.common.version>
<junit.version>5.9.1</junit.version>
<logback.version>1.2.13</logback.version>
<fastjson.version>1.2.83</fastjson.version>
<powerjob.common.version>5.1.1</powerjob.common.version>
<mvn.shade.plugin.version>3.2.4</mvn.shade.plugin.version>
</properties>
@ -44,6 +45,13 @@
<version>${junit.version}</version>
<scope>test</scope>
</dependency>
<!-- log for test stage -->
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<version>${logback.version}</version>
<scope>test</scope>
</dependency>
</dependencies>
<build>

View File

@ -0,0 +1,71 @@
package tech.powerjob.client;
import lombok.Getter;
import lombok.Setter;
import lombok.ToString;
import lombok.experimental.Accessors;
import tech.powerjob.client.common.Protocol;
import tech.powerjob.client.extension.ClientExtension;
import java.io.Serializable;
import java.util.List;
import java.util.Map;
/**
* 客户端配置
*
* @author 程序帕鲁
* @since 2024/2/20
*/
@Getter
@Setter
@ToString
@Accessors(chain = true)
public class ClientConfig implements Serializable {
/**
* 执行器 AppName
*/
private String appName;
/**
* 执行器密码
*/
private String password;
/**
* 地址列表支持格式
* - IP:Port, eg: 192.168.1.1:7700
* - 域名, eg: powerjob.apple-inc.com
*/
private List<String> addressList;
/**
* 客户端通讯协议
*/
private Protocol protocol = Protocol.HTTP;
/**
* 连接超时时间
*/
private Integer connectionTimeout;
/**
* 指定了等待服务器响应数据的最长时间更具体地说这是从服务器开始返回响应数据包括HTTP头和数据客户端读取数据的超时时间
*/
private Integer readTimeout;
/**
* 指定了向服务器发送数据的最长时间这是从客户端开始发送数据如POST请求的正文到数据完全发送出去的时间
*/
private Integer writeTimeout;
/**
* 默认携带的请求头
* 用于流量被基础设施识别
*/
private Map<String, String> defaultHeaders;
/**
* 客户端行为扩展
*/
private ClientExtension clientExtension;
}

View File

@ -0,0 +1,82 @@
package tech.powerjob.client;
import tech.powerjob.common.request.http.SaveJobInfoRequest;
import tech.powerjob.common.request.http.SaveWorkflowNodeRequest;
import tech.powerjob.common.request.http.SaveWorkflowRequest;
import tech.powerjob.common.request.query.InstancePageQuery;
import tech.powerjob.common.request.query.JobInfoQuery;
import tech.powerjob.common.response.*;
import java.util.List;
/**
* PowerJobClient, the client for OpenAPI.
*
* @author tjq
* @since 2023/3/5
*/
public interface IPowerJobClient {
/* ************* Job 区 ************* */
ResultDTO<SaveJobInfoRequest> exportJob(Long jobId);
ResultDTO<Long> saveJob(SaveJobInfoRequest request);
ResultDTO<Long> copyJob(Long jobId);
ResultDTO<JobInfoDTO> fetchJob(Long jobId);
ResultDTO<List<JobInfoDTO>> fetchAllJob();
ResultDTO<List<JobInfoDTO>> queryJob(JobInfoQuery powerQuery);
ResultDTO<Void> disableJob(Long jobId);
ResultDTO<Void> enableJob(Long jobId);
ResultDTO<Void> deleteJob(Long jobId);
ResultDTO<Long> runJob(Long jobId, String instanceParams, long delayMS);
/* ************* Instance API list ************* */
ResultDTO<Void> stopInstance(Long instanceId);
ResultDTO<Void> cancelInstance(Long instanceId);
ResultDTO<Void> retryInstance(Long instanceId);
ResultDTO<Integer> fetchInstanceStatus(Long instanceId);
ResultDTO<InstanceInfoDTO> fetchInstanceInfo(Long instanceId);
ResultDTO<PageResult<InstanceInfoDTO>> queryInstanceInfo(InstancePageQuery instancePageQuery);
/* ************* Workflow API list ************* */
ResultDTO<Long> saveWorkflow(SaveWorkflowRequest request);
ResultDTO<Long> copyWorkflow(Long workflowId);
ResultDTO<List<WorkflowNodeInfoDTO>> saveWorkflowNode(List<SaveWorkflowNodeRequest> requestList);
ResultDTO<WorkflowInfoDTO> fetchWorkflow(Long workflowId);
ResultDTO<Void> disableWorkflow(Long workflowId);
ResultDTO<Void> enableWorkflow(Long workflowId);
ResultDTO<Void> deleteWorkflow(Long workflowId);
ResultDTO<Long> runWorkflow(Long workflowId, String initParams, long delayMS);
/* ************* Workflow Instance API list ************* */
ResultDTO<Void> stopWorkflowInstance(Long wfInstanceId);
ResultDTO<Void> retryWorkflowInstance(Long wfInstanceId);
ResultDTO<Void> markWorkflowNodeAsSuccess(Long wfInstanceId, Long nodeId);
ResultDTO<WorkflowInstanceInfoDTO> fetchWorkflowInstanceInfo(Long wfInstanceId);
}

View File

@ -1,28 +1,33 @@
package tech.powerjob.client;
import com.alibaba.fastjson.JSON;
import tech.powerjob.common.enums.InstanceStatus;
import tech.powerjob.common.OmsConstant;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import tech.powerjob.client.module.AppAuthRequest;
import tech.powerjob.client.module.AppAuthResult;
import tech.powerjob.client.service.PowerRequestBody;
import tech.powerjob.client.service.RequestService;
import tech.powerjob.client.service.impl.ClusterRequestServiceOkHttp3Impl;
import tech.powerjob.common.OpenAPIConstant;
import tech.powerjob.common.enums.EncryptType;
import tech.powerjob.common.enums.InstanceStatus;
import tech.powerjob.common.exception.PowerJobException;
import tech.powerjob.common.request.http.SaveJobInfoRequest;
import tech.powerjob.common.request.http.SaveWorkflowNodeRequest;
import tech.powerjob.common.request.http.SaveWorkflowRequest;
import tech.powerjob.common.request.query.InstancePageQuery;
import tech.powerjob.common.request.query.JobInfoQuery;
import tech.powerjob.common.response.*;
import tech.powerjob.common.utils.CommonUtils;
import tech.powerjob.common.utils.HttpUtils;
import tech.powerjob.common.serialize.JsonUtils;
import com.google.common.collect.Lists;
import lombok.extern.slf4j.Slf4j;
import okhttp3.FormBody;
import okhttp3.MediaType;
import okhttp3.RequestBody;
import org.apache.commons.lang3.StringUtils;
import tech.powerjob.common.utils.CommonUtils;
import tech.powerjob.common.utils.DigestUtils;
import java.io.Closeable;
import java.io.IOException;
import java.util.List;
import java.util.Objects;
import java.util.Map;
import static tech.powerjob.client.TypeStore.*;
@ -33,14 +38,44 @@ import static tech.powerjob.client.TypeStore.*;
* @since 2020/4/15
*/
@Slf4j
public class PowerJobClient {
public class PowerJobClient implements IPowerJobClient, Closeable {
private Long appId;
private String currentAddress;
private final List<String> allAddress;
private final RequestService requestService;
private static final String URL_PATTERN = "http://%s%s%s";
public PowerJobClient(ClientConfig config) {
List<String> addressList = config.getAddressList();
String appName = config.getAppName();
CommonUtils.requireNonNull(addressList, "addressList can't be null!");
CommonUtils.requireNonNull(appName, "appName can't be null");
this.requestService = new ClusterRequestServiceOkHttp3Impl(config);
AppAuthRequest appAuthRequest = new AppAuthRequest();
appAuthRequest.setAppName(appName);
appAuthRequest.setEncryptedPassword(DigestUtils.md5(config.getPassword()));
appAuthRequest.setEncryptType(EncryptType.MD5.getCode());
String assertResponse = requestService.request(OpenAPIConstant.AUTH_APP, PowerRequestBody.newJsonRequestBody(appAuthRequest));
if (StringUtils.isNotEmpty(assertResponse)) {
ResultDTO<AppAuthResult> resultDTO = JSON.parseObject(assertResponse, APP_AUTH_RESULT_TYPE);
if (resultDTO.isSuccess()) {
appId = resultDTO.getData().getAppId();
} else {
throw new PowerJobException(resultDTO.getMessage());
}
}
if (appId == null) {
throw new PowerJobException("appId is null, please check your config");
}
log.info("[PowerJobClient] [INIT] {}'s PowerJobClient bootstrap successfully", appName);
}
/**
* Init PowerJobClient with domain, appName and password.
*
@ -49,7 +84,7 @@ public class PowerJobClient {
* @param password password of the application
*/
public PowerJobClient(String domain, String appName, String password) {
this(Lists.newArrayList(domain), appName, password);
this(new ClientConfig().setAppName(appName).setPassword(password).setAddressList(Lists.newArrayList(domain)));
}
@ -61,48 +96,7 @@ public class PowerJobClient {
* @param password password of the application
*/
public PowerJobClient(List<String> addressList, String appName, String password) {
CommonUtils.requireNonNull(addressList, "addressList can't be null!");
CommonUtils.requireNonNull(appName, "appName can't be null");
allAddress = addressList;
for (String addr : addressList) {
String url = getUrl(OpenAPIConstant.ASSERT, addr);
try {
String result = assertApp(appName, password, url);
if (StringUtils.isNotEmpty(result)) {
ResultDTO<Long> resultDTO = JSON.parseObject(result, LONG_RESULT_TYPE);
if (resultDTO.isSuccess()) {
appId = resultDTO.getData();
currentAddress = addr;
break;
} else {
throw new PowerJobException(resultDTO.getMessage());
}
}
} catch (IOException ignore) {
//
}
}
if (StringUtils.isEmpty(currentAddress)) {
throw new PowerJobException("no server available for PowerJobClient");
}
log.info("[PowerJobClient] {}'s PowerJobClient bootstrap successfully, using server: {}", appName, currentAddress);
}
private static String assertApp(String appName, String password, String url) throws IOException {
FormBody.Builder builder = new FormBody.Builder()
.add("appName", appName);
if (password != null) {
builder.add("password", password);
}
return HttpUtils.post(url, builder.build());
}
private static String getUrl(String path, String address) {
return String.format(URL_PATTERN, address, OpenAPIConstant.WEB_PATH, path);
this(new ClientConfig().setAppName(appName).setPassword(password).setAddressList(addressList));
}
/* ************* Job 区 ************* */
@ -114,12 +108,11 @@ public class PowerJobClient {
* @param request Job meta info
* @return jobId
*/
@Override
public ResultDTO<Long> saveJob(SaveJobInfoRequest request) {
request.setAppId(appId);
MediaType jsonType = MediaType.parse(OmsConstant.JSON_MEDIA_TYPE);
String json = JSON.toJSONString(request);
String post = postHA(OpenAPIConstant.SAVE_JOB, RequestBody.create(jsonType, json));
String post = requestService.request(OpenAPIConstant.SAVE_JOB, PowerRequestBody.newJsonRequestBody(request));
return JSON.parseObject(post, LONG_RESULT_TYPE);
}
@ -130,27 +123,37 @@ public class PowerJobClient {
* @param jobId Job id
* @return Id of job copy
*/
@Override
public ResultDTO<Long> copyJob(Long jobId) {
RequestBody body = new FormBody.Builder()
.add("jobId", jobId.toString())
.add("appId", appId.toString())
.build();
String post = postHA(OpenAPIConstant.COPY_JOB, body);
Map<String, String> param = Maps.newHashMap();
param.put("jobId", jobId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.COPY_JOB, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, LONG_RESULT_TYPE);
}
@Override
public ResultDTO<SaveJobInfoRequest> exportJob(Long jobId) {
Map<String, String> param = Maps.newHashMap();
param.put("jobId", jobId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.EXPORT_JOB, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, SAVE_JOB_INFO_REQUEST_RESULT_TYPE);
}
/**
* Query JobInfo by jobId
*
* @param jobId jobId
* @return Job meta info
*/
@Override
public ResultDTO<JobInfoDTO> fetchJob(Long jobId) {
RequestBody body = new FormBody.Builder()
.add("jobId", jobId.toString())
.add("appId", appId.toString())
.build();
String post = postHA(OpenAPIConstant.FETCH_JOB, body);
Map<String, String> param = Maps.newHashMap();
param.put("jobId", jobId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.FETCH_JOB, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, JOB_RESULT_TYPE);
}
@ -159,11 +162,11 @@ public class PowerJobClient {
*
* @return All JobInfo
*/
@Override
public ResultDTO<List<JobInfoDTO>> fetchAllJob() {
RequestBody body = new FormBody.Builder()
.add("appId", appId.toString())
.build();
String post = postHA(OpenAPIConstant.FETCH_ALL_JOB, body);
Map<String, String> param = Maps.newHashMap();
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.FETCH_ALL_JOB, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, LIST_JOB_RESULT_TYPE);
}
@ -173,11 +176,10 @@ public class PowerJobClient {
* @param powerQuery JobQuery
* @return JobInfo
*/
@Override
public ResultDTO<List<JobInfoDTO>> queryJob(JobInfoQuery powerQuery) {
powerQuery.setAppIdEq(appId);
MediaType jsonType = MediaType.parse(OmsConstant.JSON_MEDIA_TYPE);
String json = JsonUtils.toJSONStringUnsafe(powerQuery);
String post = postHA(OpenAPIConstant.QUERY_JOB, RequestBody.create(jsonType, json));
String post = requestService.request(OpenAPIConstant.QUERY_JOB, PowerRequestBody.newJsonRequestBody(powerQuery));
return JSON.parseObject(post, LIST_JOB_RESULT_TYPE);
}
@ -187,12 +189,12 @@ public class PowerJobClient {
* @param jobId jobId
* @return Standard return object
*/
@Override
public ResultDTO<Void> disableJob(Long jobId) {
RequestBody body = new FormBody.Builder()
.add("jobId", jobId.toString())
.add("appId", appId.toString())
.build();
String post = postHA(OpenAPIConstant.DISABLE_JOB, body);
Map<String, String> param = Maps.newHashMap();
param.put("jobId", jobId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.DISABLE_JOB, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, VOID_RESULT_TYPE);
}
@ -202,12 +204,12 @@ public class PowerJobClient {
* @param jobId jobId
* @return Standard return object
*/
@Override
public ResultDTO<Void> enableJob(Long jobId) {
RequestBody body = new FormBody.Builder()
.add("jobId", jobId.toString())
.add("appId", appId.toString())
.build();
String post = postHA(OpenAPIConstant.ENABLE_JOB, body);
Map<String, String> param = Maps.newHashMap();
param.put("jobId", jobId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.ENABLE_JOB, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, VOID_RESULT_TYPE);
}
@ -217,12 +219,12 @@ public class PowerJobClient {
* @param jobId jobId
* @return Standard return object
*/
@Override
public ResultDTO<Void> deleteJob(Long jobId) {
RequestBody body = new FormBody.Builder()
.add("jobId", jobId.toString())
.add("appId", appId.toString())
.build();
String post = postHA(OpenAPIConstant.DELETE_JOB, body);
Map<String, String> param = Maps.newHashMap();
param.put("jobId", jobId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.DELETE_JOB, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, VOID_RESULT_TYPE);
}
@ -234,16 +236,18 @@ public class PowerJobClient {
* @param delayMS Delay timeMilliseconds
* @return instanceId
*/
@Override
public ResultDTO<Long> runJob(Long jobId, String instanceParams, long delayMS) {
FormBody.Builder builder = new FormBody.Builder()
.add("jobId", jobId.toString())
.add("appId", appId.toString())
.add("delay", String.valueOf(delayMS));
Map<String, String> param = Maps.newHashMap();
param.put("jobId", jobId.toString());
param.put("appId", appId.toString());
param.put("delay", String.valueOf(delayMS));
if (StringUtils.isNotEmpty(instanceParams)) {
builder.add("instanceParams", instanceParams);
param.put("instanceParams", instanceParams);
}
String post = postHA(OpenAPIConstant.RUN_JOB, builder.build());
String post = requestService.request(OpenAPIConstant.RUN_JOB, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, LONG_RESULT_TYPE);
}
@ -259,12 +263,14 @@ public class PowerJobClient {
* @param instanceId instanceId
* @return Standard return object
*/
@Override
public ResultDTO<Void> stopInstance(Long instanceId) {
RequestBody body = new FormBody.Builder()
.add("instanceId", instanceId.toString())
.add("appId", appId.toString())
.build();
String post = postHA(OpenAPIConstant.STOP_INSTANCE, body);
Map<String, String> param = Maps.newHashMap();
param.put("instanceId", instanceId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.STOP_INSTANCE, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, VOID_RESULT_TYPE);
}
@ -275,12 +281,12 @@ public class PowerJobClient {
* @param instanceId instanceId
* @return Standard return object
*/
@Override
public ResultDTO<Void> cancelInstance(Long instanceId) {
RequestBody body = new FormBody.Builder()
.add("instanceId", instanceId.toString())
.add("appId", appId.toString())
.build();
String post = postHA(OpenAPIConstant.CANCEL_INSTANCE, body);
Map<String, String> param = Maps.newHashMap();
param.put("instanceId", instanceId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.CANCEL_INSTANCE, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, VOID_RESULT_TYPE);
}
@ -291,12 +297,12 @@ public class PowerJobClient {
* @param instanceId instanceId
* @return Standard return object
*/
@Override
public ResultDTO<Void> retryInstance(Long instanceId) {
RequestBody body = new FormBody.Builder()
.add("instanceId", instanceId.toString())
.add("appId", appId.toString())
.build();
String post = postHA(OpenAPIConstant.RETRY_INSTANCE, body);
Map<String, String> param = Maps.newHashMap();
param.put("instanceId", instanceId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.RETRY_INSTANCE, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, VOID_RESULT_TYPE);
}
@ -306,11 +312,12 @@ public class PowerJobClient {
* @param instanceId instanceId
* @return {@link InstanceStatus}
*/
@Override
public ResultDTO<Integer> fetchInstanceStatus(Long instanceId) {
RequestBody body = new FormBody.Builder()
.add("instanceId", instanceId.toString())
.build();
String post = postHA(OpenAPIConstant.FETCH_INSTANCE_STATUS, body);
Map<String, String> param = Maps.newHashMap();
param.put("instanceId", instanceId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.FETCH_INSTANCE_STATUS, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, INTEGER_RESULT_TYPE);
}
@ -320,14 +327,22 @@ public class PowerJobClient {
* @param instanceId instanceId
* @return instance detail
*/
@Override
public ResultDTO<InstanceInfoDTO> fetchInstanceInfo(Long instanceId) {
RequestBody body = new FormBody.Builder()
.add("instanceId", instanceId.toString())
.build();
String post = postHA(OpenAPIConstant.FETCH_INSTANCE_INFO, body);
Map<String, String> param = Maps.newHashMap();
param.put("instanceId", instanceId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.FETCH_INSTANCE_INFO, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, INSTANCE_RESULT_TYPE);
}
@Override
public ResultDTO<PageResult<InstanceInfoDTO>> queryInstanceInfo(InstancePageQuery instancePageQuery) {
instancePageQuery.setAppIdEq(appId);
String post = requestService.request(OpenAPIConstant.QUERY_INSTANCE, PowerRequestBody.newJsonRequestBody(instancePageQuery));
return JSON.parseObject(post, PAGE_INSTANCE_RESULT_TYPE);
}
/* ************* Workflow API list ************* */
/**
@ -337,12 +352,12 @@ public class PowerJobClient {
* @param request Workflow meta info
* @return workflowId
*/
@Override
public ResultDTO<Long> saveWorkflow(SaveWorkflowRequest request) {
request.setAppId(appId);
MediaType jsonType = MediaType.parse(OmsConstant.JSON_MEDIA_TYPE);
// 中坑记录 FastJSON 序列化会导致 Server 接收时 pEWorkflowDAG null无语.jpg
String json = JsonUtils.toJSONStringUnsafe(request);
String post = postHA(OpenAPIConstant.SAVE_WORKFLOW, RequestBody.create(jsonType, json));
String post = requestService.request(OpenAPIConstant.SAVE_WORKFLOW, PowerRequestBody.newJsonRequestBody(json));
return JSON.parseObject(post, LONG_RESULT_TYPE);
}
@ -352,12 +367,14 @@ public class PowerJobClient {
* @param workflowId Workflow id
* @return Id of workflow copy
*/
@Override
public ResultDTO<Long> copyWorkflow(Long workflowId) {
RequestBody body = new FormBody.Builder()
.add("workflowId", workflowId.toString())
.add("appId", appId.toString())
.build();
String post = postHA(OpenAPIConstant.COPY_WORKFLOW, body);
Map<String, String> param = Maps.newHashMap();
param.put("workflowId", workflowId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.COPY_WORKFLOW, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, LONG_RESULT_TYPE);
}
@ -368,13 +385,14 @@ public class PowerJobClient {
* @param requestList Node info list of Workflow
* @return Standard return object
*/
@Override
public ResultDTO<List<WorkflowNodeInfoDTO>> saveWorkflowNode(List<SaveWorkflowNodeRequest> requestList) {
for (SaveWorkflowNodeRequest saveWorkflowNodeRequest : requestList) {
saveWorkflowNodeRequest.setAppId(appId);
}
MediaType jsonType = MediaType.parse(OmsConstant.JSON_MEDIA_TYPE);
String json = JsonUtils.toJSONStringUnsafe(requestList);
String post = postHA(OpenAPIConstant.SAVE_WORKFLOW_NODE, RequestBody.create(jsonType, json));
String post = requestService.request(OpenAPIConstant.SAVE_WORKFLOW_NODE, PowerRequestBody.newJsonRequestBody(json));
return JSON.parseObject(post, WF_NODE_LIST_RESULT_TYPE);
}
@ -386,12 +404,12 @@ public class PowerJobClient {
* @param workflowId workflowId
* @return Workflow meta info
*/
@Override
public ResultDTO<WorkflowInfoDTO> fetchWorkflow(Long workflowId) {
RequestBody body = new FormBody.Builder()
.add("workflowId", workflowId.toString())
.add("appId", appId.toString())
.build();
String post = postHA(OpenAPIConstant.FETCH_WORKFLOW, body);
Map<String, String> param = Maps.newHashMap();
param.put("workflowId", workflowId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.FETCH_WORKFLOW, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, WF_RESULT_TYPE);
}
@ -401,12 +419,12 @@ public class PowerJobClient {
* @param workflowId workflowId
* @return Standard return object
*/
@Override
public ResultDTO<Void> disableWorkflow(Long workflowId) {
RequestBody body = new FormBody.Builder()
.add("workflowId", workflowId.toString())
.add("appId", appId.toString())
.build();
String post = postHA(OpenAPIConstant.DISABLE_WORKFLOW, body);
Map<String, String> param = Maps.newHashMap();
param.put("workflowId", workflowId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.DISABLE_WORKFLOW, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, VOID_RESULT_TYPE);
}
@ -416,12 +434,12 @@ public class PowerJobClient {
* @param workflowId workflowId
* @return Standard return object
*/
@Override
public ResultDTO<Void> enableWorkflow(Long workflowId) {
RequestBody body = new FormBody.Builder()
.add("workflowId", workflowId.toString())
.add("appId", appId.toString())
.build();
String post = postHA(OpenAPIConstant.ENABLE_WORKFLOW, body);
Map<String, String> param = Maps.newHashMap();
param.put("workflowId", workflowId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.ENABLE_WORKFLOW, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, VOID_RESULT_TYPE);
}
@ -431,12 +449,12 @@ public class PowerJobClient {
* @param workflowId workflowId
* @return Standard return object
*/
@Override
public ResultDTO<Void> deleteWorkflow(Long workflowId) {
RequestBody body = new FormBody.Builder()
.add("workflowId", workflowId.toString())
.add("appId", appId.toString())
.build();
String post = postHA(OpenAPIConstant.DELETE_WORKFLOW, body);
Map<String, String> param = Maps.newHashMap();
param.put("workflowId", workflowId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.DELETE_WORKFLOW, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, VOID_RESULT_TYPE);
}
@ -448,15 +466,19 @@ public class PowerJobClient {
* @param delayMS Delay timeMilliseconds
* @return workflow instanceId
*/
@Override
public ResultDTO<Long> runWorkflow(Long workflowId, String initParams, long delayMS) {
FormBody.Builder builder = new FormBody.Builder()
.add("workflowId", workflowId.toString())
.add("appId", appId.toString())
.add("delay", String.valueOf(delayMS));
Map<String, String> param = Maps.newHashMap();
param.put("workflowId", workflowId.toString());
param.put("appId", appId.toString());
param.put("delay", String.valueOf(delayMS));
if (StringUtils.isNotEmpty(initParams)) {
builder.add("initParams", initParams);
param.put("initParams", initParams);
}
String post = postHA(OpenAPIConstant.RUN_WORKFLOW, builder.build());
String post = requestService.request(OpenAPIConstant.RUN_WORKFLOW, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, LONG_RESULT_TYPE);
}
@ -472,12 +494,14 @@ public class PowerJobClient {
* @param wfInstanceId workflow instanceId
* @return Standard return object
*/
@Override
public ResultDTO<Void> stopWorkflowInstance(Long wfInstanceId) {
RequestBody body = new FormBody.Builder()
.add("wfInstanceId", wfInstanceId.toString())
.add("appId", appId.toString())
.build();
String post = postHA(OpenAPIConstant.STOP_WORKFLOW_INSTANCE, body);
Map<String, String> param = Maps.newHashMap();
param.put("wfInstanceId", wfInstanceId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.STOP_WORKFLOW_INSTANCE, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, VOID_RESULT_TYPE);
}
@ -487,12 +511,12 @@ public class PowerJobClient {
* @param wfInstanceId workflow instanceId
* @return Standard return object
*/
@Override
public ResultDTO<Void> retryWorkflowInstance(Long wfInstanceId) {
RequestBody body = new FormBody.Builder()
.add("wfInstanceId", wfInstanceId.toString())
.add("appId", appId.toString())
.build();
String post = postHA(OpenAPIConstant.RETRY_WORKFLOW_INSTANCE, body);
Map<String, String> param = Maps.newHashMap();
param.put("wfInstanceId", wfInstanceId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.RETRY_WORKFLOW_INSTANCE, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, VOID_RESULT_TYPE);
}
@ -503,13 +527,15 @@ public class PowerJobClient {
* @param nodeId node id
* @return Standard return object
*/
@Override
public ResultDTO<Void> markWorkflowNodeAsSuccess(Long wfInstanceId, Long nodeId) {
RequestBody body = new FormBody.Builder()
.add("wfInstanceId", wfInstanceId.toString())
.add("nodeId", nodeId.toString())
.add("appId", appId.toString())
.build();
String post = postHA(OpenAPIConstant.MARK_WORKFLOW_NODE_AS_SUCCESS, body);
Map<String, String> param = Maps.newHashMap();
param.put("wfInstanceId", wfInstanceId.toString());
param.put("appId", appId.toString());
param.put("nodeId", nodeId.toString());
String post = requestService.request(OpenAPIConstant.MARK_WORKFLOW_NODE_AS_SUCCESS, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, VOID_RESULT_TYPE);
}
@ -519,48 +545,19 @@ public class PowerJobClient {
* @param wfInstanceId workflow instanceId
* @return detail about a workflow
*/
@Override
public ResultDTO<WorkflowInstanceInfoDTO> fetchWorkflowInstanceInfo(Long wfInstanceId) {
RequestBody body = new FormBody.Builder()
.add("wfInstanceId", wfInstanceId.toString())
.add("appId", appId.toString())
.build();
String post = postHA(OpenAPIConstant.FETCH_WORKFLOW_INSTANCE_INFO, body);
Map<String, String> param = Maps.newHashMap();
param.put("wfInstanceId", wfInstanceId.toString());
param.put("appId", appId.toString());
String post = requestService.request(OpenAPIConstant.FETCH_WORKFLOW_INSTANCE_INFO, PowerRequestBody.newFormRequestBody(param));
return JSON.parseObject(post, WF_INSTANCE_RESULT_TYPE);
}
private String postHA(String path, RequestBody requestBody) {
// 先尝试默认地址
String url = getUrl(path, currentAddress);
try {
String res = HttpUtils.post(url, requestBody);
if (StringUtils.isNotEmpty(res)) {
return res;
}
} catch (IOException e) {
log.warn("[PowerJobClient] request url:{} failed, reason is {}.", url, e.toString());
}
// 失败开始重试
for (String addr : allAddress) {
if (Objects.equals(addr, currentAddress)) {
continue;
}
url = getUrl(path, addr);
try {
String res = HttpUtils.post(url, requestBody);
if (StringUtils.isNotEmpty(res)) {
log.warn("[PowerJobClient] server change: from({}) -> to({}).", currentAddress, addr);
currentAddress = addr;
return res;
}
} catch (IOException e) {
log.warn("[PowerJobClient] request url:{} failed, reason is {}.", url, e.toString());
}
}
log.error("[PowerJobClient] do post for path: {} failed because of no server available in {}.", path, allAddress);
throw new PowerJobException("no server available when send post request");
@Override
public void close() throws IOException {
requestService.close();
}
}

View File

@ -1,6 +1,8 @@
package tech.powerjob.client;
import com.alibaba.fastjson.TypeReference;
import tech.powerjob.client.module.AppAuthResult;
import tech.powerjob.common.request.http.SaveJobInfoRequest;
import tech.powerjob.common.response.*;
import java.util.List;
@ -13,6 +15,7 @@ import java.util.List;
*/
public class TypeStore {
public static final TypeReference<ResultDTO<AppAuthResult>> APP_AUTH_RESULT_TYPE = new TypeReference<ResultDTO<AppAuthResult>>(){};
public static final TypeReference<ResultDTO<Void>> VOID_RESULT_TYPE = new TypeReference<ResultDTO<Void>>(){};
public static final TypeReference<ResultDTO<Integer>> INTEGER_RESULT_TYPE = new TypeReference<ResultDTO<Integer>>(){};
@ -21,12 +24,16 @@ public class TypeStore {
public static final TypeReference<ResultDTO<JobInfoDTO>> JOB_RESULT_TYPE = new TypeReference<ResultDTO<JobInfoDTO>>(){};
public static final TypeReference<ResultDTO<SaveJobInfoRequest>> SAVE_JOB_INFO_REQUEST_RESULT_TYPE = new TypeReference<ResultDTO<SaveJobInfoRequest>>(){};
public static final TypeReference<ResultDTO<List<JobInfoDTO>>> LIST_JOB_RESULT_TYPE = new TypeReference<ResultDTO<List<JobInfoDTO>>>(){};
public static final TypeReference<ResultDTO<InstanceInfoDTO>> INSTANCE_RESULT_TYPE = new TypeReference<ResultDTO<InstanceInfoDTO>>() {};
public static final TypeReference<ResultDTO<List<InstanceInfoDTO>>> LIST_INSTANCE_RESULT_TYPE = new TypeReference<ResultDTO<List<InstanceInfoDTO>>>(){};
public static final TypeReference<ResultDTO<PageResult<InstanceInfoDTO>>> PAGE_INSTANCE_RESULT_TYPE = new TypeReference<ResultDTO<PageResult<InstanceInfoDTO>>>(){};
public static final TypeReference<ResultDTO<WorkflowInfoDTO>> WF_RESULT_TYPE = new TypeReference<ResultDTO<WorkflowInfoDTO>>() {};
public static final TypeReference<ResultDTO<WorkflowInstanceInfoDTO>> WF_INSTANCE_RESULT_TYPE = new TypeReference<ResultDTO<WorkflowInstanceInfoDTO>>() {};

View File

@ -0,0 +1,28 @@
package tech.powerjob.client.common;
import lombok.Getter;
/**
* Protocol
*
* @author tjq
* @since 2024/2/20
*/
@Getter
public enum Protocol {
HTTP("http"),
HTTPS("https");
private final String protocol;
Protocol(String protocol) {
this.protocol = protocol;
}
@Override
public String toString() {
return protocol;
}
}

View File

@ -0,0 +1,19 @@
package tech.powerjob.client.extension;
import java.util.List;
/**
* 扩展服务
*
* @author tjq
* @since 2024/8/11
*/
public interface ClientExtension {
/**
* 动态提供地址适用于 server 部署在动态集群上的场景
* @param context 上下文
* @return 地址格式要求同 ClientConfig#addressList
*/
List<String> addressProvider(ExtensionContext context);
}

View File

@ -0,0 +1,10 @@
package tech.powerjob.client.extension;
/**
* 扩展上下文
*
* @author tjq
* @since 2024/8/11
*/
public class ExtensionContext {
}

View File

@ -0,0 +1,39 @@
package tech.powerjob.client.module;
import lombok.Getter;
import lombok.Setter;
import lombok.ToString;
import java.io.Serializable;
import java.util.Map;
/**
* App 鉴权请求
*
* @author tjq
* @since 2024/2/19
*/
@Getter
@Setter
@ToString
public class AppAuthRequest implements Serializable {
/**
* 应用名称
*/
private String appName;
/**
* 加密后密码
*/
private String encryptedPassword;
/**
* 加密类型
*/
private String encryptType;
/**
* 额外参数方便开发者传递其他参数
*/
private Map<String, Object> extra;
}

View File

@ -0,0 +1,30 @@
package tech.powerjob.client.module;
import lombok.Getter;
import lombok.Setter;
import lombok.ToString;
import java.io.Serializable;
import java.util.Map;
/**
* App 鉴权响应
*
* @author tjq
* @since 2024/2/21
*/
@Getter
@Setter
@ToString
public class AppAuthResult implements Serializable {
private Long appId;
private String token;
/**
* 额外参数
* 有安全需求的开发者可执行扩展
*/
private Map<String, Object> extra;
}

View File

@ -0,0 +1,26 @@
package tech.powerjob.client.service;
import lombok.Data;
import lombok.experimental.Accessors;
import java.io.Serializable;
import java.util.Map;
/**
* HTTP 响应
*
* @author tjq
* @since 2024/8/10
*/
@Data
@Accessors(chain = true)
public class HttpResponse implements Serializable {
private boolean success;
private int code;
private String response;
private Map<String, String> headers;
}

View File

@ -0,0 +1,47 @@
package tech.powerjob.client.service;
import com.google.common.collect.Maps;
import lombok.Getter;
import tech.powerjob.common.enums.MIME;
import java.util.Map;
/**
* 请求体
*
* @author tjq
* @since 2024/8/10
*/
@Getter
public class PowerRequestBody {
private MIME mime;
private Object payload;
private final Map<String, String> headers = Maps.newHashMap();
private PowerRequestBody() {
}
public static PowerRequestBody newJsonRequestBody(Object data) {
PowerRequestBody powerRequestBody = new PowerRequestBody();
powerRequestBody.mime = MIME.APPLICATION_JSON;
powerRequestBody.payload = data;
return powerRequestBody;
}
public static PowerRequestBody newFormRequestBody(Map<String, String> form) {
PowerRequestBody powerRequestBody = new PowerRequestBody();
powerRequestBody.mime = MIME.APPLICATION_FORM;
powerRequestBody.payload = form;
return powerRequestBody;
}
public void addHeaders(Map<String, String> hs) {
if (hs == null || hs.isEmpty()) {
return;
}
this.headers.putAll(hs);
}
}

View File

@ -0,0 +1,15 @@
package tech.powerjob.client.service;
import java.io.Closeable;
/**
* 请求服务
*
* @author tjq
* @since 2024/2/20
*/
public interface RequestService extends Closeable {
String request(String path, PowerRequestBody powerRequestBody);
}

View File

@ -0,0 +1,107 @@
package tech.powerjob.client.service.impl;
import com.alibaba.fastjson.JSONObject;
import com.google.common.collect.Maps;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import tech.powerjob.client.ClientConfig;
import tech.powerjob.client.TypeStore;
import tech.powerjob.client.module.AppAuthRequest;
import tech.powerjob.client.module.AppAuthResult;
import tech.powerjob.client.service.HttpResponse;
import tech.powerjob.client.service.PowerRequestBody;
import tech.powerjob.common.OpenAPIConstant;
import tech.powerjob.common.enums.EncryptType;
import tech.powerjob.common.exception.PowerJobException;
import tech.powerjob.common.response.ResultDTO;
import tech.powerjob.common.utils.DigestUtils;
import tech.powerjob.common.utils.MapUtils;
import java.util.Map;
/**
* 封装鉴权相关逻辑
*
* @author tjq
* @since 2024/2/21
*/
@Slf4j
abstract class AppAuthClusterRequestService extends ClusterRequestService {
protected AppAuthResult appAuthResult;
public AppAuthClusterRequestService(ClientConfig config) {
super(config);
}
@Override
public String request(String path, PowerRequestBody powerRequestBody) {
// 若不存在 appAuthResult则首先进行鉴权
if (appAuthResult == null) {
refreshAppAuthResult();
}
HttpResponse httpResponse = doRequest(path, powerRequestBody);
// 如果 auth 成功则代表请求有效直接返回
String authStatus = MapUtils.getString(httpResponse.getHeaders(), OpenAPIConstant.RESPONSE_HEADER_AUTH_STATUS);
if (Boolean.TRUE.toString().equalsIgnoreCase(authStatus)) {
return httpResponse.getResponse();
}
// 否则请求无效刷新鉴权后重新请求
log.warn("[PowerJobClient] auth failed[authStatus: {}], try to refresh the auth info", authStatus);
refreshAppAuthResult();
httpResponse = doRequest(path, powerRequestBody);
// 只要请求不失败直接返回如果鉴权失败则返回鉴权错误信息server 保证 response 永远非空
return httpResponse.getResponse();
}
private HttpResponse doRequest(String path, PowerRequestBody powerRequestBody) {
// 添加鉴权信息
Map<String, String> authHeaders = buildAuthHeader();
powerRequestBody.addHeaders(authHeaders);
HttpResponse httpResponse = clusterHaRequest(path, powerRequestBody);
// 任何请求不成功都直接报错
if (!httpResponse.isSuccess()) {
throw new PowerJobException("REMOTE_SERVER_INNER_EXCEPTION");
}
return httpResponse;
}
private Map<String, String> buildAuthHeader() {
Map<String, String> authHeader = Maps.newHashMap();
authHeader.put(OpenAPIConstant.REQUEST_HEADER_APP_ID, String.valueOf(appAuthResult.getAppId()));
authHeader.put(OpenAPIConstant.REQUEST_HEADER_ACCESS_TOKEN, appAuthResult.getToken());
return authHeader;
}
@SneakyThrows
private void refreshAppAuthResult() {
AppAuthRequest appAuthRequest = buildAppAuthRequest();
HttpResponse httpResponse = clusterHaRequest(OpenAPIConstant.AUTH_APP, PowerRequestBody.newJsonRequestBody(appAuthRequest));
if (!httpResponse.isSuccess()) {
throw new PowerJobException("AUTH_APP_EXCEPTION!");
}
ResultDTO<AppAuthResult> authResultDTO = JSONObject.parseObject(httpResponse.getResponse(), TypeStore.APP_AUTH_RESULT_TYPE);
if (!authResultDTO.isSuccess()) {
throw new PowerJobException("AUTH_FAILED_" + authResultDTO.getMessage());
}
log.warn("[PowerJobClient] refresh auth info successfully!");
this.appAuthResult = authResultDTO.getData();
}
protected AppAuthRequest buildAppAuthRequest() {
AppAuthRequest appAuthRequest = new AppAuthRequest();
appAuthRequest.setAppName(config.getAppName());
appAuthRequest.setEncryptedPassword(DigestUtils.md5(config.getPassword()));
appAuthRequest.setEncryptType(EncryptType.MD5.getCode());
return appAuthRequest;
}
}

View File

@ -0,0 +1,140 @@
package tech.powerjob.client.service.impl;
import lombok.extern.slf4j.Slf4j;
import tech.powerjob.client.ClientConfig;
import tech.powerjob.client.extension.ClientExtension;
import tech.powerjob.client.extension.ExtensionContext;
import tech.powerjob.client.service.HttpResponse;
import tech.powerjob.client.service.PowerRequestBody;
import tech.powerjob.client.service.RequestService;
import tech.powerjob.common.OpenAPIConstant;
import tech.powerjob.common.exception.PowerJobException;
import tech.powerjob.common.utils.CollectionUtils;
import javax.net.ssl.X509TrustManager;
import java.io.IOException;
import java.security.cert.X509Certificate;
import java.util.List;
import java.util.Objects;
/**
* 集群请求服务
* 封装网络相关通用逻辑
*
* @author tjq
* @since 2024/2/21
*/
@Slf4j
abstract class ClusterRequestService implements RequestService {
protected final ClientConfig config;
/**
* 当前地址上次请求成功的地址
*/
protected String currentAddress;
/**
* 地址格式
* 协议://域名/OpenAPI/子路径
*/
protected static final String URL_PATTERN = "%s://%s%s%s";
/**
* 默认超时时间
*/
protected static final Integer DEFAULT_TIMEOUT_SECONDS = 2;
protected static final int HTTP_SUCCESS_CODE = 200;
public ClusterRequestService(ClientConfig config) {
this.config = config;
this.currentAddress = config.getAddressList().get(0);
}
/**
* 具体某一次 HTTP 请求的实现
* @param url 完整请求地址
* @param body 请求体
* @return 响应
* @throws IOException 异常
*/
protected abstract HttpResponse sendHttpRequest(String url, PowerRequestBody body) throws IOException;
/**
* 封装集群请求能力
* @param path 请求 PATH
* @param powerRequestBody 请求体
* @return 响应
*/
protected HttpResponse clusterHaRequest(String path, PowerRequestBody powerRequestBody) {
// 先尝试默认地址
String url = getUrl(path, currentAddress);
try {
return sendHttpRequest(url, powerRequestBody);
} catch (IOException e) {
log.warn("[ClusterRequestService] request url:{} failed, reason is {}.", url, e.toString());
}
List<String> addressList = fetchAddressList();
// 失败开始重试
for (String addr : addressList) {
if (Objects.equals(addr, currentAddress)) {
continue;
}
url = getUrl(path, addr);
try {
HttpResponse res = sendHttpRequest(url, powerRequestBody);
log.warn("[ClusterRequestService] server change: from({}) -> to({}).", currentAddress, addr);
currentAddress = addr;
return res;
} catch (IOException e) {
log.warn("[ClusterRequestService] request url:{} failed, reason is {}.", url, e.toString());
}
}
log.error("[ClusterRequestService] do post for path: {} failed because of no server available in {}.", path, addressList);
throw new PowerJobException("no server available when send post request");
}
private List<String> fetchAddressList() {
ClientExtension clientExtension = config.getClientExtension();
if (clientExtension != null) {
List<String> addressList = clientExtension.addressProvider(new ExtensionContext());
if (!CollectionUtils.isEmpty(addressList)) {
return addressList;
}
}
return config.getAddressList();
}
/**
* 不验证证书
* X.509 是一个国际标准定义了公钥证书的格式这个标准是由国际电信联盟ITU-T制定的用于公钥基础设施PKI中数字证书的创建和分发X.509证书主要用于在公开网络上验证实体的身份如服务器或客户端的身份验证过程中确保通信双方是可信的X.509证书广泛应用于多种安全协议中包括SSL/TLS它是实现HTTPS的基础
*/
protected static class NoVerifyX509TrustManager implements X509TrustManager {
@Override
public void checkClientTrusted(X509Certificate[] arg0, String arg1) {
}
@Override
public void checkServerTrusted(X509Certificate[] arg0, String arg1) {
// 不验证
}
@Override
public X509Certificate[] getAcceptedIssuers() {
return new X509Certificate[0];
}
}
private String getUrl(String path, String address) {
String protocol = config.getProtocol().getProtocol();
return String.format(URL_PATTERN, protocol, address, OpenAPIConstant.WEB_PATH, path);
}
}

View File

@ -0,0 +1,148 @@
package tech.powerjob.client.service.impl;
import com.google.common.collect.Maps;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import okhttp3.*;
import tech.powerjob.client.ClientConfig;
import tech.powerjob.client.common.Protocol;
import tech.powerjob.client.service.HttpResponse;
import tech.powerjob.client.service.PowerRequestBody;
import tech.powerjob.common.OmsConstant;
import tech.powerjob.common.serialize.JsonUtils;
import javax.net.ssl.*;
import java.io.IOException;
import java.security.SecureRandom;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.TimeUnit;
/**
* desc
*
* @author tjq
* @since 2024/2/20
*/
@Slf4j
public class ClusterRequestServiceOkHttp3Impl extends AppAuthClusterRequestService {
private final OkHttpClient okHttpClient;
public ClusterRequestServiceOkHttp3Impl(ClientConfig config) {
super(config);
// 初始化 HTTP 客户端
if (Protocol.HTTPS.equals(config.getProtocol())) {
okHttpClient = initHttpsNoVerifyClient();
} else {
okHttpClient = initHttpClient();
}
}
@Override
protected HttpResponse sendHttpRequest(String url, PowerRequestBody powerRequestBody) throws IOException {
// 添加公共 header
powerRequestBody.addHeaders(config.getDefaultHeaders());
Object obj = powerRequestBody.getPayload();
RequestBody requestBody = null;
switch (powerRequestBody.getMime()) {
case APPLICATION_JSON:
MediaType jsonType = MediaType.parse(OmsConstant.JSON_MEDIA_TYPE);
String body = obj instanceof String ? (String) obj : JsonUtils.toJSONStringUnsafe(obj);
requestBody = RequestBody.create(jsonType, body);
break;
case APPLICATION_FORM:
FormBody.Builder formBuilder = new FormBody.Builder();
Map<String, String> formObj = (Map<String, String>) obj;
formObj.forEach(formBuilder::add);
requestBody = formBuilder.build();
}
Request request = new Request.Builder()
.post(requestBody)
.headers(Headers.of(powerRequestBody.getHeaders()))
.url(url)
.build();
try (Response response = okHttpClient.newCall(request).execute()) {
int code = response.code();
HttpResponse httpResponse = new HttpResponse()
.setCode(code)
.setSuccess(code == HTTP_SUCCESS_CODE);
ResponseBody body = response.body();
if (body != null) {
httpResponse.setResponse(body.string());
}
Headers respHeaders = response.headers();
Set<String> headerNames = respHeaders.names();
Map<String, String> respHeaderMap = Maps.newHashMap();
headerNames.forEach(hdKey -> respHeaderMap.put(hdKey, respHeaders.get(hdKey)));
httpResponse.setHeaders(respHeaderMap);
return httpResponse;
}
}
@SneakyThrows
private OkHttpClient initHttpClient() {
OkHttpClient.Builder okHttpBuilder = commonOkHttpBuilder();
return okHttpBuilder.build();
}
@SneakyThrows
private OkHttpClient initHttpsNoVerifyClient() {
X509TrustManager trustManager = new NoVerifyX509TrustManager();
SSLContext sslContext = SSLContext.getInstance("TLS");
sslContext.init(null, new TrustManager[]{trustManager}, new SecureRandom());
SSLSocketFactory sslSocketFactory = sslContext.getSocketFactory();
OkHttpClient.Builder okHttpBuilder = commonOkHttpBuilder();
// 不需要校验证书
okHttpBuilder.sslSocketFactory(sslSocketFactory, trustManager);
// 不校验 url中的 hostname
okHttpBuilder.hostnameVerifier((String hostname, SSLSession session) -> true);
return okHttpBuilder.build();
}
private OkHttpClient.Builder commonOkHttpBuilder() {
return new OkHttpClient.Builder()
// 设置读取超时时间
.readTimeout(Optional.ofNullable(config.getReadTimeout()).orElse(DEFAULT_TIMEOUT_SECONDS), TimeUnit.SECONDS)
// 设置写的超时时间
.writeTimeout(Optional.ofNullable(config.getWriteTimeout()).orElse(DEFAULT_TIMEOUT_SECONDS), TimeUnit.SECONDS)
// 设置连接超时时间
.connectTimeout(Optional.ofNullable(config.getConnectionTimeout()).orElse(DEFAULT_TIMEOUT_SECONDS), TimeUnit.SECONDS)
.callTimeout(Optional.ofNullable(config.getConnectionTimeout()).orElse(DEFAULT_TIMEOUT_SECONDS), TimeUnit.SECONDS);
}
@Override
public void close() throws IOException {
// 关闭 Dispatcher
okHttpClient.dispatcher().executorService().shutdown();
// 清理连接池
okHttpClient.connectionPool().evictAll();
// 清理缓存如果有使用
Cache cache = okHttpClient.cache();
if (cache != null) {
cache.close();
}
}
}

View File

@ -1,7 +1,9 @@
package tech.powerjob.client.test;
import tech.powerjob.client.PowerJobClient;
import com.google.common.collect.Lists;
import org.junit.jupiter.api.BeforeAll;
import tech.powerjob.client.IPowerJobClient;
import tech.powerjob.client.PowerJobClient;
/**
* Initialize OhMyClient
@ -11,10 +13,10 @@ import org.junit.jupiter.api.BeforeAll;
*/
public class ClientInitializer {
protected static PowerJobClient powerJobClient;
protected static IPowerJobClient powerJobClient;
@BeforeAll
public static void initClient() throws Exception {
powerJobClient = new PowerJobClient("127.0.0.1:7700", "powerjob-agent-test", "123");
powerJobClient = new PowerJobClient(Lists.newArrayList("127.0.0.1:7700", "127.0.0.1:7701"), "powerjob-worker-samples", "powerjob123");
}
}

View File

@ -1,19 +1,19 @@
package tech.powerjob.client.test;
import com.alibaba.fastjson.JSONObject;
import com.google.common.collect.Lists;
import lombok.extern.slf4j.Slf4j;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import tech.powerjob.client.PowerJobClient;
import tech.powerjob.common.enums.ExecuteType;
import tech.powerjob.common.enums.ProcessorType;
import tech.powerjob.common.enums.TimeExpressionType;
import tech.powerjob.common.request.http.SaveJobInfoRequest;
import tech.powerjob.common.request.query.InstancePageQuery;
import tech.powerjob.common.response.InstanceInfoDTO;
import tech.powerjob.common.response.JobInfoDTO;
import tech.powerjob.common.response.ResultDTO;
import lombok.SneakyThrows;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import java.util.concurrent.TimeUnit;
/**
* Test cases for {@link PowerJobClient}
@ -22,17 +22,18 @@ import java.util.concurrent.TimeUnit;
* @author Echo009
* @since 2020/4/15
*/
@Slf4j
class TestClient extends ClientInitializer {
public static final long JOB_ID = 4L;
public static final long JOB_ID = 1L;
@Test
void testSaveJob() {
SaveJobInfoRequest newJobInfo = new SaveJobInfoRequest();
newJobInfo.setId(JOB_ID);
newJobInfo.setJobName("omsOpenAPIJobccccc");
newJobInfo.setJobDescription("test OpenAPI");
newJobInfo.setJobName("omsOpenAPIJobccccc" + System.currentTimeMillis());
newJobInfo.setJobDescription("test OpenAPI" + System.currentTimeMillis());
newJobInfo.setJobParams("{'aa':'bb'}");
newJobInfo.setTimeExpressionType(TimeExpressionType.CRON);
newJobInfo.setTimeExpression("0 0 * * * ? ");
@ -45,8 +46,10 @@ class TestClient extends ClientInitializer {
newJobInfo.setMinMemorySpace(1.2);
newJobInfo.setMinDiskSpace(1.3);
log.info("[TestClient] [testSaveJob] SaveJobInfoRequest: {}", JSONObject.toJSONString(newJobInfo));
ResultDTO<Long> resultDTO = powerJobClient.saveJob(newJobInfo);
System.out.println(JSONObject.toJSONString(resultDTO));
log.info("[TestClient] [testSaveJob] result: {}", JSONObject.toJSONString(resultDTO));
Assertions.assertNotNull(resultDTO);
}
@ -57,6 +60,12 @@ class TestClient extends ClientInitializer {
Assertions.assertNotNull(copyJobRes);
}
@Test
void testExportJob() {
ResultDTO<SaveJobInfoRequest> exportJobRes = powerJobClient.exportJob(JOB_ID);
System.out.println(JSONObject.toJSONString(exportJobRes));
}
@Test
void testFetchJob() {
ResultDTO<JobInfoDTO> fetchJob = powerJobClient.fetchJob(JOB_ID);
@ -87,7 +96,7 @@ class TestClient extends ClientInitializer {
@Test
void testRun() {
ResultDTO<Long> res = powerJobClient.runJob(JOB_ID);
ResultDTO<Long> res = powerJobClient.runJob(JOB_ID, null, 0);
System.out.println(res);
Assertions.assertNotNull(res);
}
@ -101,21 +110,32 @@ class TestClient extends ClientInitializer {
@Test
void testFetchInstanceInfo() {
ResultDTO<InstanceInfoDTO> res = powerJobClient.fetchInstanceInfo(205436386851946560L);
ResultDTO<InstanceInfoDTO> res = powerJobClient.fetchInstanceInfo(702482902331424832L);
System.out.println(res);
Assertions.assertNotNull(res);
}
@Test
void testQueryInstanceInfo() {
InstancePageQuery instancePageQuery = new InstancePageQuery();
instancePageQuery.setJobIdEq(11L);
instancePageQuery.setSortBy("actualTriggerTime");
instancePageQuery.setAsc(true);
instancePageQuery.setPageSize(3);
instancePageQuery.setStatusIn(Lists.newArrayList(1,2,5));
TestUtils.output(powerJobClient.queryInstanceInfo(instancePageQuery));
}
@Test
void testStopInstance() {
ResultDTO<Void> res = powerJobClient.stopInstance(205436995885858880L);
ResultDTO<Void> res = powerJobClient.stopInstance(702482902331424832L);
System.out.println(res);
Assertions.assertNotNull(res);
}
@Test
void testFetchInstanceStatus() {
ResultDTO<Integer> res = powerJobClient.fetchInstanceStatus(205436995885858880L);
ResultDTO<Integer> res = powerJobClient.fetchInstanceStatus(702482902331424832L);
System.out.println(res);
Assertions.assertNotNull(res);
}
@ -129,19 +149,19 @@ class TestClient extends ClientInitializer {
Assertions.assertTrue(cancelRes.isSuccess());
}
@Test
@SneakyThrows
void testCancelInstanceInDatabase() {
ResultDTO<Long> startRes = powerJobClient.runJob(15L, "start by OhMyClient", 2000000);
System.out.println("runJob result: " + JSONObject.toJSONString(startRes));
// Restart server manually and clear all the data in time wheeler.
TimeUnit.MINUTES.sleep(1);
ResultDTO<Void> cancelRes = powerJobClient.cancelInstance(startRes.getData());
System.out.println("cancelJob result: " + JSONObject.toJSONString(cancelRes));
Assertions.assertTrue(cancelRes.isSuccess());
}
// @Test
// @SneakyThrows
// void testCancelInstanceInDatabase() {
// ResultDTO<Long> startRes = powerJobClient.runJob(15L, "start by OhMyClient", 2000000);
// System.out.println("runJob result: " + JSONObject.toJSONString(startRes));
//
// // Restart server manually and clear all the data in time wheeler.
// TimeUnit.MINUTES.sleep(1);
//
// ResultDTO<Void> cancelRes = powerJobClient.cancelInstance(startRes.getData());
// System.out.println("cancelJob result: " + JSONObject.toJSONString(cancelRes));
// Assertions.assertTrue(cancelRes.isSuccess());
// }
@Test
void testRetryInstance() {

View File

@ -0,0 +1,35 @@
package tech.powerjob.client.test;
import com.alibaba.fastjson.JSONObject;
import lombok.extern.slf4j.Slf4j;
import org.junit.jupiter.api.Test;
import tech.powerjob.common.response.JobInfoDTO;
import tech.powerjob.common.response.ResultDTO;
import tech.powerjob.common.utils.CommonUtils;
/**
* 测试容灾能力
*
* @author tjq
* @since 2024/8/11
*/
@Slf4j
public class TestClusterHA extends ClientInitializer {
@Test
void testHa() {
// 人工让 server 启停
for (int i = 0; i < 1000000; i++) {
CommonUtils.easySleep(100);
ResultDTO<JobInfoDTO> jobInfoDTOResultDTO = powerJobClient.fetchJob(1L);
log.info("[TestClusterHA] response: {}", JSONObject.toJSONString(jobInfoDTOResultDTO));
if (!jobInfoDTOResultDTO.isSuccess()) {
throw new RuntimeException("request failed!");
}
}
}
}

View File

@ -0,0 +1,17 @@
package tech.powerjob.client.test;
import com.alibaba.fastjson.JSONObject;
/**
* TestUtils
*
* @author tjq
* @since 2024/11/21
*/
public class TestUtils {
public static void output(Object v) {
String str = JSONObject.toJSONString(v);
System.out.println(str);
}
}

View File

@ -29,7 +29,7 @@ import java.util.List;
*/
class TestWorkflow extends ClientInitializer {
private static final long WF_ID = 1;
private static final long WF_ID = 2;
@Test
void initTestData() {
@ -71,18 +71,18 @@ class TestWorkflow extends ClientInitializer {
SaveWorkflowNodeRequest saveWorkflowNodeRequest1 = new SaveWorkflowNodeRequest();
saveWorkflowNodeRequest1.setJobId(1L);
saveWorkflowNodeRequest1.setNodeName("DAG-Node-1");
saveWorkflowNodeRequest1.setType(WorkflowNodeType.JOB);
saveWorkflowNodeRequest1.setType(WorkflowNodeType.JOB.getCode());
SaveWorkflowNodeRequest saveWorkflowNodeRequest2 = new SaveWorkflowNodeRequest();
saveWorkflowNodeRequest2.setJobId(1L);
saveWorkflowNodeRequest2.setNodeName("DAG-Node-2");
saveWorkflowNodeRequest2.setType(WorkflowNodeType.JOB);
saveWorkflowNodeRequest2.setType(WorkflowNodeType.JOB.getCode());
SaveWorkflowNodeRequest saveWorkflowNodeRequest3 = new SaveWorkflowNodeRequest();
saveWorkflowNodeRequest3.setJobId(1L);
saveWorkflowNodeRequest3.setNodeName("DAG-Node-3");
saveWorkflowNodeRequest3.setType(WorkflowNodeType.JOB);
saveWorkflowNodeRequest3.setType(WorkflowNodeType.JOB.getCode());
List<WorkflowNodeInfoDTO> nodeList = powerJobClient.saveWorkflowNode(Lists.newArrayList(saveWorkflowNodeRequest1,saveWorkflowNodeRequest2,saveWorkflowNodeRequest3)).getData();
@ -149,7 +149,7 @@ class TestWorkflow extends ClientInitializer {
@Test
void testRunWorkflow() {
ResultDTO<Long> res = powerJobClient.runWorkflow(WF_ID);
ResultDTO<Long> res = powerJobClient.runWorkflow(WF_ID, null, 0);
System.out.println(res);
Assertions.assertNotNull(res);
}

View File

@ -5,24 +5,23 @@
<parent>
<artifactId>powerjob</artifactId>
<groupId>tech.powerjob</groupId>
<version>3.0.0</version>
<version>5.1.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>powerjob-common</artifactId>
<version>4.0.0</version>
<version>5.1.1</version>
<packaging>jar</packaging>
<properties>
<slf4j.version>1.7.30</slf4j.version>
<commons.lang.version>3.10</commons.lang.version>
<commons.io.version>2.6</commons.io.version>
<guava.version>29.0-jre</guava.version>
<slf4j.version>1.7.36</slf4j.version>
<commons.lang.version>3.12.0</commons.lang.version>
<commons.io.version>2.11.0</commons.io.version>
<guava.version>31.1-jre</guava.version>
<okhttp.version>3.14.9</okhttp.version>
<akka.version>2.6.12</akka.version>
<kryo.version>5.0.4</kryo.version>
<jackson.version>2.12.2</jackson.version>
<junit.version>5.6.1</junit.version>
<kryo.version>5.3.0</kryo.version>
<jackson.version>2.14.3</jackson.version>
<junit.version>5.9.0</junit.version>
</properties>
<dependencies>
@ -54,18 +53,6 @@
<version>${okhttp.version}</version>
</dependency>
<!-- akka remote -->
<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-remote_2.13</artifactId>
<version>${akka.version}</version>
</dependency>
<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-slf4j_2.13</artifactId>
<version>${akka.version}</version>
</dependency>
<!-- commons-io -->
<dependency>
<groupId>commons-io</groupId>
@ -90,6 +77,13 @@
<artifactId>jackson-databind</artifactId>
<version>${jackson.version}</version>
</dependency>
<!-- 解决 Java8 data/time 类型处理问题 #869 -->
<dependency>
<groupId>com.fasterxml.jackson.datatype</groupId>
<artifactId>jackson-datatype-jsr310</artifactId>
<version>${jackson.version}</version>
</dependency>
<!-- Junit tests -->
<dependency>

View File

@ -8,6 +8,11 @@ package tech.powerjob.common;
*/
public class OmsConstant {
/**
* package name
*/
public static final String PACKAGE = "tech.powerjob";
public static final int SERVER_DEFAULT_AKKA_PORT = 10086;
public static final int SERVER_DEFAULT_HTTP_PORT = 10010;
@ -17,8 +22,14 @@ public class OmsConstant {
public static final String NONE = "N/A";
public static final String COMMA = ",";
public static final String AND = "&";
public static final String EQUAL = "=";
public static final String LINE_SEPARATOR = "\r\n";
public static final String HTTP_HEADER_CONTENT_TYPE = "Content-Type";
public static final String JSON_MEDIA_TYPE = "application/json; charset=utf-8";
public static final String NULL = "null";
}

View File

@ -16,10 +16,14 @@ public class OpenAPIConstant {
public static final String ASSERT = "/assert";
public static final String AUTH_APP = "/authApp";
/* ************* JOB 区 ************* */
public static final String SAVE_JOB = "/saveJob";
public static final String COPY_JOB = "/copyJob";
public static final String EXPORT_JOB = "/exportJob";
public static final String FETCH_JOB = "/fetchJob";
public static final String FETCH_ALL_JOB = "/fetchAllJob";
public static final String QUERY_JOB = "/queryJob";
@ -54,4 +58,12 @@ public class OpenAPIConstant {
public static final String RETRY_WORKFLOW_INSTANCE = "/retryWfInstance";
public static final String FETCH_WORKFLOW_INSTANCE_INFO = "/fetchWfInstanceInfo";
public static final String MARK_WORKFLOW_NODE_AS_SUCCESS = "/markWorkflowNodeAsSuccess";
/* ************* 鉴权 ************* */
public static final String REQUEST_HEADER_ACCESS_TOKEN = "X-POWERJOB-ACCESS-TOKEN";
public static final String REQUEST_HEADER_APP_ID = "X-POWERJOB-APP-ID";
public static final String RESPONSE_HEADER_AUTH_STATUS = "X-POWERJOB-AUTH-PASSED";
}

View File

@ -5,7 +5,6 @@ import java.net.NetworkInterface;
/**
* 通过 JVM 启动参数传入的配置信息
*
*
* @author tjq
* @since 2020/8/8
*/
@ -16,13 +15,55 @@ public class PowerJobDKey {
*/
public static final String PREFERRED_NETWORK_INTERFACE = "powerjob.network.interface.preferred";
/**
* 绑定地址一般填写本机网卡地址
*/
public static final String BIND_LOCAL_ADDRESS = "powerjob.network.local.address";
/**
* 外部地址可选默认与绑定地址相同当存在 NAT 等场景时可通过单独传递外部地址来实现通讯
*/
public static final String NT_EXTERNAL_ADDRESS = "powerjob.network.external.address";
public static final String NT_EXTERNAL_PORT = "powerjob.network.external.port";
/**
* Java regular expressions for network interfaces that will be ignored.
*/
public static final String IGNORED_NETWORK_INTERFACE_REGEX = "powerjob.network.interface.ignored";
/**
* Enables compression during data transfer, such as gzip under the HTTP protocol. default value is 'false'
* Note that enabling compression reduces network usage, but increases CPU consumption
*/
public static final String TRANSPORTER_USE_COMPRESSING = "powerjob.transporter.compression.enabled";
/**
* keep-alive connection timeout(in seconds), value <= 0 means disable keepalive. default value is 75
*/
public static final String TRANSPORTER_KEEP_ALIVE_TIMEOUT = "powerjob.transporter.keepalive.timeout";
public static final String WORKER_STATUS_CHECK_PERIOD = "powerjob.worker.status-check.normal.period";
/**
* allowed PowerJob to invoke Thread#stop to kill a thread when PowerJob can't interrupt the thread
* <a href="https://stackoverflow.com/questions/16504140/thread-stop-deprecated">It's VERY dangerous</a>
*/
public static final String WORKER_ALLOWED_FORCE_STOP_THREAD = "powerjob.worker.allowed-force-stop-thread";
public static final String WORKER_WORK_SPACE = "powerjob.worker.workspace";
/**
* ms
*/
public static final String FREQUENCY_JOB_MAX_INTERVAL = "powerjob.server.frequency-job.max-interval";
/* ******************* 不太可能有人用的参数,主要方便内部测试 ******************* */
/**
* 最大活跃任务数量超出部分 SWAP 到磁盘以提升性能
*/
public static final String WORKER_RUNTIME_SWAP_MAX_ACTIVE_TASK_NUM = "powerjob.worker.swap.max-active-task-num";
public static final String WORKER_RUNTIME_SWAP_TASK_SCHEDULE_INTERVAL_MS = "powerjob.worker.swap.scan-interval";
public static final String SERVER_TEST_ACCOUNT_USERNAME = "powerjob.server.test-accounts";
}

View File

@ -9,12 +9,4 @@ import java.io.Serializable;
* @since 2020/4/16
*/
public interface PowerSerializable extends Serializable {
/**
* request path for http or other protocol, like '/worker/stopInstance'
* @return null for non-http request object or no-null path for http request needed object
*/
default String path() {
return null;
}
}

View File

@ -1,18 +0,0 @@
package tech.powerjob.common;
/**
* HttpProtocolConstant
*
* @author tjq
* @since 2021/2/8
*/
public class ProtocolConstant {
public static final String SERVER_PATH_HEARTBEAT = "/server/heartbeat";
public static final String SERVER_PATH_STATUS_REPORT = "/server/statusReport";
public static final String SERVER_PATH_LOG_REPORT = "/server/logReport";
public static final String WORKER_PATH_DISPATCH_JOB = "/worker/runJob";
public static final String WORKER_PATH_STOP_INSTANCE = "/worker/stopInstance";
public static final String WORKER_PATH_QUERY_INSTANCE_INFO = "/worker/queryInstanceInfo";
}

View File

@ -12,25 +12,93 @@ public class RemoteConstant {
/* ************************ AKKA WORKER ************************ */
public static final int DEFAULT_WORKER_PORT = 27777;
public static final String WORKER_ACTOR_SYSTEM_NAME = "oms";
public static final String TASK_TRACKER_ACTOR_NAME = "task_tracker";
public static final String PROCESSOR_TRACKER_ACTOR_NAME = "processor_tracker";
public static final String WORKER_ACTOR_NAME = "worker";
public static final String TROUBLESHOOTING_ACTOR_NAME = "troubleshooting";
public static final String WORKER_AKKA_CONFIG_NAME = "oms-worker.akka.conf";
/* ************************ AKKA SERVER ************************ */
public static final String SERVER_ACTOR_SYSTEM_NAME = "oms-server";
public static final String SERVER_ACTOR_NAME = "server_actor";
public static final String SERVER_FRIEND_ACTOR_NAME = "friend_actor";
public static final String SERVER_AKKA_CONFIG_NAME = "oms-server.akka.conf";
/* ************************ OTHERS ************************ */
public static final String EMPTY_ADDRESS = "N/A";
public static final long DEFAULT_TIMEOUT_MS = 5000;
/* ************************ SERVER-self_side (s4s == server for server side) ************************ */
public static final String S4S_PATH = "friend";
/**
* server 集群间的心跳处理
*/
public static final String S4S_HANDLER_PING = "ping";
/**
* 处理其他 server 的执行请求
*/
public static final String S4S_HANDLER_PROCESS = "process";
/* ************************ SERVER-worker_sides4w == server for worker side ************************ */
public static final String S4W_PATH = "server";
/**
* server 处理在线日志
*/
public static final String S4W_HANDLER_REPORT_LOG = "reportLog";
/**
* server 处理 worker 心跳
*/
public static final String S4W_HANDLER_WORKER_HEARTBEAT = "workerHeartbeat";
/**
* server 处理 TaskTracker 上报的任务实例状态
*/
public static final String S4W_HANDLER_REPORT_INSTANCE_STATUS = "reportInstanceStatus";
/**
* server 查询任务的可执行集群
*/
public static final String S4W_HANDLER_QUERY_JOB_CLUSTER = "queryJobCluster";
/**
* server 处理 worker 请求部署容器命令
*/
public static final String S4W_HANDLER_WORKER_NEED_DEPLOY_CONTAINER = "queryContainer";
/* ************************ Worker-TaskTracker ************************ */
public static final String WTT_PATH = "taskTracker";
/**
* server 任务执行命令
*/
public static final String WTT_HANDLER_RUN_JOB = "runJob";
/**
* server 停止任务实例命令
*/
public static final String WTT_HANDLER_STOP_INSTANCE = "stopInstance";
/**
* sever 查询任务状态
*/
public static final String WTT_HANDLER_QUERY_INSTANCE_STATUS = "queryInstanceStatus";
/**
* PT 上报任务状态包含执行结果
*/
public static final String WTT_HANDLER_REPORT_TASK_STATUS = "reportTaskStatus";
/**
* PT 上报自身状态
*/
public static final String WTT_HANDLER_REPORT_PROCESSOR_TRACKER_STATUS = "reportProcessorTrackerStatus";
/**
* Map 任务
*/
public static final String WTT_HANDLER_MAP_TASK = "mapTask";
/* ************************ Worker-ProcessorTracker ************************ */
public static final String WPT_PATH = "processorTracker";
public static final String WPT_HANDLER_START_TASK = "startTask";
public static final String WPT_HANDLER_STOP_INSTANCE = "stopInstance";
/* ************************ Worker-NORMAL ************************ */
public static final String WORKER_PATH = "worker";
public static final String WORKER_HANDLER_DEPLOY_CONTAINER = "deployContainer";
public static final String WORKER_HANDLER_DESTROY_CONTAINER = "destroyContainer";
}

View File

@ -26,6 +26,23 @@ public class SystemInstanceResult {
* 任务执行超时
*/
public static final String INSTANCE_EXECUTE_TIMEOUT = "instance execute timeout";
/**
* 任务执行超时成功打断任务
*/
public static final String INSTANCE_EXECUTE_TIMEOUT_INTERRUPTED = "instance execute timeout,interrupted success";
/**
* 任务执行超时强制终止任务
*/
public static final String INSTANCE_EXECUTE_TIMEOUT_FORCE_STOP= "instance execute timeout,force stop success";
/**
* 用户手动停止任务成功打断任务
*/
public static final String USER_STOP_INSTANCE_INTERRUPTED= "user stop instance,interrupted success";
/**
* 用户手动停止任务被系统强制终止
*/
public static final String USER_STOP_INSTANCE_FORCE_STOP= "user stop instance,force stop success";
/**
* 创建根任务失败
*/

View File

@ -0,0 +1,26 @@
package tech.powerjob.common.enhance;
import lombok.extern.slf4j.Slf4j;
import java.util.concurrent.ScheduledExecutorService;
/**
* 安全的 runnable可防止因抛出异常导致周期性任务终止
* 使用 {@link ScheduledExecutorService} 执行任务时推荐继承此类捕获并打印异常避免因为抛出异常导致周期性任务终止
*
* @author songyinyin
* @since 2023/9/20 15:52
*/
@Slf4j
public abstract class SafeRunnable implements Runnable{
@Override
public void run() {
try {
run0();
} catch (Exception e) {
log.error("[SafeRunnable] run failed", e);
}
}
protected abstract void run0();
}

View File

@ -0,0 +1,30 @@
package tech.powerjob.common.enhance;
import lombok.extern.slf4j.Slf4j;
import java.util.concurrent.ScheduledExecutorService;
/**
* 使用 {@link ScheduledExecutorService} 执行任务时推荐使用此对象包装一层避免因为抛出异常导致周期性任务终止
*
* @author songyinyin
* @since 2023/9/20 16:04
*/
@Slf4j
public class SafeRunnableWrapper implements Runnable {
private final Runnable runnable;
public SafeRunnableWrapper(Runnable runnable) {
this.runnable = runnable;
}
@Override
public void run() {
try {
runnable.run();
} catch (Exception e) {
log.error("[SafeRunnableWrapper] run failed", e);
}
}
}

View File

@ -13,8 +13,19 @@ import lombok.Getter;
@AllArgsConstructor
public enum DispatchStrategy {
/**
* 健康度优先
*/
HEALTH_FIRST(1),
RANDOM(2);
/**
* 随机
*/
RANDOM(2),
/**
* 指定执行
*/
SPECIFY(11)
;
private final int v;

View File

@ -0,0 +1,22 @@
package tech.powerjob.common.enums;
import lombok.AllArgsConstructor;
import lombok.Getter;
/**
* 加密类型
*
* @author tjq
* @since 2024/8/10
*/
@Getter
@AllArgsConstructor
public enum EncryptType {
NONE("none"),
MD5("md5")
;
private final String code;
}

View File

@ -0,0 +1,71 @@
package tech.powerjob.common.enums;
import lombok.AllArgsConstructor;
import lombok.Getter;
/**
* 鉴权错误信息
*
* @author tjq
* @since 2024/2/11
*/
@Getter
@AllArgsConstructor
public enum ErrorCodes {
USER_NOT_LOGIN("-100", "UserNotLoggedIn"),
USER_NOT_EXIST("-101", "UserNotExist"),
USER_AUTH_FAILED("-102", "UserAuthFailed"),
/**
* 账户被停用
*/
USER_DISABLED("-103", "UserDisabled"),
NO_PERMISSION("-200", "NoPermission"),
/**
* 无效请求一般是参数问题
*/
INVALID_REQUEST("-300", "INVALID_REQUEST"),
INCORRECT_PASSWORD("-400", "INCORRECT_PASSWORD"),
/**
* 非法令牌
*/
INVALID_TOKEN("-401", "INVALID_TOKEN"),
/**
* 无效 APP无法找到 app
*/
INVALID_APP("-402", "INVALID_APP"),
/**
* 令牌过期
*/
TOKEN_EXPIRED("-403", "TOKEN_EXPIRED"),
/**
* 系统内部异常
*/
SYSTEM_UNKNOWN_ERROR("-500", "SYS_UNKNOWN_ERROR"),
/**
* 非法参数
*/
ILLEGAL_ARGS_ERROR("-501", "ILLEGAL_ARGS_ERROR"),
/**
* OPENAPI 错误码号段 -10XX
*/
OPEN_API_AUTH_FAILED("-1002", "OPEN_API_AUTH_FAILED"),
/**
* PowerJobClient 错误码号段
*/
CLIENT_HTTP_REQUEST_FAILED("-2001", "CLIENT_HTTP_REQUEST_FAILED"),
;
private final String code;
private final String msg;
}

View File

@ -26,8 +26,8 @@ public enum ExecuteType {
MAP_REDUCE(3, "MapReduce"),
MAP(4, "Map");
int v;
String des;
private final int v;
private final String des;
public static ExecuteType of(int v) {
for (ExecuteType type : values()) {

View File

@ -18,7 +18,8 @@ public enum LogLevel {
DEBUG(1),
INFO(2),
WARN(3),
ERROR(4);
ERROR(4),
OFF(99);
private final int v;

View File

@ -0,0 +1,37 @@
package tech.powerjob.common.enums;
import lombok.AllArgsConstructor;
import lombok.Getter;
/**
* LogType
*
* @author tjq
* @since 2022/10/3
*/
@Getter
@AllArgsConstructor
public enum LogType {
ONLINE(1),
LOCAL(2),
STDOUT(3),
LOCAL_AND_ONLINE(4),
NULL(999);
private final Integer v;
public static LogType of(Integer type) {
if (type == null) {
return ONLINE;
}
for (LogType logType : values()) {
if (logType.v.equals(type)) {
return logType;
}
}
return ONLINE;
}
}

View File

@ -0,0 +1,22 @@
package tech.powerjob.common.enums;
import lombok.AllArgsConstructor;
import lombok.Getter;
/**
* <a href="https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/MIME_types">消息内容类型</a>
*
* @author tjq
* @since 2024/8/10
*/
@Getter
@AllArgsConstructor
public enum MIME {
APPLICATION_JSON("application/json; charset=utf-8"),
APPLICATION_FORM("application/x-www-form-urlencoded")
;
private final String code;
}

View File

@ -1,4 +1,4 @@
package tech.powerjob.server.common.constants;
package tech.powerjob.common.enums;
import lombok.AllArgsConstructor;
import lombok.Getter;
@ -13,10 +13,16 @@ import lombok.Getter;
@AllArgsConstructor
public enum SwitchableStatus {
/**
*
* 启用
*/
ENABLE(1),
/**
* 关闭
*/
DISABLE(2),
/**
* 软删除
*/
DELETED(99);
private final int v;

View File

@ -0,0 +1,42 @@
package tech.powerjob.common.enums;
import lombok.AllArgsConstructor;
import lombok.Getter;
/**
* TaskTracker 行为枚举
*
* @author tjq
* @since 2024/2/24
*/
@Getter
@AllArgsConstructor
public enum TaskTrackerBehavior {
/**
* 普通不特殊处理参与集群计算会导致 TaskTracker 负载比常规节点高适用于节点数不那么多任务不那么繁重的场景
*/
NORMAL(1),
/**
* 划水只负责管理节点不参与计算稳定性最优适用于节点数量非常多的大规模计算场景少一个计算节点来换取稳定性提升
*/
PADDLING(11)
;
private final Integer v;
public static TaskTrackerBehavior of(Integer type) {
if (type == null) {
return NORMAL;
}
for (TaskTrackerBehavior t : values()) {
if (t.v.equals(type)) {
return t;
}
}
return NORMAL;
}
}

View File

@ -3,7 +3,9 @@ package tech.powerjob.common.enums;
import com.google.common.collect.Lists;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.ToString;
import java.util.Collections;
import java.util.List;
/**
@ -14,17 +16,24 @@ import java.util.List;
*/
@Getter
@AllArgsConstructor
@ToString
public enum TimeExpressionType {
API(1),
CRON(2),
FIXED_RATE(3),
FIXED_DELAY(4),
WORKFLOW(5);
WORKFLOW(5),
int v;
DAILY_TIME_INTERVAL(11);
public static final List<Integer> frequentTypes = Lists.newArrayList(FIXED_RATE.v, FIXED_DELAY.v);
private final int v;
public static final List<Integer> FREQUENT_TYPES = Collections.unmodifiableList(Lists.newArrayList(FIXED_RATE.v, FIXED_DELAY.v));
/**
* 首次计算触发时间时必须计算出一个有效值
*/
public static final List<Integer> INSPECT_TYPES = Collections.unmodifiableList(Lists.newArrayList(CRON.v, DAILY_TIME_INTERVAL.v));
public static TimeExpressionType of(int v) {
for (TimeExpressionType type : values()) {

View File

@ -4,6 +4,7 @@ import com.google.common.collect.Lists;
import lombok.AllArgsConstructor;
import lombok.Getter;
import java.util.Collections;
import java.util.List;
/**
@ -27,11 +28,11 @@ public enum WorkflowInstanceStatus {
/**
* 广义的运行状态
*/
public static final List<Integer> GENERALIZED_RUNNING_STATUS = Lists.newArrayList(WAITING.v, RUNNING.v);
public static final List<Integer> GENERALIZED_RUNNING_STATUS = Collections.unmodifiableList(Lists.newArrayList(WAITING.v, RUNNING.v));
/**
* 结束状态
*/
public static final List<Integer> FINISHED_STATUS = Lists.newArrayList(FAILED.v, SUCCEED.v, STOPPED.v);
public static final List<Integer> FINISHED_STATUS = Collections.unmodifiableList(Lists.newArrayList(FAILED.v, SUCCEED.v, STOPPED.v));
private final int v;

View File

@ -15,9 +15,33 @@ public enum WorkflowNodeType {
/**
* 任务节点
*/
JOB(1);
JOB(1,false),
/**
* 判断节点
*/
DECISION(2,true),
/**
* 内嵌工作流
*/
NESTED_WORKFLOW(3,false),
;
private final int code;
/**
* 控制节点
*/
private final boolean controlNode;
public static WorkflowNodeType of(int code) {
for (WorkflowNodeType nodeType : values()) {
if (nodeType.code == code) {
return nodeType;
}
}
throw new IllegalArgumentException("unknown WorkflowNodeType of " + code);
}
}

View File

@ -0,0 +1,10 @@
package tech.powerjob.common.exception;
/**
* ImpossibleException
*
* @author tjq
* @since 2023/7/12
*/
public class ImpossibleException extends RuntimeException {
}

View File

@ -1,13 +1,21 @@
package tech.powerjob.common.exception;
import lombok.Getter;
import lombok.Setter;
import tech.powerjob.common.enums.ErrorCodes;
/**
* PowerJob 运行时异常
*
* @author tjq
* @since 2020/5/26
*/
@Setter
@Getter
public class PowerJobException extends RuntimeException {
protected String code;
public PowerJobException() {
}
@ -15,6 +23,11 @@ public class PowerJobException extends RuntimeException {
super(message);
}
public PowerJobException(ErrorCodes errorCode, String extraMsg) {
super(extraMsg == null ? errorCode.getMsg() : errorCode.getMsg().concat(":").concat(extraMsg));
this.code = errorCode.getCode();
}
public PowerJobException(String message, Throwable cause) {
super(message, cause);
}

View File

@ -0,0 +1,17 @@
package tech.powerjob.common.exception;
import tech.powerjob.common.enums.ErrorCodes;
/**
* PowerJobExceptionLauncher
*
* @author tjq
* @since 2024/11/22
*/
public class PowerJobExceptionLauncher {
public PowerJobExceptionLauncher(ErrorCodes errorCode, String message) {
throw new PowerJobException(errorCode, message);
}
}

View File

@ -0,0 +1,28 @@
package tech.powerjob.common.model;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
/**
* @author Echo009
* @since 2022/1/25
*/
@Data
@AllArgsConstructor
@NoArgsConstructor
public class AlarmConfig {
/**
* 触发告警的阈值
*/
private Integer alertThreshold;
/**
* 统计的窗口长度s
*/
private Integer statisticWindowLen;
/**
* 沉默时间窗口s
*/
private Integer silenceWindowLen;
}

View File

@ -1,8 +1,8 @@
package tech.powerjob.common.model;
import tech.powerjob.common.PowerSerializable;
import lombok.Data;
import lombok.NoArgsConstructor;
import tech.powerjob.common.PowerSerializable;
import java.util.List;
@ -51,8 +51,15 @@ public class InstanceDetail implements PowerSerializable {
/**
* Task detail, used by MapReduce or Broadcast tasks.
* 命名有点问题实际是 task 统计信息
*/
private TaskDetail taskDetail;
/**
* 查询出来的 Task 详细结果
*/
private List<TaskDetailInfo> queriedTaskDetailInfoList;
/**
* Sub instance details, used by frequent tasks.
*/
@ -92,5 +99,14 @@ public class InstanceDetail implements PowerSerializable {
private long totalTaskNum;
private long succeedTaskNum;
private long failedTaskNum;
// 等待派发状态仅存在 TaskTracker 数据库中
protected Long waitingDispatchTaskNum;
// 已派发 ProcessorTracker 未确认可能由于网络错误请求未送达也有可能 ProcessorTracker 线程池满拒绝执行
protected Long workerUnreceivedTaskNum;
// ProcessorTracker确认接收存在与线程池队列中排队执行
protected Long receivedTaskNum;
// ProcessorTracker正在执行
protected Long runningTaskNum;
}
}

View File

@ -0,0 +1,25 @@
package tech.powerjob.common.model;
import lombok.Getter;
import lombok.Setter;
import lombok.ToString;
import lombok.experimental.Accessors;
/**
* 任务运行时高级配置
*
* @author tjq
* @since 2024/2/24
*/
@Getter
@Setter
@ToString
@Accessors(chain = true)
public class JobAdvancedRuntimeConfig {
/**
* MR 任务专享参数TaskTracker 行为 {@link tech.powerjob.common.enums.TaskTrackerBehavior}
*/
private Integer taskTrackerBehavior;
}

View File

@ -0,0 +1,28 @@
package tech.powerjob.common.model;
import lombok.Data;
import tech.powerjob.common.serialize.JsonUtils;
/**
* @author Echo009
* @since 2022/3/22
*/
@Data
public class LifeCycle {
public static final LifeCycle EMPTY_LIFE_CYCLE = new LifeCycle();
private Long start;
private Long end;
public static LifeCycle parse(String lifeCycle){
try {
return JsonUtils.parseObject(lifeCycle,LifeCycle.class);
}catch (Exception e){
// ignore
return EMPTY_LIFE_CYCLE;
}
}
}

View File

@ -0,0 +1,32 @@
package tech.powerjob.common.model;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.Setter;
import lombok.ToString;
import lombok.experimental.Accessors;
/**
* 任务日志配置
*
* @author yhz
* @since 2022/9/16
*/
@Getter
@Setter
@ToString
@Accessors(chain = true)
public class LogConfig {
/**
* log type {@link tech.powerjob.common.enums.LogType}
*/
private Integer type;
/**
* log level {@link tech.powerjob.common.enums.LogLevel}
*/
private Integer level;
private String loggerName;
}

View File

@ -3,10 +3,10 @@ package tech.powerjob.common.model;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import com.fasterxml.jackson.databind.ser.std.ToStringSerializer;
import com.google.common.collect.Lists;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.experimental.Accessors;
import tech.powerjob.common.enums.WorkflowNodeType;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
@ -38,10 +38,10 @@ public class PEWorkflowDAG implements Serializable {
@Data
@Accessors(chain = true)
@NoArgsConstructor
@AllArgsConstructor
public static class Node implements Serializable {
/**
* node id
*
* @since 20210128
*/
private Long nodeId;
@ -49,11 +49,15 @@ public class PEWorkflowDAG implements Serializable {
/**
* note type
*
* @see WorkflowNodeType
* @since 20210316
*/
private Integer nodeType;
/**
* job id
* job id or workflow id (if this Node type is a nested workflow)
*
* @see WorkflowNodeType#NESTED_WORKFLOW
*/
private Long jobId;
/**
@ -61,23 +65,41 @@ public class PEWorkflowDAG implements Serializable {
*/
private String nodeName;
@JsonSerialize(using= ToStringSerializer.class)
@JsonSerialize(using = ToStringSerializer.class)
private Long instanceId;
/**
* for decision node, it is JavaScript code
*/
private String nodeParams;
private Integer status;
/**
* for decision node, it only be can "true" or "false"
*/
private String result;
/**
* instanceId will be null if disable .
*/
private Boolean enable;
/**
* mark node which disable by control node.
*/
private Boolean disableByControlNode;
private Boolean skipWhenFailed;
private String startTime;
private String finishedTime;
public Node(Long nodeId) {
this.nodeId = nodeId;
this.nodeType = WorkflowNodeType.JOB.getCode();
}
public Node(Long nodeId, Integer nodeType) {
this.nodeId = nodeId;
this.nodeType = nodeType;
}
}
@ -86,10 +108,29 @@ public class PEWorkflowDAG implements Serializable {
*/
@Data
@NoArgsConstructor
@AllArgsConstructor
public static class Edge implements Serializable {
private Long from;
private Long to;
/**
* property,support for complex flow control
* for decision node , it can be "true" or "false"
*/
private String property;
private Boolean enable;
public Edge(long from, long to) {
this.from = from;
this.to = to;
}
public Edge(long from, long to, String property) {
this.from = from;
this.to = to;
this.property = property;
}
}
public PEWorkflowDAG(@Nonnull List<Node> nodes, @Nullable List<Edge> edges) {

View File

@ -0,0 +1,43 @@
package tech.powerjob.common.model;
import lombok.Data;
import lombok.experimental.Accessors;
import tech.powerjob.common.PowerSerializable;
/**
* Task 详细信息
*
* @author tjq
* @since 2024/2/25
*/
@Data
@Accessors(chain = true)
public class TaskDetailInfo implements PowerSerializable {
private String taskId;
private String taskName;
/**
* 任务对象map subTask
*/
private String taskContent;
/**
* 处理器地址
*/
private String processorAddress;
private Integer status;
private String statusStr;
private String result;
private Integer failedCnt;
/**
* 创建时间
*/
private Long createdTime;
/**
* 最后修改时间
*/
private Long lastModifiedTime;
/**
* ProcessorTracker 最后上报时间
*/
private Long lastReportTime;
}

View File

@ -0,0 +1,24 @@
package tech.powerjob.common.model;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.experimental.Accessors;
import java.io.Serializable;
/**
* WorkerAppInfo
*
* @author tjq
* @since 2023/9/2
*/
@Data
@NoArgsConstructor
@Accessors(chain = true)
public class WorkerAppInfo implements Serializable {
/**
* 应用唯一 ID
*/
private Long appId;
}

View File

@ -0,0 +1,62 @@
package tech.powerjob.common.request;
import lombok.Setter;
import lombok.experimental.Accessors;
import org.apache.commons.lang3.StringUtils;
import tech.powerjob.common.enums.Protocol;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
/**
* 服务发现请求
*
* @author tjq
* @since 2023/1/21
*/
@Setter
@Accessors(chain = true)
public class ServerDiscoveryRequest implements Serializable {
private Long appId;
private String protocol;
private String currentServer;
private String clientVersion;
public Map<String, Object> toMap() {
Map<String, Object> ret = new HashMap<>();
// testMode appId 可能为空此处不判断会导致 testMode 无法启动 #580
if (appId != null) {
ret.put("appId", appId);
}
ret.put("protocol", protocol);
if (StringUtils.isNotEmpty(currentServer)) {
ret.put("currentServer", currentServer);
}
if (StringUtils.isNotEmpty(clientVersion)) {
ret.put("clientVersion", clientVersion);
}
return ret;
}
public Long getAppId() {
return appId;
}
public String getProtocol() {
return Optional.ofNullable(protocol).orElse(Protocol.AKKA.name());
}
public String getCurrentServer() {
return currentServer;
}
public String getClientVersion() {
return clientVersion;
}
}

View File

@ -1,10 +1,9 @@
package tech.powerjob.common.request;
import tech.powerjob.common.PowerSerializable;
import tech.powerjob.common.ProtocolConstant;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import tech.powerjob.common.PowerSerializable;
/**
* 服务器查询实例运行状态需要返回详细的运行数据
@ -18,8 +17,11 @@ import lombok.NoArgsConstructor;
public class ServerQueryInstanceStatusReq implements PowerSerializable {
private Long instanceId;
@Override
public String path() {
return ProtocolConstant.WORKER_PATH_QUERY_INSTANCE_INFO;
}
/**
* 自定义查询
* 针对高阶用户直接开放底库查询便于运维和排查问题
* 此处只传递查询条件前置拼接 select *后置拼接 limit
*/
private String customQuery;
}

View File

@ -1,8 +1,7 @@
package tech.powerjob.common.request;
import tech.powerjob.common.PowerSerializable;
import tech.powerjob.common.ProtocolConstant;
import lombok.Data;
import tech.powerjob.common.PowerSerializable;
import java.util.List;
@ -15,9 +14,16 @@ import java.util.List;
@Data
public class ServerScheduleJobReq implements PowerSerializable {
// 可用处理器地址可能多值逗号分隔
/**
* 可用处理器地址可能多值逗号分隔
*/
private List<String> allWorkerAddress;
/**
* 最大机器数量
*/
private Integer maxWorkerCount;
/* *********************** 任务相关属性 *********************** */
/**
@ -32,48 +38,68 @@ public class ServerScheduleJobReq implements PowerSerializable {
private Long instanceId;
/**
* 任务执行处理器信息
* 任务执行类型单机广播MR
*/
// 任务执行类型单机广播MR
private String executeType;
// 处理器类型JavaBeanJar脚本等
/**
* 处理器类型内建外部
*/
private String processorType;
// 处理器信息
/**
* 处理器信息
*/
private String processorInfo;
/**
* 超时时间
* 整个任务的总体超时时间
*/
// 整个任务的总体超时时间
private long instanceTimeoutMS;
/**
* 任务运行参数
* 任务级别的参数相当于类的static变量
*/
// 任务级别的参数相当于类的static变量
private String jobParams;
// 实例级别的参数相当于类的普通变量API触发专用从API触发处带入
/**
* 实例级别的参数相当于类的普通变量API触发专用从API触发处带入
*/
private String instanceParams;
// 每台机器的处理线程数上限
/**
* 每台机器的处理线程数上限
*/
private int threadConcurrency;
// 子任务重试次数任务本身的重试机制由server控制
/**
* 子任务重试次数任务本身的重试机制由server控制
*/
private int taskRetryNum;
/**
* 定时执行信息
* 时间表达式类型CRON/API/FIX_RATE/FIX_DELAY
*/
// 时间表达式类型CRON/API/FIX_RATE/FIX_DELAY
private String timeExpressionType;
// 时间表达式CRON/NULL/LONG/LONG单位MS
/**
* 时间表达式CRON/NULL/LONG/LONG单位MS
*/
private String timeExpression;
// 最大同时运行任务数默认 1
/**
* 最大同时运行任务数默认 1
*/
private Integer maxInstanceNum;
@Override
public String path() {
return ProtocolConstant.WORKER_PATH_DISPATCH_JOB;
}
/**
* 告警配置
*/
private String alarmConfig;
/**
* 日志配置
*/
private String logConfig;
/**
* 高级运行时配置
*/
private String advancedRuntimeConfig;
}

View File

@ -1,10 +1,9 @@
package tech.powerjob.common.request;
import tech.powerjob.common.PowerSerializable;
import tech.powerjob.common.ProtocolConstant;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import tech.powerjob.common.PowerSerializable;
/**
@ -18,9 +17,4 @@ import lombok.NoArgsConstructor;
@AllArgsConstructor
public class ServerStopInstanceReq implements PowerSerializable {
private Long instanceId;
@Override
public String path() {
return ProtocolConstant.WORKER_PATH_STOP_INSTANCE;
}
}

View File

@ -15,6 +15,12 @@ import java.util.Map;
@Data
public class TaskTrackerReportInstanceStatusReq implements PowerSerializable {
/**
* 追加上报自己的 appId
* 方便后续的监控日志埋点
*/
private Long appId;
private Long jobId;
private Long instanceId;
@ -40,7 +46,18 @@ public class TaskTrackerReportInstanceStatusReq implements PowerSerializable {
private long startTime;
private Long endTime;
private long reportTime;
private String sourceAddress;
/* ********* 秒级任务的告警信息 ********* */
private boolean needAlert;
private String alertContent;
}

View File

@ -17,26 +17,55 @@ import java.util.List;
@Data
public class WorkerHeartbeat implements PowerSerializable {
// 本机地址 -> IP:port
/**
* 本机地址 -> IP:port
*/
private String workerAddress;
// 当前 appName
/**
* 当前 appName
*/
private String appName;
// 当前 appId
/**
* 当前 appId
*/
private Long appId;
// 当前时间
/**
* 当前时间
*/
private long heartbeatTime;
// 当前加载的容器容器名称 -> 容器版本
/**
* 当前加载的容器容器名称 -> 容器版本
*/
private List<DeployedContainerInfo> containerInfos;
// worker 版本信息
/**
* worker 版本信息
*/
private String version;
// 使用的通讯协议 AKKA / HTTP
/**
* 使用的通讯协议 AKKA / HTTP
*/
private String protocol;
// worker tag标识同一个 worker 下的一类集群 ISSUE: 226
/**
* worker tag标识同一个 worker 下的一类集群 ISSUE: 226
*/
private String tag;
// 客户端名称
/**
* 客户端名称
*/
private String client;
// 扩展字段
/**
* 扩展字段
*/
private String extra;
/**
* 是否已经超载超载的情况下 Server 一段时间内不会再向其派发任务
*/
private boolean isOverload;
private int lightTaskTrackerNum;
private int heavyTaskTrackerNum;
private SystemMetrics systemMetrics;
}

View File

@ -4,6 +4,10 @@ import tech.powerjob.common.enums.DispatchStrategy;
import tech.powerjob.common.enums.ExecuteType;
import tech.powerjob.common.enums.ProcessorType;
import tech.powerjob.common.enums.TimeExpressionType;
import tech.powerjob.common.model.AlarmConfig;
import tech.powerjob.common.model.JobAdvancedRuntimeConfig;
import tech.powerjob.common.model.LogConfig;
import tech.powerjob.common.model.LifeCycle;
import tech.powerjob.common.utils.CommonUtils;
import lombok.Data;
import tech.powerjob.common.response.JobInfoDTO;
@ -131,8 +135,31 @@ public class SaveJobInfoRequest {
private DispatchStrategy dispatchStrategy;
private String lifecycle;
/**
* 某种派发策略背后的具体配置值取决于 dispatchStrategy
*/
private String dispatchStrategyConfig;
private LifeCycle lifeCycle;
/**
* alarm config
*/
private AlarmConfig alarmConfig;
/**
* 任务归类开放给接入方自由定制
*/
private String tag;
/**
* 日志配置包括日志级别日志方式等配置信息
*/
private LogConfig logConfig;
/**
* 高级运行时配置
*/
private JobAdvancedRuntimeConfig advancedRuntimeConfig;
/**
* Check non-null properties.

View File

@ -1,8 +1,8 @@
package tech.powerjob.common.request.http;
import lombok.Data;
import tech.powerjob.common.enums.WorkflowNodeType;
import tech.powerjob.common.utils.CommonUtils;
import lombok.Data;
@ -22,7 +22,7 @@ public class SaveWorkflowNodeRequest {
/**
* 节点类型(默认为任务节点)
*/
private WorkflowNodeType type = WorkflowNodeType.JOB;
private Integer type;
/**
* 任务 ID
*/
@ -44,10 +44,11 @@ public class SaveWorkflowNodeRequest {
*/
private Boolean skipWhenFailed = false;
public void valid(){
public void valid() {
CommonUtils.requireNonNull(this.appId, "appId can't be empty");
CommonUtils.requireNonNull(this.type, "type can't be empty");
if (type == WorkflowNodeType.JOB) {
final WorkflowNodeType workflowNodeType = WorkflowNodeType.of(type);
if (workflowNodeType == WorkflowNodeType.JOB || workflowNodeType == WorkflowNodeType.NESTED_WORKFLOW) {
CommonUtils.requireNonNull(this.jobId, "jobId can't be empty");
}
}

View File

@ -1,6 +1,7 @@
package tech.powerjob.common.request.http;
import tech.powerjob.common.enums.TimeExpressionType;
import tech.powerjob.common.model.LifeCycle;
import tech.powerjob.common.model.PEWorkflowDAG;
import tech.powerjob.common.utils.CommonUtils;
import com.google.common.collect.Lists;
@ -63,6 +64,8 @@ public class SaveWorkflowRequest implements Serializable {
/** 点线表示法*/
private PEWorkflowDAG dag;
private LifeCycle lifeCycle;
public void valid() {
CommonUtils.requireNonNull(wfName, "workflow name can't be empty");
CommonUtils.requireNonNull(appId, "appId can't be empty");

View File

@ -0,0 +1,33 @@
package tech.powerjob.common.request.query;
import lombok.Getter;
import lombok.Setter;
import java.util.Date;
import java.util.List;
/**
* 任务实例分页查询
*
* @author tjq
* @since 2024/11/21
*/
@Getter
@Setter
public class InstancePageQuery extends PowerPageQuery {
private Long instanceIdEq;
private Long instanceIdLt;
private Long instanceIdGt;
private Long jobIdEq;
private List<Integer> statusIn;
private Date gmtCreateLt;
private Date gmtCreateGt;
private Date gmtModifiedLt;
private Date gmtModifiedGt;
}

View File

@ -50,4 +50,6 @@ public class JobInfoQuery extends PowerQuery {
private Date gmtModifiedGt;
private Integer dispatchStrategyEq;
private String tagEq;
}

View File

@ -0,0 +1,41 @@
package tech.powerjob.common.request.query;
import lombok.Getter;
import lombok.Setter;
import tech.powerjob.common.PowerQuery;
import java.io.Serializable;
/**
* 分页查询
*
* @author tjq
* @since 2024/11/21
*/
@Getter
@Setter
public class PowerPageQuery extends PowerQuery implements Serializable {
/* ****************** 分页参数 ****************** */
/**
* 当前页码
*/
protected Integer index = 0;
/**
* 页大小
*/
protected Integer pageSize = 10;
/* ****************** 排序参数 ****************** */
/**
* 排序参数 gmtCreateinstanceId
*/
protected String sortBy;
/**
* asc是指定列按升序排列desc则是指定列按降序排列
*/
protected boolean asc = false;
}

View File

@ -1,6 +1,9 @@
package tech.powerjob.common.response;
import lombok.Data;
import tech.powerjob.common.model.AlarmConfig;
import tech.powerjob.common.model.JobAdvancedRuntimeConfig;
import tech.powerjob.common.model.LogConfig;
import java.util.Date;
@ -16,69 +19,131 @@ public class JobInfoDTO {
private Long id;
/* ************************** 任务基本信息 ************************** */
// 任务名称
/**
* 任务名称
*/
private String jobName;
// 任务描述
/**
* 任务描述
*/
private String jobDescription;
// 任务所属的应用ID
/**
* 任务所属的应用ID
*/
private Long appId;
// 任务自带的参数
/**
* 任务自带的参数
*/
private String jobParams;
/* ************************** 定时参数 ************************** */
// 时间表达式类型CRON/API/FIX_RATE/FIX_DELAY
/**
* 时间表达式类型CRON/API/FIX_RATE/FIX_DELAY
*/
private Integer timeExpressionType;
// 时间表达式CRON/NULL/LONG/LONG
/**
* 时间表达式CRON/NULL/LONG/LONG
*/
private String timeExpression;
/* ************************** 执行方式 ************************** */
// 执行类型单机/广播/MR
/**
* 执行类型单机/广播/MR
*/
private Integer executeType;
// 执行器类型Java/Shell
/**
* 执行器类型Java/Shell
*/
private Integer processorType;
// 执行器信息
/**
* 执行器信息
*/
private String processorInfo;
/* ************************** 运行时配置 ************************** */
// 最大同时运行任务数默认 1
/**
* 最大同时运行任务数默认 1
*/
private Integer maxInstanceNum;
// 并发度同时执行某个任务的最大线程数量
/**
* 并发度同时执行某个任务的最大线程数量
*/
private Integer concurrency;
// 任务整体超时时间
/**
* 任务整体超时时间
*/
private Long instanceTimeLimit;
/* ************************** 重试配置 ************************** */
/** ************************** 重试配置 ************************** */
private Integer instanceRetryNum;
private Integer taskRetryNum;
// 1 正常运行2 停止不再调度
/**
* 1 正常运行2 停止不再调度
*/
private Integer status;
// 下一次调度时间
/**
* 下一次调度时间
*/
private Long nextTriggerTime;
/* ************************** 繁忙机器配置 ************************** */
// 最低CPU核心数量0代表不限
/**
* 最低CPU核心数量0代表不限
*/
private double minCpuCores;
// 最低内存空间单位 GB0代表不限
/**
* 最低内存空间单位 GB0代表不限
*/
private double minMemorySpace;
// 最低磁盘空间单位 GB0代表不限
/**
* 最低磁盘空间单位 GB0代表不限
*/
private double minDiskSpace;
/* ************************** 集群配置 ************************** */
// 指定机器运行空代表不限非空则只会使用其中的机器运行多值逗号分割
/**
* 指定机器运行空代表不限非空则只会使用其中的机器运行多值逗号分割
*/
private String designatedWorkers;
// 最大机器数量
/**
* 最大机器数量
*/
private Integer maxWorkerCount;
// 报警用户ID列表多值逗号分隔
/**
* 报警用户ID列表多值逗号分隔
*/
private String notifyUserIds;
private Date gmtCreate;
private Date gmtModified;
private String extra;
/**
* 派发策略
*/
private Integer dispatchStrategy;
/**
* 某种派发策略背后的具体配置值取决于 dispatchStrategy
*/
private String dispatchStrategyConfig;
private String lifecycle;
private AlarmConfig alarmConfig;
/**
* 任务归类开放给接入方自由定制
*/
private String tag;
/**
* 日志配置包括日志级别日志方式等配置信息
*/
private LogConfig logConfig;
private JobAdvancedRuntimeConfig advancedRuntimeConfig;
}

View File

@ -0,0 +1,10 @@
package tech.powerjob.common.response;
/**
* 主要目的消除 idea 烦人的类型提示
*
* @author tjq
* @since 2023/9/2
*/
public class ObjectResultDTO extends ResultDTO<Object> {
}

View File

@ -0,0 +1,42 @@
package tech.powerjob.common.response;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.experimental.Accessors;
import java.io.Serializable;
import java.util.List;
/**
* 分页对象
*
* @author tjq
* @since 2020/4/12
*/
@Data
@NoArgsConstructor
@Accessors(chain = true)
public class PageResult<T> implements Serializable {
/**
* 当前页数
*/
private int index;
/**
* 页大小
*/
private int pageSize;
/**
* 总页数
*/
private int totalPages;
/**
* 总数据量
*/
private long totalItems;
/**
* 数据
*/
private List<T> data;
}

Some files were not shown because too many files have changed in this diff Show More