mirror of
https://github.com/PowerJob/PowerJob.git
synced 2025-07-17 00:00:04 +08:00
Compare commits
995 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
5d82b7cc5c | ||
|
f506ba8956 | ||
|
8166b78e68 | ||
|
730982b085 | ||
|
f6a8666031 | ||
|
e6264fc9a4 | ||
|
dc62c1b992 | ||
|
c627776764 | ||
|
aefa9290c9 | ||
|
fdd80f6cf9 | ||
|
7333ee3951 | ||
|
92ddc6af4d | ||
|
508127426f | ||
|
57627305fa | ||
|
4e84bc60d7 | ||
|
4fe2d7fdf1 | ||
|
f44bd43d13 | ||
|
e912e2c31d | ||
|
f9dd8d7713 | ||
|
0bb069fa5b | ||
|
f0b2fbb5b7 | ||
|
7443edf735 | ||
|
1383e48fec | ||
|
243f7bb179 | ||
|
827bcd2502 | ||
|
8f3981dd09 | ||
|
9bab361618 | ||
|
01e15bda39 | ||
|
0bf95cf419 | ||
|
f2bed56544 | ||
|
85f5faaaac | ||
|
44ef76328b | ||
|
5c49b8d8dd | ||
|
944b06ee82 | ||
|
a35573544c | ||
|
fea1974014 | ||
|
4527454a7c | ||
|
a261b864ca | ||
|
4f5ea6f897 | ||
|
0a1191572e | ||
|
605497b36d | ||
|
3e0088870a | ||
|
a1dad6c39e | ||
|
6426424401 | ||
|
1774680792 | ||
|
29e0b2deb0 | ||
|
eb4d7ab8eb | ||
|
e711ed7251 | ||
|
53be566173 | ||
|
bee4795027 | ||
|
84b90a366c | ||
|
c04cb08390 | ||
|
4507a6a883 | ||
|
3edaae67dd | ||
|
bb99ec9d04 | ||
|
54fadf6368 | ||
|
1e092bb866 | ||
|
6a59f50b96 | ||
|
9b5916daf3 | ||
|
5e7751f092 | ||
|
e21b171b98 | ||
|
a9d8a680dc | ||
|
b822a685f4 | ||
|
dd3a17275f | ||
|
89e7ef8b40 | ||
|
32cecc59e9 | ||
|
5be7b24f4b | ||
|
1a2df5e616 | ||
|
075ecd8a84 | ||
|
02304fe921 | ||
|
b3abb461db | ||
|
d44131128e | ||
|
f4c0a0309f | ||
|
9046a8bfcf | ||
|
9d95d4ce04 | ||
|
86ec85331a | ||
|
fb1159e1b5 | ||
|
9c0731f20d | ||
|
bd725aac15 | ||
|
c35ae19ba8 | ||
|
c11d544afe | ||
|
e64ad0f74d | ||
|
07e0e17ec0 | ||
|
37ef35bd80 | ||
|
4046ea39b5 | ||
|
815d44ef7e | ||
|
c717fd3fb8 | ||
|
cf4ed93812 | ||
|
dda79439ca | ||
|
7437ad6067 | ||
|
12e2f83764 | ||
|
a750d0c55c | ||
|
d1d0407046 | ||
|
9dbd470c5a | ||
|
5e1f7e2d11 | ||
|
3ea089eaee | ||
|
7b7582dd91 | ||
|
686189e6ca | ||
|
9419340829 | ||
|
6539c66226 | ||
|
c350607762 | ||
|
31a7690844 | ||
|
05c22a5dc5 | ||
|
919a5c3b35 | ||
|
841c7891c8 | ||
|
3fdcc1e599 | ||
|
e18b9a8962 | ||
|
cf8153ae39 | ||
|
a1c12bf1c7 | ||
|
0caa854409 | ||
|
cda55c918b | ||
|
4793c19af6 | ||
|
78b58d02e8 | ||
|
ea919b102f | ||
|
599d710e27 | ||
|
ab7a398f61 | ||
|
01d7247efa | ||
|
b29e265e42 | ||
|
61aecc6354 | ||
|
6de5e83a2f | ||
|
88f7a06596 | ||
|
98fc4d3320 | ||
|
6842fb6a7b | ||
|
debc2e0abb | ||
|
f3dd56bf54 | ||
|
e63dc91643 | ||
|
4be6a139dd | ||
|
1ba74bf0af | ||
|
ff84d46713 | ||
|
d61d85abd4 | ||
|
23d94ed46f | ||
|
8f3803bda6 | ||
|
592dff8d75 | ||
|
9b7c237cf0 | ||
|
9b3a3cd586 | ||
|
966f09c034 | ||
|
4a9f38c760 | ||
|
b96768208b | ||
|
1545733012 | ||
|
fa8b4f2b62 | ||
|
882392a5e5 | ||
|
c875ba3d37 | ||
|
a138f9c8cc | ||
|
360f105c01 | ||
|
3544f76aaa | ||
|
8de1c47971 | ||
|
5dbceb7ce4 | ||
|
93dadab832 | ||
|
9ab2e92934 | ||
|
0d359b8a8e | ||
|
c91240c1b8 | ||
|
b14b49f75e | ||
|
74a92622a3 | ||
|
b5085e09f1 | ||
|
91abbc03d9 | ||
|
bdcc9b131d | ||
|
58e542c69a | ||
|
15fa1abd91 | ||
|
c08b4f1858 | ||
|
89b35c8495 | ||
|
73ebe83c05 | ||
|
ad08406d0b | ||
|
37a62549db | ||
|
c50a3edebf | ||
|
09b15dfbc1 | ||
|
6bcc275a70 | ||
|
88ebd5e042 | ||
|
df5e259e54 | ||
|
570ea0487b | ||
|
dd32916637 | ||
|
c3ce46aee9 | ||
|
d03247ea03 | ||
|
1c70bbc670 | ||
|
b251df4c35 | ||
|
f0514ac65f | ||
|
236d0a7f3b | ||
|
fc57226d3a | ||
|
5e9935fed4 | ||
|
d3140d0501 | ||
|
7318fed73a | ||
|
67a22e8b7e | ||
|
8aaa602082 | ||
|
1adc25308f | ||
|
00228f3b3e | ||
|
c3c735e5f1 | ||
|
df7ceb7ba5 | ||
|
5da0c694c3 | ||
|
5cfd0f8e74 | ||
|
e2887a12f6 | ||
|
388581c321 | ||
|
8953ecc74f | ||
|
8ecc5768c7 | ||
|
a7394e518c | ||
|
174696066d | ||
|
47b050aba2 | ||
|
46165ccd97 | ||
|
b9bd8079f2 | ||
|
1a40447b23 | ||
|
e2f5ca440f | ||
|
b71edc1f26 | ||
|
2a87a24980 | ||
|
93b01191d0 | ||
|
aa65884b3f | ||
|
4b79bd73bb | ||
|
0a76d057ac | ||
|
7b003ed895 | ||
|
70a696aaa8 | ||
|
5011ea983b | ||
|
a93602f845 | ||
|
805046dccb | ||
|
0772b41fac | ||
|
1fc240ab27 | ||
|
36b439603c | ||
|
0bb46be9d0 | ||
|
4eeda2c662 | ||
|
fcca0c0c94 | ||
|
06c4e085cb | ||
|
81752dd26f | ||
|
93d44ea07d | ||
|
a90cf82974 | ||
|
e36ac8bc59 | ||
|
3fcd99e364 | ||
|
74ef8f1d23 | ||
|
40192486c5 | ||
|
d45cb0712c | ||
|
5985c04997 | ||
|
5ddaa33f47 | ||
|
d78d139276 | ||
|
43dfc9a265 | ||
|
3aa42819e4 | ||
|
8ea4a5b260 | ||
|
34352a1eea | ||
|
2d0dcf6c7b | ||
|
cdf416d693 | ||
|
8c32c775da | ||
|
369ebdab0b | ||
|
e01770adc7 | ||
|
42823b8bdd | ||
|
9f2f68344c | ||
|
3f7d4328e9 | ||
|
421705e1bc | ||
|
69dc1c50aa | ||
|
b89ac389fd | ||
|
a5e3e829b5 | ||
|
7d947038eb | ||
|
c8a456f56d | ||
|
afa54e7958 | ||
|
39893b1e92 | ||
|
95a1f43994 | ||
|
d7c494e463 | ||
|
5ea57eebcc | ||
|
ae36ccf75a | ||
|
d5b4faa49c | ||
|
b0fae5edf8 | ||
|
1c60f17b1b | ||
|
d9b1272802 | ||
|
31d9b5b7e6 | ||
|
19a3f2fbed | ||
|
c9f5fb3f51 | ||
|
a25eac67c7 | ||
|
fb2046649e | ||
|
54beb3b2d1 | ||
|
2bd2ceca8e | ||
|
8df74b9670 | ||
|
6921cfdcf5 | ||
|
da4aa8a9fe | ||
|
a9f81d260c | ||
|
7b56393aee | ||
|
1b1efe6b80 | ||
|
3bfe58abd2 | ||
|
55e259bcf7 | ||
|
bc08b76d23 | ||
|
5f75dbe9fc | ||
|
e73675ce09 | ||
|
8e94976cdd | ||
|
b8199bf036 | ||
|
6c21c7864a | ||
|
dc61bb4648 | ||
|
afdf4a7dc2 | ||
|
789bcb5d10 | ||
|
5b78204beb | ||
|
dca97010c7 | ||
|
63a5e2b458 | ||
|
17b842a2a2 | ||
|
4a41e322ab | ||
|
e26f2df2d0 | ||
|
571b7cf3f2 | ||
|
4fece7be40 | ||
|
bfb9c68590 | ||
|
25c6a9a6d6 | ||
|
b746aa1859 | ||
|
e74fc2d138 | ||
|
dedefd5a6d | ||
|
b013fbfefd | ||
|
5a14b300f9 | ||
|
3892c38785 | ||
|
8e96fdacc6 | ||
|
503e9db5c2 | ||
|
f6a6914f91 | ||
|
847cf23738 | ||
|
16f5e67cf0 | ||
|
74358bca8d | ||
|
7eea92bfc7 | ||
|
5b94247daf | ||
|
2020f72905 | ||
|
f0da89503e | ||
|
d46a6de26e | ||
|
43df09bb38 | ||
|
0400eceab1 | ||
|
38d6b16c74 | ||
|
3d5a5ac342 | ||
|
4d2e037107 | ||
|
cc7a63c69f | ||
|
57450a98ad | ||
|
44e6ea2373 | ||
|
1ca5fed9cf | ||
|
2982410d80 | ||
|
fbd75a6ec7 | ||
|
d6f3ae6c44 | ||
|
59121684a8 | ||
|
c47fd69859 | ||
|
ccbe11ed0e | ||
|
30abf08703 | ||
|
1b3134291c | ||
|
2c51e0601d | ||
|
cd7a743097 | ||
|
2afb20df0b | ||
|
0d29b6369a | ||
|
24b4cc4eb5 | ||
|
50b4ca3cca | ||
|
7b9ee74c21 | ||
|
8b9d6df172 | ||
|
676388a988 | ||
|
b0b2c24571 | ||
|
5d3bfedf5d | ||
|
d73b8e21e6 | ||
|
94a0e2fa42 | ||
|
79cde85256 | ||
|
fe03b8faab | ||
|
9f6d421ed2 | ||
|
da04e4b048 | ||
|
a1beb44ccf | ||
|
4b2d9d4d74 | ||
|
432adeb00f | ||
|
f2b9ae222a | ||
|
af8fbb0167 | ||
|
d12ac4d6cd | ||
|
e6a171d775 | ||
|
2606440f44 | ||
|
d3bd22302f | ||
|
2c31e81c5f | ||
|
87a1a1d7c1 | ||
|
268f5dd5c7 | ||
|
eb6b0c35a5 | ||
|
0c8e339140 | ||
|
68a9cc52e2 | ||
|
31d2283f99 | ||
|
c6d90be839 | ||
|
4356c5566d | ||
|
84ef2fd120 | ||
|
d3b8c4e353 | ||
|
3f95ee8a33 | ||
|
5ba4ce5457 | ||
|
39eb79de54 | ||
|
5189634b60 | ||
|
eb195cf891 | ||
|
b9222b8594 | ||
|
987aa966a0 | ||
|
75e5c7049f | ||
|
33539857f4 | ||
|
a9936b8dba | ||
|
757b994176 | ||
|
e6c94af599 | ||
|
3bcfbd8e9f | ||
|
0541216944 | ||
|
50b68e82bd | ||
|
daaaa15b94 | ||
|
195984bb95 | ||
|
7867b07d9c | ||
|
223fac9828 | ||
|
1b710c1332 | ||
|
dc98f5f37a | ||
|
c6009c8b5e | ||
|
ce0290ea03 | ||
|
1301da0d7d | ||
|
6eb5966e96 | ||
|
db7f5855e1 | ||
|
fe1fad6a7b | ||
|
7feb25cf8a | ||
|
cded964bcd | ||
|
5d5b1e3854 | ||
|
5b68b4dc75 | ||
|
2f62f448a8 | ||
|
cb72fcb08a | ||
|
60209ebbc1 | ||
|
050190ba89 | ||
|
54db609d32 | ||
|
806747d88c | ||
|
6de2be72ef | ||
|
4bc94dd465 | ||
|
a5b46f6a47 | ||
|
5a73e6ad91 | ||
|
91b48c0a5e | ||
|
78d793d28e | ||
|
653dcb4a92 | ||
|
ce555ad18f | ||
|
e5d3139990 | ||
|
b2b8241295 | ||
|
f20a849a93 | ||
|
f3c7ed8baf | ||
|
483227f840 | ||
|
45f7b17e14 | ||
|
3823b3bc56 | ||
|
a39751818f | ||
|
ec47f5a8c5 | ||
|
74b6acc927 | ||
|
dc90f272c7 | ||
|
e501cb9dfa | ||
|
a4a41c4ab7 | ||
|
3842acf952 | ||
|
3ffaf382c7 | ||
|
ca063803db | ||
|
be2c5ea20e | ||
|
2a3b9323a6 | ||
|
12ff1335f2 | ||
|
112628f386 | ||
|
42fa628a61 | ||
|
65f2a58d2f | ||
|
5acb8f82e7 | ||
|
a32d7cebb5 | ||
|
5b223d23ad | ||
|
fd562d8ea0 | ||
|
e6d32c9a05 | ||
|
fadf2ce14e | ||
|
510b5ab546 | ||
|
a77ba8084e | ||
|
1b9d8331a1 | ||
|
ac2b28bb5f | ||
|
74f70cd58b | ||
|
5450ac00db | ||
|
2db0f05feb | ||
|
3466ff3f05 | ||
|
d531bf3a22 | ||
|
3b73a750e6 | ||
|
3869b115ce | ||
|
614349370a | ||
|
48ac446014 | ||
|
ac1b1fe0c8 | ||
|
dfd1fd069b | ||
|
22db37cad9 | ||
|
22522c099d | ||
|
eaf6dcad4f | ||
|
b6de5aa563 | ||
|
519213ad4a | ||
|
ce369b3e30 | ||
|
29a50ed89a | ||
|
2a76e7d043 | ||
|
cca9c5421e | ||
|
e23825c399 | ||
|
a7e3c05f6c | ||
|
5b865fe49b | ||
|
88bc28140f | ||
|
a0cc5670d4 | ||
|
5080796c6f | ||
|
ad1a7227d6 | ||
|
d0e95c2129 | ||
|
03165bf5e4 | ||
|
bdd9b978f9 | ||
|
54524553c1 | ||
|
0b5a404cf4 | ||
|
e1c4946a73 | ||
|
3566569dc5 | ||
|
08711f93d0 | ||
|
5ed6eac38a | ||
|
0c4eb3834a | ||
|
a9a0422de1 | ||
|
812d71f090 | ||
|
7539faffff | ||
|
88b92e2994 | ||
|
2e1c585b5e | ||
|
3923937f6c | ||
|
8909584976 | ||
|
abf266b7f8 | ||
|
1d34547f45 | ||
|
56447596f7 | ||
|
8488a10465 | ||
|
b60c236824 | ||
|
d87c358743 | ||
|
5791b43ac6 | ||
|
d4eb8e3303 | ||
|
9c30e5ee83 | ||
|
8aa5140265 | ||
|
d7c0d12a30 | ||
|
9194641c6f | ||
|
4b14be8321 | ||
|
d996b34a54 | ||
|
c15cefc447 | ||
|
8663f3b79f | ||
|
ac8e96508c | ||
|
d799586ce9 | ||
|
e585ba5a19 | ||
|
62d682fbd5 | ||
|
fb6e57a75c | ||
|
75c88c32ed | ||
|
0aa06d1ae6 | ||
|
973322370a | ||
|
49c7d18c00 | ||
|
4fccc81697 | ||
|
e094c22952 | ||
|
6ae809617b | ||
|
cbcd5dcca7 | ||
|
7a471a3917 | ||
|
584b20ae9b | ||
|
f955ae2f61 | ||
|
c8a1f536c3 | ||
|
7527b31ece | ||
|
4e9c1f98b7 | ||
|
12d0d4dbb0 | ||
|
17439536f0 | ||
|
9949e23bc4 | ||
|
4d236153de | ||
|
a0f76f7ba9 | ||
|
5f0865129e | ||
|
c31f10b3c1 | ||
|
0e5873ca05 | ||
|
cfb05d9871 | ||
|
98330846c3 | ||
|
15f48304b2 | ||
|
88caf22c9f | ||
|
93c19eae82 | ||
|
f5afff56c4 | ||
|
0adb16011c | ||
|
ddabdf2187 | ||
|
e767ad1043 | ||
|
4399f99c42 | ||
|
f8ef896a65 | ||
|
0a854cd276 | ||
|
fe439721d0 | ||
|
d8811c7d77 | ||
|
d29f0651e2 | ||
|
145d525b29 | ||
|
ab8c8b5f0f | ||
|
0febeea298 | ||
|
50635faf53 | ||
|
2cb33a7019 | ||
|
d6b72afd19 | ||
|
2e0797007c | ||
|
dff1fa4938 | ||
|
fb3673116b | ||
|
0f6ac91080 | ||
|
f3b4e91745 | ||
|
3ce47382d8 | ||
|
9dffb49918 | ||
|
67f9cbab5e | ||
|
3f6585cfda | ||
|
1273b16caf | ||
|
0fc9978385 | ||
|
f1baef7de4 | ||
|
2c8d1345cb | ||
|
5a9a5c6910 | ||
|
93158ba19b | ||
|
5834963fdd | ||
|
0f1d760dbe | ||
|
86b584be2f | ||
|
1d67e97b45 | ||
|
30d0d7d338 | ||
|
4c89a1e69c | ||
|
d07ed2b013 | ||
|
48860e3172 | ||
|
87ed304737 | ||
|
f4d7459f63 | ||
|
65e0d118c3 | ||
|
2ed0391d15 | ||
|
d20cbdb844 | ||
|
375b70dd40 | ||
|
afff77b540 | ||
|
e6127f1dba | ||
|
38929bff6d | ||
|
6a25bb57e4 | ||
|
b690d9c594 | ||
|
f81b785c0f | ||
|
3cf4807ca3 | ||
|
59d4df1422 | ||
|
cc9d804e84 | ||
|
30f2d2404e | ||
|
17cb9ea626 | ||
|
bbc1cce627 | ||
|
7579d02693 | ||
|
572ad44fb1 | ||
|
51d12dc6b4 | ||
|
dcb6b5ee24 | ||
|
b608bb9908 | ||
|
f633f9ae57 | ||
|
a0564b7922 | ||
|
5b09865b42 | ||
|
27ebd42c8e | ||
|
218d974122 | ||
|
59e3fee086 | ||
|
0c424b52df | ||
|
aee69a4167 | ||
|
b3ca7fd670 | ||
|
7575bbd4e1 | ||
|
5f1ab82f0e | ||
|
8f9e53ed83 | ||
|
21291b34cb | ||
|
bf5f6ef0db | ||
|
44ae97d6d8 | ||
|
da6eb3705b | ||
|
9a6047ad9c | ||
|
519d370445 | ||
|
ef384b5574 | ||
|
539d31007c | ||
|
cea2c0cc87 | ||
|
79b30f882d | ||
|
6d5225beab | ||
|
540849524d | ||
|
84d00ee580 | ||
|
a8e284ccbe | ||
|
611f8773e8 | ||
|
b197dc4286 | ||
|
ec9519a6b2 | ||
|
3d1c907104 | ||
|
6dc20eee7d | ||
|
b89c129e2b | ||
|
bd9224a805 | ||
|
823a47303b | ||
|
79a61454f3 | ||
|
91bb28abbb | ||
|
449608293c | ||
|
5fd4b9ae9d | ||
|
ed94bef458 | ||
|
e24f20f5ba | ||
|
2e488b5837 | ||
|
daf42be5cb | ||
|
eac7ce7b27 | ||
|
d6d461c77e | ||
|
fa3981d167 | ||
|
783ea4f67f | ||
|
9748190a8a | ||
|
6671bcf6f7 | ||
|
c3cc8aef4c | ||
|
4ee6300c6a | ||
|
11b712332d | ||
|
814d4321a1 | ||
|
fd36f2f8e1 | ||
|
6d9af1cff0 | ||
|
56993335e3 | ||
|
e1fc805a0b | ||
|
16a5f8efb6 | ||
|
83ac13b221 | ||
|
770f30dd05 | ||
|
83f6cf50a7 | ||
|
c17da02da5 | ||
|
7441c61313 | ||
|
dfbf9ec137 | ||
|
5586d48f93 | ||
|
ee9ed3c099 | ||
|
b4288225a0 | ||
|
a1a5ade215 | ||
|
eb87c329bb | ||
|
ea5cbfe8c4 | ||
|
a575b65320 | ||
|
eda39a6372 | ||
|
0f1e17e862 | ||
|
57501075de | ||
|
d978f84a60 | ||
|
3d1dc68928 | ||
|
64b9673a5c | ||
|
b24e0d2682 | ||
|
75d922d4a9 | ||
|
e97bdc71c7 | ||
|
ef3f322fff | ||
|
d06e5bc029 | ||
|
0f3cd48527 | ||
|
5b41da1473 | ||
|
4a4ef9ba13 | ||
|
3cc195ee33 | ||
|
bb11209d45 | ||
|
5ff56f8a41 | ||
|
7616890050 | ||
|
80d2709104 | ||
|
c199945840 | ||
|
f3048caaf0 | ||
|
2c3e35a742 | ||
|
02eed5e15e | ||
|
e3ef52fc0a | ||
|
1af361444d | ||
|
c4098a60e7 | ||
|
b642e1b39b | ||
|
2066f5d1da | ||
|
cce26511d8 | ||
|
cdaea08ba7 | ||
|
fb29d8013b | ||
|
2035bd6544 | ||
|
9e349a202a | ||
|
74fc5edb1f | ||
|
5dbc8db849 | ||
|
b71d167afd | ||
|
4951ccad09 | ||
|
1e51a5161f | ||
|
144020a710 | ||
|
b23ae334d3 | ||
|
119de1fb44 | ||
|
3cb9ddf31f | ||
|
dd66c8fd31 | ||
|
01e7dd41c7 | ||
|
b044784e6a | ||
|
e429c6f59e | ||
|
62aeb9c080 | ||
|
76eaabb42f | ||
|
293bc64ffa | ||
|
1c52809ebb | ||
|
895e69f043 | ||
|
0912964ec3 | ||
|
64c36d402a | ||
|
7acc1e67e3 | ||
|
bd8db1b9df | ||
|
3bd37525ca | ||
|
2ab5ac6aa0 | ||
|
bc58272c85 | ||
|
9d10e939a1 | ||
|
2ac9cb44e9 | ||
|
8d6d7c927d | ||
|
515ce9bce2 | ||
|
2ecbc2c205 | ||
|
37cf53e0a9 | ||
|
374bae9fc1 | ||
|
5f3827b8e7 | ||
|
f34f903947 | ||
|
5feaf6106e | ||
|
b97c26c78b | ||
|
078db73bce | ||
|
2da5fc624b | ||
|
b1b8e1de95 | ||
|
d5ab77ddbd | ||
|
36e258012e | ||
|
281c53ad63 | ||
|
d4a7aa68b1 | ||
|
d69171afa5 | ||
|
1384e1a886 | ||
|
b248f76d32 | ||
|
f507d340a2 | ||
|
43ca05883a | ||
|
9e6ba33f90 | ||
|
b6173967bb | ||
|
6fc5c65825 | ||
|
6c3c6695e4 | ||
|
dfd2106a3f | ||
|
85b8d004e4 | ||
|
52ea3fb80d | ||
|
24cff5b6ec | ||
|
6a85995937 | ||
|
3776d4ad84 | ||
|
0daa097d20 | ||
|
9a661aa177 | ||
|
05181c34ec | ||
|
ead9f08e52 | ||
|
3ecefd22cb | ||
|
0e77a23e76 | ||
|
269d64065c | ||
|
1b68fdb7d7 | ||
|
83dae8ddd3 | ||
|
2b7936b39b | ||
|
6ae24ac9ed | ||
|
ef881cfcac | ||
|
5329fba6b0 | ||
|
d4af8138d0 | ||
|
4f3b6057b6 | ||
|
73a5a724ec | ||
|
e8ada3789f | ||
|
c176a447e7 | ||
|
11054e9761 | ||
|
2d989d2b0b | ||
|
12162f2955 | ||
|
0de6a9f4f5 | ||
|
c40774d578 | ||
|
9f2d3134d8 | ||
|
0095d78615 | ||
|
977b8bfd4b | ||
|
76e5a41881 | ||
|
35f0991f03 | ||
|
4eb53990f4 | ||
|
ca6b02d19f | ||
|
e311e7c500 | ||
|
be43839d73 | ||
|
9822e8a10f | ||
|
3acbd0bcf5 | ||
|
ee80ae76a8 | ||
|
d799c0f2bd | ||
|
3029673612 | ||
|
ff81ac4485 | ||
|
8b93ca93eb | ||
|
1a8cc2a696 | ||
|
9db9d0d01f | ||
|
b4f92bbc25 | ||
|
a4f768e46c | ||
|
e300885839 | ||
|
6ce5765ff4 | ||
|
f104b34d2c | ||
|
efb4486a43 | ||
|
143663e333 | ||
|
ac03aec718 | ||
|
2e3db473ec | ||
|
e3d4aa3f72 | ||
|
16c5b0b80f | ||
|
ce97a87039 | ||
|
8d369ffd21 | ||
|
1d9ebc91ba | ||
|
6da82f92c7 | ||
|
24a1f2dee9 | ||
|
f3f4079314 | ||
|
f7b543664d | ||
|
bc9f18819b | ||
|
39ae41d531 | ||
|
dbcf366e3d | ||
|
08f8a1141e | ||
|
5f7de7231c | ||
|
28575b20fd | ||
|
63cc97024a | ||
|
b463261bd3 | ||
|
1868d9289f | ||
|
5a50d933a8 | ||
|
4be5b01ba1 | ||
|
684914a390 | ||
|
90781966d7 | ||
|
93f2e0c6bb | ||
|
8bb2cd7f3f | ||
|
c9b0cef1ea | ||
|
464ce2dc0c | ||
|
c845b0abca | ||
|
df32c786f9 | ||
|
2ea53b534c | ||
|
528920379d | ||
|
19112db038 | ||
|
fe0291fd19 | ||
|
7c7a7f6754 | ||
|
2cd09dee98 | ||
|
d9f4a203d1 | ||
|
def67fc858 | ||
|
a71cfa2a74 | ||
|
8b50d3d3c3 | ||
|
c0c7942fc6 | ||
|
8bed28d0c5 | ||
|
92c0f7b7bb | ||
|
d8062f810c | ||
|
5cf43ddb2a | ||
|
7ac70defd3 | ||
|
0cc7cc26b4 | ||
|
6924088d16 | ||
|
e197a6f605 | ||
|
5f09c0e294 | ||
|
c597f8c999 | ||
|
755e056143 | ||
|
287e539dcd | ||
|
3167f01cf7 | ||
|
867b610e67 | ||
|
7741b191ee | ||
|
8ffef58902 | ||
|
de3561ed4d | ||
|
2a6bb0b7f3 | ||
|
c8fd2b4257 | ||
|
c7812708c3 | ||
|
ce745ce9d2 | ||
|
9dbfb163b4 | ||
|
455d8b1e16 | ||
|
7b54dccdd4 | ||
|
d4e285bd9f | ||
|
ab0757a87e | ||
|
6d8fe8e7ff | ||
|
9aa8c0bb87 | ||
|
a67adf9601 | ||
|
a9e4ed6262 | ||
|
71dbe05647 | ||
|
763e416be3 | ||
|
8560112432 | ||
|
fb61ad0bc6 | ||
|
a138e05404 | ||
|
8361ddc4e5 | ||
|
caebb2d87a | ||
|
92b9f9668f | ||
|
3a32eaea04 | ||
|
33588a6d05 | ||
|
d13bbc77dc | ||
|
d5c26e70d8 | ||
|
11c24f4fd6 | ||
|
df1e5718da | ||
|
79176c4cab | ||
|
f36c06c26c | ||
|
8e584e8b90 | ||
|
76ed28003f | ||
|
05a6016945 | ||
|
44ae8608ff | ||
|
5b4591c8b1 | ||
|
ae18c5fd7c | ||
|
756adce633 | ||
|
55864771f5 | ||
|
6d0e7f8e36 | ||
|
1ab8997c24 | ||
|
2236d0aa13 | ||
|
f24fa50a48 | ||
|
7e35481a09 | ||
|
7da8b0ba6a | ||
|
4ddf65a44e | ||
|
154a100e2f | ||
|
80faffb2db | ||
|
32a9f446b0 | ||
|
90c79aac08 | ||
|
cc0e982be0 | ||
|
50f07e0d47 | ||
|
b31b4a819f | ||
|
14b556fe3a | ||
|
671df61390 | ||
|
986aa712a0 | ||
|
5c37d6e61e | ||
|
555a20bc7f | ||
|
105c8b5baf | ||
|
3b267a38c1 | ||
|
9a2e239a06 | ||
|
4165745f4b | ||
|
bb254da494 | ||
|
f9a30423f3 | ||
|
da85a2da5a | ||
|
f761519672 | ||
|
46763ac325 | ||
|
7c02770938 | ||
|
2d3e3af4f3 | ||
|
4ccea5ab4a | ||
|
b048175178 | ||
|
43d533a085 | ||
|
4b6906c89e | ||
|
36be03592c | ||
|
609681cd22 | ||
|
7e9af994c8 | ||
|
2df52fbf58 | ||
|
5974da1ae6 | ||
|
97f1bbca6c | ||
|
8d8b8d504a | ||
|
a1dc3f990c | ||
|
4faad700b9 | ||
|
d2cfb6e397 | ||
|
d6abe79c17 | ||
|
d64a094821 | ||
|
e9765ec1f2 | ||
|
83ad854655 | ||
|
f530ce89f6 | ||
|
2df6a9821f | ||
|
96c88e3cad | ||
|
1e8f4ead78 | ||
|
362cdb8c83 | ||
|
4c9a647132 | ||
|
557248ff89 | ||
|
5c00de45bc | ||
|
97b00a7c33 | ||
|
8c5173a7fb | ||
|
eb9dc8f2ed | ||
|
7a6b8c0a0c | ||
|
987c016e24 | ||
|
514e7aad7d | ||
|
a81a9fb168 | ||
|
03add72684 | ||
|
d159654fc6 | ||
|
dcd96fbe84 | ||
|
e860e74717 | ||
|
ce7c7baa8a | ||
|
f2a4490515 | ||
|
b53716bba4 | ||
|
598b010250 | ||
|
40d5dfc549 | ||
|
70920a9220 | ||
|
9ac3929a8e | ||
|
28299cce9c | ||
|
4e1a447f50 | ||
|
3528a7724f | ||
|
152524df1e | ||
|
fa1b5b97b6 | ||
|
404a5bb953 | ||
|
50ad38ba0b | ||
|
f332b7dd95 | ||
|
b7a3dcbe4c | ||
|
0e181ef7ec | ||
|
b1064cb019 | ||
|
51e49d0176 | ||
|
ef14c4840f | ||
|
ea4fecc270 | ||
|
4cbbc0a2c1 | ||
|
d9956a5a9a | ||
|
606d693e3f | ||
|
24d098d863 | ||
|
b189cd0ba7 | ||
|
0949d80dc5 | ||
|
eec6fea025 | ||
|
44445901da | ||
|
53a99241e0 | ||
|
26c5350ed4 | ||
|
75090e56ad |
12
.github/FUNDING.yml
vendored
Normal file
12
.github/FUNDING.yml
vendored
Normal file
@ -0,0 +1,12 @@
|
||||
# These are supported funding model platforms
|
||||
|
||||
github: #[KFCFans]
|
||||
patreon: # Replace with a single Patreon username
|
||||
open_collective: powerjob
|
||||
ko_fi: # Replace with a single Ko-fi username
|
||||
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
|
||||
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
|
||||
liberapay: # Replace with a single Liberapay username
|
||||
issuehunt: # Replace with a single IssueHunt username
|
||||
otechie: # Replace with a single Otechie username
|
||||
custom: ['http://www.powerjob.tech/support.html']
|
5
.github/ISSUE_TEMPLATE/bug-report.md
vendored
5
.github/ISSUE_TEMPLATE/bug-report.md
vendored
@ -1,7 +1,6 @@
|
||||
---
|
||||
name: bug report
|
||||
about: Create a report to help us improve
|
||||
title: "[BUG] bug report"
|
||||
name: "\U0001F41B Bug Report"
|
||||
about: Something doesn't work as expected
|
||||
labels: bug
|
||||
assignees: KFCFans
|
||||
|
||||
|
3
.github/ISSUE_TEMPLATE/feature-request.md
vendored
3
.github/ISSUE_TEMPLATE/feature-request.md
vendored
@ -1,7 +1,6 @@
|
||||
---
|
||||
name: feature request
|
||||
name: "\U0001F680 Feature Request"
|
||||
about: Suggest an idea for this project
|
||||
title: "[Feature] feature request"
|
||||
labels: new feature
|
||||
assignees: ''
|
||||
|
||||
|
15
.github/ISSUE_TEMPLATE/question.md
vendored
Normal file
15
.github/ISSUE_TEMPLATE/question.md
vendored
Normal file
@ -0,0 +1,15 @@
|
||||
---
|
||||
name: "\U0001F914 Question"
|
||||
about: Usage question that isn't answered in docs or discussion
|
||||
labels: question
|
||||
---
|
||||
|
||||
## Question
|
||||
|
||||
Before asking a question, make sure you have(在提问之前,请确保你已经):
|
||||
|
||||
- Read documentation(仔细阅读了官方文档)
|
||||
- Googled your question(百度搜索了你的问题)
|
||||
- Searched open and closed GitHub issues(搜索了开放和关闭的 GitHub issues)
|
||||
|
||||
Please pay attention on issues you submitted, because we maybe need more details.
|
27
.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
27
.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
## What is the purpose of the change
|
||||
|
||||
For example: Making PowerJob better
|
||||
|
||||
## Brief changelog
|
||||
|
||||
It is best to associate an existing issue
|
||||
|
||||
## Verifying this change
|
||||
|
||||
Do I need to test?
|
||||
Has testing been completed?
|
||||
Test method?
|
||||
|
||||
Follow this checklist to help us incorporate your contribution quickly and easily. Notice, `it would be helpful if you could finish the following 3 checklist before request the community to review your PR`.
|
||||
|
||||
- [x] Make sure there is a [Github issue](https://github.com/PowerJob/PowerJob/issues) filed for the change (usually before you start working on it). Trivial changes like typos do not require a Github issue. Your pull request should address just this issue, without pulling in other changes - one PR resolves one issue.
|
||||
- [x] Write a pull request description that is detailed enough to understand what the pull request does, how, and why.
|
||||
- [x] Follow the git commit specification
|
||||
* feat: xxx -> The feat type is used to identify production changes related to new backward-compatible abilities or functionality.
|
||||
* perf: xxx -> The perf type is used to identify production changes related to backward-compatible performance improvements.
|
||||
* fix: xxx -> The fix type is used to identify production changes related to backward-compatible bug fixes.
|
||||
* docs: xxx -> The docs type is used to identify documentation changes related to the project - whether intended externally for the end users (in case of a library) or internally for the developers.
|
||||
* test: xxx -> The test type is used to identify development changes related to tests - such as refactoring existing tests or adding new tests.
|
||||
* refactor: xxx -> The refactor type is used to identify development changes related to modifying the codebase, which neither adds a feature nor fixes a bug - such as removing redundant code, simplifying the code, renaming variables, etc.
|
||||
|
||||
|
30
.github/workflows/docker-image.yml
vendored
30
.github/workflows/docker-image.yml
vendored
@ -1,30 +0,0 @@
|
||||
name: Docker Image CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master ]
|
||||
|
||||
jobs:
|
||||
|
||||
build:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Build the Docker image
|
||||
run: mvn clean package -Pdev -DskipTests -U -e -pl powerjob-server,powerjob-worker-agent -am && /bin/cp -rf powerjob-server/target/*.jar powerjob-server/docker/powerjob-server.jar && /bin/cp -rf powerjob-worker-agent/target/*.jar powerjob-worker-agent/powerjob-agent.jar
|
||||
- uses: docker/build-push-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
repository: tjqq/powerjob-server
|
||||
tags: latest
|
||||
path: powerjob-server/docker/
|
||||
- uses: docker/build-push-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
repository: tjqq/powerjob-agent
|
||||
tags: latest
|
||||
path: powerjob-worker-agent/
|
68
.github/workflows/docker_publish.yml
vendored
Normal file
68
.github/workflows/docker_publish.yml
vendored
Normal file
@ -0,0 +1,68 @@
|
||||
name: build_docker
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*' # Push events to matching v*, i.e. v1.0, v20.15.10
|
||||
|
||||
jobs:
|
||||
build_docker:
|
||||
name: Build docker
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Build Maven Project
|
||||
uses: actions/setup-java@v4
|
||||
with:
|
||||
java-version: '8'
|
||||
distribution: 'temurin'
|
||||
- name: Publish package
|
||||
run: mvn clean package -Pdev -DskipTests -U -e && /bin/cp -rf powerjob-server/powerjob-server-starter/target/*.jar powerjob-server/docker/powerjob-server.jar && /bin/cp -rf powerjob-worker-agent/target/*.jar powerjob-worker-agent/powerjob-agent.jar && /bin/cp -rf powerjob-worker-samples/target/*.jar powerjob-worker-samples/powerjob-worker-samples.jar
|
||||
|
||||
# Login
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Build And Push [powerjob-server]
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: powerjob-server/docker/
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
tags: |
|
||||
tjqq/powerjob-server:latest
|
||||
powerjob/powerjob-server:latest
|
||||
tjqq/powerjob-server:${{ github.ref_name }}
|
||||
powerjob/powerjob-server:${{ github.ref_name }}
|
||||
|
||||
- name: Build And Push [powerjob-agent]
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: powerjob-worker-agent/
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
tags: |
|
||||
tjqq/powerjob-agent:latest
|
||||
powerjob/powerjob-agent:latest
|
||||
tjqq/powerjob-agent:${{ github.ref_name }}
|
||||
powerjob/powerjob-agent:${{ github.ref_name }}
|
||||
|
||||
- name: Build And Push [powerjob-worker-samples]
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: powerjob-worker-samples/
|
||||
push: true
|
||||
platforms: linux/amd64,linux/arm64
|
||||
tags: |
|
||||
tjqq/powerjob-worker-samples:latest
|
||||
powerjob/powerjob-worker-samples:latest
|
||||
tjqq/powerjob-worker-samples:${{ github.ref_name }}
|
||||
powerjob/powerjob-worker-samples:${{ github.ref_name }}
|
38
.github/workflows/maven.yml
vendored
38
.github/workflows/maven.yml
vendored
@ -1,38 +0,0 @@
|
||||
# This workflow will build a Java project with Maven
|
||||
# For more information see: https://help.github.com/actions/language-and-framework-guides/building-and-testing-java-with-maven
|
||||
|
||||
name: Java CI with Maven
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master ]
|
||||
pull_request:
|
||||
branches: [ master ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up JDK 1.8
|
||||
uses: actions/setup-java@v1
|
||||
with:
|
||||
java-version: 1.8
|
||||
- name: Build with Maven
|
||||
run: mvn -B clean package -Pdev -DskipTests --file pom.xml
|
||||
- name: upload build result
|
||||
run: mkdir staging && cp powerjob-server/target/*.jar staging/powerjob-server.jar && cp powerjob-client/target/*.jar staging/powerjob-client.jar && cp powerjob-worker-agent/target/*.jar staging/powerjob-agent.jar
|
||||
- uses: actions/upload-artifact@v1
|
||||
with:
|
||||
name: powerjob-server.jar
|
||||
path: staging/powerjob-server.jar
|
||||
- uses: actions/upload-artifact@v1
|
||||
with:
|
||||
name: powerjob-client.jar
|
||||
path: staging/powerjob-client.jar
|
||||
- uses: actions/upload-artifact@v1
|
||||
with:
|
||||
name: powerjob-agent.jar
|
||||
path: staging/powerjob-agent.jar
|
28
.github/workflows/maven_build.yml
vendored
Normal file
28
.github/workflows/maven_build.yml
vendored
Normal file
@ -0,0 +1,28 @@
|
||||
# This workflow will build a Java project with Maven
|
||||
# For more information see: https://docs.github.com/zh/actions/use-cases-and-examples/building-and-testing/building-and-testing-java-with-maven
|
||||
|
||||
name: Java CI with Maven
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master ]
|
||||
pull_request:
|
||||
branches: [ master ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-java@v4
|
||||
with:
|
||||
java-version: '8'
|
||||
distribution: 'temurin'
|
||||
- run: mvn -B clean package -Pdev -DskipTests --file pom.xml
|
||||
- run: mkdir staging && cp powerjob-server/powerjob-server-starter/target/*.jar staging/powerjob-server.jar && cp powerjob-client/target/*.jar staging/powerjob-client.jar && cp powerjob-worker-agent/target/*.jar staging/powerjob-agent.jar && cp powerjob-worker-spring-boot-starter/target/*.jar staging/powerjob-worker-spring-boot-starter.jar
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: Package
|
||||
path: staging
|
||||
|
22
.github/workflows/maven_publish.yml
vendored
Normal file
22
.github/workflows/maven_publish.yml
vendored
Normal file
@ -0,0 +1,22 @@
|
||||
name: Publish package to the Maven Central Repository
|
||||
on:
|
||||
release:
|
||||
types: [created]
|
||||
jobs:
|
||||
publish:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Maven Central Repository
|
||||
uses: actions/setup-java@v4
|
||||
with:
|
||||
java-version: '8'
|
||||
distribution: 'temurin'
|
||||
server-id: ossrh
|
||||
server-username: MAVEN_USERNAME
|
||||
server-password: MAVEN_PASSWORD
|
||||
- name: Publish package
|
||||
run: mvn --batch-mode clean deploy -pl powerjob-worker,powerjob-client,powerjob-worker-spring-boot-starter,powerjob-official-processors,powerjob-worker-agent -DskipTests -Prelease -am
|
||||
env:
|
||||
MAVEN_USERNAME: ${{ secrets.OSSRH_USERNAME }}
|
||||
MAVEN_PASSWORD: ${{ secrets.OSSRH_TOKEN }}
|
8
.gitignore
vendored
8
.gitignore
vendored
@ -35,3 +35,11 @@ build/
|
||||
*.log
|
||||
*/.DS_Store
|
||||
.DS_Store
|
||||
|
||||
.phd
|
||||
.txt
|
||||
.trc
|
||||
*/.phd
|
||||
*/.txt
|
||||
*/.trc
|
||||
powerjob-data/
|
2
LICENSE
2
LICENSE
@ -186,7 +186,7 @@
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
Copyright [2021] [PowerJob]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
|
110
README.md
110
README.md
@ -1,63 +1,77 @@
|
||||
# English | [简体中文](./README_zhCN.md)
|
||||
|
||||
<p align="center">
|
||||
🏮PowerJob 全体成员祝大家龙年腾飞,新的一年身体健康,万事如意,阖家欢乐,幸福安康!🏮
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<img src="https://raw.githubusercontent.com/KFCFans/PowerJob/master/others/images/logo.png" alt="PowerJob" title="PowerJob" width="557"/>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://github.com/KFCFans/PowerJob/actions"><img src="https://github.com/KFCFans/PowerJob/workflows/Java%20CI%20with%20Maven/badge.svg?branch=master" alt="actions"></a>
|
||||
<a href="https://search.maven.org/search?q=com.github.kfcfans"><img alt="Maven Central" src="https://img.shields.io/maven-central/v/com.github.kfcfans/powerjob-worker"></a>
|
||||
<a href="https://github.com/KFCFans/PowerJob/releases"><img alt="GitHub release (latest SemVer)" src="https://img.shields.io/github/v/release/kfcfans/powerjob?color=%23E59866"></a>
|
||||
<a href="https://github.com/KFCFans/PowerJob/blob/master/LICENSE"><img src="https://img.shields.io/github/license/KFCFans/PowerJob" alt="LICENSE"></a>
|
||||
<a href="https://github.com/PowerJob/PowerJob/actions"><img src="https://github.com/PowerJob/PowerJob/workflows/Java%20CI%20with%20Maven/badge.svg?branch=master" alt="actions"></a>
|
||||
<a href="https://central.sonatype.com/search?smo=true&q=powerjob-worker&namespace=tech.powerjob"><img alt="Maven Central" src="https://img.shields.io/maven-central/v/tech.powerjob/powerjob-worker"></a>
|
||||
<a href="https://github.com/PowerJob/PowerJob/releases"><img alt="GitHub release (latest SemVer)" src="https://img.shields.io/github/v/release/kfcfans/powerjob?color=%23E59866"></a>
|
||||
<a href="https://github.com/PowerJob/PowerJob/blob/master/LICENSE"><img src="https://img.shields.io/github/license/KFCFans/PowerJob" alt="LICENSE"></a>
|
||||
</p>
|
||||
|
||||
PowerJob(原OhMyScheduler)是全新一代分布式调度与计算框架,能让您轻松完成作业的调度与繁杂任务的分布式计算。
|
||||
# 简介
|
||||
### 主要特性
|
||||
* 使用简单:提供前端Web界面,允许开发者可视化地完成调度任务的管理(增、删、改、查)、任务运行状态监控和运行日志查看等功能。
|
||||
* 定时策略完善:支持CRON表达式、固定频率、固定延迟和API四种定时调度策略。
|
||||
* 执行模式丰富:支持单机、广播、Map、MapReduce四种执行模式,其中Map/MapReduce处理器能使开发者寥寥数行代码便获得集群分布式计算的能力。
|
||||
* DAG工作流支持:支持在线配置任务依赖关系,可视化得对任务进行编排,同时还支持上下游任务间的数据传递
|
||||
* 执行器支持广泛:支持Spring Bean、内置/外置Java类、Shell、Python等处理器,应用范围广。
|
||||
* 运维便捷:支持在线日志功能,执行器产生的日志可以在前端控制台页面实时显示,降低debug成本,极大地提高开发效率。
|
||||
* 依赖精简:最小仅依赖关系型数据库(MySQL/Oracle/MS SQLServer...),扩展依赖为MongoDB(用于存储庞大的在线日志)。
|
||||
* 高可用&高性能:调度服务器经过精心设计,一改其他调度框架基于数据库锁的策略,实现了无锁化调度。部署多个调度服务器可以同时实现高可用和性能的提升(支持无限的水平扩展)。
|
||||
* 故障转移与恢复:任务执行失败后,可根据配置的重试策略完成重试,只要执行器集群有足够的计算节点,任务就能顺利完成。
|
||||
[PowerJob](https://github.com/PowerJob/PowerJob) is an open-source distributed computing and job scheduling framework which allows developers to easily schedule tasks in their own application.
|
||||
|
||||
[在线试用地址](https://www.yuque.com/powerjob/guidence/hnbskn)
|
||||
### 适用场景
|
||||
* 有定时执行需求的业务场景:如每天凌晨全量同步数据、生成业务报表等。
|
||||
* 有需要全部机器一同执行的业务场景:如使用广播执行模式清理集群日志。
|
||||
* 有需要分布式处理的业务场景:比如需要更新一大批数据,单机执行耗时非常长,可以使用Map/MapReduce处理器完成任务的分发,调动整个集群加速计算。
|
||||
Refer to [PowerJob Introduction](https://www.yuque.com/powerjob/en/introduce) for detailed information.
|
||||
|
||||
### 同类产品对比
|
||||
| | QuartZ | xxl-job | SchedulerX 2.0 | PowerJob |
|
||||
| -------------- | ------------------------ | ---------------------------------------- | ------------------------------------------------- | ------------------------------------------------------------ |
|
||||
| 定时类型 | CRON | CRON | CRON、固定频率、固定延迟、OpenAPI | **CRON、固定频率、固定延迟、OpenAPI** |
|
||||
| 任务类型 | 内置Java | 内置Java、GLUE Java、Shell、Python等脚本 | 内置Java、外置Java(FatJar)、Shell、Python等脚本 | **内置Java、外置Java(容器)、Shell、Python等脚本** |
|
||||
| 分布式任务 | 无 | 静态分片 | MapReduce动态分片 | **MapReduce动态分片** |
|
||||
| 在线任务治理 | 不支持 | 支持 | 支持 | **支持** |
|
||||
| 日志白屏化 | 不支持 | 支持 | 不支持 | **支持** |
|
||||
| 调度方式及性能 | 基于数据库锁,有性能瓶颈 | 基于数据库锁,有性能瓶颈 | 不详 | **无锁化设计,性能强劲无上限** |
|
||||
| 报警监控 | 无 | 邮件 | 短信 | **邮件,提供接口允许开发者扩展** |
|
||||
| 系统依赖 | JDBC支持的关系型数据库(MySQL、Oracle...) | MySQL | 人民币(公测期间免费,哎,帮打个广告吧) | **任意Spring Data Jpa支持的关系型数据库(MySQL、Oracle...)** |
|
||||
| DAG工作流 | 不支持 | 不支持 | 支持 | **支持** |
|
||||
# Introduction
|
||||
|
||||
### Features
|
||||
- **Friendly UI:** [Front-end](http://try.powerjob.tech/#/welcome?appName=powerjob-agent-test&password=123) page is provided and developers can manage their task, monitor the status, check the logs online, etc.
|
||||
|
||||
# 文档
|
||||
**[超详细中文文档](https://www.yuque.com/powerjob/guidence/ztn4i5)** OR **[备用地址(内容可能更新不及时)](https://kfcfans.github.io/)**
|
||||
- **Abundant Timing Strategies:** Four timing strategies are supported, including CRON expression, fixed rate, fixed delay and OpenAPI which allows you to define your own scheduling policies, such as delaying execution.
|
||||
|
||||
PS:感谢文档翻译平台[breword](https://www.breword.com/)对本项目英文文档翻译做出的巨大贡献!
|
||||
- **Multiple Execution Mode:** Four execution modes are supported, including stand-alone, broadcast, Map and MapReduce. Distributed computing resource could be utilized in MapReduce mode, try the magic out [here](https://www.yuque.com/powerjob/en/za1d96#9YOnV)!
|
||||
|
||||
# 参考
|
||||
>Alibaba SchedulerX 2.0
|
||||
- **Workflow(DAG) Support:** Both job dependency management and data communications between jobs are supported.
|
||||
|
||||
* [Akka 框架](https://yq.aliyun.com/articles/709946?spm=a2c4e.11153959.teamhomeleft.67.6a0560c9bZEnZq):不得不说,akka-remote简化了相当大一部分的网络通讯代码。
|
||||
* [执行器架构设计](https://yq.aliyun.com/articles/704121?spm=a2c4e.11153959.teamhomeleft.97.371960c9qhB1mB):这篇文章反而不太认同,感觉我个人的设计更符合Yarn的“架构”。
|
||||
* [MapReduce模型](https://yq.aliyun.com/articles/706820?spm=a2c4e.11153959.teamhomeleft.83.6a0560c9bZEnZq):想法很Cool,大数据处理框架都是处理器向数据移动,但对于传统Java应用来说,数据向处理器移动也未尝不可,这样还能使框架的实现变得简单很多。
|
||||
* [广播执行](https://yq.aliyun.com/articles/716203?spm=a2c4e.11153959.teamhomeleft.40.371960c9qhB1mB):运行清理日志脚本什么的,也太实用了8~
|
||||
- **Extensive Processor Support:** Developers can write their processors in Java, Shell, Python, and will subsequently support multilingual scheduling via HTTP.
|
||||
|
||||
# 其他
|
||||
* 产品永久开源(Apache License, Version 2.0),免费使用,且目前开发者@KFCFans有充足的时间进行项目维护和提供无偿技术支持(All In 了解一下),欢迎各位试用!
|
||||
* 欢迎共同参与本项目的贡献,PR和Issue都大大滴欢迎(求求了)~
|
||||
* 觉得还不错的话,可以点个Star支持一下哦~ = ̄ω ̄=
|
||||
* 联系方式@KFCFans -> `tengjiqi@gmail.com`
|
||||
* 用户交流QQ群:487453839
|
||||
- **Powerful Disaster Tolerance:** As long as there are enough computing nodes, configurable retry policies make it possible for your task to be executed and finished successfully.
|
||||
|
||||
- **High Availability & High Performance:** PowerJob supports unlimited horizontal expansion. It's easy to achieve high availability and performance by deploying as many PowerJob server and worker nodes.
|
||||
|
||||
### Applicable scenes
|
||||
|
||||
- Timed tasks, for example, allocating e-coupons on 9 AM every morning.
|
||||
- Broadcast tasks, for example, broadcasting to the cluster to clear logs.
|
||||
- MapReduce tasks, for example, speeding up certain job like updating large amounts of data.
|
||||
- Delayed tasks, for example, processing overdue orders.
|
||||
- Customized tasks, triggered with [OpenAPI](https://www.yuque.com/powerjob/en/openapi).
|
||||
|
||||
### Online trial
|
||||
- Address: [try.powerjob.tech](http://try.powerjob.tech/#/welcome?appName=powerjob-agent-test&password=123)
|
||||
- Recommend reading the documentation first: [here](https://www.yuque.com/powerjob/en/trial)
|
||||
|
||||
# Documents
|
||||
**[Docs](https://www.yuque.com/powerjob/en/introduce)**
|
||||
|
||||
**[中文文档](https://www.yuque.com/powerjob/guidence/intro)**
|
||||
|
||||
# Known Users
|
||||
[Click to register as PowerJob user!](https://github.com/PowerJob/PowerJob/issues/6)
|
||||
ღ( ´・ᴗ・\` )ღ Many thanks to the following registered users. ღ( ´・ᴗ・\` )ღ
|
||||
<p style="text-align: center">
|
||||
<img src="https://raw.githubusercontent.com/KFCFans/PowerJob/master/others/images/user.png" alt="PowerJob User" title="PowerJob User"/>
|
||||
</p>
|
||||
|
||||
# Stargazers over time
|
||||
|
||||
[](https://starchart.cc/PowerJob/PowerJob)
|
||||
|
||||
# License
|
||||
|
||||
PowerJob is released under Apache License 2.0. Please refer to [License](./LICENSE) for details.
|
||||
|
||||
# Others
|
||||
|
||||
- Any developer interested in getting more involved in PowerJob may join our [Reddit](https://www.reddit.com/r/PowerJob) or [Gitter](https://gitter.im/PowerJob/community) community and make [contributions](https://github.com/PowerJob/PowerJob/pulls)!
|
||||
|
||||
- Reach out to me through email **tengjiqi@gmail.com**. Any issues or questions are welcomed on [Issues](https://github.com/PowerJob/PowerJob/issues).
|
||||
|
||||
- Look forward to your opinions. Response may be late but not denied.
|
||||
|
@ -1,57 +0,0 @@
|
||||
<p align="center">
|
||||
<img src="https://raw.githubusercontent.com/KFCFans/PowerJob/master/others/images/logo.png" alt="PowerJob" title="PowerJob" width="557"/>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://github.com/KFCFans/PowerJob/actions"><img src="https://github.com/KFCFans/PowerJob/workflows/Java%20CI%20with%20Maven/badge.svg?branch=master" alt="actions"></a>
|
||||
<a href="https://search.maven.org/search?q=com.github.kfcfans"><img alt="Maven Central" src="https://img.shields.io/maven-central/v/com.github.kfcfans/powerjob-worker"></a>
|
||||
<a href="https://github.com/KFCFans/PowerJob/releases"><img alt="GitHub release (latest SemVer)" src="https://img.shields.io/github/v/release/kfcfans/powerjob?color=%23E59866"></a>
|
||||
<a href="https://github.com/KFCFans/PowerJob/blob/master/LICENSE"><img src="https://img.shields.io/github/license/KFCFans/PowerJob" alt="LICENSE"></a>
|
||||
</p>
|
||||
|
||||
PowerJob is a powerful distributed scheduling platform and distributed computing framework based on Akka architecture.It provides you a chance to schedule job and distributed computing easily.
|
||||
|
||||
# Introduction
|
||||
|
||||
### Features
|
||||
- Simple to use: Provides a front-end Web interface that allows developers to visually complete the management of scheduled tasks (create, delete, update, and query), task operation status monitoring, and operation logs viewing.
|
||||
- Complete timing strategy: Support four timing scheduling strategies of CRON expression, fixed frequency, fixed delay and API.
|
||||
- Extensive execution modes: It supports four execution modes: stand-alone, broadcast, Map, and MapReduce. Among them, the Map / MapReduce processor enables developers to obtain cluster distributed computing capabilities with only a few lines of code.
|
||||
- Workflow(DAG) support: support online configuration of task dependencies, visually arrange tasks, as well as support for data transfer between upstream and downstream tasks
|
||||
- Extensive executor support: supports processors such as Spring Bean, ordinary Java objects, Shell, Python, and a wide range of applications (such as broadcast execution + Shell script to clear logs)
|
||||
- Convenient operation and maintenance: support online log function, the log generated by the actuator can be displayed on the front-end console page in real time, reduce the debugging cost, and greatly improve the development efficiency.
|
||||
- Dependency simplification: The smallest dependency-only database (MySQL / Oracle / MS SQLServer ...), the extended dependency is MongoDB (used to store huge online logs).
|
||||
- High availability & high performance: The scheduling server has been carefully designed to change the strategy of other scheduling frameworks based on database locks to achieve lock-free scheduling. Deploying multiple scheduling servers can achieve high availability and performance improvement at the same time (support unlimited horizontal expansion).
|
||||
- Failover and recovery: After the task fails to execute, the retry can be completed according to the configured retry strategy. As long as the executor cluster has enough computing nodes, the task can be successfully completed.
|
||||
|
||||
### Applicable scene
|
||||
|
||||
- Business scenarios with regular execution requirements: such as synchronizing data in full volume every morning and generating business reports.
|
||||
- There are business scenarios that require all machines to perform together: such as log cleanup.
|
||||
- There are business scenarios that require distributed processing: for example, a large amount of data needs to be updated, and the stand-alone execution takes a long time. You can use the Map / MapReduce processors to complete the task distribution and mobilize the entire cluster to speed up the calculation.
|
||||
|
||||
### Comparison of similar products
|
||||
|
||||
| | QuartZ | xxl-job | SchedulerX 2.0 | PowerJob |
|
||||
| ---------------------------------- | --------------------------------------------------------- | --------------------------------------------------------- | ------------------------------------------------------------ | ------------------------------------------------------------ |
|
||||
| Timing type | CRON | CRON | CRON, fixed frequency, fixed delay, OpenAPI | **CRON, fixed frequency, fixed delay, OpenAPI** |
|
||||
| Task type | Built-in Java | Built-in Java, GLUE Java, Shell, Python and other scripts | Built-in Java, external Java (FatJar), Shell, Python and other scripts | **Built-in Java, external Java (container), Shell, Python and other scripts** |
|
||||
| Distributed task | no | Static sharding | MapReduce dynamic sharding | **MapReduce dynamic sharding** |
|
||||
| Online task governance | not support | support | support | **support** |
|
||||
| Log blanking | not support | support | not support | **support** |
|
||||
| Scheduling methods and performance | Based on database lock, there is a performance bottleneck | Based on database lock, there is a performance bottleneck | Unknown | **Lock-free design, powerful performance without upper limit** |
|
||||
| Alarm monitoring | no | mail | SMS | **Email, providing an interface to allow developers to customize development** |
|
||||
| System dependence | Any relational database (MySQL, Oracle ...) supported by JDBC | MySQL | Renminbi (free during public beta, hey, help to advertise) | **Any relational database (MySQL, Oracle ...) supported by Spring Data Jpa** |
|
||||
| workflow | not support | not support | support | **support** |
|
||||
|
||||
# Document
|
||||
**[GitHub Wiki](https://github.com/KFCFans/PowerJob/wiki)**
|
||||
|
||||
**[中文文档](https://www.yuque.com/powerjob/product)**
|
||||
|
||||
# Others
|
||||
|
||||
- The product is permanently open source (Apache License, Version 2.0), free to use, and the current developer @KFCFans has sufficient time to maintain the project and provide free technical support (All of my time), welcome to try!
|
||||
- Welcome to participate in the contribution of this project, PR and Issue are greatly welcome (please) ~
|
||||
- If you feel pretty good, you can give it a star to support it ~ =  ̄ω ̄ =
|
||||
- Need some help or have some advice? Welcome to contact Developer @KFCFans-> `tengjiqi@gmail.com`
|
78
README_zhCN.md
Normal file
78
README_zhCN.md
Normal file
@ -0,0 +1,78 @@
|
||||
# [English](./README.md) | 简体中文
|
||||
|
||||
<p align="center">
|
||||
🏮PowerJob 全体成员祝大家龙年腾飞,新的一年身体健康,万事如意,阖家欢乐,幸福安康!🏮
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<img src="https://raw.githubusercontent.com/KFCFans/PowerJob/master/others/images/logo.png" alt="PowerJob" title="PowerJob" width="557"/>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://github.com/PowerJob/PowerJob/actions"><img src="https://github.com/PowerJob/PowerJob/workflows/Java%20CI%20with%20Maven/badge.svg?branch=master" alt="actions"></a>
|
||||
<a href="https://search.maven.org/search?q=tech.powerjob"><img alt="Maven Central" src="https://img.shields.io/maven-central/v/tech.powerjob/powerjob-worker"></a>
|
||||
<a href="https://github.com/PowerJob/PowerJob/releases"><img alt="GitHub release (latest SemVer)" src="https://img.shields.io/github/v/release/kfcfans/powerjob?color=%23E59866"></a>
|
||||
<a href="https://github.com/PowerJob/PowerJob/blob/master/LICENSE"><img src="https://img.shields.io/github/license/KFCFans/PowerJob" alt="LICENSE"></a>
|
||||
</p>
|
||||
|
||||
PowerJob(原OhMyScheduler)是全新一代分布式调度与计算框架,能让您轻松完成作业的调度与繁杂任务的分布式计算。
|
||||
# 简介
|
||||
### 主要特性
|
||||
* 使用简单:提供前端Web界面,允许开发者可视化地完成调度任务的管理(增、删、改、查)、任务运行状态监控和运行日志查看等功能。
|
||||
* 定时策略完善:支持CRON表达式、固定频率、固定延迟和API四种定时调度策略。
|
||||
* 执行模式丰富:支持单机、广播、Map、MapReduce四种执行模式,其中Map/MapReduce处理器能使开发者寥寥数行代码便获得集群分布式计算的能力。
|
||||
* DAG工作流支持:支持在线配置任务依赖关系,可视化得对任务进行编排,同时还支持上下游任务间的数据传递
|
||||
* 执行器支持广泛:支持Spring Bean、内置/外置Java类、Shell、Python等处理器,应用范围广。
|
||||
* 运维便捷:支持在线日志功能,执行器产生的日志可以在前端控制台页面实时显示,降低debug成本,极大地提高开发效率。
|
||||
* 依赖精简:最小仅依赖关系型数据库(MySQL/Oracle/MS SQLServer...)。
|
||||
* 高可用&高性能:调度服务器经过精心设计,一改其他调度框架基于数据库锁的策略,实现了无锁化调度。部署多个调度服务器可以同时实现高可用和性能的提升(支持无限的水平扩展)。
|
||||
* 故障转移与恢复:任务执行失败后,可根据配置的重试策略完成重试,只要执行器集群有足够的计算节点,任务就能顺利完成。
|
||||
|
||||
### 适用场景
|
||||
* 有定时执行需求的业务场景:如每天凌晨全量同步数据、生成业务报表等。
|
||||
* 有需要全部机器一同执行的业务场景:如使用广播执行模式清理集群日志。
|
||||
* 有需要分布式处理的业务场景:比如需要更新一大批数据,单机执行耗时非常长,可以使用Map/MapReduce处理器完成任务的分发,调动整个集群加速计算。
|
||||
* 有需要**延迟执行**某些任务的业务场景:比如订单过期处理等。
|
||||
|
||||
### 设计目标
|
||||
PowerJob 的设计目标为企业级的分布式任务调度平台,即成为公司内部的**任务调度中间件**。整个公司统一部署调度中心 powerjob-server,旗下所有业务线应用只需要依赖 `powerjob-worker` 即可接入调度中心获取任务调度与分布式计算能力。
|
||||
|
||||
### 在线试用
|
||||
* [点击查看试用说明和教程](https://www.yuque.com/powerjob/guidence/trial)
|
||||
|
||||
### 同类产品对比
|
||||
| | QuartZ | xxl-job | SchedulerX 2.0 | PowerJob |
|
||||
| -------------- | ------------------------ | ---------------------------------------- | ------------------------------------------------- | ------------------------------------------------------------ |
|
||||
| 定时类型 | CRON | CRON | CRON、固定频率、固定延迟、OpenAPI | **CRON、固定频率、固定延迟、OpenAPI** |
|
||||
| 任务类型 | 内置Java | 内置Java、GLUE Java、Shell、Python等脚本 | 内置Java、外置Java(FatJar)、Shell、Python等脚本 | **内置Java、外置Java(容器)、Shell、Python等脚本** |
|
||||
| 分布式计算 | 无 | 静态分片 | MapReduce动态分片 | **MapReduce动态分片** |
|
||||
| 在线任务治理 | 不支持 | 支持 | 支持 | **支持** |
|
||||
| 日志白屏化 | 不支持 | 支持 | 不支持 | **支持** |
|
||||
| 调度方式及性能 | 基于数据库锁,有性能瓶颈 | 基于数据库锁,有性能瓶颈 | 不详 | **无锁化设计,性能强劲无上限** |
|
||||
| 报警监控 | 无 | 邮件 | 短信 | **WebHook、邮件、钉钉与自定义扩展** |
|
||||
| 系统依赖 | JDBC支持的关系型数据库(MySQL、Oracle...) | MySQL | 人民币 | **任意Spring Data Jpa支持的关系型数据库(MySQL、Oracle...)** |
|
||||
| DAG工作流 | 不支持 | 不支持 | 支持 | **支持** |
|
||||
|
||||
|
||||
# 官方文档
|
||||
**[中文文档](https://www.yuque.com/powerjob/guidence/intro)**
|
||||
|
||||
**[Docs](https://www.yuque.com/powerjob/en/introduce)**
|
||||
|
||||
# 接入登记
|
||||
[点击进行接入登记,为 PowerJob 的发展贡献自己的力量!](https://github.com/PowerJob/PowerJob/issues/6)
|
||||
|
||||
ღ( ´・ᴗ・\` )ღ 感谢以下接入用户的大力支持 ღ( ´・ᴗ・\` )ღ
|
||||
|
||||
<p align="center">
|
||||
<img src="https://raw.githubusercontent.com/KFCFans/PowerJob/master/others/images/user.png" alt="PowerJob User" title="PowerJob User"/>
|
||||
</p>
|
||||
|
||||
# 其他
|
||||
* 开源许可证:Apache License, Version 2.0
|
||||
* 欢迎共同参与本项目的贡献,PR和Issue都大大滴欢迎(求求了)~
|
||||
* 觉得还不错的话,可以点个Star支持一下哦~ = ̄ω ̄=
|
||||
* 联系方式@KFCFans -> `tengjiqi@gmail.com`
|
||||
* 用户交流QQ群(因广告信息泛滥,加群需要验证,请认真填写申请原因):
|
||||
* 一群(已满):487453839
|
||||
* 二群:834937813
|
4
SECURITY.md
Normal file
4
SECURITY.md
Normal file
@ -0,0 +1,4 @@
|
||||
# Security notices relating to PowerJob
|
||||
|
||||
Please disclose any security issues or vulnerabilities found through [Tidelift's coordinated disclosure system](https://tidelift.com/security) or to the maintainers privately(tengjiqi@gmail.com).
|
||||
|
54
docker-compose.yml
Normal file
54
docker-compose.yml
Normal file
@ -0,0 +1,54 @@
|
||||
# 使用说明 V4.3.1
|
||||
# 1. PowerJob 根目录执行:docker-compose up
|
||||
# 2. 静静等待服务启动。
|
||||
|
||||
version: '3'
|
||||
services:
|
||||
powerjob-mysql:
|
||||
environment:
|
||||
MYSQL_ROOT_HOST: "%"
|
||||
MYSQL_ROOT_PASSWORD: No1Bug2Please3!
|
||||
restart: always
|
||||
container_name: powerjob-mysql
|
||||
image: powerjob/powerjob-mysql:latest
|
||||
ports:
|
||||
- "3307:3306"
|
||||
volumes:
|
||||
- ./powerjob-data/powerjob-mysql:/var/lib/mysql
|
||||
command: --lower_case_table_names=1
|
||||
|
||||
powerjob-server:
|
||||
container_name: powerjob-server
|
||||
image: powerjob/powerjob-server:latest
|
||||
restart: always
|
||||
depends_on:
|
||||
- powerjob-mysql
|
||||
environment:
|
||||
JVMOPTIONS: "-Xmx512m"
|
||||
PARAMS: "--oms.mongodb.enable=false --spring.datasource.core.jdbc-url=jdbc:mysql://powerjob-mysql:3306/powerjob-daily?useUnicode=true&characterEncoding=UTF-8&serverTimezone=Asia/Shanghai"
|
||||
ports:
|
||||
- "7700:7700"
|
||||
- "10086:10086"
|
||||
- "10010:10010"
|
||||
volumes:
|
||||
- ./powerjob-data/powerjob-server:/root/powerjob/server/
|
||||
|
||||
powerjob-worker-samples:
|
||||
container_name: powerjob-worker-samples
|
||||
image: powerjob/powerjob-worker-samples:latest
|
||||
restart: always
|
||||
depends_on:
|
||||
- powerjob-mysql
|
||||
- powerjob-server
|
||||
# environment:
|
||||
# PARAMS: "--powerjob.worker.server-address=powerjob-server:7700"
|
||||
ports:
|
||||
- "8081:8081"
|
||||
- "27777:27777"
|
||||
volumes:
|
||||
- ./powerjob-data/powerjob-worker-samples:/root/powerjob/worker
|
||||
- ./others/script/wait-for-it.sh:/wait-for-it.sh
|
||||
entrypoint:
|
||||
- "sh"
|
||||
- "-c"
|
||||
- "chmod +x wait-for-it.sh && ./wait-for-it.sh powerjob-server:7700 --strict -- java -Xmx512m -jar /powerjob-worker-samples.jar --powerjob.worker.server-address=powerjob-server:7700"
|
11
others/Dockerfile
Normal file
11
others/Dockerfile
Normal file
@ -0,0 +1,11 @@
|
||||
FROM mysql/mysql-server:8.0.30
|
||||
|
||||
MAINTAINER dudiao(idudaio@163.com)
|
||||
|
||||
ENV TZ=Asia/Shanghai
|
||||
|
||||
RUN ln -sf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
|
||||
|
||||
COPY sql/01schema.sql /docker-entrypoint-initdb.d
|
||||
|
||||
COPY sql/02worker-samples.sql /docker-entrypoint-initdb.d
|
19
others/dev/build_test_env.sh
Executable file
19
others/dev/build_test_env.sh
Executable file
@ -0,0 +1,19 @@
|
||||
#!/bin/bash
|
||||
# 构建 PowerJob 测试环境
|
||||
|
||||
echo "================== 关闭全部服务 =================="
|
||||
docker-compose down
|
||||
echo "================== 构建 jar =================="
|
||||
cd `dirname $0`/../.. || exit
|
||||
# mvn clean package -Pdev -DskipTests -U -e -pl powerjob-server,powerjob-worker-agent -am
|
||||
# -U:强制检查snapshot库 -pl:指定需要构建的模块,多模块逗号分割 -am:同时构建依赖模块,一般与pl连用 -Pxxx:指定使用的配置文件
|
||||
mvn clean package -Pdev -DskipTests
|
||||
echo "================== 拷贝 jar =================="
|
||||
/bin/cp -rf powerjob-server/powerjob-server-starter/target/*.jar powerjob-server/docker/powerjob-server.jar
|
||||
/bin/cp -rf powerjob-worker-agent/target/*.jar powerjob-worker-agent/powerjob-agent.jar
|
||||
ls -l powerjob-server/docker/powerjob-server.jar
|
||||
ls -l powerjob-worker-agent/powerjob-agent.jar
|
||||
|
||||
cd others/dev
|
||||
docker-compose build
|
||||
docker-compose --compatibility up
|
109
others/dev/docker-compose.yml
Normal file
109
others/dev/docker-compose.yml
Normal file
@ -0,0 +1,109 @@
|
||||
# 构建 PowerJob 测试环境
|
||||
|
||||
version: '3.7'
|
||||
services:
|
||||
powerjob-mysql:
|
||||
build:
|
||||
context: ../
|
||||
environment:
|
||||
MYSQL_ROOT_HOST: "%"
|
||||
MYSQL_ROOT_PASSWORD: No1Bug2Please3!
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
memory: 768M
|
||||
restart: always
|
||||
container_name: powerjob-mysql
|
||||
image: powerjob/powerjob-mysql:test_env
|
||||
ports:
|
||||
- "3309:3306"
|
||||
volumes:
|
||||
- ~/powerjob-data/powerjob-mysql:/var/lib/mysql
|
||||
command: --lower_case_table_names=1
|
||||
# powerjob-mongodb:
|
||||
# image: mongo:latest
|
||||
# container_name: powerjob-mongodb
|
||||
# restart: always
|
||||
# deploy:
|
||||
# resources:
|
||||
# limits:
|
||||
# memory: 256M
|
||||
# environment:
|
||||
# MONGO_INITDB_ROOT_USERNAME: "root"
|
||||
# MONGO_INITDB_ROOT_PASSWORD: "No1Bug2Please3!"
|
||||
# MONGO_INITDB_DATABASE: "powerjob_daily"
|
||||
# ports:
|
||||
# - "27017:27017"
|
||||
# volumes:
|
||||
# - ./testenv/init_mongodb.js:/docker-entrypoint-initdb.d/mongo-init.js:ro
|
||||
# - ~/powerjob-data/powerjob-mongodb:/data/db
|
||||
powerjob-server:
|
||||
build:
|
||||
context: ../../powerjob-server/docker
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
memory: 896M
|
||||
container_name: powerjob-server
|
||||
image: powerjob/powerjob-server:test_env
|
||||
restart: always
|
||||
depends_on:
|
||||
- powerjob-mysql
|
||||
# - powerjob-mongodb
|
||||
environment:
|
||||
PARAMS: "--spring.profiles.active=daily --spring.datasource.core.jdbc-url=jdbc:mysql://powerjob-mysql:3306/powerjob-daily?useUnicode=true&characterEncoding=UTF-8&serverTimezone=Asia/Shanghai --oms.storage.dfs.mysql_series.url=jdbc:mysql://powerjob-mysql:3306/powerjob-daily?useUnicode=true&characterEncoding=UTF-8&serverTimezone=Asia/Shanghai"
|
||||
JVMOPTIONS: "-server -XX:+UseG1GC -XX:+PrintGCDetails -XX:+PrintGCDateStamps -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=7 -XX:GCLogFileSize=100M -Xloggc:/root/powerjob/server/gc.log"
|
||||
ports:
|
||||
- "7700:7700"
|
||||
- "10086:10086"
|
||||
- "10010:10010"
|
||||
volumes:
|
||||
- ~/powerjob-data/powerjob-server:/root/powerjob/server/
|
||||
- ~/.m2:/root/.m2
|
||||
|
||||
powerjob-worker-agent:
|
||||
build:
|
||||
context: ../../powerjob-worker-agent
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
memory: 384M
|
||||
container_name: powerjob-worker-agent
|
||||
image: powerjob/powerjob-worker-agent:test_env
|
||||
restart: always
|
||||
depends_on:
|
||||
- powerjob-mysql
|
||||
- powerjob-server
|
||||
ports:
|
||||
- "5002:5005"
|
||||
- "10002:10000"
|
||||
- "27777:27777"
|
||||
volumes:
|
||||
- ~/powerjob-data/powerjob-worker-agent:/root
|
||||
entrypoint:
|
||||
- "sh"
|
||||
- "-c"
|
||||
- "./wait-for-it.sh powerjob-server:7700 --strict -- java -agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5005 -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.port=10000 -Dcom.sun.management.jmxremote.rmi.port=10000 -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -server -XX:+UseG1GC -XX:+PrintGCDetails -XX:+PrintGCDateStamps -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=7 -XX:GCLogFileSize=100M -Xloggc:/root/gc.log -jar /powerjob-agent.jar --app powerjob-worker-samples --server powerjob-server:7700"
|
||||
|
||||
powerjob-worker-agent2:
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
memory: 384M
|
||||
container_name: powerjob-worker-agent2
|
||||
image: powerjob/powerjob-worker-agent:test_env
|
||||
restart: always
|
||||
depends_on:
|
||||
- powerjob-mysql
|
||||
- powerjob-server
|
||||
ports:
|
||||
- "5003:5005"
|
||||
- "10003:10000"
|
||||
- "27778:27777"
|
||||
volumes:
|
||||
- ~/powerjob-data/powerjob-worker-agent2:/root
|
||||
entrypoint:
|
||||
- "sh"
|
||||
- "-c"
|
||||
- "./wait-for-it.sh powerjob-server:7700 --strict -- java -agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5005 -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.port=10000 -Dcom.sun.management.jmxremote.rmi.port=10000 -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -server -XX:+UseG1GC -XX:+PrintGCDetails -XX:+PrintGCDateStamps -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=7 -XX:GCLogFileSize=100M -Xloggc:/root/gc.log -jar /powerjob-agent.jar --app powerjob-worker-samples --server powerjob-server:7700"
|
||||
|
@ -11,10 +11,11 @@ cd `dirname $0`/../.. || exit
|
||||
read -r -p "是否进行maven构建(y/n):" needmvn
|
||||
if [ "$needmvn" = "y" ] || [ "$needmvn" = "Y" ]; then
|
||||
echo "================== 构建 jar =================="
|
||||
# mvn clean package -Pdev -DskipTests -U -e -pl powerjob-server,powerjob-worker-agent -am
|
||||
# -U:强制检查snapshot库 -pl:指定需要构建的模块,多模块逗号分割 -am:同时构建依赖模块,一般与pl连用 -Pxxx:指定使用的配置文件
|
||||
mvn clean package -Pdev -DskipTests -U -e -pl powerjob-server,powerjob-worker-agent -am
|
||||
mvn clean package -Pdev -DskipTests -U -e
|
||||
echo "================== 拷贝 jar =================="
|
||||
/bin/cp -rf powerjob-server/target/*.jar powerjob-server/docker/powerjob-server.jar
|
||||
/bin/cp -rf powerjob-server/powerjob-server-starter/target/*.jar powerjob-server/docker/powerjob-server.jar
|
||||
/bin/cp -rf powerjob-worker-agent/target/*.jar powerjob-worker-agent/powerjob-agent.jar
|
||||
ls -l powerjob-server/docker/powerjob-server.jar
|
||||
ls -l powerjob-worker-agent/powerjob-agent.jar
|
||||
@ -32,11 +33,19 @@ read -r -p "是否重新构建镜像(y/n):" rebuild
|
||||
if [ "$rebuild" = "y" ] || [ "$rebuild" = "Y" ]; then
|
||||
echo "================== 删除旧镜像 =================="
|
||||
docker rmi -f tjqq/powerjob-server:$version
|
||||
docker rmi -f powerjob/powerjob-server:$version
|
||||
docker rmi -f tjqq/powerjob-agent:$version
|
||||
docker rmi -f powerjob/powerjob-agent:$version
|
||||
docker rmi -f powerjob/powerjob-mysql:$version
|
||||
docker rmi -f powerjob/powerjob-worker-samples:$version
|
||||
echo "================== 构建 powerjob-server 镜像 =================="
|
||||
docker build -t tjqq/powerjob-server:$version powerjob-server/docker/. || exit
|
||||
echo "================== 构建 powerjob-agent 镜像 =================="
|
||||
docker build -t tjqq/powerjob-agent:$version powerjob-worker-agent/. || exit
|
||||
echo "================== 构建 powerjob-mysql 镜像 =================="
|
||||
docker build -t powerjob/powerjob-mysql:$version others/. || exit
|
||||
echo "================== 构建 powerjob-worker-samples 镜像 =================="
|
||||
docker build -t powerjob/powerjob-worker-samples:$version powerjob-worker-samples/. || exit
|
||||
|
||||
read -r -p "是否正式发布该镜像(y/n):" needrelease
|
||||
if [ "$needrelease" = "y" ] || [ "$needrelease" = "Y" ]; then
|
||||
@ -46,6 +55,25 @@ if [ "$rebuild" = "y" ] || [ "$rebuild" = "Y" ]; then
|
||||
docker push tjqq/powerjob-server:$version
|
||||
echo "================== 正在推送 agent 镜像到中央仓库 =================="
|
||||
docker push tjqq/powerjob-agent:$version
|
||||
echo "================== 正在推送 powerjob-mysql 镜像到中央仓库 =================="
|
||||
docker push powerjob/powerjob-mysql:$version
|
||||
echo "================== 正在推送 samples 镜像到中央仓库 =================="
|
||||
docker push powerjob/powerjob-worker-samples:$version
|
||||
echo "================== 双写推送 =================="
|
||||
docker tag tjqq/powerjob-server:$version powerjob/powerjob-server:$version
|
||||
docker push powerjob/powerjob-server:$version
|
||||
docker tag tjqq/powerjob-agent:$version powerjob/powerjob-agent:$version
|
||||
docker push powerjob/powerjob-agent:$version
|
||||
echo "================== 更新 LATEST 版本 =================="
|
||||
docker tag powerjob/powerjob-server:$version powerjob/powerjob-server:latest
|
||||
docker push powerjob/powerjob-server:latest
|
||||
docker tag powerjob/powerjob-agent:$version powerjob/powerjob-agent:latest
|
||||
docker push powerjob/powerjob-agent:latest
|
||||
docker tag powerjob/powerjob-mysql:$version powerjob/powerjob-mysql:latest
|
||||
docker push powerjob/powerjob-mysql:latest
|
||||
docker tag powerjob/powerjob-worker-samples:$version powerjob/powerjob-worker-samples:latest
|
||||
docker push powerjob/powerjob-worker-samples:latest
|
||||
echo "================== Docker 推送完毕 =================="
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
@ -62,8 +90,10 @@ if [ "$startup" = "y" ] || [ "$startup" = "Y" ]; then
|
||||
echo "================== 准备启动 powerjob-server =================="
|
||||
docker run -d \
|
||||
--name powerjob-server \
|
||||
-p 7700:7700 -p 10086:10086 \
|
||||
-p 7700:7700 -p 10086:10086 -p 10010:10010 -p 5001:5005 -p 10001:10000 \
|
||||
-e JVMOPTIONS="-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5005 -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.port=10000 -Dcom.sun.management.jmxremote.rmi.port=10000 -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false" \
|
||||
-e PARAMS="--spring.profiles.active=pre" \
|
||||
-e TZ="Asia/Shanghai" \
|
||||
-v ~/docker/powerjob-server:/root/powerjob-server -v ~/.m2:/root/.m2 \
|
||||
tjqq/powerjob-server:$version
|
||||
sleep 1
|
||||
@ -74,8 +104,21 @@ if [ "$startup" = "y" ] || [ "$startup" = "Y" ]; then
|
||||
serverIP=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' powerjob-server)
|
||||
serverAddress="$serverIP:7700"
|
||||
echo "使用的Server地址:$serverAddress"
|
||||
docker run -d -e PARAMS="--app powerjob-agent-test --server $serverAddress" -p 27777:27777 --name powerjob-agent -v ~/docker/powerjob-agent:/root tjqq/powerjob-agent:$version
|
||||
docker run -d -e PARAMS="--app powerjob-agent-test --server $serverAddress" -p 27778:27777 --name powerjob-agent2 -v ~/docker/powerjob-agent2:/root tjqq/powerjob-agent:$version
|
||||
docker run -d \
|
||||
--name powerjob-agent \
|
||||
-p 27777:27777 -p 5002:5005 -p 10002:10000 \
|
||||
-e JVMOPTIONS="-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5005 -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.port=10000 -Dcom.sun.management.jmxremote.rmi.port=10000 -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false" \
|
||||
-e PARAMS="--app powerjob-agent-test --server $serverAddress" \
|
||||
-v ~/docker/powerjob-agent:/root \
|
||||
tjqq/powerjob-agent:$version
|
||||
|
||||
docker run -d \
|
||||
--name powerjob-agent2 \
|
||||
-p 27778:27777 -p 5003:5005 -p 10003:10000 \
|
||||
-e JVMOPTIONS="-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5005" \
|
||||
-e PARAMS="--app powerjob-agent-test --server $serverAddress" \
|
||||
-v ~/docker/powerjob-agent2:/root \
|
||||
tjqq/powerjob-agent:$version
|
||||
|
||||
tail -f -n 100 ~/docker/powerjob-agent/powerjob/logs/powerjob-agent-application.log
|
||||
fi
|
71
others/dev/publish_docker_apple_silicon.sh
Executable file
71
others/dev/publish_docker_apple_silicon.sh
Executable file
@ -0,0 +1,71 @@
|
||||
#!/bin/bash
|
||||
echo "A docker image release script for the Apple Silicon device."
|
||||
# -p:允许后面跟一个字符串作为提示 -r:保证读入的是原始内容,不会发生任何转义
|
||||
read -r -p "请输入Dockedr镜像版本:" version
|
||||
echo "即将构建的 server 镜像:powerjob-server:$version"
|
||||
echo "即将构建的 agent 镜像:powerjob-agent:$version"
|
||||
read -r -p "任意键继续:"
|
||||
|
||||
# 一键部署脚本,请勿挪动脚本
|
||||
cd `dirname $0`/../.. || exit
|
||||
|
||||
read -r -p "是否进行maven构建(y/n):" needmvn
|
||||
if [ "$needmvn" = "y" ] || [ "$needmvn" = "Y" ]; then
|
||||
echo "================== 构建 jar =================="
|
||||
# mvn clean package -Pdev -DskipTests -U -e -pl powerjob-server,powerjob-worker-agent -am
|
||||
# -U:强制检查snapshot库 -pl:指定需要构建的模块,多模块逗号分割 -am:同时构建依赖模块,一般与pl连用 -Pxxx:指定使用的配置文件
|
||||
mvn clean package -Pdev -DskipTests -U -e
|
||||
echo "================== 拷贝 jar =================="
|
||||
/bin/cp -rf powerjob-server/powerjob-server-starter/target/*.jar powerjob-server/docker/powerjob-server.jar
|
||||
/bin/cp -rf powerjob-worker-agent/target/*.jar powerjob-worker-agent/powerjob-agent.jar
|
||||
ls -l powerjob-server/docker/powerjob-server.jar
|
||||
ls -l powerjob-worker-agent/powerjob-agent.jar
|
||||
fi
|
||||
|
||||
echo "================== 关闭老应用 =================="
|
||||
docker stop powerjob-server
|
||||
docker stop powerjob-agent
|
||||
docker stop powerjob-agent2
|
||||
echo "================== 删除老容器 =================="
|
||||
docker container rm powerjob-server
|
||||
docker container rm powerjob-agent
|
||||
docker container rm powerjob-agent2
|
||||
read -r -p "是否构建并发布镜像(y/n):" rebuild
|
||||
if [ "$rebuild" = "y" ] || [ "$rebuild" = "Y" ]; then
|
||||
echo "================== 删除旧镜像 =================="
|
||||
docker rmi -f tjqq/powerjob-server:$version
|
||||
docker rmi -f powerjob/powerjob-server:$version
|
||||
docker rmi -f tjqq/powerjob-agent:$version
|
||||
docker rmi -f powerjob/powerjob-agent:$version
|
||||
docker rmi -f powerjob/powerjob-mysql:$version
|
||||
docker rmi -f powerjob/powerjob-worker-samples:$version
|
||||
echo "================== 构建 powerjob-server 镜像(tjqq) =================="
|
||||
docker buildx build --platform=linux/amd64,linux/arm64 --tag tjqq/powerjob-server:$version powerjob-server/docker/. --push || exit
|
||||
echo "================== 构建 powerjob-server 镜像(powerjob) =================="
|
||||
docker buildx build --platform=linux/amd64,linux/arm64 --tag powerjob/powerjob-server:$version powerjob-server/docker/. --push || exit
|
||||
echo "================== 构建 powerjob-agent 镜像(tjqq) =================="
|
||||
docker buildx build --platform=linux/amd64,linux/arm64 --tag tjqq/powerjob-agent:$version powerjob-worker-agent/. --push|| exit
|
||||
echo "================== 构建 powerjob-agent 镜像(powerjob) =================="
|
||||
docker buildx build --platform=linux/amd64,linux/arm64 --tag powerjob/powerjob-agent:$version powerjob-worker-agent/. --push|| exit
|
||||
echo "================== 构建 powerjob-mysql 镜像 =================="
|
||||
docker buildx build --platform=linux/amd64,linux/arm64 --tag powerjob/powerjob-mysql:$version others/. --push|| exit
|
||||
echo "================== 构建 powerjob-worker-samples 镜像 =================="
|
||||
docker buildx build --platform=linux/amd64,linux/arm64 --tag powerjob/powerjob-worker-samples:$version powerjob-worker-samples/. --push|| exit
|
||||
fi
|
||||
|
||||
read -r -p "是否推送LATEST(y/n):" push_latest
|
||||
if [ "$push_latest" = "y" ] || [ "$push_latest" = "Y" ]; then
|
||||
|
||||
echo "================== powerjob-server LATEST (tjqq) =================="
|
||||
docker buildx build --platform=linux/amd64,linux/arm64 --tag tjqq/powerjob-server:latest powerjob-server/docker/. --push || exit
|
||||
echo "================== powerjob-server LATEST (powerjob) =================="
|
||||
docker buildx build --platform=linux/amd64,linux/arm64 --tag powerjob/powerjob-server:latest powerjob-server/docker/. --push || exit
|
||||
echo "================== powerjob-agent LATEST (tjqq) =================="
|
||||
docker buildx build --platform=linux/amd64,linux/arm64 --tag tjqq/powerjob-agent:latest powerjob-worker-agent/. --push|| exit
|
||||
echo "================== powerjob-agent LATEST (powerjob) =================="
|
||||
docker buildx build --platform=linux/amd64,linux/arm64 --tag powerjob/powerjob-agent:latest powerjob-worker-agent/. --push|| exit
|
||||
echo "================== powerjob-mysql LATEST =================="
|
||||
docker buildx build --platform=linux/amd64,linux/arm64 --tag powerjob/powerjob-mysql:latest others/. --push|| exit
|
||||
echo "================== powerjob-worker-samples LATEST =================="
|
||||
docker buildx build --platform=linux/amd64,linux/arm64 --tag powerjob/powerjob-worker-samples:latest powerjob-worker-samples/. --push|| exit
|
||||
fi
|
12
others/dev/testenv/init_mongodb.js
Normal file
12
others/dev/testenv/init_mongodb.js
Normal file
@ -0,0 +1,12 @@
|
||||
db.createUser(
|
||||
{
|
||||
user: "zqq",
|
||||
pwd: "No1Bug2Please3!",
|
||||
roles: [
|
||||
{
|
||||
role: "readWrite",
|
||||
db: "powerjob_daily"
|
||||
}
|
||||
]
|
||||
}
|
||||
);
|
BIN
others/images/user.png
Normal file
BIN
others/images/user.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 209 KiB |
@ -1,105 +0,0 @@
|
||||
# 2020.4.8 第一轮测试
|
||||
## 测试用例
|
||||
* MapReduce任务:http://localhost:7700/job/save?appId=1&concurrency=5&executeType=MAP_REDUCE&groupName=null&instanceRetryNum=3&instanceTimeLimit=4545454545&jobDescription=jobDescription&jobName=testJob&jobParams=%7B%22a%22%3A%22b%22%7D&maxInstanceNum=1&processorInfo=com.github.kfcfans.powerjob.processors.TestMapReduceProcessor&processorType=EMBEDDED_JAVA&status=1&taskRetryNum=3&taskTimeLimit=564465656&timeExpression=0%20*%20*%20*%20*%20%3F%20&timeExpressionType=CRON
|
||||
|
||||
## 问题记录
|
||||
#### 任务执行成功,释放资源失败
|
||||
第一个任务执行完成后,释放资源阶段(删除本地H2数据库中所有记录)报错,堆栈如下:
|
||||
```text
|
||||
2020-04-08 10:09:19 INFO - [ProcessorTracker-1586311659084] mission complete, ProcessorTracker already destroyed!
|
||||
2020-04-08 10:09:19 ERROR - [TaskPersistenceService] deleteAllTasks failed, instanceId=1586311659084.
|
||||
java.lang.InterruptedException: sleep interrupted
|
||||
at java.lang.Thread.sleep(Native Method)
|
||||
at CommonUtils.executeWithRetry(CommonUtils.java:34)
|
||||
at TaskPersistenceService.execute(TaskPersistenceService.java:297)
|
||||
at TaskPersistenceService.deleteAllTasks(TaskPersistenceService.java:269)
|
||||
at CommonTaskTracker.destroy(TaskTracker.java:231)
|
||||
at CommonTaskTracker$StatusCheckRunnable.innerRun(TaskTracker.java:421)
|
||||
at CommonTaskTracker$StatusCheckRunnable.run(TaskTracker.java:467)
|
||||
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
|
||||
at java.util.concurrent.FutureTask.runAndReset(FutureTask.java:308)
|
||||
at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$301(ScheduledThreadPoolExecutor.java:180)
|
||||
at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:294)
|
||||
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
|
||||
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
|
||||
at java.lang.Thread.run(Thread.java:748)
|
||||
2020-04-08 10:09:19 WARN - [TaskTracker-1586311659084] delete tasks from database failed.
|
||||
2020-04-08 10:09:19 INFO - [TaskTracker-1586311659084] TaskTracker has left the world.
|
||||
```
|
||||
随后,Server派发下来的第二个任务也无法完成创建,异常堆栈如下:
|
||||
```text
|
||||
2020-04-08 10:10:08 ERROR - [TaskPersistenceService] save taskTaskDO{taskId='0', jobId='1', instanceId='1586311804030', taskName='OMS_ROOT_TASK', address='10.37.129.2:2777', status=1, result='null', failedCnt=0, createdTime=1586311808295, lastModifiedTime=1586311808295} failed.
|
||||
2020-04-08 10:10:08 ERROR - [TaskTracker-1586311804030] create root task failed.
|
||||
[ERROR] [04/08/2020 10:10:08.511] [oms-akka.actor.internal-dispatcher-20] [akka://oms/user/task_tracker] create root task failed.
|
||||
java.lang.RuntimeException: create root task failed.
|
||||
at CommonTaskTracker.persistenceRootTask(TaskTracker.java:208)
|
||||
at CommonTaskTracker.<init>(TaskTracker.java:81)
|
||||
at TaskTrackerActor.lambda$onReceiveServerScheduleJobReq$2(TaskTrackerActor.java:138)
|
||||
at java.util.concurrent.ConcurrentHashMap.computeIfAbsent(ConcurrentHashMap.java:1660)
|
||||
at TaskTrackerPool.atomicCreateTaskTracker(TaskTrackerPool.java:30)
|
||||
at TaskTrackerActor.onReceiveServerScheduleJobReq(TaskTrackerActor.java:138)
|
||||
```
|
||||
***
|
||||
原因及解决方案:destroy方法调用了scheduledPool.shutdownNow()方法导致调用该方法的线程池被强制关闭,该方法也自然被中断,数据删到一半没删掉,破坏了数据库结构,后面的insert自然也就失败了。
|
||||
|
||||
# 2020.4.11 "集群"测试
|
||||
#### 任务重试机制失效
|
||||
原因:SQL中的now()函数返回的是Datetime,不能用ing/bigint去接收...
|
||||
|
||||
#### SystemMetric算分问题
|
||||
问题:java.lang.management.OperatingSystemMXBean#getSystemLoadAverage 不一定能获取CPU当前负载,可能返回负数代表不可用...
|
||||
解决方案:印度Windows上getSystemLoadAverage()固定返回-1...太坑了...先做个保护性判断继续测试吧...
|
||||
|
||||
#### 未知的数组越界问题(可能是数据库性能问题)
|
||||
问题:秒级Broadcast任务在第四次执行时,当Processor完成执行上报状态时,TaskTracker报错,错误的本质原因是无法从数据库中找到这个task对应的记录...
|
||||
场景:时间表达式:FIX_DELAY,对应的TaskTracker为FrequentTaskTracker
|
||||
|
||||
异常堆栈
|
||||
```text
|
||||
2020-04-16 18:05:09 ERROR - [TaskPersistenceService] getTaskStatus failed, instanceId=1586857062542,taskId=4.
|
||||
java.lang.IndexOutOfBoundsException: Index: 0, Size: 0
|
||||
at java.util.LinkedList.checkElementIndex(LinkedList.java:555)
|
||||
at java.util.LinkedList.get(LinkedList.java:476)
|
||||
at TaskPersistenceService.lambda$getTaskStatus$10(TaskPersistenceService.java:214)
|
||||
at CommonUtils.executeWithRetry(CommonUtils.java:37)
|
||||
at TaskPersistenceService.execute(TaskPersistenceService.java:310)
|
||||
at TaskPersistenceService.getTaskStatus(TaskPersistenceService.java:212)
|
||||
at TaskTracker.updateTaskStatus(TaskTracker.java:107)
|
||||
at TaskTracker.broadcast(TaskTracker.java:214)
|
||||
at TaskTrackerActor.onReceiveBroadcastTaskPreExecuteFinishedReq(TaskTrackerActor.java:106)
|
||||
at akka.japi.pf.UnitCaseStatement.apply(CaseStatements.scala:24)
|
||||
at akka.japi.pf.UnitCaseStatement.apply(CaseStatements.scala:20)
|
||||
at scala.PartialFunction.applyOrElse(PartialFunction.scala:187)
|
||||
at scala.PartialFunction.applyOrElse$(PartialFunction.scala:186)
|
||||
at akka.japi.pf.UnitCaseStatement.applyOrElse(CaseStatements.scala:20)
|
||||
at scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:241)
|
||||
at scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:242)
|
||||
at scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:242)
|
||||
at scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:242)
|
||||
at scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:242)
|
||||
at akka.actor.Actor.aroundReceive(Actor.scala:534)
|
||||
at akka.actor.Actor.aroundReceive$(Actor.scala:532)
|
||||
at akka.actor.AbstractActor.aroundReceive(AbstractActor.scala:220)
|
||||
at akka.actor.ActorCell.receiveMessage(ActorCell.scala:573)
|
||||
at akka.actor.ActorCell.invoke(ActorCell.scala:543)
|
||||
at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:269)
|
||||
at akka.dispatch.Mailbox.run(Mailbox.scala:230)
|
||||
at akka.dispatch.Mailbox.exec(Mailbox.scala:242)
|
||||
at java.util.concurrent.ForkJoinTask.doExec(ForkJoinTask.java:289)
|
||||
at java.util.concurrent.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1056)
|
||||
at java.util.concurrent.ForkJoinPool.runWorker(ForkJoinPool.java:1692)
|
||||
at java.util.concurrent.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:157)
|
||||
2020-04-16 18:05:09 WARN - [TaskTracker-1586857062542] query TaskStatus from DB failed when try to update new TaskStatus(taskId=4,newStatus=6).
|
||||
```
|
||||
解决方案:初步怀疑在连续更改时,由于数据库锁的存在导致行不可见(不知道H2具体的特性)。因此,需要保证同一个taskId串行更新 -> synchronize Yes!
|
||||
|
||||
# 2020.4.20 1.0.0发布前测试
|
||||
#### Server & Worker
|
||||
* 指定机器执行 -> 验证通过
|
||||
* Map/MapReduce/Standalone/Broadcast/Shell/Python处理器的执行 -> 验证通过
|
||||
* 超时失败 -> 验证通过
|
||||
* 破坏测试:指定错误的处理器 -> 发现问题,会造成死锁(TT创建PT,PT创建失败,无法定期汇报心跳,TT长时间未收到PT心跳,认为PT宕机(确实宕机了),无法选择可用的PT再次派发任务,死锁形成,GG斯密达 T_T)。通过确保ProcessorTracker一定能创建成功解决,如果处理器构建失败,之后所有提交的任务直接返回错误。
|
||||
#### Client
|
||||
* StopInstance -> success
|
||||
* FetchInstanceStatus -> success
|
||||
|
@ -1,34 +0,0 @@
|
||||
# 容器测试日志
|
||||
## ClassNotFound问题
|
||||
>玩热加载这一套,不来几个ClassNotFound都没那味 [滑稽]~
|
||||
|
||||
测试容器化的MapReduce任务时,发现如下错误:
|
||||
```text
|
||||
2020-05-19 09:33:18 ERROR - [ProcessorRunnable-142925055284740224] execute failed, please fix this bug @tjq!
|
||||
com.esotericsoftware.kryo.KryoException: Unable to find class: cn.edu.zju.oms.container.ContainerMRProcessor$TestSubTask
|
||||
at com.esotericsoftware.kryo.util.DefaultClassResolver.readName(DefaultClassResolver.java:182)
|
||||
at com.esotericsoftware.kryo.util.DefaultClassResolver.readClass(DefaultClassResolver.java:151)
|
||||
at com.esotericsoftware.kryo.Kryo.readClass(Kryo.java:684)
|
||||
at com.esotericsoftware.kryo.Kryo.readClassAndObject(Kryo.java:795)
|
||||
at SerializerUtils.deSerialized(SerializerUtils.java:48)
|
||||
at ProcessorRunnable.innerRun(ProcessorRunnable.java:63)
|
||||
at ProcessorRunnable.run(ProcessorRunnable.java:179)
|
||||
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
|
||||
at java.util.concurrent.FutureTask.run$$$capture(FutureTask.java:266)
|
||||
at java.util.concurrent.FutureTask.run(FutureTask.java)
|
||||
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
|
||||
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
|
||||
at java.lang.Thread.run(Thread.java:748)
|
||||
Caused by: java.lang.ClassNotFoundException: cn.edu.zju.oms.container.ContainerMRProcessor$TestSubTask
|
||||
at java.net.URLClassLoader.findClass(URLClassLoader.java:382)
|
||||
at java.lang.ClassLoader.loadClass(ClassLoader.java:418)
|
||||
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:355)
|
||||
at java.lang.ClassLoader.loadClass(ClassLoader.java:351)
|
||||
at java.lang.Class.forName0(Native Method)
|
||||
at java.lang.Class.forName(Class.java:348)
|
||||
at com.esotericsoftware.kryo.util.DefaultClassResolver.readName(DefaultClassResolver.java:176)
|
||||
... 12 common frames omitted
|
||||
```
|
||||
|
||||
* 原因分析:经过分析,原有在于序列化与反序列化过程中,框架为了追求性能,采用了**对象池**技术(库存代码: a14f554e0085b6a179375a8ca04665434b73c7bd#SerializerUtils),而Kryo在序列化和反序列化过程中只会使用固定的类加载器(创建kryo的类对象(Kryo.class)的类加载器),因此无法找到由OMS自定义类加载器创建的容器类。
|
||||
* 解决方案:弃用性能优异的对象池技术,该用ThreadLocal + 手动设置Kryo类加载器。
|
@ -1,100 +0,0 @@
|
||||
## V1.0.0
|
||||
#### 持久化链路
|
||||
1. 客户端使用内存队列异步化批量上报服务器
|
||||
2. 服务器接收到请求后无脑写H2数据库
|
||||
3. 任务结束后,流式同步到MongoDB持久化存储,维护一个包含Array的MongoDB对象
|
||||
4. 同步结束后删除本地所有数据
|
||||
#### 查询链路
|
||||
* 如果本地存在数据,则直接从本地数据库返回
|
||||
* 如果本地不存在数据,则直连MongoDB,获取数据再返回
|
||||
|
||||
***
|
||||
问题主要在于前台展示:测试100W条数据,本地H2占用82M,MongoDB未知(因为mongo shell不知道为啥用不了...),不过应该也小不到哪里去。这种情况下数据都没办法回传回来...需要更新方案。
|
||||
|
||||
```text
|
||||
org.apache.catalina.connector.ClientAbortException: java.io.IOException: Broken pipe
|
||||
at org.apache.catalina.connector.OutputBuffer.realWriteBytes(OutputBuffer.java:351)
|
||||
at org.apache.catalina.connector.OutputBuffer.flushByteBuffer(OutputBuffer.java:776)
|
||||
at org.apache.catalina.connector.OutputBuffer.append(OutputBuffer.java:681)
|
||||
at org.apache.catalina.connector.OutputBuffer.writeBytes(OutputBuffer.java:386)
|
||||
at org.apache.catalina.connector.OutputBuffer.write(OutputBuffer.java:364)
|
||||
at org.apache.catalina.connector.CoyoteOutputStream.write(CoyoteOutputStream.java:96)
|
||||
at com.fasterxml.jackson.core.json.UTF8JsonGenerator._flushBuffer(UTF8JsonGenerator.java:2137)
|
||||
at com.fasterxml.jackson.core.json.UTF8JsonGenerator.flush(UTF8JsonGenerator.java:1150)
|
||||
at com.fasterxml.jackson.databind.ObjectWriter.writeValue(ObjectWriter.java:923)
|
||||
at org.springframework.http.converter.json.AbstractJackson2HttpMessageConverter.writeInternal(AbstractJackson2HttpMessageConverter.java:287)
|
||||
at org.springframework.http.converter.AbstractGenericHttpMessageConverter.write(AbstractGenericHttpMessageConverter.java:104)
|
||||
at org.springframework.web.servlet.mvc.method.annotation.AbstractMessageConverterMethodProcessor.writeWithMessageConverters(AbstractMessageConverterMethodProcessor.java:287)
|
||||
at org.springframework.web.servlet.mvc.method.annotation.RequestResponseBodyMethodProcessor.handleReturnValue(RequestResponseBodyMethodProcessor.java:181)
|
||||
at org.springframework.web.method.support.HandlerMethodReturnValueHandlerComposite.handleReturnValue(HandlerMethodReturnValueHandlerComposite.java:82)
|
||||
at org.springframework.web.servlet.mvc.method.annotation.ServletInvocableHandlerMethod.invokeAndHandle(ServletInvocableHandlerMethod.java:123)
|
||||
at org.springframework.web.servlet.mvc.method.annotation.ExceptionHandlerExceptionResolver.doResolveHandlerMethodException(ExceptionHandlerExceptionResolver.java:403)
|
||||
at org.springframework.web.servlet.handler.AbstractHandlerMethodExceptionResolver.doResolveException(AbstractHandlerMethodExceptionResolver.java:61)
|
||||
at org.springframework.web.servlet.handler.AbstractHandlerExceptionResolver.resolveException(AbstractHandlerExceptionResolver.java:141)
|
||||
at org.springframework.web.servlet.handler.HandlerExceptionResolverComposite.resolveException(HandlerExceptionResolverComposite.java:80)
|
||||
at org.springframework.web.servlet.DispatcherServlet.processHandlerException(DispatcherServlet.java:1300)
|
||||
at org.springframework.web.servlet.DispatcherServlet.processDispatchResult(DispatcherServlet.java:1111)
|
||||
at org.springframework.web.servlet.DispatcherServlet.doDispatch(DispatcherServlet.java:1057)
|
||||
at org.springframework.web.servlet.DispatcherServlet.doService(DispatcherServlet.java:943)
|
||||
at org.springframework.web.servlet.FrameworkServlet.processRequest(FrameworkServlet.java:1006)
|
||||
at org.springframework.web.servlet.FrameworkServlet.doGet(FrameworkServlet.java:898)
|
||||
at javax.servlet.http.HttpServlet.service(HttpServlet.java:634)
|
||||
at org.springframework.web.servlet.FrameworkServlet.service(FrameworkServlet.java:883)
|
||||
at javax.servlet.http.HttpServlet.service(HttpServlet.java:741)
|
||||
at org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:231)
|
||||
at org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:166)
|
||||
at org.apache.tomcat.websocket.server.WsFilter.doFilter(WsFilter.java:53)
|
||||
at org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:193)
|
||||
at org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:166)
|
||||
at org.springframework.web.filter.RequestContextFilter.doFilterInternal(RequestContextFilter.java:100)
|
||||
at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119)
|
||||
at org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:193)
|
||||
at org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:166)
|
||||
at org.springframework.web.filter.FormContentFilter.doFilterInternal(FormContentFilter.java:93)
|
||||
at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119)
|
||||
at org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:193)
|
||||
at org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:166)
|
||||
at org.springframework.web.filter.CharacterEncodingFilter.doFilterInternal(CharacterEncodingFilter.java:201)
|
||||
at org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119)
|
||||
at org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:193)
|
||||
at org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:166)
|
||||
at org.apache.catalina.core.StandardWrapperValve.invoke(StandardWrapperValve.java:202)
|
||||
at org.apache.catalina.core.StandardContextValve.invoke(StandardContextValve.java:96)
|
||||
at org.apache.catalina.authenticator.AuthenticatorBase.invoke(AuthenticatorBase.java:541)
|
||||
at org.apache.catalina.core.StandardHostValve.invoke(StandardHostValve.java:139)
|
||||
at org.apache.catalina.valves.ErrorReportValve.invoke(ErrorReportValve.java:92)
|
||||
at org.apache.catalina.core.StandardEngineValve.invoke(StandardEngineValve.java:74)
|
||||
at org.apache.catalina.connector.CoyoteAdapter.service(CoyoteAdapter.java:343)
|
||||
at org.apache.coyote.http11.Http11Processor.service(Http11Processor.java:373)
|
||||
at org.apache.coyote.AbstractProcessorLight.process(AbstractProcessorLight.java:65)
|
||||
at org.apache.coyote.AbstractProtocol$ConnectionHandler.process(AbstractProtocol.java:868)
|
||||
at org.apache.tomcat.util.net.NioEndpoint$SocketProcessor.doRun(NioEndpoint.java:1594)
|
||||
at org.apache.tomcat.util.net.SocketProcessorBase.run(SocketProcessorBase.java:49)
|
||||
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
|
||||
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
|
||||
at org.apache.tomcat.util.threads.TaskThread$WrappingRunnable.run(TaskThread.java:61)
|
||||
at java.lang.Thread.run(Thread.java:748)
|
||||
Caused by: java.io.IOException: Broken pipe
|
||||
at sun.nio.ch.FileDispatcherImpl.write0(Native Method)
|
||||
at sun.nio.ch.SocketDispatcher.write(SocketDispatcher.java:47)
|
||||
at sun.nio.ch.IOUtil.writeFromNativeBuffer(IOUtil.java:93)
|
||||
at sun.nio.ch.IOUtil.write(IOUtil.java:65)
|
||||
at sun.nio.ch.SocketChannelImpl.write(SocketChannelImpl.java:471)
|
||||
at org.apache.tomcat.util.net.NioChannel.write(NioChannel.java:138)
|
||||
at org.apache.tomcat.util.net.NioBlockingSelector.write(NioBlockingSelector.java:101)
|
||||
at org.apache.tomcat.util.net.NioSelectorPool.write(NioSelectorPool.java:152)
|
||||
at org.apache.tomcat.util.net.NioEndpoint$NioSocketWrapper.doWrite(NioEndpoint.java:1253)
|
||||
at org.apache.tomcat.util.net.SocketWrapperBase.doWrite(SocketWrapperBase.java:740)
|
||||
at org.apache.tomcat.util.net.SocketWrapperBase.writeBlocking(SocketWrapperBase.java:560)
|
||||
at org.apache.tomcat.util.net.SocketWrapperBase.write(SocketWrapperBase.java:504)
|
||||
at org.apache.coyote.http11.Http11OutputBuffer$SocketOutputBuffer.doWrite(Http11OutputBuffer.java:538)
|
||||
at org.apache.coyote.http11.filters.ChunkedOutputFilter.doWrite(ChunkedOutputFilter.java:110)
|
||||
at org.apache.coyote.http11.Http11OutputBuffer.doWrite(Http11OutputBuffer.java:190)
|
||||
at org.apache.coyote.Response.doWrite(Response.java:601)
|
||||
at org.apache.catalina.connector.OutputBuffer.realWriteBytes(OutputBuffer.java:339)
|
||||
... 60 common frames omitted
|
||||
|
||||
```
|
||||
|
||||
## V2.0.0
|
||||
>经过小小的调查,mongoDB似乎允许用户直接使用它的文件系统:GridFS,完成文件的存储。那么要不要改成文件对文件的形式呢?同步开始时,先在本地生成日志文件,然后同步到MongoDB。查询时则先下载文件。一旦拥有了完整的文件,分页什么的也就容易实现了,前端展示一次1000行之类的~
|
@ -1,197 +0,0 @@
|
||||
/*
|
||||
Navicat Premium Data Transfer
|
||||
|
||||
Source Server Type : MySQL
|
||||
Source Server Version : 50724
|
||||
Source Schema : oms-product
|
||||
|
||||
Target Server Type : MySQL
|
||||
Target Server Version : 50724
|
||||
File Encoding : 65001
|
||||
|
||||
Date: 07/06/2020 11:11:47
|
||||
*/
|
||||
|
||||
SET NAMES utf8mb4;
|
||||
SET FOREIGN_KEY_CHECKS = 0;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for app_info
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `app_info`;
|
||||
CREATE TABLE `app_info` (
|
||||
`id` bigint(20) NOT NULL AUTO_INCREMENT,
|
||||
`app_name` varchar(255) DEFAULT NULL,
|
||||
`current_server` varchar(255) DEFAULT NULL,
|
||||
`description` varchar(255) DEFAULT NULL,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
UNIQUE KEY `appNameUK` (`app_name`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for container_info
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `container_info`;
|
||||
CREATE TABLE `container_info` (
|
||||
`id` bigint(20) NOT NULL AUTO_INCREMENT,
|
||||
`app_id` bigint(20) DEFAULT NULL,
|
||||
`container_name` varchar(255) DEFAULT NULL,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`last_deploy_time` datetime(6) DEFAULT NULL,
|
||||
`source_info` varchar(255) DEFAULT NULL,
|
||||
`source_type` int(11) DEFAULT NULL,
|
||||
`status` int(11) DEFAULT NULL,
|
||||
`version` varchar(255) DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
KEY `IDX8hixyaktlnwil2w9up6b0p898` (`app_id`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for instance_info
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `instance_info`;
|
||||
CREATE TABLE `instance_info` (
|
||||
`id` bigint(20) NOT NULL AUTO_INCREMENT,
|
||||
`actual_trigger_time` bigint(20) DEFAULT NULL,
|
||||
`app_id` bigint(20) DEFAULT NULL,
|
||||
`expected_trigger_time` bigint(20) DEFAULT NULL,
|
||||
`finished_time` bigint(20) DEFAULT NULL,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`instance_id` bigint(20) DEFAULT NULL,
|
||||
`instance_params` text,
|
||||
`job_id` bigint(20) DEFAULT NULL,
|
||||
`result` text,
|
||||
`running_times` bigint(20) DEFAULT NULL,
|
||||
`status` int(11) DEFAULT NULL,
|
||||
`task_tracker_address` varchar(255) DEFAULT NULL,
|
||||
`type` int(11) DEFAULT NULL,
|
||||
`wf_instance_id` bigint(20) DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
KEY `IDX5b1nhpe5je7gc5s1ur200njr7` (`job_id`),
|
||||
KEY `IDXjnji5lrr195kswk6f7mfhinrs` (`app_id`),
|
||||
KEY `IDXa98hq3yu0l863wuotdjl7noum` (`instance_id`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for job_info
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `job_info`;
|
||||
CREATE TABLE `job_info` (
|
||||
`id` bigint(20) NOT NULL AUTO_INCREMENT,
|
||||
`app_id` bigint(20) DEFAULT NULL,
|
||||
`concurrency` int(11) DEFAULT NULL,
|
||||
`designated_workers` varchar(255) DEFAULT NULL,
|
||||
`execute_type` int(11) DEFAULT NULL,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`instance_retry_num` int(11) DEFAULT NULL,
|
||||
`instance_time_limit` bigint(20) DEFAULT NULL,
|
||||
`job_description` varchar(255) DEFAULT NULL,
|
||||
`job_name` varchar(255) DEFAULT NULL,
|
||||
`job_params` varchar(255) DEFAULT NULL,
|
||||
`max_instance_num` int(11) DEFAULT NULL,
|
||||
`max_worker_count` int(11) DEFAULT NULL,
|
||||
`min_cpu_cores` double NOT NULL,
|
||||
`min_disk_space` double NOT NULL,
|
||||
`min_memory_space` double NOT NULL,
|
||||
`next_trigger_time` bigint(20) DEFAULT NULL,
|
||||
`notify_user_ids` varchar(255) DEFAULT NULL,
|
||||
`processor_info` text,
|
||||
`processor_type` int(11) DEFAULT NULL,
|
||||
`status` int(11) DEFAULT NULL,
|
||||
`task_retry_num` int(11) DEFAULT NULL,
|
||||
`time_expression` varchar(255) DEFAULT NULL,
|
||||
`time_expression_type` int(11) DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
KEY `IDXk2xprmn3lldmlcb52i36udll1` (`app_id`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for oms_lock
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `oms_lock`;
|
||||
CREATE TABLE `oms_lock` (
|
||||
`id` bigint(20) NOT NULL AUTO_INCREMENT,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`lock_name` varchar(255) DEFAULT NULL,
|
||||
`max_lock_time` bigint(20) DEFAULT NULL,
|
||||
`ownerip` varchar(255) DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
UNIQUE KEY `lockNameUK` (`lock_name`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for server_info
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `server_info`;
|
||||
CREATE TABLE `server_info` (
|
||||
`id` bigint(20) NOT NULL AUTO_INCREMENT,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`ip` varchar(255) DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
UNIQUE KEY `UKtk8ytgpl7mpukhnvhbl82kgvy` (`ip`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for user_info
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `user_info`;
|
||||
CREATE TABLE `user_info` (
|
||||
`id` bigint(20) NOT NULL AUTO_INCREMENT,
|
||||
`email` varchar(255) DEFAULT NULL,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`password` varchar(255) DEFAULT NULL,
|
||||
`phone` varchar(255) DEFAULT NULL,
|
||||
`username` varchar(255) DEFAULT NULL,
|
||||
PRIMARY KEY (`id`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for workflow_info
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `workflow_info`;
|
||||
CREATE TABLE `workflow_info` (
|
||||
`id` bigint(20) NOT NULL AUTO_INCREMENT,
|
||||
`app_id` bigint(20) DEFAULT NULL,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`max_wf_instance_num` int(11) DEFAULT NULL,
|
||||
`next_trigger_time` bigint(20) DEFAULT NULL,
|
||||
`notify_user_ids` varchar(255) DEFAULT NULL,
|
||||
`pedag` text,
|
||||
`status` int(11) DEFAULT NULL,
|
||||
`time_expression` varchar(255) DEFAULT NULL,
|
||||
`time_expression_type` int(11) DEFAULT NULL,
|
||||
`wf_description` varchar(255) DEFAULT NULL,
|
||||
`wf_name` varchar(255) DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
KEY `IDX7uo5w0e3beeho3fnx9t7eiol3` (`app_id`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for workflow_instance_info
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `workflow_instance_info`;
|
||||
CREATE TABLE `workflow_instance_info` (
|
||||
`id` bigint(20) NOT NULL AUTO_INCREMENT,
|
||||
`actual_trigger_time` bigint(20) DEFAULT NULL,
|
||||
`app_id` bigint(20) DEFAULT NULL,
|
||||
`dag` text,
|
||||
`finished_time` bigint(20) DEFAULT NULL,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`result` text,
|
||||
`status` int(11) DEFAULT NULL,
|
||||
`wf_instance_id` bigint(20) DEFAULT NULL,
|
||||
`workflow_id` bigint(20) DEFAULT NULL,
|
||||
PRIMARY KEY (`id`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
SET FOREIGN_KEY_CHECKS = 1;
|
328
others/powerjob-mysql.sql
Normal file
328
others/powerjob-mysql.sql
Normal file
@ -0,0 +1,328 @@
|
||||
/*
|
||||
官方 SQL 仅基于特定版本(MySQL8)导出,不一定兼容其他数据库,也不一定兼容其他版本。此 SQL 仅供参考。
|
||||
如果您的数据库无法使用此 SQL,建议使用 SpringDataJPA 自带的建表能力,先在开发环境直连测试库自动建表,然后自行导出相关的 SQL 即可。
|
||||
*/
|
||||
|
||||
/*
|
||||
Navicat Premium Data Transfer
|
||||
|
||||
Source Server : Local@3306
|
||||
Source Server Type : MySQL
|
||||
Source Server Version : 80300 (8.3.0)
|
||||
Source Host : localhost:3306
|
||||
Source Schema : powerjob5
|
||||
|
||||
Target Server Type : MySQL
|
||||
Target Server Version : 80300 (8.3.0)
|
||||
File Encoding : 65001
|
||||
|
||||
Date: 11/08/2024 23:23:30
|
||||
*/
|
||||
|
||||
SET NAMES utf8mb4;
|
||||
SET FOREIGN_KEY_CHECKS = 0;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for app_info
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `app_info`;
|
||||
CREATE TABLE `app_info` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`app_name` varchar(255) DEFAULT NULL,
|
||||
`creator` bigint DEFAULT NULL,
|
||||
`current_server` varchar(255) DEFAULT NULL,
|
||||
`extra` varchar(255) DEFAULT NULL,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`modifier` bigint DEFAULT NULL,
|
||||
`namespace_id` bigint DEFAULT NULL,
|
||||
`password` varchar(255) DEFAULT NULL,
|
||||
`tags` varchar(255) DEFAULT NULL,
|
||||
`title` varchar(255) DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
UNIQUE KEY `uidx01_app_info` (`app_name`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for container_info
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `container_info`;
|
||||
CREATE TABLE `container_info` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`app_id` bigint DEFAULT NULL,
|
||||
`container_name` varchar(255) DEFAULT NULL,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`last_deploy_time` datetime(6) DEFAULT NULL,
|
||||
`source_info` varchar(255) DEFAULT NULL,
|
||||
`source_type` int DEFAULT NULL,
|
||||
`status` int DEFAULT NULL,
|
||||
`version` varchar(255) DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
KEY `idx01_container_info` (`app_id`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for instance_info
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `instance_info`;
|
||||
CREATE TABLE `instance_info` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`actual_trigger_time` bigint DEFAULT NULL,
|
||||
`app_id` bigint DEFAULT NULL,
|
||||
`expected_trigger_time` bigint DEFAULT NULL,
|
||||
`finished_time` bigint DEFAULT NULL,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`instance_id` bigint DEFAULT NULL,
|
||||
`instance_params` longtext,
|
||||
`job_id` bigint DEFAULT NULL,
|
||||
`job_params` longtext,
|
||||
`last_report_time` bigint DEFAULT NULL,
|
||||
`result` longtext,
|
||||
`running_times` bigint DEFAULT NULL,
|
||||
`status` int DEFAULT NULL,
|
||||
`task_tracker_address` varchar(255) DEFAULT NULL,
|
||||
`type` int DEFAULT NULL,
|
||||
`wf_instance_id` bigint DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
KEY `idx01_instance_info` (`job_id`,`status`),
|
||||
KEY `idx02_instance_info` (`app_id`,`status`),
|
||||
KEY `idx03_instance_info` (`instance_id`,`status`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for job_info
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `job_info`;
|
||||
CREATE TABLE `job_info` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`advanced_runtime_config` varchar(255) DEFAULT NULL,
|
||||
`alarm_config` varchar(255) DEFAULT NULL,
|
||||
`app_id` bigint DEFAULT NULL,
|
||||
`concurrency` int DEFAULT NULL,
|
||||
`designated_workers` varchar(255) DEFAULT NULL,
|
||||
`dispatch_strategy` int DEFAULT NULL,
|
||||
`dispatch_strategy_config` varchar(255) DEFAULT NULL,
|
||||
`execute_type` int DEFAULT NULL,
|
||||
`extra` varchar(255) DEFAULT NULL,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`instance_retry_num` int DEFAULT NULL,
|
||||
`instance_time_limit` bigint DEFAULT NULL,
|
||||
`job_description` varchar(255) DEFAULT NULL,
|
||||
`job_name` varchar(255) DEFAULT NULL,
|
||||
`job_params` longtext,
|
||||
`lifecycle` varchar(255) DEFAULT NULL,
|
||||
`log_config` varchar(255) DEFAULT NULL,
|
||||
`max_instance_num` int DEFAULT NULL,
|
||||
`max_worker_count` int DEFAULT NULL,
|
||||
`min_cpu_cores` double NOT NULL,
|
||||
`min_disk_space` double NOT NULL,
|
||||
`min_memory_space` double NOT NULL,
|
||||
`next_trigger_time` bigint DEFAULT NULL,
|
||||
`notify_user_ids` varchar(255) DEFAULT NULL,
|
||||
`processor_info` varchar(255) DEFAULT NULL,
|
||||
`processor_type` int DEFAULT NULL,
|
||||
`status` int DEFAULT NULL,
|
||||
`tag` varchar(255) DEFAULT NULL,
|
||||
`task_retry_num` int DEFAULT NULL,
|
||||
`time_expression` varchar(255) DEFAULT NULL,
|
||||
`time_expression_type` int DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
KEY `idx01_job_info` (`app_id`,`status`,`time_expression_type`,`next_trigger_time`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for namespace
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `namespace`;
|
||||
CREATE TABLE `namespace` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`code` varchar(255) DEFAULT NULL,
|
||||
`creator` bigint DEFAULT NULL,
|
||||
`dept` varchar(255) DEFAULT NULL,
|
||||
`extra` varchar(255) DEFAULT NULL,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`modifier` bigint DEFAULT NULL,
|
||||
`name` varchar(255) DEFAULT NULL,
|
||||
`status` int DEFAULT NULL,
|
||||
`tags` varchar(255) DEFAULT NULL,
|
||||
`token` varchar(255) DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
UNIQUE KEY `uidx01_namespace` (`code`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for oms_lock
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `oms_lock`;
|
||||
CREATE TABLE `oms_lock` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`lock_name` varchar(255) DEFAULT NULL,
|
||||
`max_lock_time` bigint DEFAULT NULL,
|
||||
`ownerip` varchar(255) DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
UNIQUE KEY `uidx01_oms_lock` (`lock_name`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=4 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for pwjb_user_info
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `pwjb_user_info`;
|
||||
CREATE TABLE `pwjb_user_info` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`extra` varchar(255) DEFAULT NULL,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`password` varchar(255) DEFAULT NULL,
|
||||
`username` varchar(255) DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
UNIQUE KEY `uidx01_username` (`username`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for server_info
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `server_info`;
|
||||
CREATE TABLE `server_info` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`ip` varchar(255) DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
UNIQUE KEY `uidx01_server_info` (`ip`),
|
||||
KEY `idx01_server_info` (`gmt_modified`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for sundry
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `sundry`;
|
||||
CREATE TABLE `sundry` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`content` varchar(255) DEFAULT NULL,
|
||||
`extra` varchar(255) DEFAULT NULL,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`pkey` varchar(255) DEFAULT NULL,
|
||||
`skey` varchar(255) DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
UNIQUE KEY `uidx01_sundry` (`pkey`,`skey`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=3 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for user_info
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `user_info`;
|
||||
CREATE TABLE `user_info` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`account_type` varchar(255) DEFAULT NULL,
|
||||
`email` varchar(255) DEFAULT NULL,
|
||||
`extra` varchar(255) DEFAULT NULL,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`nick` varchar(255) DEFAULT NULL,
|
||||
`origin_username` varchar(255) DEFAULT NULL,
|
||||
`password` varchar(255) DEFAULT NULL,
|
||||
`phone` varchar(255) DEFAULT NULL,
|
||||
`status` int DEFAULT NULL,
|
||||
`token_login_verify_info` varchar(255) DEFAULT NULL,
|
||||
`username` varchar(255) DEFAULT NULL,
|
||||
`web_hook` varchar(255) DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
UNIQUE KEY `uidx01_user_name` (`username`),
|
||||
KEY `uidx02_user_info` (`email`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for user_role
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `user_role`;
|
||||
CREATE TABLE `user_role` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`extra` varchar(255) DEFAULT NULL,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`role` int DEFAULT NULL,
|
||||
`scope` int DEFAULT NULL,
|
||||
`target` bigint DEFAULT NULL,
|
||||
`user_id` bigint DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
KEY `uidx01_user_id` (`user_id`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for workflow_info
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `workflow_info`;
|
||||
CREATE TABLE `workflow_info` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`app_id` bigint DEFAULT NULL,
|
||||
`extra` varchar(255) DEFAULT NULL,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`lifecycle` varchar(255) DEFAULT NULL,
|
||||
`max_wf_instance_num` int DEFAULT NULL,
|
||||
`next_trigger_time` bigint DEFAULT NULL,
|
||||
`notify_user_ids` varchar(255) DEFAULT NULL,
|
||||
`pedag` longtext,
|
||||
`status` int DEFAULT NULL,
|
||||
`time_expression` varchar(255) DEFAULT NULL,
|
||||
`time_expression_type` int DEFAULT NULL,
|
||||
`wf_description` varchar(255) DEFAULT NULL,
|
||||
`wf_name` varchar(255) DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
KEY `idx01_workflow_info` (`app_id`,`status`,`time_expression_type`,`next_trigger_time`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for workflow_instance_info
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `workflow_instance_info`;
|
||||
CREATE TABLE `workflow_instance_info` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`actual_trigger_time` bigint DEFAULT NULL,
|
||||
`app_id` bigint DEFAULT NULL,
|
||||
`dag` longtext,
|
||||
`expected_trigger_time` bigint DEFAULT NULL,
|
||||
`finished_time` bigint DEFAULT NULL,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`parent_wf_instance_id` bigint DEFAULT NULL,
|
||||
`result` longtext,
|
||||
`status` int DEFAULT NULL,
|
||||
`wf_context` longtext,
|
||||
`wf_init_params` longtext,
|
||||
`wf_instance_id` bigint DEFAULT NULL,
|
||||
`workflow_id` bigint DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
UNIQUE KEY `uidx01_wf_instance` (`wf_instance_id`),
|
||||
KEY `idx01_wf_instance` (`workflow_id`,`status`,`app_id`,`expected_trigger_time`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for workflow_node_info
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `workflow_node_info`;
|
||||
CREATE TABLE `workflow_node_info` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`app_id` bigint NOT NULL,
|
||||
`enable` bit(1) NOT NULL,
|
||||
`extra` longtext,
|
||||
`gmt_create` datetime(6) NOT NULL,
|
||||
`gmt_modified` datetime(6) NOT NULL,
|
||||
`job_id` bigint DEFAULT NULL,
|
||||
`node_name` varchar(255) DEFAULT NULL,
|
||||
`node_params` longtext,
|
||||
`skip_when_failed` bit(1) NOT NULL,
|
||||
`type` int DEFAULT NULL,
|
||||
`workflow_id` bigint DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
KEY `idx01_workflow_node_info` (`workflow_id`,`gmt_create`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
SET FOREIGN_KEY_CHECKS = 1;
|
@ -1,12 +0,0 @@
|
||||
#!/bin/sh
|
||||
# 一键部署脚本,请勿挪动脚本
|
||||
cd `dirname $0`/../.. || exit
|
||||
echo "================== 构建 jar =================="
|
||||
mvn clean package -DskipTests -Pdev -e -U
|
||||
echo "================== 拷贝 jar =================="
|
||||
/bin/cp -rf powerjob-server/target/*.jar others/powerjob-server.jar
|
||||
ls -l others/powerjob-server.jar
|
||||
echo "================== debug 模式启动 =================="
|
||||
nohup java -agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5005 -jar others/powerjob-server.jar > powerjob-server.log &
|
||||
sleep 100
|
||||
tail --pid=$$ -f -n 1000 others/powerjob-server.log
|
@ -1,37 +0,0 @@
|
||||
#!/bin/bash
|
||||
cd `dirname $0`/../.. || exit
|
||||
echo "================== 构建 jar =================="
|
||||
mvn clean package -Pdev -DskipTests -U -e -pl powerjob-server,powerjob-worker-agent -am
|
||||
echo "================== 拷贝 jar =================="
|
||||
/bin/cp -rf powerjob-server/target/*.jar powerjob-server/docker/powerjob-server.jar
|
||||
/bin/cp -rf powerjob-worker-agent/target/*.jar powerjob-worker-agent/powerjob-agent.jar
|
||||
echo "================== 关闭老应用 =================="
|
||||
docker stop powerjob-server
|
||||
docker stop powerjob-agent
|
||||
docker stop powerjob-agent2
|
||||
echo "================== 删除老容器 =================="
|
||||
docker container rm powerjob-server
|
||||
docker container rm powerjob-agent
|
||||
docker container rm powerjob-agent2
|
||||
echo "================== 删除旧镜像 =================="
|
||||
docker rmi -f tjqq/powerjob-server:latest
|
||||
docker rmi -f tjqq/powerjob-agent:latest
|
||||
echo "================== 构建 powerjob-server 镜像 =================="
|
||||
docker build -t tjqq/powerjob-server:latest powerjob-server/docker/. || exit
|
||||
echo "================== 构建 powerjob-agent 镜像 =================="
|
||||
docker build -t tjqq/powerjob-agent:latest powerjob-worker-agent/. || exit
|
||||
echo "================== 准备启动 powerjob-server =================="
|
||||
docker run -d \
|
||||
--name powerjob-server \
|
||||
-p 7700:7700 -p 10086:10086 \
|
||||
-e PARAMS="--spring.profiles.active=product --spring.datasource.core.jdbc-url=jdbc:mysql://172.27.147.252:3306/oms-product?serverTimezone=Asia/Shanghai&useUnicode=true&characterEncoding=UTF-8 --spring.data.mongodb.uri=mongodb://172.27.147.252:27017/oms-product" \
|
||||
-v ~/docker/powerjob-server:/root/powerjob-server -v ~/.m2:/root/.m2 \
|
||||
tjqq/powerjob-server:latest
|
||||
sleep 60
|
||||
echo "================== 准备启动 powerjob-client =================="
|
||||
serverIP=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' powerjob-server)
|
||||
serverAddress="$serverIP:7700"
|
||||
echo "使用的Server地址:$serverAddress"
|
||||
docker run -d -e PARAMS="--app powerjob-agent-test --server $serverAddress" -p 27777:27777 --name powerjob-agent -v ~/docker/powerjob-agent:/root tjqq/powerjob-agent:latest
|
||||
docker run -d -e PARAMS="--app powerjob-agent-test --server $serverAddress" -p 27778:27777 --name powerjob-agent2 -v ~/docker/powerjob-agent2:/root tjqq/powerjob-agent:latest
|
||||
|
54
others/script/jenkins_auto_build.sh
Executable file
54
others/script/jenkins_auto_build.sh
Executable file
@ -0,0 +1,54 @@
|
||||
#!/bin/bash
|
||||
cd `dirname $0`/../.. || exit
|
||||
echo "================== 构建 jar =================="
|
||||
mvn clean package -Pdev -DskipTests -e
|
||||
echo "================== 拷贝 jar =================="
|
||||
/bin/cp -rf powerjob-server/powerjob-server-starter/target/*.jar powerjob-server/docker/powerjob-server.jar
|
||||
echo "================== 关闭老应用 =================="
|
||||
docker stop powerjob-server
|
||||
docker stop powerjob-worker-samples
|
||||
docker stop powerjob-worker-samples2
|
||||
echo "================== 删除老容器 =================="
|
||||
docker container rm powerjob-server
|
||||
docker container rm powerjob-worker-samples
|
||||
docker container rm powerjob-worker-samples2
|
||||
echo "================== 删除旧镜像 =================="
|
||||
docker rmi -f tjqq/powerjob-server:latest
|
||||
docker rmi -f tjqq/powerjob-worker-samples:latest
|
||||
echo "================== 构建 powerjob-server 镜像 =================="
|
||||
docker build -t tjqq/powerjob-server:latest powerjob-server/docker/. || exit
|
||||
echo "================== 构建 powerjob-worker-samples 镜像 =================="
|
||||
docker build -t tjqq/powerjob-worker-samples:latest powerjob-worker-samples/. || exit
|
||||
echo "================== 准备启动 powerjob-server =================="
|
||||
docker run -d \
|
||||
--restart=always \
|
||||
--name powerjob-server \
|
||||
-p 7700:7700 -p 10086:10086 -p 5001:5005 -p 10001:10000 \
|
||||
-e JVMOPTIONS="-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5005 -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.port=10000 -Dcom.sun.management.jmxremote.rmi.port=10000 -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false" \
|
||||
-e PARAMS="--oms.swagger.enable=true --spring.profiles.active=product --spring.datasource.core.jdbc-url=jdbc:mysql://remotehost:3306/powerjob-product?useUnicode=true&characterEncoding=UTF-8 --oms.mongodb.enable=false --spring.data.mongodb.uri=mongodb://remotehost:27017/powerjob-product" \
|
||||
-v ~/docker/powerjob-server:/root/powerjob/server -v ~/.m2:/root/.m2 \
|
||||
tjqq/powerjob-server:latest
|
||||
sleep 60
|
||||
echo "================== 准备启动 powerjob-agent =================="
|
||||
serverIP=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' powerjob-server)
|
||||
serverAddress="$serverIP:7700"
|
||||
echo "使用的Server地址:$serverAddress"
|
||||
|
||||
docker run -d \
|
||||
--restart=always \
|
||||
--name powerjob-worker-samples \
|
||||
-p 27777:27777 -p 5002:5005 -p 10002:10000 \
|
||||
-e JVMOPTIONS="-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5005 -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.port=10000 -Dcom.sun.management.jmxremote.rmi.port=10000 -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false" \
|
||||
-e PARAMS="--powerjob.worker.server-address=$serverAddress" \
|
||||
-v ~/docker/powerjob-worker-samples:/root \
|
||||
tjqq/powerjob-worker-samples:latest
|
||||
|
||||
docker run -d \
|
||||
--restart=always \
|
||||
--name powerjob-worker-samples2 \
|
||||
-p 27778:27777 -p 5003:5005 -p 10003:10000 \
|
||||
-e JVMOPTIONS="-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5005 -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.port=10000 -Dcom.sun.management.jmxremote.rmi.port=10000 -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false" \
|
||||
-e PARAMS="--powerjob.worker.server-address=$serverAddress" \
|
||||
-v ~/docker/powerjob-worker-samples2:/root \
|
||||
tjqq/powerjob-worker-samples:latest
|
||||
|
182
others/script/wait-for-it.sh
Executable file
182
others/script/wait-for-it.sh
Executable file
@ -0,0 +1,182 @@
|
||||
#!/usr/bin/env bash
|
||||
# Use this script to test if a given TCP host/port are available
|
||||
|
||||
WAITFORIT_cmdname=${0##*/}
|
||||
|
||||
echoerr() { if [[ $WAITFORIT_QUIET -ne 1 ]]; then echo "$@" 1>&2; fi }
|
||||
|
||||
usage()
|
||||
{
|
||||
cat << USAGE >&2
|
||||
Usage:
|
||||
$WAITFORIT_cmdname host:port [-s] [-t timeout] [-- command args]
|
||||
-h HOST | --host=HOST Host or IP under test
|
||||
-p PORT | --port=PORT TCP port under test
|
||||
Alternatively, you specify the host and port as host:port
|
||||
-s | --strict Only execute subcommand if the test succeeds
|
||||
-q | --quiet Don't output any status messages
|
||||
-t TIMEOUT | --timeout=TIMEOUT
|
||||
Timeout in seconds, zero for no timeout
|
||||
-- COMMAND ARGS Execute command with args after the test finishes
|
||||
USAGE
|
||||
exit 1
|
||||
}
|
||||
|
||||
wait_for()
|
||||
{
|
||||
if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
|
||||
echoerr "$WAITFORIT_cmdname: waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
|
||||
else
|
||||
echoerr "$WAITFORIT_cmdname: waiting for $WAITFORIT_HOST:$WAITFORIT_PORT without a timeout"
|
||||
fi
|
||||
WAITFORIT_start_ts=$(date +%s)
|
||||
while :
|
||||
do
|
||||
if [[ $WAITFORIT_ISBUSY -eq 1 ]]; then
|
||||
nc -z $WAITFORIT_HOST $WAITFORIT_PORT
|
||||
WAITFORIT_result=$?
|
||||
else
|
||||
(echo -n > /dev/tcp/$WAITFORIT_HOST/$WAITFORIT_PORT) >/dev/null 2>&1
|
||||
WAITFORIT_result=$?
|
||||
fi
|
||||
if [[ $WAITFORIT_result -eq 0 ]]; then
|
||||
WAITFORIT_end_ts=$(date +%s)
|
||||
echoerr "$WAITFORIT_cmdname: $WAITFORIT_HOST:$WAITFORIT_PORT is available after $((WAITFORIT_end_ts - WAITFORIT_start_ts)) seconds"
|
||||
break
|
||||
fi
|
||||
sleep 1
|
||||
done
|
||||
return $WAITFORIT_result
|
||||
}
|
||||
|
||||
wait_for_wrapper()
|
||||
{
|
||||
# In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692
|
||||
if [[ $WAITFORIT_QUIET -eq 1 ]]; then
|
||||
timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --quiet --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
|
||||
else
|
||||
timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT &
|
||||
fi
|
||||
WAITFORIT_PID=$!
|
||||
trap "kill -INT -$WAITFORIT_PID" INT
|
||||
wait $WAITFORIT_PID
|
||||
WAITFORIT_RESULT=$?
|
||||
if [[ $WAITFORIT_RESULT -ne 0 ]]; then
|
||||
echoerr "$WAITFORIT_cmdname: timeout occurred after waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT"
|
||||
fi
|
||||
return $WAITFORIT_RESULT
|
||||
}
|
||||
|
||||
# process arguments
|
||||
while [[ $# -gt 0 ]]
|
||||
do
|
||||
case "$1" in
|
||||
*:* )
|
||||
WAITFORIT_hostport=(${1//:/ })
|
||||
WAITFORIT_HOST=${WAITFORIT_hostport[0]}
|
||||
WAITFORIT_PORT=${WAITFORIT_hostport[1]}
|
||||
shift 1
|
||||
;;
|
||||
--child)
|
||||
WAITFORIT_CHILD=1
|
||||
shift 1
|
||||
;;
|
||||
-q | --quiet)
|
||||
WAITFORIT_QUIET=1
|
||||
shift 1
|
||||
;;
|
||||
-s | --strict)
|
||||
WAITFORIT_STRICT=1
|
||||
shift 1
|
||||
;;
|
||||
-h)
|
||||
WAITFORIT_HOST="$2"
|
||||
if [[ $WAITFORIT_HOST == "" ]]; then break; fi
|
||||
shift 2
|
||||
;;
|
||||
--host=*)
|
||||
WAITFORIT_HOST="${1#*=}"
|
||||
shift 1
|
||||
;;
|
||||
-p)
|
||||
WAITFORIT_PORT="$2"
|
||||
if [[ $WAITFORIT_PORT == "" ]]; then break; fi
|
||||
shift 2
|
||||
;;
|
||||
--port=*)
|
||||
WAITFORIT_PORT="${1#*=}"
|
||||
shift 1
|
||||
;;
|
||||
-t)
|
||||
WAITFORIT_TIMEOUT="$2"
|
||||
if [[ $WAITFORIT_TIMEOUT == "" ]]; then break; fi
|
||||
shift 2
|
||||
;;
|
||||
--timeout=*)
|
||||
WAITFORIT_TIMEOUT="${1#*=}"
|
||||
shift 1
|
||||
;;
|
||||
--)
|
||||
shift
|
||||
WAITFORIT_CLI=("$@")
|
||||
break
|
||||
;;
|
||||
--help)
|
||||
usage
|
||||
;;
|
||||
*)
|
||||
echoerr "Unknown argument: $1"
|
||||
usage
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [[ "$WAITFORIT_HOST" == "" || "$WAITFORIT_PORT" == "" ]]; then
|
||||
echoerr "Error: you need to provide a host and port to test."
|
||||
usage
|
||||
fi
|
||||
|
||||
WAITFORIT_TIMEOUT=${WAITFORIT_TIMEOUT:-15}
|
||||
WAITFORIT_STRICT=${WAITFORIT_STRICT:-0}
|
||||
WAITFORIT_CHILD=${WAITFORIT_CHILD:-0}
|
||||
WAITFORIT_QUIET=${WAITFORIT_QUIET:-0}
|
||||
|
||||
# Check to see if timeout is from busybox?
|
||||
WAITFORIT_TIMEOUT_PATH=$(type -p timeout)
|
||||
WAITFORIT_TIMEOUT_PATH=$(realpath $WAITFORIT_TIMEOUT_PATH 2>/dev/null || readlink -f $WAITFORIT_TIMEOUT_PATH)
|
||||
|
||||
WAITFORIT_BUSYTIMEFLAG=""
|
||||
if [[ $WAITFORIT_TIMEOUT_PATH =~ "busybox" ]]; then
|
||||
WAITFORIT_ISBUSY=1
|
||||
# Check if busybox timeout uses -t flag
|
||||
# (recent Alpine versions don't support -t anymore)
|
||||
if timeout &>/dev/stdout | grep -q -e '-t '; then
|
||||
WAITFORIT_BUSYTIMEFLAG="-t"
|
||||
fi
|
||||
else
|
||||
WAITFORIT_ISBUSY=0
|
||||
fi
|
||||
|
||||
if [[ $WAITFORIT_CHILD -gt 0 ]]; then
|
||||
wait_for
|
||||
WAITFORIT_RESULT=$?
|
||||
exit $WAITFORIT_RESULT
|
||||
else
|
||||
if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then
|
||||
wait_for_wrapper
|
||||
WAITFORIT_RESULT=$?
|
||||
else
|
||||
wait_for
|
||||
WAITFORIT_RESULT=$?
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ $WAITFORIT_CLI != "" ]]; then
|
||||
if [[ $WAITFORIT_RESULT -ne 0 && $WAITFORIT_STRICT -eq 1 ]]; then
|
||||
echoerr "$WAITFORIT_cmdname: strict mode, refusing to execute subprocess"
|
||||
exit $WAITFORIT_RESULT
|
||||
fi
|
||||
exec "${WAITFORIT_CLI[@]}"
|
||||
else
|
||||
exit $WAITFORIT_RESULT
|
||||
fi
|
2
others/sql/01schema.sql
Normal file
2
others/sql/01schema.sql
Normal file
@ -0,0 +1,2 @@
|
||||
-- powerjob
|
||||
create database `powerjob-daily` default character set utf8mb4 collate utf8mb4_general_ci;
|
21
others/sql/02worker-samples.sql
Normal file
21
others/sql/02worker-samples.sql
Normal file
@ -0,0 +1,21 @@
|
||||
USE powerjob-daily;
|
||||
|
||||
SET NAMES utf8mb4;
|
||||
SET FOREIGN_KEY_CHECKS = 0;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS `app_info` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT COMMENT '应用ID',
|
||||
`app_name` varchar(128) not NULL COMMENT '应用名称',
|
||||
`current_server` varchar(255) default null COMMENT 'Server地址,用于负责调度应用的ActorSystem地址',
|
||||
`gmt_create` datetime not null COMMENT '创建时间',
|
||||
`gmt_modified` datetime not null COMMENT '更新时间',
|
||||
`password` varchar(255) not null COMMENT '应用密码',
|
||||
PRIMARY KEY (`id`),
|
||||
UNIQUE KEY `uidx01_app_info` (`app_name`)
|
||||
) ENGINE = InnoDB AUTO_INCREMENT = 1
|
||||
DEFAULT CHARSET = utf8mb4
|
||||
COLLATE = utf8mb4_general_ci COMMENT ='应用表';
|
||||
|
||||
insert into app_info (app_name, gmt_create, gmt_modified, password) select 'powerjob-worker-samples', current_timestamp(), current_timestamp(), 'powerjob123' from dual where not exists ( select * from app_info where app_name = 'powerjob-worker-samples');
|
||||
|
||||
SET FOREIGN_KEY_CHECKS = 1;
|
243
others/sql/schema/powerjob_mysql_4.3.9.sql
Normal file
243
others/sql/schema/powerjob_mysql_4.3.9.sql
Normal file
@ -0,0 +1,243 @@
|
||||
/*
|
||||
Navicat Premium Data Transfer
|
||||
|
||||
Source Server : Local@3306
|
||||
Source Server Type : MySQL
|
||||
Source Server Version : 80300 (8.3.0)
|
||||
Source Host : localhost:3306
|
||||
Source Schema : powerjob4
|
||||
|
||||
Target Server Type : MySQL
|
||||
Target Server Version : 80300 (8.3.0)
|
||||
File Encoding : 65001
|
||||
|
||||
Date: 02/03/2024 18:51:36
|
||||
*/
|
||||
|
||||
SET NAMES utf8mb4;
|
||||
SET FOREIGN_KEY_CHECKS = 0;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for app_info
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `app_info`;
|
||||
CREATE TABLE `app_info` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`app_name` varchar(255) DEFAULT NULL,
|
||||
`current_server` varchar(255) DEFAULT NULL,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`password` varchar(255) DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
UNIQUE KEY `uidx01_app_info` (`app_name`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for container_info
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `container_info`;
|
||||
CREATE TABLE `container_info` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`app_id` bigint DEFAULT NULL,
|
||||
`container_name` varchar(255) DEFAULT NULL,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`last_deploy_time` datetime(6) DEFAULT NULL,
|
||||
`source_info` varchar(255) DEFAULT NULL,
|
||||
`source_type` int DEFAULT NULL,
|
||||
`status` int DEFAULT NULL,
|
||||
`version` varchar(255) DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
KEY `idx01_container_info` (`app_id`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for instance_info
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `instance_info`;
|
||||
CREATE TABLE `instance_info` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`actual_trigger_time` bigint DEFAULT NULL,
|
||||
`app_id` bigint DEFAULT NULL,
|
||||
`expected_trigger_time` bigint DEFAULT NULL,
|
||||
`finished_time` bigint DEFAULT NULL,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`instance_id` bigint DEFAULT NULL,
|
||||
`instance_params` longtext,
|
||||
`job_id` bigint DEFAULT NULL,
|
||||
`job_params` longtext,
|
||||
`last_report_time` bigint DEFAULT NULL,
|
||||
`result` longtext,
|
||||
`running_times` bigint DEFAULT NULL,
|
||||
`status` int DEFAULT NULL,
|
||||
`task_tracker_address` varchar(255) DEFAULT NULL,
|
||||
`type` int DEFAULT NULL,
|
||||
`wf_instance_id` bigint DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
KEY `idx01_instance_info` (`job_id`,`status`),
|
||||
KEY `idx02_instance_info` (`app_id`,`status`),
|
||||
KEY `idx03_instance_info` (`instance_id`,`status`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for job_info
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `job_info`;
|
||||
CREATE TABLE `job_info` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`advanced_runtime_config` varchar(255) DEFAULT NULL,
|
||||
`alarm_config` varchar(255) DEFAULT NULL,
|
||||
`app_id` bigint DEFAULT NULL,
|
||||
`concurrency` int DEFAULT NULL,
|
||||
`designated_workers` varchar(255) DEFAULT NULL,
|
||||
`dispatch_strategy` int DEFAULT NULL,
|
||||
`dispatch_strategy_config` varchar(255) DEFAULT NULL,
|
||||
`execute_type` int DEFAULT NULL,
|
||||
`extra` varchar(255) DEFAULT NULL,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`instance_retry_num` int DEFAULT NULL,
|
||||
`instance_time_limit` bigint DEFAULT NULL,
|
||||
`job_description` varchar(255) DEFAULT NULL,
|
||||
`job_name` varchar(255) DEFAULT NULL,
|
||||
`job_params` longtext,
|
||||
`lifecycle` varchar(255) DEFAULT NULL,
|
||||
`log_config` varchar(255) DEFAULT NULL,
|
||||
`max_instance_num` int DEFAULT NULL,
|
||||
`max_worker_count` int DEFAULT NULL,
|
||||
`min_cpu_cores` double NOT NULL,
|
||||
`min_disk_space` double NOT NULL,
|
||||
`min_memory_space` double NOT NULL,
|
||||
`next_trigger_time` bigint DEFAULT NULL,
|
||||
`notify_user_ids` varchar(255) DEFAULT NULL,
|
||||
`processor_info` varchar(255) DEFAULT NULL,
|
||||
`processor_type` int DEFAULT NULL,
|
||||
`status` int DEFAULT NULL,
|
||||
`tag` varchar(255) DEFAULT NULL,
|
||||
`task_retry_num` int DEFAULT NULL,
|
||||
`time_expression` varchar(255) DEFAULT NULL,
|
||||
`time_expression_type` int DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
KEY `idx01_job_info` (`app_id`,`status`,`time_expression_type`,`next_trigger_time`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for oms_lock
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `oms_lock`;
|
||||
CREATE TABLE `oms_lock` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`lock_name` varchar(255) DEFAULT NULL,
|
||||
`max_lock_time` bigint DEFAULT NULL,
|
||||
`ownerip` varchar(255) DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
UNIQUE KEY `uidx01_oms_lock` (`lock_name`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for server_info
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `server_info`;
|
||||
CREATE TABLE `server_info` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`ip` varchar(255) DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
UNIQUE KEY `uidx01_server_info` (`ip`),
|
||||
KEY `idx01_server_info` (`gmt_modified`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for user_info
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `user_info`;
|
||||
CREATE TABLE `user_info` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`email` varchar(255) DEFAULT NULL,
|
||||
`extra` varchar(255) DEFAULT NULL,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`password` varchar(255) DEFAULT NULL,
|
||||
`phone` varchar(255) DEFAULT NULL,
|
||||
`username` varchar(255) DEFAULT NULL,
|
||||
`web_hook` varchar(255) DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
KEY `uidx01_user_info` (`username`),
|
||||
KEY `uidx02_user_info` (`email`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for workflow_info
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `workflow_info`;
|
||||
CREATE TABLE `workflow_info` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`app_id` bigint DEFAULT NULL,
|
||||
`extra` varchar(255) DEFAULT NULL,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`lifecycle` varchar(255) DEFAULT NULL,
|
||||
`max_wf_instance_num` int DEFAULT NULL,
|
||||
`next_trigger_time` bigint DEFAULT NULL,
|
||||
`notify_user_ids` varchar(255) DEFAULT NULL,
|
||||
`pedag` longtext,
|
||||
`status` int DEFAULT NULL,
|
||||
`time_expression` varchar(255) DEFAULT NULL,
|
||||
`time_expression_type` int DEFAULT NULL,
|
||||
`wf_description` varchar(255) DEFAULT NULL,
|
||||
`wf_name` varchar(255) DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
KEY `idx01_workflow_info` (`app_id`,`status`,`time_expression_type`,`next_trigger_time`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for workflow_instance_info
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `workflow_instance_info`;
|
||||
CREATE TABLE `workflow_instance_info` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`actual_trigger_time` bigint DEFAULT NULL,
|
||||
`app_id` bigint DEFAULT NULL,
|
||||
`dag` longtext,
|
||||
`expected_trigger_time` bigint DEFAULT NULL,
|
||||
`finished_time` bigint DEFAULT NULL,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`parent_wf_instance_id` bigint DEFAULT NULL,
|
||||
`result` longtext,
|
||||
`status` int DEFAULT NULL,
|
||||
`wf_context` longtext,
|
||||
`wf_init_params` longtext,
|
||||
`wf_instance_id` bigint DEFAULT NULL,
|
||||
`workflow_id` bigint DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
UNIQUE KEY `uidx01_wf_instance` (`wf_instance_id`),
|
||||
KEY `idx01_wf_instance` (`workflow_id`,`status`,`app_id`,`expected_trigger_time`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for workflow_node_info
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `workflow_node_info`;
|
||||
CREATE TABLE `workflow_node_info` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`app_id` bigint NOT NULL,
|
||||
`enable` bit(1) NOT NULL,
|
||||
`extra` longtext,
|
||||
`gmt_create` datetime(6) NOT NULL,
|
||||
`gmt_modified` datetime(6) NOT NULL,
|
||||
`job_id` bigint DEFAULT NULL,
|
||||
`node_name` varchar(255) DEFAULT NULL,
|
||||
`node_params` longtext,
|
||||
`skip_when_failed` bit(1) NOT NULL,
|
||||
`type` int DEFAULT NULL,
|
||||
`workflow_id` bigint DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
KEY `idx01_workflow_node_info` (`workflow_id`,`gmt_create`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
SET FOREIGN_KEY_CHECKS = 1;
|
323
others/sql/schema/powerjob_mysql_5.0.1.sql
Normal file
323
others/sql/schema/powerjob_mysql_5.0.1.sql
Normal file
@ -0,0 +1,323 @@
|
||||
/*
|
||||
Navicat Premium Data Transfer
|
||||
|
||||
Source Server : Local@3306
|
||||
Source Server Type : MySQL
|
||||
Source Server Version : 80300 (8.3.0)
|
||||
Source Host : localhost:3306
|
||||
Source Schema : powerjob5
|
||||
|
||||
Target Server Type : MySQL
|
||||
Target Server Version : 80300 (8.3.0)
|
||||
File Encoding : 65001
|
||||
|
||||
Date: 16/03/2024 22:07:31
|
||||
*/
|
||||
|
||||
SET NAMES utf8mb4;
|
||||
SET FOREIGN_KEY_CHECKS = 0;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for app_info
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `app_info`;
|
||||
CREATE TABLE `app_info` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`app_name` varchar(255) DEFAULT NULL,
|
||||
`creator` bigint DEFAULT NULL,
|
||||
`current_server` varchar(255) DEFAULT NULL,
|
||||
`extra` varchar(255) DEFAULT NULL,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`modifier` bigint DEFAULT NULL,
|
||||
`namespace_id` bigint DEFAULT NULL,
|
||||
`password` varchar(255) DEFAULT NULL,
|
||||
`tags` varchar(255) DEFAULT NULL,
|
||||
`title` varchar(255) DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
UNIQUE KEY `uidx01_app_info` (`app_name`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for container_info
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `container_info`;
|
||||
CREATE TABLE `container_info` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`app_id` bigint DEFAULT NULL,
|
||||
`container_name` varchar(255) DEFAULT NULL,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`last_deploy_time` datetime(6) DEFAULT NULL,
|
||||
`source_info` varchar(255) DEFAULT NULL,
|
||||
`source_type` int DEFAULT NULL,
|
||||
`status` int DEFAULT NULL,
|
||||
`version` varchar(255) DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
KEY `idx01_container_info` (`app_id`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for instance_info
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `instance_info`;
|
||||
CREATE TABLE `instance_info` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`actual_trigger_time` bigint DEFAULT NULL,
|
||||
`app_id` bigint DEFAULT NULL,
|
||||
`expected_trigger_time` bigint DEFAULT NULL,
|
||||
`finished_time` bigint DEFAULT NULL,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`instance_id` bigint DEFAULT NULL,
|
||||
`instance_params` longtext,
|
||||
`job_id` bigint DEFAULT NULL,
|
||||
`job_params` longtext,
|
||||
`last_report_time` bigint DEFAULT NULL,
|
||||
`result` longtext,
|
||||
`running_times` bigint DEFAULT NULL,
|
||||
`status` int DEFAULT NULL,
|
||||
`task_tracker_address` varchar(255) DEFAULT NULL,
|
||||
`type` int DEFAULT NULL,
|
||||
`wf_instance_id` bigint DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
KEY `idx01_instance_info` (`job_id`,`status`),
|
||||
KEY `idx02_instance_info` (`app_id`,`status`),
|
||||
KEY `idx03_instance_info` (`instance_id`,`status`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for job_info
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `job_info`;
|
||||
CREATE TABLE `job_info` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`advanced_runtime_config` varchar(255) DEFAULT NULL,
|
||||
`alarm_config` varchar(255) DEFAULT NULL,
|
||||
`app_id` bigint DEFAULT NULL,
|
||||
`concurrency` int DEFAULT NULL,
|
||||
`designated_workers` varchar(255) DEFAULT NULL,
|
||||
`dispatch_strategy` int DEFAULT NULL,
|
||||
`dispatch_strategy_config` varchar(255) DEFAULT NULL,
|
||||
`execute_type` int DEFAULT NULL,
|
||||
`extra` varchar(255) DEFAULT NULL,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`instance_retry_num` int DEFAULT NULL,
|
||||
`instance_time_limit` bigint DEFAULT NULL,
|
||||
`job_description` varchar(255) DEFAULT NULL,
|
||||
`job_name` varchar(255) DEFAULT NULL,
|
||||
`job_params` longtext,
|
||||
`lifecycle` varchar(255) DEFAULT NULL,
|
||||
`log_config` varchar(255) DEFAULT NULL,
|
||||
`max_instance_num` int DEFAULT NULL,
|
||||
`max_worker_count` int DEFAULT NULL,
|
||||
`min_cpu_cores` double NOT NULL,
|
||||
`min_disk_space` double NOT NULL,
|
||||
`min_memory_space` double NOT NULL,
|
||||
`next_trigger_time` bigint DEFAULT NULL,
|
||||
`notify_user_ids` varchar(255) DEFAULT NULL,
|
||||
`processor_info` varchar(255) DEFAULT NULL,
|
||||
`processor_type` int DEFAULT NULL,
|
||||
`status` int DEFAULT NULL,
|
||||
`tag` varchar(255) DEFAULT NULL,
|
||||
`task_retry_num` int DEFAULT NULL,
|
||||
`time_expression` varchar(255) DEFAULT NULL,
|
||||
`time_expression_type` int DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
KEY `idx01_job_info` (`app_id`,`status`,`time_expression_type`,`next_trigger_time`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for namespace
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `namespace`;
|
||||
CREATE TABLE `namespace` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`code` varchar(255) DEFAULT NULL,
|
||||
`creator` bigint DEFAULT NULL,
|
||||
`dept` varchar(255) DEFAULT NULL,
|
||||
`extra` varchar(255) DEFAULT NULL,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`modifier` bigint DEFAULT NULL,
|
||||
`name` varchar(255) DEFAULT NULL,
|
||||
`status` int DEFAULT NULL,
|
||||
`tags` varchar(255) DEFAULT NULL,
|
||||
`token` varchar(255) DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
UNIQUE KEY `uidx01_namespace` (`code`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for oms_lock
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `oms_lock`;
|
||||
CREATE TABLE `oms_lock` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`lock_name` varchar(255) DEFAULT NULL,
|
||||
`max_lock_time` bigint DEFAULT NULL,
|
||||
`ownerip` varchar(255) DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
UNIQUE KEY `uidx01_oms_lock` (`lock_name`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=4 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for pwjb_user_info
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `pwjb_user_info`;
|
||||
CREATE TABLE `pwjb_user_info` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`extra` varchar(255) DEFAULT NULL,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`password` varchar(255) DEFAULT NULL,
|
||||
`username` varchar(255) DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
UNIQUE KEY `uidx01_username` (`username`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for server_info
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `server_info`;
|
||||
CREATE TABLE `server_info` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`ip` varchar(255) DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
UNIQUE KEY `uidx01_server_info` (`ip`),
|
||||
KEY `idx01_server_info` (`gmt_modified`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for sundry
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `sundry`;
|
||||
CREATE TABLE `sundry` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`content` varchar(255) DEFAULT NULL,
|
||||
`extra` varchar(255) DEFAULT NULL,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`pkey` varchar(255) DEFAULT NULL,
|
||||
`skey` varchar(255) DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
UNIQUE KEY `uidx01_sundry` (`pkey`,`skey`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=3 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for user_info
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `user_info`;
|
||||
CREATE TABLE `user_info` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`account_type` varchar(255) DEFAULT NULL,
|
||||
`email` varchar(255) DEFAULT NULL,
|
||||
`extra` varchar(255) DEFAULT NULL,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`nick` varchar(255) DEFAULT NULL,
|
||||
`origin_username` varchar(255) DEFAULT NULL,
|
||||
`password` varchar(255) DEFAULT NULL,
|
||||
`phone` varchar(255) DEFAULT NULL,
|
||||
`status` int DEFAULT NULL,
|
||||
`token_login_verify_info` varchar(255) DEFAULT NULL,
|
||||
`username` varchar(255) DEFAULT NULL,
|
||||
`web_hook` varchar(255) DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
UNIQUE KEY `uidx01_user_name` (`username`),
|
||||
KEY `uidx02_user_info` (`email`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for user_role
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `user_role`;
|
||||
CREATE TABLE `user_role` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`extra` varchar(255) DEFAULT NULL,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`role` int DEFAULT NULL,
|
||||
`scope` int DEFAULT NULL,
|
||||
`target` bigint DEFAULT NULL,
|
||||
`user_id` bigint DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
KEY `uidx01_user_id` (`user_id`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for workflow_info
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `workflow_info`;
|
||||
CREATE TABLE `workflow_info` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`app_id` bigint DEFAULT NULL,
|
||||
`extra` varchar(255) DEFAULT NULL,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`lifecycle` varchar(255) DEFAULT NULL,
|
||||
`max_wf_instance_num` int DEFAULT NULL,
|
||||
`next_trigger_time` bigint DEFAULT NULL,
|
||||
`notify_user_ids` varchar(255) DEFAULT NULL,
|
||||
`pedag` longtext,
|
||||
`status` int DEFAULT NULL,
|
||||
`time_expression` varchar(255) DEFAULT NULL,
|
||||
`time_expression_type` int DEFAULT NULL,
|
||||
`wf_description` varchar(255) DEFAULT NULL,
|
||||
`wf_name` varchar(255) DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
KEY `idx01_workflow_info` (`app_id`,`status`,`time_expression_type`,`next_trigger_time`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for workflow_instance_info
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `workflow_instance_info`;
|
||||
CREATE TABLE `workflow_instance_info` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`actual_trigger_time` bigint DEFAULT NULL,
|
||||
`app_id` bigint DEFAULT NULL,
|
||||
`dag` longtext,
|
||||
`expected_trigger_time` bigint DEFAULT NULL,
|
||||
`finished_time` bigint DEFAULT NULL,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`parent_wf_instance_id` bigint DEFAULT NULL,
|
||||
`result` longtext,
|
||||
`status` int DEFAULT NULL,
|
||||
`wf_context` longtext,
|
||||
`wf_init_params` longtext,
|
||||
`wf_instance_id` bigint DEFAULT NULL,
|
||||
`workflow_id` bigint DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
UNIQUE KEY `uidx01_wf_instance` (`wf_instance_id`),
|
||||
KEY `idx01_wf_instance` (`workflow_id`,`status`,`app_id`,`expected_trigger_time`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for workflow_node_info
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `workflow_node_info`;
|
||||
CREATE TABLE `workflow_node_info` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`app_id` bigint NOT NULL,
|
||||
`enable` bit(1) NOT NULL,
|
||||
`extra` longtext,
|
||||
`gmt_create` datetime(6) NOT NULL,
|
||||
`gmt_modified` datetime(6) NOT NULL,
|
||||
`job_id` bigint DEFAULT NULL,
|
||||
`node_name` varchar(255) DEFAULT NULL,
|
||||
`node_params` longtext,
|
||||
`skip_when_failed` bit(1) NOT NULL,
|
||||
`type` int DEFAULT NULL,
|
||||
`workflow_id` bigint DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
KEY `idx01_workflow_node_info` (`workflow_id`,`gmt_create`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
SET FOREIGN_KEY_CHECKS = 1;
|
323
others/sql/schema/powerjob_mysql_5.1.0.sql
Normal file
323
others/sql/schema/powerjob_mysql_5.1.0.sql
Normal file
@ -0,0 +1,323 @@
|
||||
/*
|
||||
Navicat Premium Data Transfer
|
||||
|
||||
Source Server : Local@3306
|
||||
Source Server Type : MySQL
|
||||
Source Server Version : 80300 (8.3.0)
|
||||
Source Host : localhost:3306
|
||||
Source Schema : powerjob5
|
||||
|
||||
Target Server Type : MySQL
|
||||
Target Server Version : 80300 (8.3.0)
|
||||
File Encoding : 65001
|
||||
|
||||
Date: 11/08/2024 23:23:30
|
||||
*/
|
||||
|
||||
SET NAMES utf8mb4;
|
||||
SET FOREIGN_KEY_CHECKS = 0;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for app_info
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `app_info`;
|
||||
CREATE TABLE `app_info` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`app_name` varchar(255) DEFAULT NULL,
|
||||
`creator` bigint DEFAULT NULL,
|
||||
`current_server` varchar(255) DEFAULT NULL,
|
||||
`extra` varchar(255) DEFAULT NULL,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`modifier` bigint DEFAULT NULL,
|
||||
`namespace_id` bigint DEFAULT NULL,
|
||||
`password` varchar(255) DEFAULT NULL,
|
||||
`tags` varchar(255) DEFAULT NULL,
|
||||
`title` varchar(255) DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
UNIQUE KEY `uidx01_app_info` (`app_name`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for container_info
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `container_info`;
|
||||
CREATE TABLE `container_info` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`app_id` bigint DEFAULT NULL,
|
||||
`container_name` varchar(255) DEFAULT NULL,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`last_deploy_time` datetime(6) DEFAULT NULL,
|
||||
`source_info` varchar(255) DEFAULT NULL,
|
||||
`source_type` int DEFAULT NULL,
|
||||
`status` int DEFAULT NULL,
|
||||
`version` varchar(255) DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
KEY `idx01_container_info` (`app_id`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for instance_info
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `instance_info`;
|
||||
CREATE TABLE `instance_info` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`actual_trigger_time` bigint DEFAULT NULL,
|
||||
`app_id` bigint DEFAULT NULL,
|
||||
`expected_trigger_time` bigint DEFAULT NULL,
|
||||
`finished_time` bigint DEFAULT NULL,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`instance_id` bigint DEFAULT NULL,
|
||||
`instance_params` longtext,
|
||||
`job_id` bigint DEFAULT NULL,
|
||||
`job_params` longtext,
|
||||
`last_report_time` bigint DEFAULT NULL,
|
||||
`result` longtext,
|
||||
`running_times` bigint DEFAULT NULL,
|
||||
`status` int DEFAULT NULL,
|
||||
`task_tracker_address` varchar(255) DEFAULT NULL,
|
||||
`type` int DEFAULT NULL,
|
||||
`wf_instance_id` bigint DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
KEY `idx01_instance_info` (`job_id`,`status`),
|
||||
KEY `idx02_instance_info` (`app_id`,`status`),
|
||||
KEY `idx03_instance_info` (`instance_id`,`status`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for job_info
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `job_info`;
|
||||
CREATE TABLE `job_info` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`advanced_runtime_config` varchar(255) DEFAULT NULL,
|
||||
`alarm_config` varchar(255) DEFAULT NULL,
|
||||
`app_id` bigint DEFAULT NULL,
|
||||
`concurrency` int DEFAULT NULL,
|
||||
`designated_workers` varchar(255) DEFAULT NULL,
|
||||
`dispatch_strategy` int DEFAULT NULL,
|
||||
`dispatch_strategy_config` varchar(255) DEFAULT NULL,
|
||||
`execute_type` int DEFAULT NULL,
|
||||
`extra` varchar(255) DEFAULT NULL,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`instance_retry_num` int DEFAULT NULL,
|
||||
`instance_time_limit` bigint DEFAULT NULL,
|
||||
`job_description` varchar(255) DEFAULT NULL,
|
||||
`job_name` varchar(255) DEFAULT NULL,
|
||||
`job_params` longtext,
|
||||
`lifecycle` varchar(255) DEFAULT NULL,
|
||||
`log_config` varchar(255) DEFAULT NULL,
|
||||
`max_instance_num` int DEFAULT NULL,
|
||||
`max_worker_count` int DEFAULT NULL,
|
||||
`min_cpu_cores` double NOT NULL,
|
||||
`min_disk_space` double NOT NULL,
|
||||
`min_memory_space` double NOT NULL,
|
||||
`next_trigger_time` bigint DEFAULT NULL,
|
||||
`notify_user_ids` varchar(255) DEFAULT NULL,
|
||||
`processor_info` varchar(255) DEFAULT NULL,
|
||||
`processor_type` int DEFAULT NULL,
|
||||
`status` int DEFAULT NULL,
|
||||
`tag` varchar(255) DEFAULT NULL,
|
||||
`task_retry_num` int DEFAULT NULL,
|
||||
`time_expression` varchar(255) DEFAULT NULL,
|
||||
`time_expression_type` int DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
KEY `idx01_job_info` (`app_id`,`status`,`time_expression_type`,`next_trigger_time`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for namespace
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `namespace`;
|
||||
CREATE TABLE `namespace` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`code` varchar(255) DEFAULT NULL,
|
||||
`creator` bigint DEFAULT NULL,
|
||||
`dept` varchar(255) DEFAULT NULL,
|
||||
`extra` varchar(255) DEFAULT NULL,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`modifier` bigint DEFAULT NULL,
|
||||
`name` varchar(255) DEFAULT NULL,
|
||||
`status` int DEFAULT NULL,
|
||||
`tags` varchar(255) DEFAULT NULL,
|
||||
`token` varchar(255) DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
UNIQUE KEY `uidx01_namespace` (`code`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for oms_lock
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `oms_lock`;
|
||||
CREATE TABLE `oms_lock` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`lock_name` varchar(255) DEFAULT NULL,
|
||||
`max_lock_time` bigint DEFAULT NULL,
|
||||
`ownerip` varchar(255) DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
UNIQUE KEY `uidx01_oms_lock` (`lock_name`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=4 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for pwjb_user_info
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `pwjb_user_info`;
|
||||
CREATE TABLE `pwjb_user_info` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`extra` varchar(255) DEFAULT NULL,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`password` varchar(255) DEFAULT NULL,
|
||||
`username` varchar(255) DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
UNIQUE KEY `uidx01_username` (`username`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for server_info
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `server_info`;
|
||||
CREATE TABLE `server_info` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`ip` varchar(255) DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
UNIQUE KEY `uidx01_server_info` (`ip`),
|
||||
KEY `idx01_server_info` (`gmt_modified`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for sundry
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `sundry`;
|
||||
CREATE TABLE `sundry` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`content` varchar(255) DEFAULT NULL,
|
||||
`extra` varchar(255) DEFAULT NULL,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`pkey` varchar(255) DEFAULT NULL,
|
||||
`skey` varchar(255) DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
UNIQUE KEY `uidx01_sundry` (`pkey`,`skey`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=3 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for user_info
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `user_info`;
|
||||
CREATE TABLE `user_info` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`account_type` varchar(255) DEFAULT NULL,
|
||||
`email` varchar(255) DEFAULT NULL,
|
||||
`extra` varchar(255) DEFAULT NULL,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`nick` varchar(255) DEFAULT NULL,
|
||||
`origin_username` varchar(255) DEFAULT NULL,
|
||||
`password` varchar(255) DEFAULT NULL,
|
||||
`phone` varchar(255) DEFAULT NULL,
|
||||
`status` int DEFAULT NULL,
|
||||
`token_login_verify_info` varchar(255) DEFAULT NULL,
|
||||
`username` varchar(255) DEFAULT NULL,
|
||||
`web_hook` varchar(255) DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
UNIQUE KEY `uidx01_user_name` (`username`),
|
||||
KEY `uidx02_user_info` (`email`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for user_role
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `user_role`;
|
||||
CREATE TABLE `user_role` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`extra` varchar(255) DEFAULT NULL,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`role` int DEFAULT NULL,
|
||||
`scope` int DEFAULT NULL,
|
||||
`target` bigint DEFAULT NULL,
|
||||
`user_id` bigint DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
KEY `uidx01_user_id` (`user_id`)
|
||||
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for workflow_info
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `workflow_info`;
|
||||
CREATE TABLE `workflow_info` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`app_id` bigint DEFAULT NULL,
|
||||
`extra` varchar(255) DEFAULT NULL,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`lifecycle` varchar(255) DEFAULT NULL,
|
||||
`max_wf_instance_num` int DEFAULT NULL,
|
||||
`next_trigger_time` bigint DEFAULT NULL,
|
||||
`notify_user_ids` varchar(255) DEFAULT NULL,
|
||||
`pedag` longtext,
|
||||
`status` int DEFAULT NULL,
|
||||
`time_expression` varchar(255) DEFAULT NULL,
|
||||
`time_expression_type` int DEFAULT NULL,
|
||||
`wf_description` varchar(255) DEFAULT NULL,
|
||||
`wf_name` varchar(255) DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
KEY `idx01_workflow_info` (`app_id`,`status`,`time_expression_type`,`next_trigger_time`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for workflow_instance_info
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `workflow_instance_info`;
|
||||
CREATE TABLE `workflow_instance_info` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`actual_trigger_time` bigint DEFAULT NULL,
|
||||
`app_id` bigint DEFAULT NULL,
|
||||
`dag` longtext,
|
||||
`expected_trigger_time` bigint DEFAULT NULL,
|
||||
`finished_time` bigint DEFAULT NULL,
|
||||
`gmt_create` datetime(6) DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) DEFAULT NULL,
|
||||
`parent_wf_instance_id` bigint DEFAULT NULL,
|
||||
`result` longtext,
|
||||
`status` int DEFAULT NULL,
|
||||
`wf_context` longtext,
|
||||
`wf_init_params` longtext,
|
||||
`wf_instance_id` bigint DEFAULT NULL,
|
||||
`workflow_id` bigint DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
UNIQUE KEY `uidx01_wf_instance` (`wf_instance_id`),
|
||||
KEY `idx01_wf_instance` (`workflow_id`,`status`,`app_id`,`expected_trigger_time`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table structure for workflow_node_info
|
||||
-- ----------------------------
|
||||
DROP TABLE IF EXISTS `workflow_node_info`;
|
||||
CREATE TABLE `workflow_node_info` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`app_id` bigint NOT NULL,
|
||||
`enable` bit(1) NOT NULL,
|
||||
`extra` longtext,
|
||||
`gmt_create` datetime(6) NOT NULL,
|
||||
`gmt_modified` datetime(6) NOT NULL,
|
||||
`job_id` bigint DEFAULT NULL,
|
||||
`node_name` varchar(255) DEFAULT NULL,
|
||||
`node_params` longtext,
|
||||
`skip_when_failed` bit(1) NOT NULL,
|
||||
`type` int DEFAULT NULL,
|
||||
`workflow_id` bigint DEFAULT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
KEY `idx01_workflow_node_info` (`workflow_id`,`gmt_create`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
|
||||
|
||||
SET FOREIGN_KEY_CHECKS = 1;
|
7
others/sql/upgrade/README.md
Normal file
7
others/sql/upgrade/README.md
Normal file
@ -0,0 +1,7 @@
|
||||
由于存在不同数据库、不同版本的升级,官方能给出的 upgrade SQL 相对有限,大家可参考以下方式自行生成升级 SQL:
|
||||
|
||||
- 【官方脚本】参考官方每个版本的数据库全库建表文件(项目 others - sql - schema),自行进行字段 DIFF
|
||||
|
||||
- 【自己动手版】导出当前您的 powerjob 数据库表结构,同时创建一个测试库,让 5.x 版本的 server 直连该测试库,自动建表。分别拿到两个版本的表结构 SQL 后,借用工具生产 update SQL 即可(navigate 等数据库管理软件均支持结构对比)
|
||||
|
||||
参考文档:https://www.yuque.com/powerjob/guidence/upgrade
|
10
others/sql/upgrade/v4.0.x-v4.1.x.sql
Normal file
10
others/sql/upgrade/v4.0.x-v4.1.x.sql
Normal file
@ -0,0 +1,10 @@
|
||||
-- Upgrade SQL FROM 4.0.x to 4.1.x
|
||||
-- ----------------------------
|
||||
-- Table change for workflow_instance_info
|
||||
-- ----------------------------
|
||||
alter table workflow_instance_info
|
||||
add parent_wf_instance_id bigint default null null comment '上层工作流实例ID';
|
||||
-- ----------------------------
|
||||
-- Table change for job_info
|
||||
-- ----------------------------
|
||||
alter table job_info add alarm_config varchar(512) comment '告警配置' default null;
|
6
others/sql/upgrade/v4.1.x-v4.2.x.sql
Normal file
6
others/sql/upgrade/v4.1.x-v4.2.x.sql
Normal file
@ -0,0 +1,6 @@
|
||||
-- Upgrade SQL FROM 4.1.x to 4.2.x
|
||||
-- ----------------------------
|
||||
-- Table change for job_info
|
||||
-- ----------------------------
|
||||
alter table job_info add tag varchar(255) comment 'TAG' default null;
|
||||
alter table job_info add log_config varchar(255) comment 'logConfig' default null;
|
6
others/sql/upgrade/v4.3.7-v4.3.8.sql
Normal file
6
others/sql/upgrade/v4.3.7-v4.3.8.sql
Normal file
@ -0,0 +1,6 @@
|
||||
-- Upgrade SQL FROM 4.3.7 to 4.3.8
|
||||
-- ----------------------------
|
||||
-- Table change for job_info
|
||||
-- ----------------------------
|
||||
alter table job_info add dispatch_strategy_config varchar(255) comment 'dispatch_strategy_config' default null;
|
||||
alter table job_info add advanced_runtime_config varchar(255) comment 'advanced_runtime_config' default null;
|
88
others/sql/upgrade/v4.3.x-v5.0.x.sql
Normal file
88
others/sql/upgrade/v4.3.x-v5.0.x.sql
Normal file
@ -0,0 +1,88 @@
|
||||
-- Upgrade SQL FROM 4.1.x to 4.2.x
|
||||
-- ----------------------------
|
||||
-- Table change for app_info
|
||||
-- ----------------------------
|
||||
SET FOREIGN_KEY_CHECKS=0;
|
||||
|
||||
ALTER TABLE `app_info` ADD COLUMN `creator` bigint NULL DEFAULT NULL;
|
||||
ALTER TABLE `app_info` ADD COLUMN `extra` varchar(255) NULL DEFAULT NULL;
|
||||
ALTER TABLE `app_info` ADD COLUMN `modifier` bigint NULL DEFAULT NULL;
|
||||
ALTER TABLE `app_info` ADD COLUMN `namespace_id` bigint NULL DEFAULT NULL;
|
||||
ALTER TABLE `app_info` ADD COLUMN `tags` varchar(255) NULL DEFAULT NULL;
|
||||
ALTER TABLE `app_info` ADD COLUMN `title` varchar(255) NULL DEFAULT NULL;
|
||||
|
||||
-- ----------------------------
|
||||
-- Table change for user_info
|
||||
-- ----------------------------
|
||||
ALTER TABLE `user_info` ADD COLUMN `account_type` varchar(255) NULL DEFAULT NULL;
|
||||
ALTER TABLE `user_info` ADD COLUMN `nick` varchar(255) NULL DEFAULT NULL;
|
||||
ALTER TABLE `user_info` ADD COLUMN `origin_username` varchar(255) NULL DEFAULT NULL;
|
||||
ALTER TABLE `user_info` ADD COLUMN `token_login_verify_info` varchar(255) NULL DEFAULT NULL;
|
||||
ALTER TABLE `user_info` ADD UNIQUE INDEX `uidx01_user_name`(`username` ASC) USING BTREE;
|
||||
|
||||
-- ----------------------------
|
||||
-- new table 'namespace'
|
||||
-- ----------------------------
|
||||
CREATE TABLE `namespace` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`code` varchar(255) NULL DEFAULT NULL,
|
||||
`creator` bigint NULL DEFAULT NULL,
|
||||
`dept` varchar(255) NULL DEFAULT NULL,
|
||||
`extra` varchar(255) NULL DEFAULT NULL,
|
||||
`gmt_create` datetime(6) NULL DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) NULL DEFAULT NULL,
|
||||
`modifier` bigint NULL DEFAULT NULL,
|
||||
`name` varchar(255) NULL DEFAULT NULL,
|
||||
`status` int NULL DEFAULT NULL,
|
||||
`tags` varchar(255) NULL DEFAULT NULL,
|
||||
`token` varchar(255) NULL DEFAULT NULL,
|
||||
PRIMARY KEY (`id`) USING BTREE,
|
||||
UNIQUE INDEX `uidx01_namespace`(`code` ASC) USING BTREE
|
||||
) ENGINE = InnoDB AUTO_INCREMENT = 2 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_0900_ai_ci ROW_FORMAT = Dynamic;
|
||||
|
||||
|
||||
-- ----------------------------
|
||||
-- new table 'pwjb_user_info'
|
||||
-- ----------------------------
|
||||
CREATE TABLE `pwjb_user_info` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`extra` varchar(255) NULL DEFAULT NULL,
|
||||
`gmt_create` datetime(6) NULL DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) NULL DEFAULT NULL,
|
||||
`password` varchar(255) NULL DEFAULT NULL,
|
||||
`username` varchar(255) NULL DEFAULT NULL,
|
||||
PRIMARY KEY (`id`) USING BTREE,
|
||||
UNIQUE INDEX `uidx01_username`(`username` ASC) USING BTREE
|
||||
) ENGINE = InnoDB AUTO_INCREMENT = 2 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_0900_ai_ci ROW_FORMAT = Dynamic;
|
||||
|
||||
-- ----------------------------
|
||||
-- new table 'sundry'
|
||||
-- ----------------------------
|
||||
CREATE TABLE `sundry` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`content` varchar(255) NULL DEFAULT NULL,
|
||||
`extra` varchar(255) NULL DEFAULT NULL,
|
||||
`gmt_create` datetime(6) NULL DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) NULL DEFAULT NULL,
|
||||
`pkey` varchar(255) NULL DEFAULT NULL,
|
||||
`skey` varchar(255) NULL DEFAULT NULL,
|
||||
PRIMARY KEY (`id`) USING BTREE,
|
||||
UNIQUE INDEX `uidx01_sundry`(`pkey` ASC, `skey` ASC) USING BTREE
|
||||
) ENGINE = InnoDB AUTO_INCREMENT = 3 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_0900_ai_ci ROW_FORMAT = Dynamic;
|
||||
|
||||
|
||||
-- ----------------------------
|
||||
-- new table 'user_role'
|
||||
-- ----------------------------
|
||||
CREATE TABLE `user_role` (
|
||||
`id` bigint NOT NULL AUTO_INCREMENT,
|
||||
`extra` varchar(255) NULL DEFAULT NULL,
|
||||
`gmt_create` datetime(6) NULL DEFAULT NULL,
|
||||
`gmt_modified` datetime(6) NULL DEFAULT NULL,
|
||||
`role` int NULL DEFAULT NULL,
|
||||
`scope` int NULL DEFAULT NULL,
|
||||
`target` bigint NULL DEFAULT NULL,
|
||||
`user_id` bigint NULL DEFAULT NULL,
|
||||
PRIMARY KEY (`id`) USING BTREE,
|
||||
INDEX `uidx01_user_id`(`user_id` ASC) USING BTREE
|
||||
) ENGINE = InnoDB AUTO_INCREMENT = 2 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_0900_ai_ci ROW_FORMAT = Dynamic;
|
44
pom.xml
44
pom.xml
@ -4,13 +4,13 @@
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<groupId>com.github.kfcfans</groupId>
|
||||
<groupId>tech.powerjob</groupId>
|
||||
<artifactId>powerjob</artifactId>
|
||||
<version>1.0.0</version>
|
||||
<version>5.1.1</version>
|
||||
<packaging>pom</packaging>
|
||||
<name>powerjob</name>
|
||||
<url>https://github.com/KFCFans/OhMyScheduler</url>
|
||||
<description>Distributed scheduling and execution framework</description>
|
||||
<url>http://www.powerjob.tech</url>
|
||||
<description>Enterprise job scheduling middleware with distributed computing ability.</description>
|
||||
<licenses>
|
||||
<license>
|
||||
<name>Apache License, Version 2.0</name>
|
||||
@ -19,8 +19,8 @@
|
||||
</license>
|
||||
</licenses>
|
||||
<scm>
|
||||
<url>https://github.com/KFCFans/OhMyScheduler</url>
|
||||
<connection>https://github.com/KFCFans/OhMyScheduler.git</connection>
|
||||
<url>https://github.com/PowerJob/PowerJob</url>
|
||||
<connection>https://github.com/PowerJob/PowerJob.git</connection>
|
||||
</scm>
|
||||
|
||||
<developers>
|
||||
@ -40,8 +40,11 @@
|
||||
<module>powerjob-server</module>
|
||||
<module>powerjob-common</module>
|
||||
<module>powerjob-client</module>
|
||||
<module>powerjob-worker-samples</module>
|
||||
<module>powerjob-worker-agent</module>
|
||||
<module>powerjob-worker-spring-boot-starter</module>
|
||||
<module>powerjob-worker-samples</module>
|
||||
<module>powerjob-official-processors</module>
|
||||
<module>powerjob-remote</module>
|
||||
</modules>
|
||||
|
||||
<properties>
|
||||
@ -86,7 +89,7 @@
|
||||
<testTarget>${java.version}</testTarget>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<!-- 打包源码 -->
|
||||
<!-- Package source codes -->
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-source-plugin</artifactId>
|
||||
@ -120,9 +123,9 @@
|
||||
<artifactId>maven-javadoc-plugin</artifactId>
|
||||
<version>${maven-javadoc-plugin.version}</version>
|
||||
<configuration>
|
||||
<!-- JavaDoc 编译错误不影响正常构建 -->
|
||||
<!-- Prevent JavaDoc error from affecting building project. -->
|
||||
<failOnError>false</failOnError>
|
||||
<!-- 非严格模式...以后要好好按格式写注释啊... -->
|
||||
<!-- Non-strict mode -->
|
||||
<additionalJOption>-Xdoclint:none</additionalJOption>
|
||||
</configuration>
|
||||
<executions>
|
||||
@ -148,22 +151,33 @@
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.sonatype.plugins</groupId>
|
||||
<artifactId>nexus-staging-maven-plugin</artifactId>
|
||||
<version>1.6.7</version>
|
||||
<extensions>true</extensions>
|
||||
<configuration>
|
||||
<serverId>ossrh</serverId>
|
||||
<nexusUrl>https://s01.oss.sonatype.org/</nexusUrl>
|
||||
<autoReleaseAfterClose>true</autoReleaseAfterClose>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
|
||||
<distributionManagement>
|
||||
<snapshotRepository>
|
||||
<id>ossrh</id>
|
||||
<url>https://oss.sonatype.org/content/repositories/snapshots/</url>
|
||||
<url>https://s01.oss.sonatype.org/content/repositories/snapshots/</url>
|
||||
</snapshotRepository>
|
||||
<repository>
|
||||
<id>ossrh</id>
|
||||
<url>https://oss.sonatype.org/service/local/staging/deploy/maven2/</url>
|
||||
<url>https://s01.oss.sonatype.org/service/local/staging/deploy/maven2/</url>
|
||||
</repository>
|
||||
</distributionManagement>
|
||||
</profile>
|
||||
|
||||
<!-- 本地使用 -->
|
||||
<!-- Local profile -->
|
||||
<profile>
|
||||
<id>dev</id>
|
||||
<activation>
|
||||
@ -172,7 +186,7 @@
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
<!-- 编译插件 -->
|
||||
<!-- Maven compiler plugin -->
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-compiler-plugin</artifactId>
|
||||
@ -184,7 +198,7 @@
|
||||
<testTarget>${java.version}</testTarget>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<!-- 编辑 MANIFEST.MF -->
|
||||
<!-- Edit MANIFEST.MF -->
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-jar-plugin</artifactId>
|
||||
|
@ -4,35 +4,100 @@
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<parent>
|
||||
<artifactId>powerjob</artifactId>
|
||||
<groupId>com.github.kfcfans</groupId>
|
||||
<version>1.0.0</version>
|
||||
<groupId>tech.powerjob</groupId>
|
||||
<version>5.1.1</version>
|
||||
</parent>
|
||||
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<artifactId>powerjob-client</artifactId>
|
||||
<version>3.1.0</version>
|
||||
<version>5.1.1</version>
|
||||
<packaging>jar</packaging>
|
||||
|
||||
<properties>
|
||||
<powerjob.common.version>3.1.0</powerjob.common.version>
|
||||
<junit.version>5.6.1</junit.version>
|
||||
<junit.version>5.9.1</junit.version>
|
||||
<logback.version>1.2.13</logback.version>
|
||||
<fastjson.version>1.2.83</fastjson.version>
|
||||
<powerjob.common.version>5.1.1</powerjob.common.version>
|
||||
|
||||
<mvn.shade.plugin.version>3.2.4</mvn.shade.plugin.version>
|
||||
</properties>
|
||||
|
||||
<dependencies>
|
||||
|
||||
<!-- fastJson -->
|
||||
<dependency>
|
||||
<groupId>com.alibaba</groupId>
|
||||
<artifactId>fastjson</artifactId>
|
||||
<version>${fastjson.version}</version>
|
||||
</dependency>
|
||||
|
||||
<!-- oms-common -->
|
||||
<dependency>
|
||||
<groupId>com.github.kfcfans</groupId>
|
||||
<groupId>tech.powerjob</groupId>
|
||||
<artifactId>powerjob-common</artifactId>
|
||||
<version>${powerjob.common.version}</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Junit 测试 -->
|
||||
<!-- Junit tests -->
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-api</artifactId>
|
||||
<version>${junit.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<!-- log for test stage -->
|
||||
<dependency>
|
||||
<groupId>ch.qos.logback</groupId>
|
||||
<artifactId>logback-classic</artifactId>
|
||||
<version>${logback.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
<!-- 暂时放弃 shade,打 shade 包一定要非常干净,否则是更大的坑 -->
|
||||
<!--
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-shade-plugin</artifactId>
|
||||
<version>${mvn.shade.plugin.version}</version>
|
||||
<configuration>
|
||||
<createDependencyReducedPom>false</createDependencyReducedPom>
|
||||
<relocations>
|
||||
<relocation>
|
||||
<pattern>okhttp3</pattern>
|
||||
<shadedPattern>shade.powerjob.okhttp3</shadedPattern>
|
||||
</relocation>
|
||||
<relocation>
|
||||
<pattern>okio</pattern>
|
||||
<shadedPattern>shade.powerjob.okio</shadedPattern>
|
||||
</relocation>
|
||||
<relocation>
|
||||
<pattern>com.google</pattern>
|
||||
<shadedPattern>shade.powerjob.com.google</shadedPattern>
|
||||
</relocation>
|
||||
<relocation>
|
||||
<pattern>org.apache</pattern>
|
||||
<shadedPattern>shade.powerjob.org.apache</shadedPattern>
|
||||
</relocation>
|
||||
<relocation>
|
||||
<pattern>com.alibaba</pattern>
|
||||
<shadedPattern>shade.powerjob.com.alibaba</shadedPattern>
|
||||
</relocation>
|
||||
</relocations>
|
||||
</configuration>
|
||||
<executions>
|
||||
<execution>
|
||||
<phase>package</phase>
|
||||
<goals>
|
||||
<goal>shade</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
-->
|
||||
</plugins>
|
||||
</build>
|
||||
|
||||
</project>
|
@ -1,387 +0,0 @@
|
||||
package com.github.kfcfans.powerjob.client;
|
||||
|
||||
import com.github.kfcfans.powerjob.common.InstanceStatus;
|
||||
import com.github.kfcfans.powerjob.common.OmsException;
|
||||
import com.github.kfcfans.powerjob.common.OpenAPIConstant;
|
||||
import com.github.kfcfans.powerjob.common.request.http.SaveJobInfoRequest;
|
||||
import com.github.kfcfans.powerjob.common.request.http.SaveWorkflowRequest;
|
||||
import com.github.kfcfans.powerjob.common.response.*;
|
||||
import com.github.kfcfans.powerjob.common.utils.HttpUtils;
|
||||
import com.github.kfcfans.powerjob.common.utils.JsonUtils;
|
||||
import com.google.common.collect.Lists;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import okhttp3.FormBody;
|
||||
import okhttp3.MediaType;
|
||||
import okhttp3.RequestBody;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* OpenAPI 客户端
|
||||
*
|
||||
* @author tjq
|
||||
* @since 2020/4/15
|
||||
*/
|
||||
@Slf4j
|
||||
@SuppressWarnings("rawtypes, unchecked")
|
||||
public class OhMyClient {
|
||||
|
||||
private Long appId;
|
||||
private String currentAddress;
|
||||
private List<String> allAddress;
|
||||
|
||||
private static final String URL_PATTERN = "http://%s%s%s";
|
||||
|
||||
/**
|
||||
* 初始化 OhMyClient 客户端
|
||||
* @param domain www.oms-server.com(内网域名,自行完成DNS & Proxy)
|
||||
* @param appName 负责的应用名称
|
||||
*/
|
||||
public OhMyClient(String domain, String appName, String password) {
|
||||
this(Lists.newArrayList(domain), appName, password);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* 初始化 OhMyClient 客户端
|
||||
* @param addressList IP:Port 列表
|
||||
* @param appName 负责的应用名称
|
||||
*/
|
||||
public OhMyClient(List<String> addressList, String appName, String password) {
|
||||
|
||||
Objects.requireNonNull(addressList, "domain can't be null!");
|
||||
Objects.requireNonNull(appName, "appName can't be null");
|
||||
|
||||
allAddress = addressList;
|
||||
for (String addr : addressList) {
|
||||
String url = getUrl(OpenAPIConstant.ASSERT, addr);
|
||||
try {
|
||||
String result = assertApp(appName, password, url);
|
||||
if (StringUtils.isNotEmpty(result)) {
|
||||
ResultDTO resultDTO = JsonUtils.parseObject(result, ResultDTO.class);
|
||||
if (resultDTO.isSuccess()) {
|
||||
appId = Long.parseLong(resultDTO.getData().toString());
|
||||
currentAddress = addr;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}catch (Exception ignore) {
|
||||
}
|
||||
}
|
||||
|
||||
if (StringUtils.isEmpty(currentAddress)) {
|
||||
throw new OmsException("no server available");
|
||||
}
|
||||
log.info("[OhMyClient] {}'s oms-client bootstrap successfully.", appName);
|
||||
}
|
||||
|
||||
private static String assertApp(String appName, String password, String url) throws IOException {
|
||||
FormBody.Builder builder = new FormBody.Builder()
|
||||
.add("appName", appName);
|
||||
if (password != null) {
|
||||
builder.add("password", password);
|
||||
}
|
||||
return HttpUtils.post(url, builder.build());
|
||||
}
|
||||
|
||||
|
||||
private static String getUrl(String path, String address) {
|
||||
return String.format(URL_PATTERN, address, OpenAPIConstant.WEB_PATH, path);
|
||||
}
|
||||
|
||||
/* ************* Job 区 ************* */
|
||||
|
||||
/**
|
||||
* 保存任务(包括创建与修改)
|
||||
* @param request 任务详细参数
|
||||
* @return 创建的任务ID
|
||||
* @throws Exception 异常
|
||||
*/
|
||||
public ResultDTO<Long> saveJob(SaveJobInfoRequest request) throws Exception {
|
||||
|
||||
request.setAppId(appId);
|
||||
MediaType jsonType = MediaType.parse("application/json; charset=utf-8");
|
||||
String json = JsonUtils.toJSONStringUnsafe(request);
|
||||
String post = postHA(OpenAPIConstant.SAVE_JOB, RequestBody.create(json, jsonType));
|
||||
return JsonUtils.parseObject(post, ResultDTO.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* 根据 jobId 查询任务信息
|
||||
* @param jobId 任务ID
|
||||
* @return 任务详细信息
|
||||
* @throws Exception 异常
|
||||
*/
|
||||
public ResultDTO<JobInfoDTO> fetchJob(Long jobId) throws Exception {
|
||||
RequestBody body = new FormBody.Builder()
|
||||
.add("jobId", jobId.toString())
|
||||
.add("appId", appId.toString())
|
||||
.build();
|
||||
String post = postHA(OpenAPIConstant.FETCH_JOB, body);
|
||||
return JsonUtils.parseObject(post, ResultDTO.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* 禁用某个任务
|
||||
* @param jobId 任务ID
|
||||
* @return 标准返回对象
|
||||
* @throws Exception 异常
|
||||
*/
|
||||
public ResultDTO<Void> disableJob(Long jobId) throws Exception {
|
||||
RequestBody body = new FormBody.Builder()
|
||||
.add("jobId", jobId.toString())
|
||||
.add("appId", appId.toString())
|
||||
.build();
|
||||
String post = postHA(OpenAPIConstant.DISABLE_JOB, body);
|
||||
return JsonUtils.parseObject(post, ResultDTO.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* 启用某个任务
|
||||
* @param jobId 任务ID
|
||||
* @return 标准返回对象
|
||||
* @throws Exception 异常
|
||||
*/
|
||||
public ResultDTO<Void> enableJob(Long jobId) throws Exception {
|
||||
RequestBody body = new FormBody.Builder()
|
||||
.add("jobId", jobId.toString())
|
||||
.add("appId", appId.toString())
|
||||
.build();
|
||||
String post = postHA(OpenAPIConstant.ENABLE_JOB, body);
|
||||
return JsonUtils.parseObject(post, ResultDTO.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* 删除某个任务
|
||||
* @param jobId 任务ID
|
||||
* @return 标准返回对象
|
||||
* @throws Exception 异常
|
||||
*/
|
||||
public ResultDTO<Void> deleteJob(Long jobId) throws Exception {
|
||||
RequestBody body = new FormBody.Builder()
|
||||
.add("jobId", jobId.toString())
|
||||
.add("appId", appId.toString())
|
||||
.build();
|
||||
String post = postHA(OpenAPIConstant.DELETE_JOB, body);
|
||||
return JsonUtils.parseObject(post, ResultDTO.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* 运行某个任务
|
||||
* @param jobId 任务ID
|
||||
* @param instanceParams 任务实例的参数
|
||||
* @return 任务实例ID(instanceId)
|
||||
* @throws Exception 异常
|
||||
*/
|
||||
public ResultDTO<Long> runJob(Long jobId, String instanceParams) throws Exception {
|
||||
FormBody.Builder builder = new FormBody.Builder()
|
||||
.add("jobId", jobId.toString())
|
||||
.add("appId", appId.toString());
|
||||
|
||||
if (StringUtils.isNotEmpty(instanceParams)) {
|
||||
builder.add("instanceParams", instanceParams);
|
||||
}
|
||||
String post = postHA(OpenAPIConstant.RUN_JOB, builder.build());
|
||||
return JsonUtils.parseObject(post, ResultDTO.class);
|
||||
}
|
||||
public ResultDTO<Long> runJob(Long jobId) throws Exception {
|
||||
return runJob(jobId, null);
|
||||
}
|
||||
|
||||
/* ************* Instance 区 ************* */
|
||||
/**
|
||||
* 停止应用实例
|
||||
* @param instanceId 应用实例ID
|
||||
* @return true 停止成功,false 停止失败
|
||||
* @throws Exception 异常
|
||||
*/
|
||||
public ResultDTO<Void> stopInstance(Long instanceId) throws Exception {
|
||||
RequestBody body = new FormBody.Builder()
|
||||
.add("instanceId", instanceId.toString())
|
||||
.add("appId", appId.toString())
|
||||
.build();
|
||||
String post = postHA(OpenAPIConstant.STOP_INSTANCE, body);
|
||||
return JsonUtils.parseObject(post, ResultDTO.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* 查询任务实例状态
|
||||
* @param instanceId 应用实例ID
|
||||
* @return {@link InstanceStatus} 的枚举值
|
||||
* @throws Exception 异常
|
||||
*/
|
||||
public ResultDTO<Integer> fetchInstanceStatus(Long instanceId) throws Exception {
|
||||
RequestBody body = new FormBody.Builder()
|
||||
.add("instanceId", instanceId.toString())
|
||||
.build();
|
||||
String post = postHA(OpenAPIConstant.FETCH_INSTANCE_STATUS, body);
|
||||
return JsonUtils.parseObject(post, ResultDTO.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* 查询任务实例的信息
|
||||
* @param instanceId 任务实例ID
|
||||
* @return 任务实例信息
|
||||
* @throws Exception 潜在的异常
|
||||
*/
|
||||
public ResultDTO<InstanceInfoDTO> fetchInstanceInfo(Long instanceId) throws Exception {
|
||||
RequestBody body = new FormBody.Builder()
|
||||
.add("instanceId", instanceId.toString())
|
||||
.build();
|
||||
String post = postHA(OpenAPIConstant.FETCH_INSTANCE_INFO, body);
|
||||
return JsonUtils.parseObject(post, ResultDTO.class);
|
||||
}
|
||||
|
||||
/* ************* Workflow 区 ************* */
|
||||
/**
|
||||
* 保存工作流(包括创建和修改)
|
||||
* @param request 创建/修改 Workflow 请求
|
||||
* @return 工作流ID
|
||||
* @throws Exception 异常
|
||||
*/
|
||||
public ResultDTO<Long> saveWorkflow(SaveWorkflowRequest request) throws Exception {
|
||||
request.setAppId(appId);
|
||||
MediaType jsonType = MediaType.parse("application/json; charset=utf-8");
|
||||
String json = JsonUtils.toJSONStringUnsafe(request);
|
||||
String post = postHA(OpenAPIConstant.SAVE_WORKFLOW, RequestBody.create(json, jsonType));
|
||||
return JsonUtils.parseObject(post, ResultDTO.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* 根据 workflowId 查询工作流信息
|
||||
* @param workflowId workflowId
|
||||
* @return 工作流信息
|
||||
* @throws Exception 异常
|
||||
*/
|
||||
public ResultDTO<WorkflowInfoDTO> fetchWorkflow(Long workflowId) throws Exception {
|
||||
RequestBody body = new FormBody.Builder()
|
||||
.add("workflowId", workflowId.toString())
|
||||
.add("appId", appId.toString())
|
||||
.build();
|
||||
String post = postHA(OpenAPIConstant.FETCH_WORKFLOW, body);
|
||||
return JsonUtils.parseObject(post, ResultDTO.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* 禁用某个工作流
|
||||
* @param workflowId 工作流ID
|
||||
* @return 标准返回对象
|
||||
* @throws Exception 异常
|
||||
*/
|
||||
public ResultDTO<Void> disableWorkflow(Long workflowId) throws Exception {
|
||||
RequestBody body = new FormBody.Builder()
|
||||
.add("workflowId", workflowId.toString())
|
||||
.add("appId", appId.toString())
|
||||
.build();
|
||||
String post = postHA(OpenAPIConstant.DISABLE_WORKFLOW, body);
|
||||
return JsonUtils.parseObject(post, ResultDTO.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* 启用某个工作流
|
||||
* @param workflowId workflowId
|
||||
* @return 标准返回对象
|
||||
* @throws Exception 异常
|
||||
*/
|
||||
public ResultDTO<Void> enableWorkflow(Long workflowId) throws Exception {
|
||||
RequestBody body = new FormBody.Builder()
|
||||
.add("workflowId", workflowId.toString())
|
||||
.add("appId", appId.toString())
|
||||
.build();
|
||||
String post = postHA(OpenAPIConstant.ENABLE_WORKFLOW, body);
|
||||
return JsonUtils.parseObject(post, ResultDTO.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* 删除某个工作流
|
||||
* @param workflowId workflowId
|
||||
* @return 标准返回对象
|
||||
* @throws Exception 异常
|
||||
*/
|
||||
public ResultDTO<Void> deleteWorkflow(Long workflowId) throws Exception {
|
||||
RequestBody body = new FormBody.Builder()
|
||||
.add("workflowId", workflowId.toString())
|
||||
.add("appId", appId.toString())
|
||||
.build();
|
||||
String post = postHA(OpenAPIConstant.DELETE_WORKFLOW, body);
|
||||
return JsonUtils.parseObject(post, ResultDTO.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* 运行工作流
|
||||
* @param workflowId workflowId
|
||||
* @return 工作流实例ID
|
||||
* @throws Exception 异常
|
||||
*/
|
||||
public ResultDTO<Long> runWorkflow(Long workflowId) throws Exception {
|
||||
FormBody.Builder builder = new FormBody.Builder()
|
||||
.add("workflowId", workflowId.toString())
|
||||
.add("appId", appId.toString());
|
||||
String post = postHA(OpenAPIConstant.RUN_WORKFLOW, builder.build());
|
||||
return JsonUtils.parseObject(post, ResultDTO.class);
|
||||
}
|
||||
|
||||
/* ************* Workflow Instance 区 ************* */
|
||||
/**
|
||||
* 停止应用实例
|
||||
* @param wfInstanceId 工作流实例ID
|
||||
* @return true 停止成功 ; false 停止失败
|
||||
* @throws Exception 异常
|
||||
*/
|
||||
public ResultDTO<Void> stopWorkflowInstance(Long wfInstanceId) throws Exception {
|
||||
RequestBody body = new FormBody.Builder()
|
||||
.add("wfInstanceId", wfInstanceId.toString())
|
||||
.add("appId", appId.toString())
|
||||
.build();
|
||||
String post = postHA(OpenAPIConstant.STOP_WORKFLOW_INSTANCE, body);
|
||||
return JsonUtils.parseObject(post, ResultDTO.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* 查询任务实例的信息
|
||||
* @param wfInstanceId 任务实例ID
|
||||
* @return 任务实例信息
|
||||
* @throws Exception 潜在的异常
|
||||
*/
|
||||
public ResultDTO<WorkflowInstanceInfoDTO> fetchWorkflowInstanceInfo(Long wfInstanceId) throws Exception {
|
||||
RequestBody body = new FormBody.Builder()
|
||||
.add("wfInstanceId", wfInstanceId.toString())
|
||||
.add("appId", appId.toString())
|
||||
.build();
|
||||
String post = postHA(OpenAPIConstant.FETCH_WORKFLOW_INSTANCE_INFO, body);
|
||||
return JsonUtils.parseObject(post, ResultDTO.class);
|
||||
}
|
||||
|
||||
|
||||
|
||||
private String postHA(String path, RequestBody requestBody) {
|
||||
|
||||
// 先尝试默认地址
|
||||
try {
|
||||
String res = HttpUtils.post(getUrl(path, currentAddress), requestBody);
|
||||
if (StringUtils.isNotEmpty(res)) {
|
||||
return res;
|
||||
}
|
||||
}catch (Exception ignore) {
|
||||
}
|
||||
|
||||
// 失败,开始重试
|
||||
for (String addr : allAddress) {
|
||||
try {
|
||||
String res = HttpUtils.post(getUrl(path, addr), requestBody);
|
||||
if (StringUtils.isNotEmpty(res)) {
|
||||
log.warn("[OhMyClient] server change: from({}) -> to({}).", currentAddress, addr);
|
||||
currentAddress = addr;
|
||||
return res;
|
||||
}
|
||||
}catch (Exception ignore) {
|
||||
}
|
||||
}
|
||||
|
||||
log.error("[OhMyClient] no server available in {}.", allAddress);
|
||||
throw new OmsException("no server available");
|
||||
}
|
||||
}
|
@ -0,0 +1,71 @@
|
||||
package tech.powerjob.client;
|
||||
|
||||
import lombok.Getter;
|
||||
import lombok.Setter;
|
||||
import lombok.ToString;
|
||||
import lombok.experimental.Accessors;
|
||||
import tech.powerjob.client.common.Protocol;
|
||||
import tech.powerjob.client.extension.ClientExtension;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* 客户端配置
|
||||
*
|
||||
* @author 程序帕鲁
|
||||
* @since 2024/2/20
|
||||
*/
|
||||
@Getter
|
||||
@Setter
|
||||
@ToString
|
||||
@Accessors(chain = true)
|
||||
public class ClientConfig implements Serializable {
|
||||
|
||||
/**
|
||||
* 执行器 AppName
|
||||
*/
|
||||
private String appName;
|
||||
|
||||
/**
|
||||
* 执行器密码
|
||||
*/
|
||||
private String password;
|
||||
|
||||
/**
|
||||
* 地址列表,支持格式:
|
||||
* - IP:Port, eg: 192.168.1.1:7700
|
||||
* - 域名, eg: powerjob.apple-inc.com
|
||||
*/
|
||||
private List<String> addressList;
|
||||
|
||||
/**
|
||||
* 客户端通讯协议
|
||||
*/
|
||||
private Protocol protocol = Protocol.HTTP;
|
||||
|
||||
/**
|
||||
* 连接超时时间
|
||||
*/
|
||||
private Integer connectionTimeout;
|
||||
/**
|
||||
* 指定了等待服务器响应数据的最长时间。更具体地说,这是从服务器开始返回响应数据(包括HTTP头和数据)后,客户端读取数据的超时时间
|
||||
*/
|
||||
private Integer readTimeout;
|
||||
/**
|
||||
* 指定了向服务器发送数据的最长时间。这是从客户端开始发送数据(如POST请求的正文)到数据完全发送出去的时间
|
||||
*/
|
||||
private Integer writeTimeout;
|
||||
|
||||
/**
|
||||
* 默认携带的请求头
|
||||
* 用于流量被基础设施识别
|
||||
*/
|
||||
private Map<String, String> defaultHeaders;
|
||||
|
||||
/**
|
||||
* 客户端行为扩展
|
||||
*/
|
||||
private ClientExtension clientExtension;
|
||||
}
|
@ -0,0 +1,82 @@
|
||||
package tech.powerjob.client;
|
||||
|
||||
import tech.powerjob.common.request.http.SaveJobInfoRequest;
|
||||
import tech.powerjob.common.request.http.SaveWorkflowNodeRequest;
|
||||
import tech.powerjob.common.request.http.SaveWorkflowRequest;
|
||||
import tech.powerjob.common.request.query.InstancePageQuery;
|
||||
import tech.powerjob.common.request.query.JobInfoQuery;
|
||||
import tech.powerjob.common.response.*;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* PowerJobClient, the client for OpenAPI.
|
||||
*
|
||||
* @author tjq
|
||||
* @since 2023/3/5
|
||||
*/
|
||||
public interface IPowerJobClient {
|
||||
|
||||
/* ************* Job 区 ************* */
|
||||
|
||||
ResultDTO<SaveJobInfoRequest> exportJob(Long jobId);
|
||||
|
||||
ResultDTO<Long> saveJob(SaveJobInfoRequest request);
|
||||
|
||||
ResultDTO<Long> copyJob(Long jobId);
|
||||
|
||||
ResultDTO<JobInfoDTO> fetchJob(Long jobId);
|
||||
|
||||
ResultDTO<List<JobInfoDTO>> fetchAllJob();
|
||||
|
||||
ResultDTO<List<JobInfoDTO>> queryJob(JobInfoQuery powerQuery);
|
||||
|
||||
ResultDTO<Void> disableJob(Long jobId);
|
||||
|
||||
ResultDTO<Void> enableJob(Long jobId);
|
||||
|
||||
ResultDTO<Void> deleteJob(Long jobId);
|
||||
|
||||
ResultDTO<Long> runJob(Long jobId, String instanceParams, long delayMS);
|
||||
|
||||
/* ************* Instance API list ************* */
|
||||
|
||||
ResultDTO<Void> stopInstance(Long instanceId);
|
||||
|
||||
ResultDTO<Void> cancelInstance(Long instanceId);
|
||||
|
||||
ResultDTO<Void> retryInstance(Long instanceId);
|
||||
|
||||
ResultDTO<Integer> fetchInstanceStatus(Long instanceId);
|
||||
|
||||
ResultDTO<InstanceInfoDTO> fetchInstanceInfo(Long instanceId);
|
||||
|
||||
ResultDTO<PageResult<InstanceInfoDTO>> queryInstanceInfo(InstancePageQuery instancePageQuery);
|
||||
|
||||
/* ************* Workflow API list ************* */
|
||||
ResultDTO<Long> saveWorkflow(SaveWorkflowRequest request);
|
||||
|
||||
ResultDTO<Long> copyWorkflow(Long workflowId);
|
||||
|
||||
ResultDTO<List<WorkflowNodeInfoDTO>> saveWorkflowNode(List<SaveWorkflowNodeRequest> requestList);
|
||||
|
||||
ResultDTO<WorkflowInfoDTO> fetchWorkflow(Long workflowId);
|
||||
|
||||
ResultDTO<Void> disableWorkflow(Long workflowId);
|
||||
|
||||
ResultDTO<Void> enableWorkflow(Long workflowId);
|
||||
|
||||
ResultDTO<Void> deleteWorkflow(Long workflowId);
|
||||
|
||||
ResultDTO<Long> runWorkflow(Long workflowId, String initParams, long delayMS);
|
||||
|
||||
/* ************* Workflow Instance API list ************* */
|
||||
|
||||
ResultDTO<Void> stopWorkflowInstance(Long wfInstanceId);
|
||||
|
||||
ResultDTO<Void> retryWorkflowInstance(Long wfInstanceId);
|
||||
|
||||
ResultDTO<Void> markWorkflowNodeAsSuccess(Long wfInstanceId, Long nodeId);
|
||||
|
||||
ResultDTO<WorkflowInstanceInfoDTO> fetchWorkflowInstanceInfo(Long wfInstanceId);
|
||||
}
|
@ -0,0 +1,563 @@
|
||||
package tech.powerjob.client;
|
||||
|
||||
import com.alibaba.fastjson.JSON;
|
||||
import com.google.common.collect.Lists;
|
||||
import com.google.common.collect.Maps;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import tech.powerjob.client.module.AppAuthRequest;
|
||||
import tech.powerjob.client.module.AppAuthResult;
|
||||
import tech.powerjob.client.service.PowerRequestBody;
|
||||
import tech.powerjob.client.service.RequestService;
|
||||
import tech.powerjob.client.service.impl.ClusterRequestServiceOkHttp3Impl;
|
||||
import tech.powerjob.common.OpenAPIConstant;
|
||||
import tech.powerjob.common.enums.EncryptType;
|
||||
import tech.powerjob.common.enums.InstanceStatus;
|
||||
import tech.powerjob.common.exception.PowerJobException;
|
||||
import tech.powerjob.common.request.http.SaveJobInfoRequest;
|
||||
import tech.powerjob.common.request.http.SaveWorkflowNodeRequest;
|
||||
import tech.powerjob.common.request.http.SaveWorkflowRequest;
|
||||
import tech.powerjob.common.request.query.InstancePageQuery;
|
||||
import tech.powerjob.common.request.query.JobInfoQuery;
|
||||
import tech.powerjob.common.response.*;
|
||||
import tech.powerjob.common.serialize.JsonUtils;
|
||||
import tech.powerjob.common.utils.CommonUtils;
|
||||
import tech.powerjob.common.utils.DigestUtils;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static tech.powerjob.client.TypeStore.*;
|
||||
|
||||
/**
|
||||
* PowerJobClient, the client for OpenAPI.
|
||||
*
|
||||
* @author tjq
|
||||
* @since 2020/4/15
|
||||
*/
|
||||
@Slf4j
|
||||
public class PowerJobClient implements IPowerJobClient, Closeable {
|
||||
|
||||
private Long appId;
|
||||
|
||||
private final RequestService requestService;
|
||||
|
||||
public PowerJobClient(ClientConfig config) {
|
||||
|
||||
List<String> addressList = config.getAddressList();
|
||||
String appName = config.getAppName();
|
||||
|
||||
CommonUtils.requireNonNull(addressList, "addressList can't be null!");
|
||||
CommonUtils.requireNonNull(appName, "appName can't be null");
|
||||
|
||||
this.requestService = new ClusterRequestServiceOkHttp3Impl(config);
|
||||
|
||||
AppAuthRequest appAuthRequest = new AppAuthRequest();
|
||||
appAuthRequest.setAppName(appName);
|
||||
appAuthRequest.setEncryptedPassword(DigestUtils.md5(config.getPassword()));
|
||||
appAuthRequest.setEncryptType(EncryptType.MD5.getCode());
|
||||
|
||||
String assertResponse = requestService.request(OpenAPIConstant.AUTH_APP, PowerRequestBody.newJsonRequestBody(appAuthRequest));
|
||||
|
||||
if (StringUtils.isNotEmpty(assertResponse)) {
|
||||
ResultDTO<AppAuthResult> resultDTO = JSON.parseObject(assertResponse, APP_AUTH_RESULT_TYPE);
|
||||
if (resultDTO.isSuccess()) {
|
||||
appId = resultDTO.getData().getAppId();
|
||||
} else {
|
||||
throw new PowerJobException(resultDTO.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
if (appId == null) {
|
||||
throw new PowerJobException("appId is null, please check your config");
|
||||
}
|
||||
|
||||
log.info("[PowerJobClient] [INIT] {}'s PowerJobClient bootstrap successfully", appName);
|
||||
}
|
||||
/**
|
||||
* Init PowerJobClient with domain, appName and password.
|
||||
*
|
||||
* @param domain like powerjob-server.apple-inc.com (Intranet Domain)
|
||||
* @param appName name of the application
|
||||
* @param password password of the application
|
||||
*/
|
||||
public PowerJobClient(String domain, String appName, String password) {
|
||||
this(new ClientConfig().setAppName(appName).setPassword(password).setAddressList(Lists.newArrayList(domain)));
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Init PowerJobClient with server address, appName and password.
|
||||
*
|
||||
* @param addressList IP:Port address list, like 192.168.1.1:7700
|
||||
* @param appName name of the application
|
||||
* @param password password of the application
|
||||
*/
|
||||
public PowerJobClient(List<String> addressList, String appName, String password) {
|
||||
this(new ClientConfig().setAppName(appName).setPassword(password).setAddressList(addressList));
|
||||
}
|
||||
|
||||
/* ************* Job 区 ************* */
|
||||
|
||||
/**
|
||||
* Save one Job
|
||||
* When an ID exists in SaveJobInfoRequest, it is an update operation. Otherwise, it is a crate operation.
|
||||
*
|
||||
* @param request Job meta info
|
||||
* @return jobId
|
||||
*/
|
||||
@Override
|
||||
public ResultDTO<Long> saveJob(SaveJobInfoRequest request) {
|
||||
|
||||
request.setAppId(appId);
|
||||
String post = requestService.request(OpenAPIConstant.SAVE_JOB, PowerRequestBody.newJsonRequestBody(request));
|
||||
return JSON.parseObject(post, LONG_RESULT_TYPE);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Copy one Job
|
||||
*
|
||||
* @param jobId Job id
|
||||
* @return Id of job copy
|
||||
*/
|
||||
@Override
|
||||
public ResultDTO<Long> copyJob(Long jobId) {
|
||||
Map<String, String> param = Maps.newHashMap();
|
||||
param.put("jobId", jobId.toString());
|
||||
param.put("appId", appId.toString());
|
||||
|
||||
String post = requestService.request(OpenAPIConstant.COPY_JOB, PowerRequestBody.newFormRequestBody(param));
|
||||
return JSON.parseObject(post, LONG_RESULT_TYPE);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ResultDTO<SaveJobInfoRequest> exportJob(Long jobId) {
|
||||
Map<String, String> param = Maps.newHashMap();
|
||||
param.put("jobId", jobId.toString());
|
||||
param.put("appId", appId.toString());
|
||||
String post = requestService.request(OpenAPIConstant.EXPORT_JOB, PowerRequestBody.newFormRequestBody(param));
|
||||
return JSON.parseObject(post, SAVE_JOB_INFO_REQUEST_RESULT_TYPE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Query JobInfo by jobId
|
||||
*
|
||||
* @param jobId jobId
|
||||
* @return Job meta info
|
||||
*/
|
||||
@Override
|
||||
public ResultDTO<JobInfoDTO> fetchJob(Long jobId) {
|
||||
Map<String, String> param = Maps.newHashMap();
|
||||
param.put("jobId", jobId.toString());
|
||||
param.put("appId", appId.toString());
|
||||
String post = requestService.request(OpenAPIConstant.FETCH_JOB, PowerRequestBody.newFormRequestBody(param));
|
||||
return JSON.parseObject(post, JOB_RESULT_TYPE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Query all JobInfo
|
||||
*
|
||||
* @return All JobInfo
|
||||
*/
|
||||
@Override
|
||||
public ResultDTO<List<JobInfoDTO>> fetchAllJob() {
|
||||
Map<String, String> param = Maps.newHashMap();
|
||||
param.put("appId", appId.toString());
|
||||
String post = requestService.request(OpenAPIConstant.FETCH_ALL_JOB, PowerRequestBody.newFormRequestBody(param));
|
||||
return JSON.parseObject(post, LIST_JOB_RESULT_TYPE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Query JobInfo by PowerQuery
|
||||
*
|
||||
* @param powerQuery JobQuery
|
||||
* @return JobInfo
|
||||
*/
|
||||
@Override
|
||||
public ResultDTO<List<JobInfoDTO>> queryJob(JobInfoQuery powerQuery) {
|
||||
powerQuery.setAppIdEq(appId);
|
||||
String post = requestService.request(OpenAPIConstant.QUERY_JOB, PowerRequestBody.newJsonRequestBody(powerQuery));
|
||||
return JSON.parseObject(post, LIST_JOB_RESULT_TYPE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Disable one Job by jobId
|
||||
*
|
||||
* @param jobId jobId
|
||||
* @return Standard return object
|
||||
*/
|
||||
@Override
|
||||
public ResultDTO<Void> disableJob(Long jobId) {
|
||||
Map<String, String> param = Maps.newHashMap();
|
||||
param.put("jobId", jobId.toString());
|
||||
param.put("appId", appId.toString());
|
||||
String post = requestService.request(OpenAPIConstant.DISABLE_JOB, PowerRequestBody.newFormRequestBody(param));
|
||||
return JSON.parseObject(post, VOID_RESULT_TYPE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Enable one job by jobId
|
||||
*
|
||||
* @param jobId jobId
|
||||
* @return Standard return object
|
||||
*/
|
||||
@Override
|
||||
public ResultDTO<Void> enableJob(Long jobId) {
|
||||
Map<String, String> param = Maps.newHashMap();
|
||||
param.put("jobId", jobId.toString());
|
||||
param.put("appId", appId.toString());
|
||||
String post = requestService.request(OpenAPIConstant.ENABLE_JOB, PowerRequestBody.newFormRequestBody(param));
|
||||
return JSON.parseObject(post, VOID_RESULT_TYPE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete one job by jobId
|
||||
*
|
||||
* @param jobId jobId
|
||||
* @return Standard return object
|
||||
*/
|
||||
@Override
|
||||
public ResultDTO<Void> deleteJob(Long jobId) {
|
||||
Map<String, String> param = Maps.newHashMap();
|
||||
param.put("jobId", jobId.toString());
|
||||
param.put("appId", appId.toString());
|
||||
String post = requestService.request(OpenAPIConstant.DELETE_JOB, PowerRequestBody.newFormRequestBody(param));
|
||||
return JSON.parseObject(post, VOID_RESULT_TYPE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Run a job once
|
||||
*
|
||||
* @param jobId ID of the job to be run
|
||||
* @param instanceParams Runtime parameters of the job (TaskContext#instanceParams)
|
||||
* @param delayMS Delay time(Milliseconds)
|
||||
* @return instanceId
|
||||
*/
|
||||
@Override
|
||||
public ResultDTO<Long> runJob(Long jobId, String instanceParams, long delayMS) {
|
||||
|
||||
Map<String, String> param = Maps.newHashMap();
|
||||
param.put("jobId", jobId.toString());
|
||||
param.put("appId", appId.toString());
|
||||
param.put("delay", String.valueOf(delayMS));
|
||||
|
||||
if (StringUtils.isNotEmpty(instanceParams)) {
|
||||
param.put("instanceParams", instanceParams);
|
||||
}
|
||||
String post = requestService.request(OpenAPIConstant.RUN_JOB, PowerRequestBody.newFormRequestBody(param));
|
||||
return JSON.parseObject(post, LONG_RESULT_TYPE);
|
||||
}
|
||||
|
||||
public ResultDTO<Long> runJob(Long jobId) {
|
||||
return runJob(jobId, null, 0);
|
||||
}
|
||||
|
||||
/* ************* Instance API list ************* */
|
||||
|
||||
/**
|
||||
* Stop one job instance
|
||||
*
|
||||
* @param instanceId instanceId
|
||||
* @return Standard return object
|
||||
*/
|
||||
@Override
|
||||
public ResultDTO<Void> stopInstance(Long instanceId) {
|
||||
|
||||
Map<String, String> param = Maps.newHashMap();
|
||||
param.put("instanceId", instanceId.toString());
|
||||
param.put("appId", appId.toString());
|
||||
|
||||
String post = requestService.request(OpenAPIConstant.STOP_INSTANCE, PowerRequestBody.newFormRequestBody(param));
|
||||
return JSON.parseObject(post, VOID_RESULT_TYPE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Cancel a job instance that is not yet running
|
||||
* Notice:There is a time interval between the call interface time and the expected execution time of the job instance to be cancelled, otherwise reliability is not guaranteed
|
||||
*
|
||||
* @param instanceId instanceId
|
||||
* @return Standard return object
|
||||
*/
|
||||
@Override
|
||||
public ResultDTO<Void> cancelInstance(Long instanceId) {
|
||||
Map<String, String> param = Maps.newHashMap();
|
||||
param.put("instanceId", instanceId.toString());
|
||||
param.put("appId", appId.toString());
|
||||
String post = requestService.request(OpenAPIConstant.CANCEL_INSTANCE, PowerRequestBody.newFormRequestBody(param));
|
||||
return JSON.parseObject(post, VOID_RESULT_TYPE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Retry failed job instance
|
||||
* Notice: Only job instance with completion status (success, failure, manually stopped, cancelled) can be retried, and retries of job instances within workflows are not supported yet.
|
||||
*
|
||||
* @param instanceId instanceId
|
||||
* @return Standard return object
|
||||
*/
|
||||
@Override
|
||||
public ResultDTO<Void> retryInstance(Long instanceId) {
|
||||
Map<String, String> param = Maps.newHashMap();
|
||||
param.put("instanceId", instanceId.toString());
|
||||
param.put("appId", appId.toString());
|
||||
String post = requestService.request(OpenAPIConstant.RETRY_INSTANCE, PowerRequestBody.newFormRequestBody(param));
|
||||
return JSON.parseObject(post, VOID_RESULT_TYPE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Query status about a job instance
|
||||
*
|
||||
* @param instanceId instanceId
|
||||
* @return {@link InstanceStatus}
|
||||
*/
|
||||
@Override
|
||||
public ResultDTO<Integer> fetchInstanceStatus(Long instanceId) {
|
||||
Map<String, String> param = Maps.newHashMap();
|
||||
param.put("instanceId", instanceId.toString());
|
||||
param.put("appId", appId.toString());
|
||||
String post = requestService.request(OpenAPIConstant.FETCH_INSTANCE_STATUS, PowerRequestBody.newFormRequestBody(param));
|
||||
return JSON.parseObject(post, INTEGER_RESULT_TYPE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Query detail about a job instance
|
||||
*
|
||||
* @param instanceId instanceId
|
||||
* @return instance detail
|
||||
*/
|
||||
@Override
|
||||
public ResultDTO<InstanceInfoDTO> fetchInstanceInfo(Long instanceId) {
|
||||
Map<String, String> param = Maps.newHashMap();
|
||||
param.put("instanceId", instanceId.toString());
|
||||
param.put("appId", appId.toString());
|
||||
String post = requestService.request(OpenAPIConstant.FETCH_INSTANCE_INFO, PowerRequestBody.newFormRequestBody(param));
|
||||
return JSON.parseObject(post, INSTANCE_RESULT_TYPE);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ResultDTO<PageResult<InstanceInfoDTO>> queryInstanceInfo(InstancePageQuery instancePageQuery) {
|
||||
instancePageQuery.setAppIdEq(appId);
|
||||
String post = requestService.request(OpenAPIConstant.QUERY_INSTANCE, PowerRequestBody.newJsonRequestBody(instancePageQuery));
|
||||
return JSON.parseObject(post, PAGE_INSTANCE_RESULT_TYPE);
|
||||
}
|
||||
|
||||
/* ************* Workflow API list ************* */
|
||||
|
||||
/**
|
||||
* Save one workflow
|
||||
* When an ID exists in SaveWorkflowRequest, it is an update operation. Otherwise, it is a crate operation.
|
||||
*
|
||||
* @param request Workflow meta info
|
||||
* @return workflowId
|
||||
*/
|
||||
@Override
|
||||
public ResultDTO<Long> saveWorkflow(SaveWorkflowRequest request) {
|
||||
request.setAppId(appId);
|
||||
// 中坑记录:用 FastJSON 序列化会导致 Server 接收时 pEWorkflowDAG 为 null,无语.jpg
|
||||
String json = JsonUtils.toJSONStringUnsafe(request);
|
||||
String post = requestService.request(OpenAPIConstant.SAVE_WORKFLOW, PowerRequestBody.newJsonRequestBody(json));
|
||||
return JSON.parseObject(post, LONG_RESULT_TYPE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Copy one workflow
|
||||
*
|
||||
* @param workflowId Workflow id
|
||||
* @return Id of workflow copy
|
||||
*/
|
||||
@Override
|
||||
public ResultDTO<Long> copyWorkflow(Long workflowId) {
|
||||
|
||||
Map<String, String> param = Maps.newHashMap();
|
||||
param.put("workflowId", workflowId.toString());
|
||||
param.put("appId", appId.toString());
|
||||
|
||||
String post = requestService.request(OpenAPIConstant.COPY_WORKFLOW, PowerRequestBody.newFormRequestBody(param));
|
||||
return JSON.parseObject(post, LONG_RESULT_TYPE);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* 添加工作流节点
|
||||
*
|
||||
* @param requestList Node info list of Workflow
|
||||
* @return Standard return object
|
||||
*/
|
||||
@Override
|
||||
public ResultDTO<List<WorkflowNodeInfoDTO>> saveWorkflowNode(List<SaveWorkflowNodeRequest> requestList) {
|
||||
for (SaveWorkflowNodeRequest saveWorkflowNodeRequest : requestList) {
|
||||
saveWorkflowNodeRequest.setAppId(appId);
|
||||
}
|
||||
|
||||
String json = JsonUtils.toJSONStringUnsafe(requestList);
|
||||
String post = requestService.request(OpenAPIConstant.SAVE_WORKFLOW_NODE, PowerRequestBody.newJsonRequestBody(json));
|
||||
return JSON.parseObject(post, WF_NODE_LIST_RESULT_TYPE);
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Query Workflow by workflowId
|
||||
*
|
||||
* @param workflowId workflowId
|
||||
* @return Workflow meta info
|
||||
*/
|
||||
@Override
|
||||
public ResultDTO<WorkflowInfoDTO> fetchWorkflow(Long workflowId) {
|
||||
Map<String, String> param = Maps.newHashMap();
|
||||
param.put("workflowId", workflowId.toString());
|
||||
param.put("appId", appId.toString());
|
||||
String post = requestService.request(OpenAPIConstant.FETCH_WORKFLOW, PowerRequestBody.newFormRequestBody(param));
|
||||
return JSON.parseObject(post, WF_RESULT_TYPE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Disable Workflow by workflowId
|
||||
*
|
||||
* @param workflowId workflowId
|
||||
* @return Standard return object
|
||||
*/
|
||||
@Override
|
||||
public ResultDTO<Void> disableWorkflow(Long workflowId) {
|
||||
Map<String, String> param = Maps.newHashMap();
|
||||
param.put("workflowId", workflowId.toString());
|
||||
param.put("appId", appId.toString());
|
||||
String post = requestService.request(OpenAPIConstant.DISABLE_WORKFLOW, PowerRequestBody.newFormRequestBody(param));
|
||||
return JSON.parseObject(post, VOID_RESULT_TYPE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Enable Workflow by workflowId
|
||||
*
|
||||
* @param workflowId workflowId
|
||||
* @return Standard return object
|
||||
*/
|
||||
@Override
|
||||
public ResultDTO<Void> enableWorkflow(Long workflowId) {
|
||||
Map<String, String> param = Maps.newHashMap();
|
||||
param.put("workflowId", workflowId.toString());
|
||||
param.put("appId", appId.toString());
|
||||
String post = requestService.request(OpenAPIConstant.ENABLE_WORKFLOW, PowerRequestBody.newFormRequestBody(param));
|
||||
return JSON.parseObject(post, VOID_RESULT_TYPE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete Workflow by workflowId
|
||||
*
|
||||
* @param workflowId workflowId
|
||||
* @return Standard return object
|
||||
*/
|
||||
@Override
|
||||
public ResultDTO<Void> deleteWorkflow(Long workflowId) {
|
||||
Map<String, String> param = Maps.newHashMap();
|
||||
param.put("workflowId", workflowId.toString());
|
||||
param.put("appId", appId.toString());
|
||||
String post = requestService.request(OpenAPIConstant.DELETE_WORKFLOW, PowerRequestBody.newFormRequestBody(param));
|
||||
return JSON.parseObject(post, VOID_RESULT_TYPE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Run a workflow once
|
||||
*
|
||||
* @param workflowId workflowId
|
||||
* @param initParams workflow startup parameters
|
||||
* @param delayMS Delay time(Milliseconds)
|
||||
* @return workflow instanceId
|
||||
*/
|
||||
@Override
|
||||
public ResultDTO<Long> runWorkflow(Long workflowId, String initParams, long delayMS) {
|
||||
|
||||
Map<String, String> param = Maps.newHashMap();
|
||||
param.put("workflowId", workflowId.toString());
|
||||
param.put("appId", appId.toString());
|
||||
param.put("delay", String.valueOf(delayMS));
|
||||
|
||||
|
||||
if (StringUtils.isNotEmpty(initParams)) {
|
||||
param.put("initParams", initParams);
|
||||
}
|
||||
String post = requestService.request(OpenAPIConstant.RUN_WORKFLOW, PowerRequestBody.newFormRequestBody(param));
|
||||
return JSON.parseObject(post, LONG_RESULT_TYPE);
|
||||
}
|
||||
|
||||
public ResultDTO<Long> runWorkflow(Long workflowId) {
|
||||
return runWorkflow(workflowId, null, 0);
|
||||
}
|
||||
|
||||
/* ************* Workflow Instance API list ************* */
|
||||
|
||||
/**
|
||||
* Stop one workflow instance
|
||||
*
|
||||
* @param wfInstanceId workflow instanceId
|
||||
* @return Standard return object
|
||||
*/
|
||||
@Override
|
||||
public ResultDTO<Void> stopWorkflowInstance(Long wfInstanceId) {
|
||||
|
||||
Map<String, String> param = Maps.newHashMap();
|
||||
param.put("wfInstanceId", wfInstanceId.toString());
|
||||
param.put("appId", appId.toString());
|
||||
|
||||
String post = requestService.request(OpenAPIConstant.STOP_WORKFLOW_INSTANCE, PowerRequestBody.newFormRequestBody(param));
|
||||
return JSON.parseObject(post, VOID_RESULT_TYPE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Retry one workflow instance
|
||||
*
|
||||
* @param wfInstanceId workflow instanceId
|
||||
* @return Standard return object
|
||||
*/
|
||||
@Override
|
||||
public ResultDTO<Void> retryWorkflowInstance(Long wfInstanceId) {
|
||||
Map<String, String> param = Maps.newHashMap();
|
||||
param.put("wfInstanceId", wfInstanceId.toString());
|
||||
param.put("appId", appId.toString());
|
||||
String post = requestService.request(OpenAPIConstant.RETRY_WORKFLOW_INSTANCE, PowerRequestBody.newFormRequestBody(param));
|
||||
return JSON.parseObject(post, VOID_RESULT_TYPE);
|
||||
}
|
||||
|
||||
/**
|
||||
* mark the workflow node as success
|
||||
*
|
||||
* @param wfInstanceId workflow instanceId
|
||||
* @param nodeId node id
|
||||
* @return Standard return object
|
||||
*/
|
||||
@Override
|
||||
public ResultDTO<Void> markWorkflowNodeAsSuccess(Long wfInstanceId, Long nodeId) {
|
||||
|
||||
Map<String, String> param = Maps.newHashMap();
|
||||
param.put("wfInstanceId", wfInstanceId.toString());
|
||||
param.put("appId", appId.toString());
|
||||
param.put("nodeId", nodeId.toString());
|
||||
|
||||
String post = requestService.request(OpenAPIConstant.MARK_WORKFLOW_NODE_AS_SUCCESS, PowerRequestBody.newFormRequestBody(param));
|
||||
return JSON.parseObject(post, VOID_RESULT_TYPE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Query detail about a workflow instance
|
||||
*
|
||||
* @param wfInstanceId workflow instanceId
|
||||
* @return detail about a workflow
|
||||
*/
|
||||
@Override
|
||||
public ResultDTO<WorkflowInstanceInfoDTO> fetchWorkflowInstanceInfo(Long wfInstanceId) {
|
||||
|
||||
Map<String, String> param = Maps.newHashMap();
|
||||
param.put("wfInstanceId", wfInstanceId.toString());
|
||||
param.put("appId", appId.toString());
|
||||
|
||||
String post = requestService.request(OpenAPIConstant.FETCH_WORKFLOW_INSTANCE_INFO, PowerRequestBody.newFormRequestBody(param));
|
||||
return JSON.parseObject(post, WF_INSTANCE_RESULT_TYPE);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
requestService.close();
|
||||
}
|
||||
}
|
@ -0,0 +1,43 @@
|
||||
package tech.powerjob.client;
|
||||
|
||||
import com.alibaba.fastjson.TypeReference;
|
||||
import tech.powerjob.client.module.AppAuthResult;
|
||||
import tech.powerjob.common.request.http.SaveJobInfoRequest;
|
||||
import tech.powerjob.common.response.*;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* TypeReference store.
|
||||
*
|
||||
* @author tjq
|
||||
* @since 11/7/20
|
||||
*/
|
||||
public class TypeStore {
|
||||
|
||||
public static final TypeReference<ResultDTO<AppAuthResult>> APP_AUTH_RESULT_TYPE = new TypeReference<ResultDTO<AppAuthResult>>(){};
|
||||
public static final TypeReference<ResultDTO<Void>> VOID_RESULT_TYPE = new TypeReference<ResultDTO<Void>>(){};
|
||||
|
||||
public static final TypeReference<ResultDTO<Integer>> INTEGER_RESULT_TYPE = new TypeReference<ResultDTO<Integer>>(){};
|
||||
|
||||
public static final TypeReference<ResultDTO<Long>> LONG_RESULT_TYPE = new TypeReference<ResultDTO<Long>>(){};
|
||||
|
||||
public static final TypeReference<ResultDTO<JobInfoDTO>> JOB_RESULT_TYPE = new TypeReference<ResultDTO<JobInfoDTO>>(){};
|
||||
|
||||
public static final TypeReference<ResultDTO<SaveJobInfoRequest>> SAVE_JOB_INFO_REQUEST_RESULT_TYPE = new TypeReference<ResultDTO<SaveJobInfoRequest>>(){};
|
||||
|
||||
public static final TypeReference<ResultDTO<List<JobInfoDTO>>> LIST_JOB_RESULT_TYPE = new TypeReference<ResultDTO<List<JobInfoDTO>>>(){};
|
||||
|
||||
public static final TypeReference<ResultDTO<InstanceInfoDTO>> INSTANCE_RESULT_TYPE = new TypeReference<ResultDTO<InstanceInfoDTO>>() {};
|
||||
|
||||
public static final TypeReference<ResultDTO<List<InstanceInfoDTO>>> LIST_INSTANCE_RESULT_TYPE = new TypeReference<ResultDTO<List<InstanceInfoDTO>>>(){};
|
||||
|
||||
public static final TypeReference<ResultDTO<PageResult<InstanceInfoDTO>>> PAGE_INSTANCE_RESULT_TYPE = new TypeReference<ResultDTO<PageResult<InstanceInfoDTO>>>(){};
|
||||
|
||||
public static final TypeReference<ResultDTO<WorkflowInfoDTO>> WF_RESULT_TYPE = new TypeReference<ResultDTO<WorkflowInfoDTO>>() {};
|
||||
|
||||
public static final TypeReference<ResultDTO<WorkflowInstanceInfoDTO>> WF_INSTANCE_RESULT_TYPE = new TypeReference<ResultDTO<WorkflowInstanceInfoDTO>>() {};
|
||||
|
||||
public static final TypeReference<ResultDTO<List<WorkflowNodeInfoDTO>>> WF_NODE_LIST_RESULT_TYPE = new TypeReference<ResultDTO<List<WorkflowNodeInfoDTO>>> () {};
|
||||
|
||||
}
|
@ -0,0 +1,28 @@
|
||||
package tech.powerjob.client.common;
|
||||
|
||||
import lombok.Getter;
|
||||
|
||||
/**
|
||||
* Protocol
|
||||
*
|
||||
* @author tjq
|
||||
* @since 2024/2/20
|
||||
*/
|
||||
@Getter
|
||||
public enum Protocol {
|
||||
|
||||
HTTP("http"),
|
||||
|
||||
HTTPS("https");
|
||||
|
||||
private final String protocol;
|
||||
|
||||
Protocol(String protocol) {
|
||||
this.protocol = protocol;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return protocol;
|
||||
}
|
||||
}
|
@ -0,0 +1,19 @@
|
||||
package tech.powerjob.client.extension;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* 扩展服务
|
||||
*
|
||||
* @author tjq
|
||||
* @since 2024/8/11
|
||||
*/
|
||||
public interface ClientExtension {
|
||||
|
||||
/**
|
||||
* 动态提供地址,适用于 server 部署在动态集群上的场景
|
||||
* @param context 上下文
|
||||
* @return 地址,格式要求同 ClientConfig#addressList
|
||||
*/
|
||||
List<String> addressProvider(ExtensionContext context);
|
||||
}
|
@ -0,0 +1,10 @@
|
||||
package tech.powerjob.client.extension;
|
||||
|
||||
/**
|
||||
* 扩展上下文
|
||||
*
|
||||
* @author tjq
|
||||
* @since 2024/8/11
|
||||
*/
|
||||
public class ExtensionContext {
|
||||
}
|
@ -0,0 +1,39 @@
|
||||
package tech.powerjob.client.module;
|
||||
|
||||
import lombok.Getter;
|
||||
import lombok.Setter;
|
||||
import lombok.ToString;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* App 鉴权请求
|
||||
*
|
||||
* @author tjq
|
||||
* @since 2024/2/19
|
||||
*/
|
||||
@Getter
|
||||
@Setter
|
||||
@ToString
|
||||
public class AppAuthRequest implements Serializable {
|
||||
|
||||
/**
|
||||
* 应用名称
|
||||
*/
|
||||
private String appName;
|
||||
/**
|
||||
* 加密后密码
|
||||
*/
|
||||
private String encryptedPassword;
|
||||
|
||||
/**
|
||||
* 加密类型
|
||||
*/
|
||||
private String encryptType;
|
||||
|
||||
/**
|
||||
* 额外参数,方便开发者传递其他参数
|
||||
*/
|
||||
private Map<String, Object> extra;
|
||||
}
|
@ -0,0 +1,30 @@
|
||||
package tech.powerjob.client.module;
|
||||
|
||||
import lombok.Getter;
|
||||
import lombok.Setter;
|
||||
import lombok.ToString;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* App 鉴权响应
|
||||
*
|
||||
* @author tjq
|
||||
* @since 2024/2/21
|
||||
*/
|
||||
@Getter
|
||||
@Setter
|
||||
@ToString
|
||||
public class AppAuthResult implements Serializable {
|
||||
|
||||
private Long appId;
|
||||
|
||||
private String token;
|
||||
|
||||
/**
|
||||
* 额外参数
|
||||
* 有安全需求的开发者可执行扩展
|
||||
*/
|
||||
private Map<String, Object> extra;
|
||||
}
|
@ -0,0 +1,26 @@
|
||||
package tech.powerjob.client.service;
|
||||
|
||||
import lombok.Data;
|
||||
import lombok.experimental.Accessors;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* HTTP 响应
|
||||
*
|
||||
* @author tjq
|
||||
* @since 2024/8/10
|
||||
*/
|
||||
@Data
|
||||
@Accessors(chain = true)
|
||||
public class HttpResponse implements Serializable {
|
||||
|
||||
private boolean success;
|
||||
|
||||
private int code;
|
||||
|
||||
private String response;
|
||||
|
||||
private Map<String, String> headers;
|
||||
}
|
@ -0,0 +1,47 @@
|
||||
package tech.powerjob.client.service;
|
||||
|
||||
import com.google.common.collect.Maps;
|
||||
import lombok.Getter;
|
||||
import tech.powerjob.common.enums.MIME;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* 请求体
|
||||
*
|
||||
* @author tjq
|
||||
* @since 2024/8/10
|
||||
*/
|
||||
@Getter
|
||||
public class PowerRequestBody {
|
||||
|
||||
private MIME mime;
|
||||
|
||||
private Object payload;
|
||||
|
||||
private final Map<String, String> headers = Maps.newHashMap();
|
||||
|
||||
private PowerRequestBody() {
|
||||
}
|
||||
|
||||
public static PowerRequestBody newJsonRequestBody(Object data) {
|
||||
PowerRequestBody powerRequestBody = new PowerRequestBody();
|
||||
powerRequestBody.mime = MIME.APPLICATION_JSON;
|
||||
powerRequestBody.payload = data;
|
||||
return powerRequestBody;
|
||||
}
|
||||
|
||||
public static PowerRequestBody newFormRequestBody(Map<String, String> form) {
|
||||
PowerRequestBody powerRequestBody = new PowerRequestBody();
|
||||
powerRequestBody.mime = MIME.APPLICATION_FORM;
|
||||
powerRequestBody.payload = form;
|
||||
return powerRequestBody;
|
||||
}
|
||||
|
||||
public void addHeaders(Map<String, String> hs) {
|
||||
if (hs == null || hs.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
this.headers.putAll(hs);
|
||||
}
|
||||
}
|
@ -0,0 +1,15 @@
|
||||
package tech.powerjob.client.service;
|
||||
|
||||
import java.io.Closeable;
|
||||
|
||||
/**
|
||||
* 请求服务
|
||||
*
|
||||
* @author tjq
|
||||
* @since 2024/2/20
|
||||
*/
|
||||
public interface RequestService extends Closeable {
|
||||
|
||||
|
||||
String request(String path, PowerRequestBody powerRequestBody);
|
||||
}
|
@ -0,0 +1,107 @@
|
||||
package tech.powerjob.client.service.impl;
|
||||
|
||||
import com.alibaba.fastjson.JSONObject;
|
||||
import com.google.common.collect.Maps;
|
||||
import lombok.SneakyThrows;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import tech.powerjob.client.ClientConfig;
|
||||
import tech.powerjob.client.TypeStore;
|
||||
import tech.powerjob.client.module.AppAuthRequest;
|
||||
import tech.powerjob.client.module.AppAuthResult;
|
||||
import tech.powerjob.client.service.HttpResponse;
|
||||
import tech.powerjob.client.service.PowerRequestBody;
|
||||
import tech.powerjob.common.OpenAPIConstant;
|
||||
import tech.powerjob.common.enums.EncryptType;
|
||||
import tech.powerjob.common.exception.PowerJobException;
|
||||
import tech.powerjob.common.response.ResultDTO;
|
||||
import tech.powerjob.common.utils.DigestUtils;
|
||||
import tech.powerjob.common.utils.MapUtils;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* 封装鉴权相关逻辑
|
||||
*
|
||||
* @author tjq
|
||||
* @since 2024/2/21
|
||||
*/
|
||||
@Slf4j
|
||||
abstract class AppAuthClusterRequestService extends ClusterRequestService {
|
||||
|
||||
protected AppAuthResult appAuthResult;
|
||||
|
||||
public AppAuthClusterRequestService(ClientConfig config) {
|
||||
super(config);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public String request(String path, PowerRequestBody powerRequestBody) {
|
||||
// 若不存在 appAuthResult,则首先进行鉴权
|
||||
if (appAuthResult == null) {
|
||||
refreshAppAuthResult();
|
||||
}
|
||||
|
||||
HttpResponse httpResponse = doRequest(path, powerRequestBody);
|
||||
|
||||
// 如果 auth 成功,则代表请求有效,直接返回
|
||||
String authStatus = MapUtils.getString(httpResponse.getHeaders(), OpenAPIConstant.RESPONSE_HEADER_AUTH_STATUS);
|
||||
if (Boolean.TRUE.toString().equalsIgnoreCase(authStatus)) {
|
||||
return httpResponse.getResponse();
|
||||
}
|
||||
|
||||
// 否则请求无效,刷新鉴权后重新请求
|
||||
log.warn("[PowerJobClient] auth failed[authStatus: {}], try to refresh the auth info", authStatus);
|
||||
refreshAppAuthResult();
|
||||
httpResponse = doRequest(path, powerRequestBody);
|
||||
|
||||
// 只要请求不失败,直接返回(如果鉴权失败则返回鉴权错误信息,server 保证 response 永远非空)
|
||||
return httpResponse.getResponse();
|
||||
}
|
||||
|
||||
private HttpResponse doRequest(String path, PowerRequestBody powerRequestBody) {
|
||||
|
||||
// 添加鉴权信息
|
||||
Map<String, String> authHeaders = buildAuthHeader();
|
||||
powerRequestBody.addHeaders(authHeaders);
|
||||
|
||||
HttpResponse httpResponse = clusterHaRequest(path, powerRequestBody);
|
||||
|
||||
// 任何请求不成功,都直接报错
|
||||
if (!httpResponse.isSuccess()) {
|
||||
throw new PowerJobException("REMOTE_SERVER_INNER_EXCEPTION");
|
||||
}
|
||||
return httpResponse;
|
||||
}
|
||||
|
||||
private Map<String, String> buildAuthHeader() {
|
||||
Map<String, String> authHeader = Maps.newHashMap();
|
||||
authHeader.put(OpenAPIConstant.REQUEST_HEADER_APP_ID, String.valueOf(appAuthResult.getAppId()));
|
||||
authHeader.put(OpenAPIConstant.REQUEST_HEADER_ACCESS_TOKEN, appAuthResult.getToken());
|
||||
return authHeader;
|
||||
}
|
||||
|
||||
@SneakyThrows
|
||||
private void refreshAppAuthResult() {
|
||||
AppAuthRequest appAuthRequest = buildAppAuthRequest();
|
||||
HttpResponse httpResponse = clusterHaRequest(OpenAPIConstant.AUTH_APP, PowerRequestBody.newJsonRequestBody(appAuthRequest));
|
||||
if (!httpResponse.isSuccess()) {
|
||||
throw new PowerJobException("AUTH_APP_EXCEPTION!");
|
||||
}
|
||||
ResultDTO<AppAuthResult> authResultDTO = JSONObject.parseObject(httpResponse.getResponse(), TypeStore.APP_AUTH_RESULT_TYPE);
|
||||
if (!authResultDTO.isSuccess()) {
|
||||
throw new PowerJobException("AUTH_FAILED_" + authResultDTO.getMessage());
|
||||
}
|
||||
|
||||
log.warn("[PowerJobClient] refresh auth info successfully!");
|
||||
this.appAuthResult = authResultDTO.getData();
|
||||
}
|
||||
|
||||
protected AppAuthRequest buildAppAuthRequest() {
|
||||
AppAuthRequest appAuthRequest = new AppAuthRequest();
|
||||
appAuthRequest.setAppName(config.getAppName());
|
||||
appAuthRequest.setEncryptedPassword(DigestUtils.md5(config.getPassword()));
|
||||
appAuthRequest.setEncryptType(EncryptType.MD5.getCode());
|
||||
return appAuthRequest;
|
||||
}
|
||||
}
|
@ -0,0 +1,140 @@
|
||||
package tech.powerjob.client.service.impl;
|
||||
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import tech.powerjob.client.ClientConfig;
|
||||
import tech.powerjob.client.extension.ClientExtension;
|
||||
import tech.powerjob.client.extension.ExtensionContext;
|
||||
import tech.powerjob.client.service.HttpResponse;
|
||||
import tech.powerjob.client.service.PowerRequestBody;
|
||||
import tech.powerjob.client.service.RequestService;
|
||||
import tech.powerjob.common.OpenAPIConstant;
|
||||
import tech.powerjob.common.exception.PowerJobException;
|
||||
import tech.powerjob.common.utils.CollectionUtils;
|
||||
|
||||
import javax.net.ssl.X509TrustManager;
|
||||
import java.io.IOException;
|
||||
import java.security.cert.X509Certificate;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* 集群请求服务
|
||||
* 封装网络相关通用逻辑
|
||||
*
|
||||
* @author tjq
|
||||
* @since 2024/2/21
|
||||
*/
|
||||
@Slf4j
|
||||
abstract class ClusterRequestService implements RequestService {
|
||||
|
||||
protected final ClientConfig config;
|
||||
|
||||
/**
|
||||
* 当前地址(上次请求成功的地址)
|
||||
*/
|
||||
protected String currentAddress;
|
||||
|
||||
/**
|
||||
* 地址格式
|
||||
* 协议://域名/OpenAPI/子路径
|
||||
*/
|
||||
protected static final String URL_PATTERN = "%s://%s%s%s";
|
||||
|
||||
/**
|
||||
* 默认超时时间
|
||||
*/
|
||||
protected static final Integer DEFAULT_TIMEOUT_SECONDS = 2;
|
||||
|
||||
protected static final int HTTP_SUCCESS_CODE = 200;
|
||||
|
||||
public ClusterRequestService(ClientConfig config) {
|
||||
this.config = config;
|
||||
this.currentAddress = config.getAddressList().get(0);
|
||||
}
|
||||
|
||||
/**
|
||||
* 具体某一次 HTTP 请求的实现
|
||||
* @param url 完整请求地址
|
||||
* @param body 请求体
|
||||
* @return 响应
|
||||
* @throws IOException 异常
|
||||
*/
|
||||
protected abstract HttpResponse sendHttpRequest(String url, PowerRequestBody body) throws IOException;
|
||||
|
||||
/**
|
||||
* 封装集群请求能力
|
||||
* @param path 请求 PATH
|
||||
* @param powerRequestBody 请求体
|
||||
* @return 响应
|
||||
*/
|
||||
protected HttpResponse clusterHaRequest(String path, PowerRequestBody powerRequestBody) {
|
||||
|
||||
// 先尝试默认地址
|
||||
String url = getUrl(path, currentAddress);
|
||||
try {
|
||||
return sendHttpRequest(url, powerRequestBody);
|
||||
} catch (IOException e) {
|
||||
log.warn("[ClusterRequestService] request url:{} failed, reason is {}.", url, e.toString());
|
||||
}
|
||||
|
||||
List<String> addressList = fetchAddressList();
|
||||
|
||||
// 失败,开始重试
|
||||
for (String addr : addressList) {
|
||||
if (Objects.equals(addr, currentAddress)) {
|
||||
continue;
|
||||
}
|
||||
url = getUrl(path, addr);
|
||||
try {
|
||||
HttpResponse res = sendHttpRequest(url, powerRequestBody);
|
||||
log.warn("[ClusterRequestService] server change: from({}) -> to({}).", currentAddress, addr);
|
||||
currentAddress = addr;
|
||||
return res;
|
||||
} catch (IOException e) {
|
||||
log.warn("[ClusterRequestService] request url:{} failed, reason is {}.", url, e.toString());
|
||||
}
|
||||
}
|
||||
|
||||
log.error("[ClusterRequestService] do post for path: {} failed because of no server available in {}.", path, addressList);
|
||||
throw new PowerJobException("no server available when send post request");
|
||||
}
|
||||
|
||||
private List<String> fetchAddressList() {
|
||||
|
||||
ClientExtension clientExtension = config.getClientExtension();
|
||||
if (clientExtension != null) {
|
||||
List<String> addressList = clientExtension.addressProvider(new ExtensionContext());
|
||||
if (!CollectionUtils.isEmpty(addressList)) {
|
||||
return addressList;
|
||||
}
|
||||
}
|
||||
|
||||
return config.getAddressList();
|
||||
}
|
||||
|
||||
/**
|
||||
* 不验证证书
|
||||
* X.509 是一个国际标准,定义了公钥证书的格式。这个标准是由国际电信联盟(ITU-T)制定的,用于公钥基础设施(PKI)中数字证书的创建和分发。X.509证书主要用于在公开网络上验证实体的身份,如服务器或客户端的身份验证过程中,确保通信双方是可信的。X.509证书广泛应用于多种安全协议中,包括SSL/TLS,它是实现HTTPS的基础。
|
||||
*/
|
||||
protected static class NoVerifyX509TrustManager implements X509TrustManager {
|
||||
@Override
|
||||
public void checkClientTrusted(X509Certificate[] arg0, String arg1) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void checkServerTrusted(X509Certificate[] arg0, String arg1) {
|
||||
// 不验证
|
||||
}
|
||||
|
||||
@Override
|
||||
public X509Certificate[] getAcceptedIssuers() {
|
||||
return new X509Certificate[0];
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private String getUrl(String path, String address) {
|
||||
String protocol = config.getProtocol().getProtocol();
|
||||
return String.format(URL_PATTERN, protocol, address, OpenAPIConstant.WEB_PATH, path);
|
||||
}
|
||||
}
|
@ -0,0 +1,148 @@
|
||||
package tech.powerjob.client.service.impl;
|
||||
|
||||
import com.google.common.collect.Maps;
|
||||
import lombok.SneakyThrows;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import okhttp3.*;
|
||||
import tech.powerjob.client.ClientConfig;
|
||||
import tech.powerjob.client.common.Protocol;
|
||||
import tech.powerjob.client.service.HttpResponse;
|
||||
import tech.powerjob.client.service.PowerRequestBody;
|
||||
import tech.powerjob.common.OmsConstant;
|
||||
import tech.powerjob.common.serialize.JsonUtils;
|
||||
|
||||
import javax.net.ssl.*;
|
||||
import java.io.IOException;
|
||||
import java.security.SecureRandom;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
/**
|
||||
* desc
|
||||
*
|
||||
* @author tjq
|
||||
* @since 2024/2/20
|
||||
*/
|
||||
@Slf4j
|
||||
public class ClusterRequestServiceOkHttp3Impl extends AppAuthClusterRequestService {
|
||||
|
||||
private final OkHttpClient okHttpClient;
|
||||
|
||||
|
||||
public ClusterRequestServiceOkHttp3Impl(ClientConfig config) {
|
||||
super(config);
|
||||
|
||||
// 初始化 HTTP 客户端
|
||||
if (Protocol.HTTPS.equals(config.getProtocol())) {
|
||||
okHttpClient = initHttpsNoVerifyClient();
|
||||
} else {
|
||||
okHttpClient = initHttpClient();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected HttpResponse sendHttpRequest(String url, PowerRequestBody powerRequestBody) throws IOException {
|
||||
|
||||
// 添加公共 header
|
||||
powerRequestBody.addHeaders(config.getDefaultHeaders());
|
||||
|
||||
Object obj = powerRequestBody.getPayload();
|
||||
|
||||
RequestBody requestBody = null;
|
||||
|
||||
switch (powerRequestBody.getMime()) {
|
||||
case APPLICATION_JSON:
|
||||
MediaType jsonType = MediaType.parse(OmsConstant.JSON_MEDIA_TYPE);
|
||||
String body = obj instanceof String ? (String) obj : JsonUtils.toJSONStringUnsafe(obj);
|
||||
requestBody = RequestBody.create(jsonType, body);
|
||||
|
||||
break;
|
||||
case APPLICATION_FORM:
|
||||
FormBody.Builder formBuilder = new FormBody.Builder();
|
||||
Map<String, String> formObj = (Map<String, String>) obj;
|
||||
formObj.forEach(formBuilder::add);
|
||||
requestBody = formBuilder.build();
|
||||
}
|
||||
|
||||
Request request = new Request.Builder()
|
||||
.post(requestBody)
|
||||
.headers(Headers.of(powerRequestBody.getHeaders()))
|
||||
.url(url)
|
||||
.build();
|
||||
|
||||
try (Response response = okHttpClient.newCall(request).execute()) {
|
||||
|
||||
int code = response.code();
|
||||
HttpResponse httpResponse = new HttpResponse()
|
||||
.setCode(code)
|
||||
.setSuccess(code == HTTP_SUCCESS_CODE);
|
||||
|
||||
ResponseBody body = response.body();
|
||||
if (body != null) {
|
||||
httpResponse.setResponse(body.string());
|
||||
}
|
||||
|
||||
Headers respHeaders = response.headers();
|
||||
Set<String> headerNames = respHeaders.names();
|
||||
Map<String, String> respHeaderMap = Maps.newHashMap();
|
||||
headerNames.forEach(hdKey -> respHeaderMap.put(hdKey, respHeaders.get(hdKey)));
|
||||
|
||||
httpResponse.setHeaders(respHeaderMap);
|
||||
|
||||
return httpResponse;
|
||||
}
|
||||
}
|
||||
|
||||
@SneakyThrows
|
||||
private OkHttpClient initHttpClient() {
|
||||
OkHttpClient.Builder okHttpBuilder = commonOkHttpBuilder();
|
||||
return okHttpBuilder.build();
|
||||
}
|
||||
|
||||
@SneakyThrows
|
||||
private OkHttpClient initHttpsNoVerifyClient() {
|
||||
|
||||
X509TrustManager trustManager = new NoVerifyX509TrustManager();
|
||||
|
||||
SSLContext sslContext = SSLContext.getInstance("TLS");
|
||||
sslContext.init(null, new TrustManager[]{trustManager}, new SecureRandom());
|
||||
SSLSocketFactory sslSocketFactory = sslContext.getSocketFactory();
|
||||
|
||||
OkHttpClient.Builder okHttpBuilder = commonOkHttpBuilder();
|
||||
|
||||
// 不需要校验证书
|
||||
okHttpBuilder.sslSocketFactory(sslSocketFactory, trustManager);
|
||||
// 不校验 url中的 hostname
|
||||
okHttpBuilder.hostnameVerifier((String hostname, SSLSession session) -> true);
|
||||
|
||||
|
||||
return okHttpBuilder.build();
|
||||
}
|
||||
|
||||
private OkHttpClient.Builder commonOkHttpBuilder() {
|
||||
return new OkHttpClient.Builder()
|
||||
// 设置读取超时时间
|
||||
.readTimeout(Optional.ofNullable(config.getReadTimeout()).orElse(DEFAULT_TIMEOUT_SECONDS), TimeUnit.SECONDS)
|
||||
// 设置写的超时时间
|
||||
.writeTimeout(Optional.ofNullable(config.getWriteTimeout()).orElse(DEFAULT_TIMEOUT_SECONDS), TimeUnit.SECONDS)
|
||||
// 设置连接超时时间
|
||||
.connectTimeout(Optional.ofNullable(config.getConnectionTimeout()).orElse(DEFAULT_TIMEOUT_SECONDS), TimeUnit.SECONDS)
|
||||
.callTimeout(Optional.ofNullable(config.getConnectionTimeout()).orElse(DEFAULT_TIMEOUT_SECONDS), TimeUnit.SECONDS);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
|
||||
// 关闭 Dispatcher
|
||||
okHttpClient.dispatcher().executorService().shutdown();
|
||||
// 清理连接池
|
||||
okHttpClient.connectionPool().evictAll();
|
||||
// 清理缓存(如果有使用)
|
||||
Cache cache = okHttpClient.cache();
|
||||
if (cache != null) {
|
||||
cache.close();
|
||||
}
|
||||
}
|
||||
}
|
@ -1,90 +0,0 @@
|
||||
import com.github.kfcfans.powerjob.common.ExecuteType;
|
||||
import com.github.kfcfans.powerjob.common.ProcessorType;
|
||||
import com.github.kfcfans.powerjob.common.TimeExpressionType;
|
||||
import com.github.kfcfans.powerjob.common.request.http.SaveJobInfoRequest;
|
||||
import com.github.kfcfans.powerjob.common.response.JobInfoDTO;
|
||||
import com.github.kfcfans.powerjob.common.response.ResultDTO;
|
||||
import com.github.kfcfans.powerjob.client.OhMyClient;
|
||||
import com.github.kfcfans.powerjob.common.utils.JsonUtils;
|
||||
import org.junit.jupiter.api.BeforeAll;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
/**
|
||||
* 测试 Client
|
||||
*
|
||||
* @author tjq
|
||||
* @since 2020/4/15
|
||||
*/
|
||||
public class TestClient {
|
||||
|
||||
private static OhMyClient ohMyClient;
|
||||
|
||||
@BeforeAll
|
||||
public static void initClient() throws Exception {
|
||||
ohMyClient = new OhMyClient("127.0.0.1:7700", "oms-test2", null);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSaveJob() throws Exception {
|
||||
|
||||
SaveJobInfoRequest newJobInfo = new SaveJobInfoRequest();
|
||||
// newJobInfo.setId(8L);
|
||||
newJobInfo.setJobName("omsOpenAPIJobccccc");
|
||||
newJobInfo.setJobDescription("tes OpenAPI");
|
||||
newJobInfo.setJobParams("{'aa':'bb'}");
|
||||
newJobInfo.setTimeExpressionType(TimeExpressionType.CRON);
|
||||
newJobInfo.setTimeExpression("0 0 * * * ? ");
|
||||
newJobInfo.setExecuteType(ExecuteType.STANDALONE);
|
||||
newJobInfo.setProcessorType(ProcessorType.EMBEDDED_JAVA);
|
||||
newJobInfo.setProcessorInfo("com.github.kfcfans.oms.server.tester.OmsLogPerformanceTester");
|
||||
newJobInfo.setDesignatedWorkers("192.168.1.1:2777");
|
||||
|
||||
newJobInfo.setMinCpuCores(1.1);
|
||||
newJobInfo.setMinMemorySpace(1.2);
|
||||
newJobInfo.setMinDiskSpace(1.3);
|
||||
|
||||
ResultDTO<Long> resultDTO = ohMyClient.saveJob(newJobInfo);
|
||||
System.out.println(JsonUtils.toJSONString(resultDTO));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFetchJob() throws Exception {
|
||||
ResultDTO<JobInfoDTO> fetchJob = ohMyClient.fetchJob(1L);
|
||||
System.out.println(JsonUtils.toJSONStringUnsafe(fetchJob));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDisableJob() throws Exception {
|
||||
System.out.println(ohMyClient.disableJob(7L));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testEnableJob() throws Exception {
|
||||
System.out.println(ohMyClient.enableJob(7L));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDeleteJob() throws Exception {
|
||||
System.out.println(ohMyClient.deleteJob(7L));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRunJob() throws Exception {
|
||||
System.out.println(ohMyClient.runJob(8L, "this is instanceParams"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFetchInstanceInfo() throws Exception {
|
||||
System.out.println(ohMyClient.fetchInstanceInfo(141251409466097728L));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testStopInstance() throws Exception {
|
||||
ResultDTO<Void> res = ohMyClient.stopInstance(141251409466097728L);
|
||||
System.out.println(res.toString());
|
||||
}
|
||||
@Test
|
||||
public void testFetchInstanceStatus() throws Exception {
|
||||
System.out.println(ohMyClient.fetchInstanceStatus(141251409466097728L));
|
||||
}
|
||||
}
|
@ -1,84 +0,0 @@
|
||||
import com.github.kfcfans.powerjob.client.OhMyClient;
|
||||
import com.github.kfcfans.powerjob.common.TimeExpressionType;
|
||||
import com.github.kfcfans.powerjob.common.model.PEWorkflowDAG;
|
||||
import com.github.kfcfans.powerjob.common.request.http.SaveWorkflowRequest;
|
||||
import com.google.common.collect.Lists;
|
||||
import org.junit.jupiter.api.BeforeAll;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* 测试 Client(workflow部分)
|
||||
*
|
||||
* @author tjq
|
||||
* @since 2020/6/2
|
||||
*/
|
||||
public class TestWorkflow {
|
||||
|
||||
private static OhMyClient ohMyClient;
|
||||
|
||||
@BeforeAll
|
||||
public static void initClient() throws Exception {
|
||||
ohMyClient = new OhMyClient("127.0.0.1:7700", "oms-test", null);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSaveWorkflow() throws Exception {
|
||||
|
||||
// DAG 图
|
||||
List<PEWorkflowDAG.Node> nodes = Lists.newLinkedList();
|
||||
List<PEWorkflowDAG.Edge> edges = Lists.newLinkedList();
|
||||
|
||||
nodes.add(new PEWorkflowDAG.Node(1L, "node-1"));
|
||||
nodes.add(new PEWorkflowDAG.Node(2L, "node-2"));
|
||||
|
||||
edges.add(new PEWorkflowDAG.Edge(1L, 2L));
|
||||
|
||||
PEWorkflowDAG peWorkflowDAG = new PEWorkflowDAG(nodes, edges);
|
||||
SaveWorkflowRequest req = new SaveWorkflowRequest();
|
||||
|
||||
req.setWfName("workflow-by-client");
|
||||
req.setWfDescription("created by client");
|
||||
req.setPEWorkflowDAG(peWorkflowDAG);
|
||||
req.setEnable(true);
|
||||
req.setTimeExpressionType(TimeExpressionType.API);
|
||||
|
||||
System.out.println(ohMyClient.saveWorkflow(req));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDisableWorkflow() throws Exception {
|
||||
System.out.println(ohMyClient.disableWorkflow(4L));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDeleteWorkflow() throws Exception {
|
||||
System.out.println(ohMyClient.deleteWorkflow(4L));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testEnableWorkflow() throws Exception {
|
||||
System.out.println(ohMyClient.enableWorkflow(4L));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFetchWorkflowInfo() throws Exception {
|
||||
System.out.println(ohMyClient.fetchWorkflow(5L));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRunWorkflow() throws Exception {
|
||||
System.out.println(ohMyClient.runWorkflow(5L));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testStopWorkflowInstance() throws Exception {
|
||||
System.out.println(ohMyClient.stopWorkflowInstance(149962433421639744L));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFetchWfInstanceInfo() throws Exception {
|
||||
System.out.println(ohMyClient.fetchWorkflowInstanceInfo(149962433421639744L));
|
||||
}
|
||||
}
|
@ -0,0 +1,22 @@
|
||||
package tech.powerjob.client.test;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
import org.junit.jupiter.api.BeforeAll;
|
||||
import tech.powerjob.client.IPowerJobClient;
|
||||
import tech.powerjob.client.PowerJobClient;
|
||||
|
||||
/**
|
||||
* Initialize OhMyClient
|
||||
*
|
||||
* @author tjq
|
||||
* @since 1/16/21
|
||||
*/
|
||||
public class ClientInitializer {
|
||||
|
||||
protected static IPowerJobClient powerJobClient;
|
||||
|
||||
@BeforeAll
|
||||
public static void initClient() throws Exception {
|
||||
powerJobClient = new PowerJobClient(Lists.newArrayList("127.0.0.1:7700", "127.0.0.1:7701"), "powerjob-worker-samples", "powerjob123");
|
||||
}
|
||||
}
|
@ -0,0 +1,172 @@
|
||||
package tech.powerjob.client.test;
|
||||
|
||||
import com.alibaba.fastjson.JSONObject;
|
||||
import com.google.common.collect.Lists;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.junit.jupiter.api.Assertions;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import tech.powerjob.client.PowerJobClient;
|
||||
import tech.powerjob.common.enums.ExecuteType;
|
||||
import tech.powerjob.common.enums.ProcessorType;
|
||||
import tech.powerjob.common.enums.TimeExpressionType;
|
||||
import tech.powerjob.common.request.http.SaveJobInfoRequest;
|
||||
import tech.powerjob.common.request.query.InstancePageQuery;
|
||||
import tech.powerjob.common.response.InstanceInfoDTO;
|
||||
import tech.powerjob.common.response.JobInfoDTO;
|
||||
import tech.powerjob.common.response.ResultDTO;
|
||||
|
||||
/**
|
||||
* Test cases for {@link PowerJobClient}
|
||||
*
|
||||
* @author tjq
|
||||
* @author Echo009
|
||||
* @since 2020/4/15
|
||||
*/
|
||||
@Slf4j
|
||||
class TestClient extends ClientInitializer {
|
||||
|
||||
public static final long JOB_ID = 1L;
|
||||
|
||||
@Test
|
||||
void testSaveJob() {
|
||||
|
||||
SaveJobInfoRequest newJobInfo = new SaveJobInfoRequest();
|
||||
newJobInfo.setId(JOB_ID);
|
||||
newJobInfo.setJobName("omsOpenAPIJobccccc" + System.currentTimeMillis());
|
||||
newJobInfo.setJobDescription("test OpenAPI" + System.currentTimeMillis());
|
||||
newJobInfo.setJobParams("{'aa':'bb'}");
|
||||
newJobInfo.setTimeExpressionType(TimeExpressionType.CRON);
|
||||
newJobInfo.setTimeExpression("0 0 * * * ? ");
|
||||
newJobInfo.setExecuteType(ExecuteType.STANDALONE);
|
||||
newJobInfo.setProcessorType(ProcessorType.BUILT_IN);
|
||||
newJobInfo.setProcessorInfo("tech.powerjob.samples.processors.StandaloneProcessorDemo");
|
||||
newJobInfo.setDesignatedWorkers("");
|
||||
|
||||
newJobInfo.setMinCpuCores(1.1);
|
||||
newJobInfo.setMinMemorySpace(1.2);
|
||||
newJobInfo.setMinDiskSpace(1.3);
|
||||
|
||||
log.info("[TestClient] [testSaveJob] SaveJobInfoRequest: {}", JSONObject.toJSONString(newJobInfo));
|
||||
|
||||
ResultDTO<Long> resultDTO = powerJobClient.saveJob(newJobInfo);
|
||||
log.info("[TestClient] [testSaveJob] result: {}", JSONObject.toJSONString(resultDTO));
|
||||
Assertions.assertNotNull(resultDTO);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testCopyJob() {
|
||||
ResultDTO<Long> copyJobRes = powerJobClient.copyJob(JOB_ID);
|
||||
System.out.println(JSONObject.toJSONString(copyJobRes));
|
||||
Assertions.assertNotNull(copyJobRes);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testExportJob() {
|
||||
ResultDTO<SaveJobInfoRequest> exportJobRes = powerJobClient.exportJob(JOB_ID);
|
||||
System.out.println(JSONObject.toJSONString(exportJobRes));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFetchJob() {
|
||||
ResultDTO<JobInfoDTO> fetchJob = powerJobClient.fetchJob(JOB_ID);
|
||||
System.out.println(JSONObject.toJSONString(fetchJob));
|
||||
Assertions.assertNotNull(fetchJob);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testDisableJob() {
|
||||
ResultDTO<Void> res = powerJobClient.disableJob(JOB_ID);
|
||||
System.out.println(res);
|
||||
Assertions.assertNotNull(res);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testEnableJob() {
|
||||
ResultDTO<Void> res = powerJobClient.enableJob(JOB_ID);
|
||||
System.out.println(res);
|
||||
Assertions.assertNotNull(res);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testDeleteJob() {
|
||||
ResultDTO<Void> res = powerJobClient.deleteJob(JOB_ID);
|
||||
System.out.println(res);
|
||||
Assertions.assertNotNull(res);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testRun() {
|
||||
ResultDTO<Long> res = powerJobClient.runJob(JOB_ID, null, 0);
|
||||
System.out.println(res);
|
||||
Assertions.assertNotNull(res);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testRunJobDelay() {
|
||||
ResultDTO<Long> res = powerJobClient.runJob(JOB_ID, "this is instanceParams", 60000);
|
||||
System.out.println(res);
|
||||
Assertions.assertNotNull(res);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFetchInstanceInfo() {
|
||||
ResultDTO<InstanceInfoDTO> res = powerJobClient.fetchInstanceInfo(702482902331424832L);
|
||||
System.out.println(res);
|
||||
Assertions.assertNotNull(res);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testQueryInstanceInfo() {
|
||||
InstancePageQuery instancePageQuery = new InstancePageQuery();
|
||||
instancePageQuery.setJobIdEq(11L);
|
||||
instancePageQuery.setSortBy("actualTriggerTime");
|
||||
instancePageQuery.setAsc(true);
|
||||
instancePageQuery.setPageSize(3);
|
||||
instancePageQuery.setStatusIn(Lists.newArrayList(1,2,5));
|
||||
TestUtils.output(powerJobClient.queryInstanceInfo(instancePageQuery));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testStopInstance() {
|
||||
ResultDTO<Void> res = powerJobClient.stopInstance(702482902331424832L);
|
||||
System.out.println(res);
|
||||
Assertions.assertNotNull(res);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFetchInstanceStatus() {
|
||||
ResultDTO<Integer> res = powerJobClient.fetchInstanceStatus(702482902331424832L);
|
||||
System.out.println(res);
|
||||
Assertions.assertNotNull(res);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testCancelInstanceInTimeWheel() {
|
||||
ResultDTO<Long> startRes = powerJobClient.runJob(JOB_ID, "start by OhMyClient", 20000);
|
||||
System.out.println("runJob result: " + JSONObject.toJSONString(startRes));
|
||||
ResultDTO<Void> cancelRes = powerJobClient.cancelInstance(startRes.getData());
|
||||
System.out.println("cancelJob result: " + JSONObject.toJSONString(cancelRes));
|
||||
Assertions.assertTrue(cancelRes.isSuccess());
|
||||
}
|
||||
|
||||
// @Test
|
||||
// @SneakyThrows
|
||||
// void testCancelInstanceInDatabase() {
|
||||
// ResultDTO<Long> startRes = powerJobClient.runJob(15L, "start by OhMyClient", 2000000);
|
||||
// System.out.println("runJob result: " + JSONObject.toJSONString(startRes));
|
||||
//
|
||||
// // Restart server manually and clear all the data in time wheeler.
|
||||
// TimeUnit.MINUTES.sleep(1);
|
||||
//
|
||||
// ResultDTO<Void> cancelRes = powerJobClient.cancelInstance(startRes.getData());
|
||||
// System.out.println("cancelJob result: " + JSONObject.toJSONString(cancelRes));
|
||||
// Assertions.assertTrue(cancelRes.isSuccess());
|
||||
// }
|
||||
|
||||
@Test
|
||||
void testRetryInstance() {
|
||||
ResultDTO<Void> res = powerJobClient.retryInstance(169557545206153344L);
|
||||
System.out.println(res);
|
||||
Assertions.assertNotNull(res);
|
||||
}
|
||||
}
|
@ -0,0 +1,35 @@
|
||||
package tech.powerjob.client.test;
|
||||
|
||||
import com.alibaba.fastjson.JSONObject;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import tech.powerjob.common.response.JobInfoDTO;
|
||||
import tech.powerjob.common.response.ResultDTO;
|
||||
import tech.powerjob.common.utils.CommonUtils;
|
||||
|
||||
/**
|
||||
* 测试容灾能力
|
||||
*
|
||||
* @author tjq
|
||||
* @since 2024/8/11
|
||||
*/
|
||||
@Slf4j
|
||||
public class TestClusterHA extends ClientInitializer {
|
||||
|
||||
@Test
|
||||
void testHa() {
|
||||
// 人工让 server 启停
|
||||
for (int i = 0; i < 1000000; i++) {
|
||||
|
||||
CommonUtils.easySleep(100);
|
||||
|
||||
ResultDTO<JobInfoDTO> jobInfoDTOResultDTO = powerJobClient.fetchJob(1L);
|
||||
|
||||
log.info("[TestClusterHA] response: {}", JSONObject.toJSONString(jobInfoDTOResultDTO));
|
||||
|
||||
if (!jobInfoDTOResultDTO.isSuccess()) {
|
||||
throw new RuntimeException("request failed!");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,45 @@
|
||||
package tech.powerjob.client.test;
|
||||
|
||||
import tech.powerjob.common.enums.ExecuteType;
|
||||
import tech.powerjob.common.enums.ProcessorType;
|
||||
import tech.powerjob.common.enums.TimeExpressionType;
|
||||
import tech.powerjob.common.request.http.SaveJobInfoRequest;
|
||||
import tech.powerjob.common.response.ResultDTO;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.util.concurrent.ForkJoinPool;
|
||||
|
||||
/**
|
||||
* TestConcurrencyControl
|
||||
*
|
||||
* @author tjq
|
||||
* @since 1/16/21
|
||||
*/
|
||||
class TestConcurrencyControl extends ClientInitializer {
|
||||
|
||||
@Test
|
||||
void testRunJobConcurrencyControl() {
|
||||
|
||||
SaveJobInfoRequest saveJobInfoRequest = new SaveJobInfoRequest();
|
||||
saveJobInfoRequest.setJobName("test concurrency control job");
|
||||
saveJobInfoRequest.setProcessorType(ProcessorType.SHELL);
|
||||
saveJobInfoRequest.setProcessorInfo("pwd");
|
||||
saveJobInfoRequest.setExecuteType(ExecuteType.STANDALONE);
|
||||
saveJobInfoRequest.setTimeExpressionType(TimeExpressionType.API);
|
||||
saveJobInfoRequest.setMaxInstanceNum(1);
|
||||
|
||||
Long jobId = powerJobClient.saveJob(saveJobInfoRequest).getData();
|
||||
|
||||
System.out.println("jobId: " + jobId);
|
||||
|
||||
ForkJoinPool pool = new ForkJoinPool(32);
|
||||
|
||||
for (int i = 0; i < 100; i++) {
|
||||
String params = "index-" + i;
|
||||
pool.execute(() -> {
|
||||
ResultDTO<Long> res = powerJobClient.runJob(jobId, params, 0);
|
||||
System.out.println(params + ": " + res);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,48 @@
|
||||
package tech.powerjob.client.test;
|
||||
|
||||
import com.alibaba.fastjson.JSON;
|
||||
import tech.powerjob.common.request.query.JobInfoQuery;
|
||||
import tech.powerjob.common.enums.ExecuteType;
|
||||
import tech.powerjob.common.enums.ProcessorType;
|
||||
import tech.powerjob.common.response.JobInfoDTO;
|
||||
import tech.powerjob.common.response.ResultDTO;
|
||||
import com.google.common.collect.Lists;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Test the query method
|
||||
*
|
||||
* @author tjq
|
||||
* @since 1/16/21
|
||||
*/
|
||||
@Slf4j
|
||||
class TestQuery extends ClientInitializer {
|
||||
|
||||
@Test
|
||||
void testFetchAllJob() {
|
||||
ResultDTO<List<JobInfoDTO>> allJobRes = powerJobClient.fetchAllJob();
|
||||
System.out.println(JSON.toJSONString(allJobRes));
|
||||
}
|
||||
|
||||
@Test
|
||||
void testQueryJob() {
|
||||
JobInfoQuery jobInfoQuery = new JobInfoQuery()
|
||||
.setIdGt(-1L)
|
||||
.setIdLt(10086L)
|
||||
.setJobNameLike("DAG")
|
||||
.setGmtModifiedGt(DateUtils.addYears(new Date(), -10))
|
||||
.setGmtCreateLt(DateUtils.addDays(new Date(), 10))
|
||||
.setExecuteTypeIn(Lists.newArrayList(ExecuteType.STANDALONE.getV(), ExecuteType.BROADCAST.getV(), ExecuteType.MAP_REDUCE.getV()))
|
||||
.setProcessorTypeIn(Lists.newArrayList(ProcessorType.BUILT_IN.getV(), ProcessorType.SHELL.getV(), ProcessorType.EXTERNAL.getV()))
|
||||
.setProcessorInfoLike("tech.powerjob");
|
||||
|
||||
ResultDTO<List<JobInfoDTO>> jobQueryResult = powerJobClient.queryJob(jobInfoQuery);
|
||||
System.out.println(JSON.toJSONString(jobQueryResult));
|
||||
System.out.println(jobQueryResult.getData().size());
|
||||
}
|
||||
}
|
@ -0,0 +1,17 @@
|
||||
package tech.powerjob.client.test;
|
||||
|
||||
import com.alibaba.fastjson.JSONObject;
|
||||
|
||||
/**
|
||||
* TestUtils
|
||||
*
|
||||
* @author tjq
|
||||
* @since 2024/11/21
|
||||
*/
|
||||
public class TestUtils {
|
||||
|
||||
public static void output(Object v) {
|
||||
String str = JSONObject.toJSONString(v);
|
||||
System.out.println(str);
|
||||
}
|
||||
}
|
@ -0,0 +1,191 @@
|
||||
package tech.powerjob.client.test;
|
||||
|
||||
import com.alibaba.fastjson.JSONObject;
|
||||
import tech.powerjob.client.PowerJobClient;
|
||||
import tech.powerjob.common.enums.ExecuteType;
|
||||
import tech.powerjob.common.enums.ProcessorType;
|
||||
import tech.powerjob.common.enums.TimeExpressionType;
|
||||
import tech.powerjob.common.enums.WorkflowNodeType;
|
||||
import tech.powerjob.common.model.PEWorkflowDAG;
|
||||
import tech.powerjob.common.request.http.SaveJobInfoRequest;
|
||||
import tech.powerjob.common.request.http.SaveWorkflowNodeRequest;
|
||||
import tech.powerjob.common.request.http.SaveWorkflowRequest;
|
||||
import tech.powerjob.common.response.ResultDTO;
|
||||
import tech.powerjob.common.response.WorkflowInfoDTO;
|
||||
import tech.powerjob.common.response.WorkflowInstanceInfoDTO;
|
||||
import tech.powerjob.common.response.WorkflowNodeInfoDTO;
|
||||
import com.google.common.collect.Lists;
|
||||
import org.junit.jupiter.api.Assertions;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Test cases for {@link PowerJobClient} workflow.
|
||||
*
|
||||
* @author tjq
|
||||
* @author Echo009
|
||||
* @since 2020/6/2
|
||||
*/
|
||||
class TestWorkflow extends ClientInitializer {
|
||||
|
||||
private static final long WF_ID = 2;
|
||||
|
||||
@Test
|
||||
void initTestData() {
|
||||
SaveJobInfoRequest base = new SaveJobInfoRequest();
|
||||
base.setJobName("DAG-Node-");
|
||||
base.setTimeExpressionType(TimeExpressionType.WORKFLOW);
|
||||
base.setExecuteType(ExecuteType.STANDALONE);
|
||||
base.setProcessorType(ProcessorType.BUILT_IN);
|
||||
base.setProcessorInfo("tech.powerjob.samples.workflow.WorkflowStandaloneProcessor");
|
||||
|
||||
for (int i = 0; i < 5; i++) {
|
||||
SaveJobInfoRequest request = JSONObject.parseObject(JSONObject.toJSONBytes(base), SaveJobInfoRequest.class);
|
||||
request.setJobName(request.getJobName() + i);
|
||||
ResultDTO<Long> res = powerJobClient.saveJob(request);
|
||||
System.out.println(res);
|
||||
Assertions.assertNotNull(res);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSaveWorkflow() {
|
||||
|
||||
SaveWorkflowRequest req = new SaveWorkflowRequest();
|
||||
|
||||
req.setWfName("workflow-by-client");
|
||||
req.setWfDescription("created by client");
|
||||
req.setEnable(true);
|
||||
req.setTimeExpressionType(TimeExpressionType.API);
|
||||
|
||||
System.out.println("req ->" + JSONObject.toJSON(req));
|
||||
ResultDTO<Long> res = powerJobClient.saveWorkflow(req);
|
||||
System.out.println(res);
|
||||
Assertions.assertNotNull(res);
|
||||
|
||||
req.setId(res.getData());
|
||||
|
||||
// 创建节点
|
||||
SaveWorkflowNodeRequest saveWorkflowNodeRequest1 = new SaveWorkflowNodeRequest();
|
||||
saveWorkflowNodeRequest1.setJobId(1L);
|
||||
saveWorkflowNodeRequest1.setNodeName("DAG-Node-1");
|
||||
saveWorkflowNodeRequest1.setType(WorkflowNodeType.JOB.getCode());
|
||||
|
||||
SaveWorkflowNodeRequest saveWorkflowNodeRequest2 = new SaveWorkflowNodeRequest();
|
||||
saveWorkflowNodeRequest2.setJobId(1L);
|
||||
saveWorkflowNodeRequest2.setNodeName("DAG-Node-2");
|
||||
saveWorkflowNodeRequest2.setType(WorkflowNodeType.JOB.getCode());
|
||||
|
||||
|
||||
SaveWorkflowNodeRequest saveWorkflowNodeRequest3 = new SaveWorkflowNodeRequest();
|
||||
saveWorkflowNodeRequest3.setJobId(1L);
|
||||
saveWorkflowNodeRequest3.setNodeName("DAG-Node-3");
|
||||
saveWorkflowNodeRequest3.setType(WorkflowNodeType.JOB.getCode());
|
||||
|
||||
|
||||
List<WorkflowNodeInfoDTO> nodeList = powerJobClient.saveWorkflowNode(Lists.newArrayList(saveWorkflowNodeRequest1,saveWorkflowNodeRequest2,saveWorkflowNodeRequest3)).getData();
|
||||
System.out.println(nodeList);
|
||||
Assertions.assertNotNull(nodeList);
|
||||
|
||||
|
||||
// DAG 图
|
||||
List<PEWorkflowDAG.Node> nodes = Lists.newLinkedList();
|
||||
List<PEWorkflowDAG.Edge> edges = Lists.newLinkedList();
|
||||
|
||||
nodes.add(new PEWorkflowDAG.Node(nodeList.get(0).getId()));
|
||||
nodes.add(new PEWorkflowDAG.Node(nodeList.get(1).getId()));
|
||||
nodes.add(new PEWorkflowDAG.Node(nodeList.get(2).getId()));
|
||||
|
||||
edges.add(new PEWorkflowDAG.Edge(nodeList.get(0).getId(), nodeList.get(1).getId()));
|
||||
edges.add(new PEWorkflowDAG.Edge(nodeList.get(1).getId(), nodeList.get(2).getId()));
|
||||
PEWorkflowDAG peWorkflowDAG = new PEWorkflowDAG(nodes, edges);
|
||||
|
||||
// 保存完整信息
|
||||
req.setDag(peWorkflowDAG);
|
||||
res = powerJobClient.saveWorkflow(req);
|
||||
|
||||
System.out.println(res);
|
||||
Assertions.assertNotNull(res);
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
void testCopyWorkflow() {
|
||||
ResultDTO<Long> res = powerJobClient.copyWorkflow(WF_ID);
|
||||
System.out.println(res);
|
||||
Assertions.assertNotNull(res);
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
void testDisableWorkflow() {
|
||||
ResultDTO<Void> res = powerJobClient.disableWorkflow(WF_ID);
|
||||
System.out.println(res);
|
||||
Assertions.assertNotNull(res);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testDeleteWorkflow() {
|
||||
ResultDTO<Void> res = powerJobClient.deleteWorkflow(WF_ID);
|
||||
System.out.println(res);
|
||||
Assertions.assertNotNull(res);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testEnableWorkflow() {
|
||||
ResultDTO<Void> res = powerJobClient.enableWorkflow(WF_ID);
|
||||
System.out.println(res);
|
||||
Assertions.assertNotNull(res);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFetchWorkflowInfo() {
|
||||
ResultDTO<WorkflowInfoDTO> res = powerJobClient.fetchWorkflow(WF_ID);
|
||||
System.out.println(res);
|
||||
Assertions.assertNotNull(res);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testRunWorkflow() {
|
||||
ResultDTO<Long> res = powerJobClient.runWorkflow(WF_ID, null, 0);
|
||||
System.out.println(res);
|
||||
Assertions.assertNotNull(res);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testStopWorkflowInstance() {
|
||||
ResultDTO<Void> res = powerJobClient.stopWorkflowInstance(149962433421639744L);
|
||||
System.out.println(res);
|
||||
Assertions.assertNotNull(res);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testRetryWorkflowInstance() {
|
||||
ResultDTO<Void> res = powerJobClient.retryWorkflowInstance(149962433421639744L);
|
||||
System.out.println(res);
|
||||
Assertions.assertNotNull(res);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testMarkWorkflowNodeAsSuccess() {
|
||||
ResultDTO<Void> res = powerJobClient.markWorkflowNodeAsSuccess(149962433421639744L, 1L);
|
||||
System.out.println(res);
|
||||
Assertions.assertNotNull(res);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testFetchWfInstanceInfo() {
|
||||
ResultDTO<WorkflowInstanceInfoDTO> res = powerJobClient.fetchWorkflowInstanceInfo(149962433421639744L);
|
||||
System.out.println(res);
|
||||
Assertions.assertNotNull(res);
|
||||
}
|
||||
|
||||
@Test
|
||||
void testRunWorkflowPlus() {
|
||||
ResultDTO<Long> res = powerJobClient.runWorkflow(WF_ID, "this is init Params 2", 90000);
|
||||
System.out.println(res);
|
||||
Assertions.assertNotNull(res);
|
||||
}
|
||||
}
|
@ -4,23 +4,24 @@
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<parent>
|
||||
<artifactId>powerjob</artifactId>
|
||||
<groupId>com.github.kfcfans</groupId>
|
||||
<version>1.0.0</version>
|
||||
<groupId>tech.powerjob</groupId>
|
||||
<version>5.1.1</version>
|
||||
</parent>
|
||||
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<artifactId>powerjob-common</artifactId>
|
||||
<version>3.1.0</version>
|
||||
<version>5.1.1</version>
|
||||
<packaging>jar</packaging>
|
||||
|
||||
<properties>
|
||||
<slf4j.version>1.7.30</slf4j.version>
|
||||
<commons.lang.version>3.10</commons.lang.version>
|
||||
<commons.io.version>2.6</commons.io.version>
|
||||
<guava.version>29.0-jre</guava.version>
|
||||
<okhttp.version>4.4.1</okhttp.version>
|
||||
<akka.version>2.6.4</akka.version>
|
||||
<junit.version>5.6.1</junit.version>
|
||||
<slf4j.version>1.7.36</slf4j.version>
|
||||
<commons.lang.version>3.12.0</commons.lang.version>
|
||||
<commons.io.version>2.11.0</commons.io.version>
|
||||
<guava.version>31.1-jre</guava.version>
|
||||
<okhttp.version>3.14.9</okhttp.version>
|
||||
<kryo.version>5.3.0</kryo.version>
|
||||
<jackson.version>2.14.3</jackson.version>
|
||||
<junit.version>5.9.0</junit.version>
|
||||
</properties>
|
||||
|
||||
<dependencies>
|
||||
@ -52,23 +53,6 @@
|
||||
<version>${okhttp.version}</version>
|
||||
</dependency>
|
||||
|
||||
<!-- akka remote -->
|
||||
<dependency>
|
||||
<groupId>com.typesafe.akka</groupId>
|
||||
<artifactId>akka-remote_2.13</artifactId>
|
||||
<version>${akka.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.typesafe.akka</groupId>
|
||||
<artifactId>akka-serialization-jackson_2.13</artifactId>
|
||||
<version>${akka.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.typesafe.akka</groupId>
|
||||
<artifactId>akka-slf4j_2.13</artifactId>
|
||||
<version>${akka.version}</version>
|
||||
</dependency>
|
||||
|
||||
<!-- commons-io -->
|
||||
<dependency>
|
||||
<groupId>commons-io</groupId>
|
||||
@ -76,7 +60,32 @@
|
||||
<version>${commons.io.version}</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Junit 测试 -->
|
||||
<!-- kryo 超超超高性能序列化框架 -->
|
||||
<dependency>
|
||||
<groupId>com.esotericsoftware.kryo</groupId>
|
||||
<artifactId>kryo5</artifactId>
|
||||
<version>${kryo.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-core</artifactId>
|
||||
<version>${jackson.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-databind</artifactId>
|
||||
<version>${jackson.version}</version>
|
||||
</dependency>
|
||||
<!-- 解决 Java8 data/time 类型处理问题 #869 -->
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.datatype</groupId>
|
||||
<artifactId>jackson-datatype-jsr310</artifactId>
|
||||
<version>${jackson.version}</version>
|
||||
</dependency>
|
||||
|
||||
|
||||
<!-- Junit tests -->
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-api</artifactId>
|
||||
|
@ -1,13 +0,0 @@
|
||||
package com.github.kfcfans.powerjob.common;
|
||||
|
||||
/**
|
||||
* 部署环境
|
||||
*
|
||||
* @author tjq
|
||||
* @since 2020/5/3
|
||||
*/
|
||||
public enum Env {
|
||||
DAILY,
|
||||
PRE,
|
||||
PRODUCT
|
||||
}
|
@ -1,15 +0,0 @@
|
||||
package com.github.kfcfans.powerjob.common;
|
||||
|
||||
/**
|
||||
* 公共常量
|
||||
*
|
||||
* @author tjq
|
||||
* @since 2020/5/31
|
||||
*/
|
||||
public class OmsConstant {
|
||||
|
||||
public static final String TIME_PATTERN = "yyyy-MM-dd HH:mm:ss";
|
||||
public static final String TIME_PATTERN_PLUS = "yyyy-MM-dd HH:mm:ss.SSS";
|
||||
|
||||
public static final String NONE = "N/A";
|
||||
}
|
@ -1,29 +0,0 @@
|
||||
package com.github.kfcfans.powerjob.common;
|
||||
|
||||
/**
|
||||
* OhMyScheduler 运行时异常
|
||||
*
|
||||
* @author tjq
|
||||
* @since 2020/5/26
|
||||
*/
|
||||
public class OmsException extends RuntimeException {
|
||||
|
||||
public OmsException() {
|
||||
}
|
||||
|
||||
public OmsException(String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
public OmsException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
|
||||
public OmsException(Throwable cause) {
|
||||
super(cause);
|
||||
}
|
||||
|
||||
public OmsException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
|
||||
super(message, cause, enableSuppression, writableStackTrace);
|
||||
}
|
||||
}
|
@ -1,12 +0,0 @@
|
||||
package com.github.kfcfans.powerjob.common;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
/**
|
||||
* OMS 序列化标记接口
|
||||
*
|
||||
* @author tjq
|
||||
* @since 2020/4/16
|
||||
*/
|
||||
public interface OmsSerializable extends Serializable {
|
||||
}
|
@ -1,35 +0,0 @@
|
||||
package com.github.kfcfans.powerjob.common;
|
||||
|
||||
/**
|
||||
* RemoteConstant
|
||||
*
|
||||
* @author tjq
|
||||
* @since 2020/3/17
|
||||
*/
|
||||
public class RemoteConstant {
|
||||
|
||||
|
||||
/* ************************ AKKA WORKER ************************ */
|
||||
public static final int DEFAULT_WORKER_PORT = 27777;
|
||||
|
||||
public static final String WORKER_ACTOR_SYSTEM_NAME = "oms";
|
||||
|
||||
public static final String Task_TRACKER_ACTOR_NAME = "task_tracker";
|
||||
public static final String PROCESSOR_TRACKER_ACTOR_NAME = "processor_tracker";
|
||||
public static final String WORKER_ACTOR_NAME = "worker";
|
||||
|
||||
public static final String WORKER_AKKA_CONFIG_NAME = "oms-worker.akka.conf";
|
||||
|
||||
|
||||
/* ************************ AKKA SERVER ************************ */
|
||||
public static final String SERVER_ACTOR_SYSTEM_NAME = "oms-server";
|
||||
|
||||
public static final String SERVER_ACTOR_NAME = "server_actor";
|
||||
public static final String SERVER_FRIEND_ACTOR_NAME = "friend_actor";
|
||||
public static final String SERVER_AKKA_CONFIG_NAME = "oms-server.akka.conf";
|
||||
|
||||
|
||||
/* ************************ OTHERS ************************ */
|
||||
public static final String EMPTY_ADDRESS = "N/A";
|
||||
public static final long DEFAULT_TIMEOUT_MS = 3000;
|
||||
}
|
@ -1,35 +0,0 @@
|
||||
package com.github.kfcfans.powerjob.common;
|
||||
|
||||
/**
|
||||
* 系统生成的任务实例运行结果
|
||||
*
|
||||
* @author tjq
|
||||
* @since 2020/4/11
|
||||
*/
|
||||
public class SystemInstanceResult {
|
||||
|
||||
/* *********** 普通instance 专用 *********** */
|
||||
|
||||
// 同时运行的任务实例数过多
|
||||
public static final String TOO_MUCH_INSTANCE = "too much instance(%d>%d)";
|
||||
// 无可用worker
|
||||
public static final String NO_WORKER_AVAILABLE = "no worker available";
|
||||
// 任务执行超时
|
||||
public static final String INSTANCE_EXECUTE_TIMEOUT = "instance execute timeout";
|
||||
// 创建根任务失败
|
||||
public static final String TASK_INIT_FAILED = "create root task failed";
|
||||
// 未知错误
|
||||
public static final String UNKNOWN_BUG = "unknown bug";
|
||||
// TaskTracker 长时间未上报
|
||||
public static final String REPORT_TIMEOUT = "worker report timeout, maybe TaskTracker down";
|
||||
|
||||
/* *********** workflow 专用 *********** */
|
||||
public static final String MIDDLE_JOB_FAILED = "middle job failed";
|
||||
public static final String MIDDLE_JOB_STOPPED = "middle job stopped by user";
|
||||
public static final String CAN_NOT_FIND_JOB = "can't find some job";
|
||||
|
||||
// 被用户手动停止
|
||||
public static final String STOPPED_BY_USER = "stopped by user";
|
||||
|
||||
|
||||
}
|
@ -1,37 +0,0 @@
|
||||
package com.github.kfcfans.powerjob.common;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Getter;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* 时间表达式类型
|
||||
*
|
||||
* @author tjq
|
||||
* @since 2020/3/30
|
||||
*/
|
||||
@Getter
|
||||
@AllArgsConstructor
|
||||
public enum TimeExpressionType {
|
||||
|
||||
API(1),
|
||||
CRON(2),
|
||||
FIX_RATE(3),
|
||||
FIX_DELAY(4),
|
||||
WORKFLOW(5);
|
||||
|
||||
int v;
|
||||
|
||||
public static final List<Integer> frequentTypes = Lists.newArrayList(FIX_RATE.v, FIX_DELAY.v);
|
||||
|
||||
public static TimeExpressionType of(int v) {
|
||||
for (TimeExpressionType type : values()) {
|
||||
if (type.v == v) {
|
||||
return type;
|
||||
}
|
||||
}
|
||||
throw new IllegalArgumentException("unknown TimeExpressionType of " + v);
|
||||
}
|
||||
}
|
@ -1,27 +0,0 @@
|
||||
package com.github.kfcfans.powerjob.common.model;
|
||||
|
||||
import com.github.kfcfans.powerjob.common.OmsSerializable;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
/**
|
||||
* 已部署的容器信息
|
||||
*
|
||||
* @author tjq
|
||||
* @since 2020/5/18
|
||||
*/
|
||||
@Data
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class DeployedContainerInfo implements OmsSerializable {
|
||||
|
||||
// 容器ID
|
||||
private Long containerId;
|
||||
// 版本
|
||||
private String version;
|
||||
// 部署时间
|
||||
private long deployedTime;
|
||||
// 机器地址(无需上报)
|
||||
private String workerAddress;
|
||||
}
|
@ -1,21 +0,0 @@
|
||||
package com.github.kfcfans.powerjob.common.model;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
/**
|
||||
* Git代码库信息
|
||||
*
|
||||
* @author tjq
|
||||
* @since 2020/5/17
|
||||
*/
|
||||
@Data
|
||||
public class GitRepoInfo {
|
||||
// 仓库地址
|
||||
private String repo;
|
||||
// 分支名称
|
||||
private String branch;
|
||||
// 用户名
|
||||
private String username;
|
||||
// 密码
|
||||
private String password;
|
||||
}
|
@ -1,57 +0,0 @@
|
||||
package com.github.kfcfans.powerjob.common.model;
|
||||
|
||||
import com.github.kfcfans.powerjob.common.OmsSerializable;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* 任务实例的运行详细信息(对外)
|
||||
*
|
||||
* @author tjq
|
||||
* @since 2020/4/11
|
||||
*/
|
||||
@Data
|
||||
@NoArgsConstructor
|
||||
public class InstanceDetail implements OmsSerializable {
|
||||
|
||||
// 任务整体开始时间
|
||||
private Long actualTriggerTime;
|
||||
// 任务整体结束时间(可能不存在)
|
||||
private Long finishedTime;
|
||||
// 任务状态(中文)
|
||||
private Integer status;
|
||||
// 任务执行结果(可能不存在)
|
||||
private String result;
|
||||
// TaskTracker地址
|
||||
private String taskTrackerAddress;
|
||||
|
||||
// MR或BD任务专用
|
||||
private TaskDetail taskDetail;
|
||||
// 秒级任务专用
|
||||
private List<SubInstanceDetail> subInstanceDetails;
|
||||
|
||||
// 重试次数
|
||||
private Long runningTimes;
|
||||
|
||||
// 秒级任务的 extra -> List<SubInstanceDetail>
|
||||
@Data
|
||||
@NoArgsConstructor
|
||||
public static class SubInstanceDetail implements OmsSerializable {
|
||||
private long subInstanceId;
|
||||
private String startTime;
|
||||
private String finishedTime;
|
||||
private String result;
|
||||
private String status;
|
||||
}
|
||||
|
||||
// MapReduce 和 Broadcast 任务的 extra ->
|
||||
@Data
|
||||
@NoArgsConstructor
|
||||
public static class TaskDetail implements OmsSerializable {
|
||||
private long totalTaskNum;
|
||||
private long succeedTaskNum;
|
||||
private long failedTaskNum;
|
||||
}
|
||||
}
|
@ -1,26 +0,0 @@
|
||||
package com.github.kfcfans.powerjob.common.model;
|
||||
|
||||
import com.github.kfcfans.powerjob.common.OmsSerializable;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
/**
|
||||
* 任务实例日志对象
|
||||
*
|
||||
* @author tjq
|
||||
* @since 2020/4/21
|
||||
*/
|
||||
@Data
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class InstanceLogContent implements OmsSerializable {
|
||||
|
||||
// 实例ID
|
||||
private long instanceId;
|
||||
// 日志提交时间
|
||||
private long logTime;
|
||||
// 日志内容
|
||||
private String logContent;
|
||||
}
|
||||
|
@ -1,62 +0,0 @@
|
||||
package com.github.kfcfans.powerjob.common.model;
|
||||
|
||||
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
|
||||
import com.fasterxml.jackson.databind.ser.std.ToStringSerializer;
|
||||
import com.google.common.collect.Lists;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.annotation.Nullable;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Point & Edge DAG 表示法
|
||||
* 点 + 线,易于表达和传播
|
||||
*
|
||||
* @author tjq
|
||||
* @since 2020/5/26
|
||||
*/
|
||||
@Data
|
||||
@NoArgsConstructor
|
||||
public class PEWorkflowDAG {
|
||||
|
||||
// DAG 图(点线表示法)
|
||||
private List<Node> nodes;
|
||||
private List<Edge> edges;
|
||||
|
||||
// 点
|
||||
@Data
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public static class Node {
|
||||
private Long jobId;
|
||||
private String jobName;
|
||||
|
||||
// 运行时参数,图定义不需要
|
||||
@JsonSerialize(using= ToStringSerializer.class)
|
||||
private Long instanceId;
|
||||
private Integer status;
|
||||
private String result;
|
||||
|
||||
public Node(Long jobId, String jobName) {
|
||||
this.jobId = jobId;
|
||||
this.jobName = jobName;
|
||||
}
|
||||
}
|
||||
|
||||
// 边 jobId -> jobId
|
||||
@Data
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public static class Edge {
|
||||
private Long from;
|
||||
private Long to;
|
||||
}
|
||||
|
||||
public PEWorkflowDAG(@Nonnull List<Node> nodes, @Nullable List<Edge> edges) {
|
||||
this.nodes = nodes;
|
||||
this.edges = edges == null ? Lists.newLinkedList() : edges;
|
||||
}
|
||||
}
|
@ -1,73 +0,0 @@
|
||||
package com.github.kfcfans.powerjob.common.model;
|
||||
|
||||
import com.github.kfcfans.powerjob.common.OmsSerializable;
|
||||
import lombok.Data;
|
||||
|
||||
/**
|
||||
* 系统指标
|
||||
*
|
||||
* @author tjq
|
||||
* @since 2020/3/25
|
||||
*/
|
||||
@Data
|
||||
public class SystemMetrics implements OmsSerializable, Comparable<SystemMetrics> {
|
||||
|
||||
// CPU核心数量
|
||||
private int cpuProcessors;
|
||||
// CPU负载(需要处以核心数)
|
||||
private double cpuLoad;
|
||||
|
||||
// 内存(单位 GB)
|
||||
private double jvmUsedMemory;
|
||||
private double jvmMaxMemory;
|
||||
// 内存占用(0.X,非百分比)
|
||||
private double jvmMemoryUsage;
|
||||
|
||||
// 磁盘(单位 GB)
|
||||
private double diskUsed;
|
||||
private double diskTotal;
|
||||
// 磁盘占用(0.X,非百分比)
|
||||
private double diskUsage;
|
||||
|
||||
// 缓存分数
|
||||
private int score;
|
||||
|
||||
@Override
|
||||
public int compareTo(SystemMetrics that) {
|
||||
return this.calculateScore() - that.calculateScore();
|
||||
}
|
||||
|
||||
/**
|
||||
* 计算得分情况,内存 then CPU then 磁盘(磁盘必须有1G以上的剩余空间)
|
||||
* @return 得分情况
|
||||
*/
|
||||
public int calculateScore() {
|
||||
|
||||
if (score > 0) {
|
||||
return score;
|
||||
}
|
||||
|
||||
double availableCPUCores = cpuProcessors * cpuLoad;
|
||||
double availableMemory = jvmMaxMemory - jvmUsedMemory;
|
||||
|
||||
// Windows下无法获取CPU可用核心数,值固定为-1
|
||||
cpuLoad = Math.max(0, cpuLoad);
|
||||
|
||||
return (int) (availableMemory * 2 + availableCPUCores);
|
||||
}
|
||||
|
||||
/**
|
||||
* 该机器是否可用
|
||||
* @param minCPUCores 判断标准之最低可用CPU核心数量
|
||||
* @param minMemorySpace 判断标准之最低可用内存
|
||||
* @param minDiskSpace 判断标准之最低可用磁盘空间
|
||||
* @return 是否可用
|
||||
*/
|
||||
public boolean available(double minCPUCores, double minMemorySpace, double minDiskSpace) {
|
||||
|
||||
double currentCpuCores = Math.max(cpuLoad * cpuProcessors, 0);
|
||||
double currentMemory = jvmMaxMemory - jvmUsedMemory;
|
||||
double currentDisk = diskTotal - diskUsed;
|
||||
return currentCpuCores >= minCPUCores && currentMemory >= minMemorySpace && currentDisk >= minDiskSpace;
|
||||
}
|
||||
}
|
@ -1,19 +0,0 @@
|
||||
package com.github.kfcfans.powerjob.common.request;
|
||||
|
||||
import com.github.kfcfans.powerjob.common.OmsSerializable;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
/**
|
||||
* 服务器销毁容器请求
|
||||
*
|
||||
* @author tjq
|
||||
* @since 2020/5/18
|
||||
*/
|
||||
@Data
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class ServerDestroyContainerRequest implements OmsSerializable {
|
||||
private String containerName;
|
||||
}
|
@ -1,19 +0,0 @@
|
||||
package com.github.kfcfans.powerjob.common.request;
|
||||
|
||||
import com.github.kfcfans.powerjob.common.OmsSerializable;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
/**
|
||||
* 服务器查询实例运行状态,需要返回详细的运行数据
|
||||
*
|
||||
* @author tjq
|
||||
* @since 2020/4/10
|
||||
*/
|
||||
@Data
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class ServerQueryInstanceStatusReq implements OmsSerializable {
|
||||
private Long instanceId;
|
||||
}
|
@ -1,74 +0,0 @@
|
||||
package com.github.kfcfans.powerjob.common.request;
|
||||
|
||||
import com.github.kfcfans.powerjob.common.OmsSerializable;
|
||||
import lombok.Data;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* 服务端调度任务请求(一次任务处理的入口)
|
||||
*
|
||||
* @author tjq
|
||||
* @since 2020/3/17
|
||||
*/
|
||||
@Data
|
||||
public class ServerScheduleJobReq implements OmsSerializable {
|
||||
|
||||
// 可用处理器地址,可能多值,逗号分隔
|
||||
private List<String> allWorkerAddress;
|
||||
|
||||
/* *********************** 任务相关属性 *********************** */
|
||||
|
||||
/**
|
||||
* 任务ID,当更换Server后需要根据 JobId 重新查询任务元数据
|
||||
*/
|
||||
private Long jobId;
|
||||
|
||||
private Long wfInstanceId;
|
||||
/**
|
||||
* 基础信息
|
||||
*/
|
||||
private Long instanceId;
|
||||
|
||||
/**
|
||||
* 任务执行处理器信息
|
||||
*/
|
||||
// 任务执行类型,单机、广播、MR
|
||||
private String executeType;
|
||||
// 处理器类型(JavaBean、Jar、脚本等)
|
||||
private String processorType;
|
||||
// 处理器信息
|
||||
private String processorInfo;
|
||||
|
||||
|
||||
/**
|
||||
* 超时时间
|
||||
*/
|
||||
// 整个任务的总体超时时间
|
||||
private long instanceTimeoutMS;
|
||||
|
||||
/**
|
||||
* 任务运行参数
|
||||
*/
|
||||
// 任务级别的参数,相当于类的static变量
|
||||
private String jobParams;
|
||||
// 实例级别的参数,相当于类的普通变量(API触发专用,从API触发处带入)
|
||||
private String instanceParams;
|
||||
|
||||
// 每台机器的处理线程数上限
|
||||
private int threadConcurrency;
|
||||
// 子任务重试次数(任务本身的重试机制由server控制)
|
||||
private int taskRetryNum;
|
||||
|
||||
/**
|
||||
* 定时执行信息
|
||||
*/
|
||||
// 时间表达式类型(CRON/API/FIX_RATE/FIX_DELAY)
|
||||
private String timeExpressionType;
|
||||
// 时间表达式,CRON/NULL/LONG/LONG(单位MS)
|
||||
private String timeExpression;
|
||||
|
||||
// 最大同时运行任务数,默认 1
|
||||
private Integer maxInstanceNum;
|
||||
|
||||
}
|
@ -1,32 +0,0 @@
|
||||
package com.github.kfcfans.powerjob.common.request;
|
||||
|
||||
import com.github.kfcfans.powerjob.common.OmsSerializable;
|
||||
import lombok.Data;
|
||||
|
||||
|
||||
/**
|
||||
* TaskTracker 将状态上报给服务器
|
||||
*
|
||||
* @author tjq
|
||||
* @since 2020/3/17
|
||||
*/
|
||||
@Data
|
||||
public class TaskTrackerReportInstanceStatusReq implements OmsSerializable {
|
||||
|
||||
private Long jobId;
|
||||
private Long instanceId;
|
||||
private Long wfInstanceId;
|
||||
|
||||
private int instanceStatus;
|
||||
|
||||
private String result;
|
||||
|
||||
/* ********* 统计信息 ********* */
|
||||
private long totalTaskNum;
|
||||
private long succeedTaskNum;
|
||||
private long failedTaskNum;
|
||||
|
||||
private long startTime;
|
||||
private long reportTime;
|
||||
private String sourceAddress;
|
||||
}
|
@ -1,32 +0,0 @@
|
||||
package com.github.kfcfans.powerjob.common.request;
|
||||
|
||||
import com.github.kfcfans.powerjob.common.OmsSerializable;
|
||||
import com.github.kfcfans.powerjob.common.model.DeployedContainerInfo;
|
||||
import com.github.kfcfans.powerjob.common.model.SystemMetrics;
|
||||
import lombok.Data;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
|
||||
/**
|
||||
* Worker 上报健康信息(worker定时发送的heartbeat)
|
||||
*
|
||||
* @author tjq
|
||||
* @since 2020/3/25
|
||||
*/
|
||||
@Data
|
||||
public class WorkerHeartbeat implements OmsSerializable {
|
||||
|
||||
// 本机地址 -> IP:port
|
||||
private String workerAddress;
|
||||
// 当前 appName
|
||||
private String appName;
|
||||
// 当前 appId
|
||||
private Long appId;
|
||||
// 当前时间
|
||||
private long heartbeatTime;
|
||||
// 当前加载的容器(容器名称 -> 容器版本)
|
||||
private List<DeployedContainerInfo> containerInfos;
|
||||
|
||||
private SystemMetrics systemMetrics;
|
||||
}
|
@ -1,79 +0,0 @@
|
||||
package com.github.kfcfans.powerjob.common.request.http;
|
||||
|
||||
import com.github.kfcfans.powerjob.common.ExecuteType;
|
||||
import com.github.kfcfans.powerjob.common.ProcessorType;
|
||||
import com.github.kfcfans.powerjob.common.TimeExpressionType;
|
||||
import lombok.Data;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* 创建/修改 JobInfo 请求
|
||||
*
|
||||
* @author tjq
|
||||
* @since 2020/3/30
|
||||
*/
|
||||
@Data
|
||||
public class SaveJobInfoRequest {
|
||||
|
||||
// 任务ID(jobId),null -> 插入,否则为更新
|
||||
private Long id;
|
||||
/* ************************** 任务基本信息 ************************** */
|
||||
// 任务名称
|
||||
private String jobName;
|
||||
// 任务描述
|
||||
private String jobDescription;
|
||||
// 任务所属的应用ID(Client无需填写该参数,自动填充)
|
||||
private Long appId;
|
||||
// 任务自带的参数
|
||||
private String jobParams;
|
||||
|
||||
/* ************************** 定时参数 ************************** */
|
||||
// 时间表达式类型(CRON/API/FIX_RATE/FIX_DELAY)
|
||||
private TimeExpressionType timeExpressionType;
|
||||
// 时间表达式,CRON/NULL/LONG/LONG
|
||||
private String timeExpression;
|
||||
|
||||
|
||||
/* ************************** 执行方式 ************************** */
|
||||
// 执行类型,单机/广播/MR
|
||||
private ExecuteType executeType;
|
||||
// 执行器类型,Java/Shell
|
||||
private ProcessorType processorType;
|
||||
// 执行器信息
|
||||
private String processorInfo;
|
||||
|
||||
|
||||
/* ************************** 运行时配置 ************************** */
|
||||
// 最大同时运行任务数
|
||||
private Integer maxInstanceNum = 1;
|
||||
// 并发度,同时执行的线程数量
|
||||
private Integer concurrency = 5;
|
||||
// 任务整体超时时间
|
||||
private Long instanceTimeLimit = 0L;
|
||||
|
||||
/* ************************** 重试配置 ************************** */
|
||||
private Integer instanceRetryNum = 0;
|
||||
private Integer taskRetryNum = 0;
|
||||
|
||||
/* ************************** 繁忙机器配置 ************************** */
|
||||
// 最低CPU核心数量,0代表不限
|
||||
private double minCpuCores = 0;
|
||||
// 最低内存空间,单位 GB,0代表不限
|
||||
private double minMemorySpace = 0;
|
||||
// 最低磁盘空间,单位 GB,0代表不限
|
||||
private double minDiskSpace = 0;
|
||||
|
||||
// 1 正常运行,2 停止(不再调度)
|
||||
private boolean enable = true;
|
||||
|
||||
|
||||
/* ************************** 集群配置 ************************** */
|
||||
// 指定机器运行,空代表不限,非空则只会使用其中的机器运行(多值逗号分割)
|
||||
private String designatedWorkers;
|
||||
// 最大机器数量
|
||||
private Integer maxWorkerCount = 0;
|
||||
|
||||
// 报警用户ID列表
|
||||
private List<Long> notifyUserIds;
|
||||
}
|
@ -1,46 +0,0 @@
|
||||
package com.github.kfcfans.powerjob.common.request.http;
|
||||
|
||||
import com.github.kfcfans.powerjob.common.TimeExpressionType;
|
||||
import com.github.kfcfans.powerjob.common.model.PEWorkflowDAG;
|
||||
import com.google.common.collect.Lists;
|
||||
import lombok.Data;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* 创建/修改 Workflow 请求
|
||||
*
|
||||
* @author tjq
|
||||
* @since 2020/5/26
|
||||
*/
|
||||
@Data
|
||||
public class SaveWorkflowRequest {
|
||||
|
||||
private Long id;
|
||||
|
||||
// 工作流名称
|
||||
private String wfName;
|
||||
// 工作流描述
|
||||
private String wfDescription;
|
||||
|
||||
// 所属应用ID(OpenClient不需要用户填写,自动填充)
|
||||
private Long appId;
|
||||
|
||||
// 点线表示法
|
||||
private PEWorkflowDAG pEWorkflowDAG;
|
||||
|
||||
/* ************************** 定时参数 ************************** */
|
||||
// 时间表达式类型,仅支持 CRON 和 API
|
||||
private TimeExpressionType timeExpressionType;
|
||||
// 时间表达式,CRON/NULL/LONG/LONG
|
||||
private String timeExpression;
|
||||
|
||||
// 最大同时运行的工作流个数,默认 1
|
||||
private Integer maxWfInstanceNum = 1;
|
||||
|
||||
// ENABLE / DISABLE
|
||||
private boolean enable = true;
|
||||
|
||||
// 工作流整体失败的报警
|
||||
private List<Long> notifyUserIds = Lists.newLinkedList();
|
||||
}
|
@ -1,45 +0,0 @@
|
||||
package com.github.kfcfans.powerjob.common.response;
|
||||
|
||||
import com.github.kfcfans.powerjob.common.InstanceStatus;
|
||||
import lombok.Data;
|
||||
|
||||
import java.util.Date;
|
||||
|
||||
/**
|
||||
* instanceInfo 对外输出对象
|
||||
*
|
||||
* @author tjq
|
||||
* @since 2020/5/14
|
||||
*/
|
||||
@Data
|
||||
public class InstanceInfoDTO {
|
||||
|
||||
// 任务ID
|
||||
private Long jobId;
|
||||
// 任务所属应用的ID,冗余提高查询效率
|
||||
private Long appId;
|
||||
// 任务实例ID
|
||||
private Long instanceId;
|
||||
// 任务实例参数
|
||||
private String instanceParams;
|
||||
/**
|
||||
* 任务状态 {@link InstanceStatus}
|
||||
*/
|
||||
private int status;
|
||||
// 执行结果
|
||||
private String result;
|
||||
// 预计触发时间
|
||||
private Long expectedTriggerTime;
|
||||
// 实际触发时间
|
||||
private Long actualTriggerTime;
|
||||
// 结束时间
|
||||
private Long finishedTime;
|
||||
// TaskTracker地址
|
||||
private String taskTrackerAddress;
|
||||
|
||||
// 总共执行的次数(用于重试判断)
|
||||
private Long runningTimes;
|
||||
|
||||
private Date gmtCreate;
|
||||
private Date gmtModified;
|
||||
}
|
@ -1,78 +0,0 @@
|
||||
package com.github.kfcfans.powerjob.common.response;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
import java.util.Date;
|
||||
|
||||
/**
|
||||
* jobInfo 对外输出对象
|
||||
*
|
||||
* @author tjq
|
||||
* @since 2020/5/14
|
||||
*/
|
||||
@Data
|
||||
public class JobInfoDTO {
|
||||
|
||||
private Long id;
|
||||
|
||||
/* ************************** 任务基本信息 ************************** */
|
||||
// 任务名称
|
||||
private String jobName;
|
||||
// 任务描述
|
||||
private String jobDescription;
|
||||
// 任务所属的应用ID
|
||||
private Long appId;
|
||||
// 任务自带的参数
|
||||
private String jobParams;
|
||||
|
||||
/* ************************** 定时参数 ************************** */
|
||||
// 时间表达式类型(CRON/API/FIX_RATE/FIX_DELAY)
|
||||
private Integer timeExpressionType;
|
||||
// 时间表达式,CRON/NULL/LONG/LONG
|
||||
private String timeExpression;
|
||||
|
||||
/* ************************** 执行方式 ************************** */
|
||||
// 执行类型,单机/广播/MR
|
||||
private Integer executeType;
|
||||
// 执行器类型,Java/Shell
|
||||
private Integer processorType;
|
||||
// 执行器信息
|
||||
private String processorInfo;
|
||||
|
||||
/* ************************** 运行时配置 ************************** */
|
||||
// 最大同时运行任务数,默认 1
|
||||
private Integer maxInstanceNum;
|
||||
// 并发度,同时执行某个任务的最大线程数量
|
||||
private Integer concurrency;
|
||||
// 任务整体超时时间
|
||||
private Long instanceTimeLimit;
|
||||
|
||||
/* ************************** 重试配置 ************************** */
|
||||
private Integer instanceRetryNum;
|
||||
private Integer taskRetryNum;
|
||||
|
||||
// 1 正常运行,2 停止(不再调度)
|
||||
private Integer status;
|
||||
// 下一次调度时间
|
||||
private Long nextTriggerTime;
|
||||
|
||||
/* ************************** 繁忙机器配置 ************************** */
|
||||
// 最低CPU核心数量,0代表不限
|
||||
private double minCpuCores;
|
||||
// 最低内存空间,单位 GB,0代表不限
|
||||
private double minMemorySpace;
|
||||
// 最低磁盘空间,单位 GB,0代表不限
|
||||
private double minDiskSpace;
|
||||
|
||||
/* ************************** 集群配置 ************************** */
|
||||
// 指定机器运行,空代表不限,非空则只会使用其中的机器运行(多值逗号分割)
|
||||
private String designatedWorkers;
|
||||
// 最大机器数量
|
||||
private Integer maxWorkerCount;
|
||||
|
||||
// 报警用户ID列表,多值逗号分隔
|
||||
private String notifyUserIds;
|
||||
|
||||
private Date gmtCreate;
|
||||
private Date gmtModified;
|
||||
}
|
@ -1,44 +0,0 @@
|
||||
package com.github.kfcfans.powerjob.common.response;
|
||||
|
||||
import com.github.kfcfans.powerjob.common.OmsSerializable;
|
||||
import lombok.Getter;
|
||||
import lombok.Setter;
|
||||
import lombok.ToString;
|
||||
import org.apache.commons.lang3.exception.ExceptionUtils;
|
||||
|
||||
/**
|
||||
* 请求返回的结果对象
|
||||
*
|
||||
* @author tjq
|
||||
* @since 2020/3/30
|
||||
*/
|
||||
@Getter
|
||||
@Setter
|
||||
@ToString
|
||||
public class ResultDTO<T> implements OmsSerializable {
|
||||
|
||||
private boolean success;
|
||||
// 数据(success为 true 时存在)
|
||||
private T data;
|
||||
// 错误信息(success为 false 时存在)
|
||||
private String message;
|
||||
|
||||
public static <T> ResultDTO<T> success(T data) {
|
||||
ResultDTO<T> r = new ResultDTO<>();
|
||||
r.success = true;
|
||||
r.data = data;
|
||||
return r;
|
||||
}
|
||||
|
||||
public static <T> ResultDTO<T> failed(String message) {
|
||||
ResultDTO<T> r = new ResultDTO<>();
|
||||
r.success = false;
|
||||
r.message = message;
|
||||
return r;
|
||||
}
|
||||
|
||||
public static <T> ResultDTO<T> failed(Throwable t) {
|
||||
return failed(ExceptionUtils.getStackTrace(t));
|
||||
}
|
||||
|
||||
}
|
@ -1,45 +0,0 @@
|
||||
package com.github.kfcfans.powerjob.common.response;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
import java.util.Date;
|
||||
|
||||
/**
|
||||
* workflowInfo 对外输出对象
|
||||
*
|
||||
* @author tjq
|
||||
* @since 2020/6/2
|
||||
*/
|
||||
@Data
|
||||
public class WorkflowInfoDTO {
|
||||
|
||||
private Long id;
|
||||
private String wfName;
|
||||
private String wfDescription;
|
||||
|
||||
// 所属应用ID
|
||||
private Long appId;
|
||||
|
||||
// 工作流的DAG图信息(点线式DAG的json)
|
||||
private String peDAG;
|
||||
|
||||
/* ************************** 定时参数 ************************** */
|
||||
// 时间表达式类型(CRON/API/FIX_RATE/FIX_DELAY)
|
||||
private Integer timeExpressionType;
|
||||
// 时间表达式,CRON/NULL/LONG/LONG
|
||||
private String timeExpression;
|
||||
|
||||
// 最大同时运行的工作流个数,默认 1
|
||||
private Integer maxWfInstanceNum;
|
||||
|
||||
// 1 正常运行,2 停止(不再调度)
|
||||
private Integer status;
|
||||
// 下一次调度时间
|
||||
private Long nextTriggerTime;
|
||||
|
||||
// 工作流整体失败的报警
|
||||
private String notifyUserIds;
|
||||
|
||||
private Date gmtCreate;
|
||||
private Date gmtModified;
|
||||
}
|
@ -1,59 +0,0 @@
|
||||
package com.github.kfcfans.powerjob.common.utils;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonParser;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.github.kfcfans.powerjob.common.OmsException;
|
||||
import org.apache.commons.lang3.exception.ExceptionUtils;
|
||||
|
||||
/**
|
||||
* JSON工具类
|
||||
*
|
||||
* @author tjq
|
||||
* @since 2020/4/16
|
||||
*/
|
||||
public class JsonUtils {
|
||||
|
||||
private static final ObjectMapper objectMapper = new ObjectMapper();
|
||||
|
||||
static {
|
||||
objectMapper.configure(JsonParser.Feature.ALLOW_SINGLE_QUOTES, true);
|
||||
}
|
||||
|
||||
public static String toJSONString(Object obj) {
|
||||
try {
|
||||
return objectMapper.writeValueAsString(obj);
|
||||
}catch (Exception ignore) {
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public static String toJSONStringUnsafe(Object obj) throws JsonProcessingException {
|
||||
return objectMapper.writeValueAsString(obj);
|
||||
}
|
||||
|
||||
public static byte[] toBytes(Object obj) {
|
||||
try {
|
||||
return objectMapper.writeValueAsBytes(obj);
|
||||
}catch (Exception ignore) {
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public static <T> T parseObject(String json, Class<T> clz) throws JsonProcessingException {
|
||||
return objectMapper.readValue(json, clz);
|
||||
}
|
||||
|
||||
public static <T> T parseObject(byte[] b, Class<T> clz) throws Exception {
|
||||
return objectMapper.readValue(b, clz);
|
||||
}
|
||||
|
||||
public static <T> T parseObjectUnsafe(String json, Class<T> clz) {
|
||||
try {
|
||||
return objectMapper.readValue(json, clz);
|
||||
}catch (Exception e) {
|
||||
ExceptionUtils.rethrow(e);
|
||||
}
|
||||
throw new OmsException("impossible");
|
||||
}
|
||||
}
|
@ -1,454 +0,0 @@
|
||||
package com.github.kfcfans.powerjob.common.utils;
|
||||
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.*;
|
||||
import java.util.Enumeration;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.ThreadLocalRandom;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
/**
|
||||
* IP and Port Helper for RPC
|
||||
*
|
||||
* @author tjq borrowed from dubbo
|
||||
* @since 2020/3/16
|
||||
*/
|
||||
@Slf4j
|
||||
@SuppressWarnings("all")
|
||||
public class NetUtils {
|
||||
|
||||
private static final String ANYHOST_VALUE = "0.0.0.0";
|
||||
private static final String LOCALHOST_KEY = "localhost";
|
||||
private static final String LOCALHOST_VALUE = "127.0.0.1";
|
||||
|
||||
// returned port range is [30000, 39999]
|
||||
private static final int RND_PORT_START = 30000;
|
||||
private static final int RND_PORT_RANGE = 10000;
|
||||
|
||||
// valid port range is (0, 65535]
|
||||
private static final int MIN_PORT = 0;
|
||||
public static final int MAX_PORT = 65535;
|
||||
|
||||
private static final Pattern ADDRESS_PATTERN = Pattern.compile("^\\d{1,3}(\\.\\d{1,3}){3}\\:\\d{1,5}$");
|
||||
private static final Pattern LOCAL_IP_PATTERN = Pattern.compile("127(\\.\\d{1,3}){3}$");
|
||||
private static final Pattern IP_PATTERN = Pattern.compile("\\d{1,3}(\\.\\d{1,3}){3,5}$");
|
||||
|
||||
private static volatile InetAddress LOCAL_ADDRESS = null;
|
||||
|
||||
private static final String SPLIT_IPV4_CHARECTER = "\\.";
|
||||
private static final String SPLIT_IPV6_CHARECTER = ":";
|
||||
|
||||
public static int getRandomPort() {
|
||||
return RND_PORT_START + ThreadLocalRandom.current().nextInt(RND_PORT_RANGE);
|
||||
}
|
||||
|
||||
public static int getAvailablePort() {
|
||||
try (ServerSocket ss = new ServerSocket()) {
|
||||
ss.bind(null);
|
||||
return ss.getLocalPort();
|
||||
} catch (IOException e) {
|
||||
return getRandomPort();
|
||||
}
|
||||
}
|
||||
|
||||
public static int getAvailablePort(int port) {
|
||||
if (port <= 0) {
|
||||
return getAvailablePort();
|
||||
}
|
||||
for (int i = port; i < MAX_PORT; i++) {
|
||||
try (ServerSocket ss = new ServerSocket(i)) {
|
||||
return i;
|
||||
} catch (IOException e) {
|
||||
// continue
|
||||
}
|
||||
}
|
||||
return port;
|
||||
}
|
||||
|
||||
public static boolean isInvalidPort(int port) {
|
||||
return port <= MIN_PORT || port > MAX_PORT;
|
||||
}
|
||||
|
||||
public static boolean isValidAddress(String address) {
|
||||
return ADDRESS_PATTERN.matcher(address).matches();
|
||||
}
|
||||
|
||||
public static boolean isLocalHost(String host) {
|
||||
return host != null
|
||||
&& (LOCAL_IP_PATTERN.matcher(host).matches()
|
||||
|| host.equalsIgnoreCase(LOCALHOST_KEY));
|
||||
}
|
||||
|
||||
public static boolean isAnyHost(String host) {
|
||||
return ANYHOST_VALUE.equals(host);
|
||||
}
|
||||
|
||||
public static boolean isInvalidLocalHost(String host) {
|
||||
return host == null
|
||||
|| host.length() == 0
|
||||
|| host.equalsIgnoreCase(LOCALHOST_KEY)
|
||||
|| host.equals(ANYHOST_VALUE)
|
||||
|| (LOCAL_IP_PATTERN.matcher(host).matches());
|
||||
}
|
||||
|
||||
public static boolean isValidLocalHost(String host) {
|
||||
return !isInvalidLocalHost(host);
|
||||
}
|
||||
|
||||
public static InetSocketAddress getLocalSocketAddress(String host, int port) {
|
||||
return isInvalidLocalHost(host) ?
|
||||
new InetSocketAddress(port) : new InetSocketAddress(host, port);
|
||||
}
|
||||
|
||||
static boolean isValidV4Address(InetAddress address) {
|
||||
if (address == null || address.isLoopbackAddress()) {
|
||||
return false;
|
||||
}
|
||||
String name = address.getHostAddress();
|
||||
boolean result = (name != null
|
||||
&& IP_PATTERN.matcher(name).matches()
|
||||
&& !ANYHOST_VALUE.equals(name)
|
||||
&& !LOCALHOST_VALUE.equals(name));
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if an ipv6 address
|
||||
*
|
||||
* @return true if it is reachable
|
||||
*/
|
||||
static boolean isPreferIPV6Address() {
|
||||
boolean preferIpv6 = Boolean.getBoolean("java.net.preferIPv6Addresses");
|
||||
if (!preferIpv6) {
|
||||
return false;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* normalize the ipv6 Address, convert scope name to scope id.
|
||||
* e.g.
|
||||
* convert
|
||||
* fe80:0:0:0:894:aeec:f37d:23e1%en0
|
||||
* to
|
||||
* fe80:0:0:0:894:aeec:f37d:23e1%5
|
||||
* <p>
|
||||
* The %5 after ipv6 address is called scope id.
|
||||
* see java doc of {@link Inet6Address} for more details.
|
||||
*
|
||||
* @param address the input address
|
||||
* @return the normalized address, with scope id converted to int
|
||||
*/
|
||||
static InetAddress normalizeV6Address(Inet6Address address) {
|
||||
String addr = address.getHostAddress();
|
||||
int i = addr.lastIndexOf('%');
|
||||
if (i > 0) {
|
||||
try {
|
||||
return InetAddress.getByName(addr.substring(0, i) + '%' + address.getScopeId());
|
||||
} catch (UnknownHostException e) {
|
||||
// ignore
|
||||
log.debug("Unknown IPV6 address: ", e);
|
||||
}
|
||||
}
|
||||
return address;
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取本机 IP 地址
|
||||
* @return 本机IP地址
|
||||
*/
|
||||
public static String getLocalHost() {
|
||||
InetAddress address = getLocalAddress();
|
||||
return address == null ? LOCALHOST_VALUE : address.getHostAddress();
|
||||
}
|
||||
|
||||
/**
|
||||
* Find first valid IP from local network card
|
||||
* @return first valid local IP
|
||||
*/
|
||||
public static InetAddress getLocalAddress() {
|
||||
if (LOCAL_ADDRESS != null) {
|
||||
return LOCAL_ADDRESS;
|
||||
}
|
||||
InetAddress localAddress = getLocalAddress0();
|
||||
LOCAL_ADDRESS = localAddress;
|
||||
return localAddress;
|
||||
}
|
||||
|
||||
private static Optional<InetAddress> toValidAddress(InetAddress address) {
|
||||
if (address instanceof Inet6Address) {
|
||||
Inet6Address v6Address = (Inet6Address) address;
|
||||
if (isPreferIPV6Address()) {
|
||||
return Optional.ofNullable(normalizeV6Address(v6Address));
|
||||
}
|
||||
}
|
||||
if (isValidV4Address(address)) {
|
||||
return Optional.of(address);
|
||||
}
|
||||
return Optional.empty();
|
||||
}
|
||||
|
||||
private static InetAddress getLocalAddress0() {
|
||||
InetAddress localAddress = null;
|
||||
try {
|
||||
localAddress = InetAddress.getLocalHost();
|
||||
Optional<InetAddress> addressOp = toValidAddress(localAddress);
|
||||
if (addressOp.isPresent()) {
|
||||
return addressOp.get();
|
||||
}
|
||||
} catch (Throwable e) {
|
||||
log.warn("[Triple]", e);
|
||||
}
|
||||
|
||||
try {
|
||||
Enumeration<NetworkInterface> interfaces = NetworkInterface.getNetworkInterfaces();
|
||||
if (null == interfaces) {
|
||||
return localAddress;
|
||||
}
|
||||
while (interfaces.hasMoreElements()) {
|
||||
try {
|
||||
NetworkInterface network = interfaces.nextElement();
|
||||
if (network.isLoopback() || network.isVirtual() || !network.isUp()) {
|
||||
continue;
|
||||
}
|
||||
Enumeration<InetAddress> addresses = network.getInetAddresses();
|
||||
while (addresses.hasMoreElements()) {
|
||||
try {
|
||||
Optional<InetAddress> addressOp = toValidAddress(addresses.nextElement());
|
||||
if (addressOp.isPresent()) {
|
||||
try {
|
||||
if(addressOp.get().isReachable(100)){
|
||||
return addressOp.get();
|
||||
}
|
||||
} catch (IOException e) {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
} catch (Throwable e) {
|
||||
log.warn("[Triple]", e);
|
||||
}
|
||||
}
|
||||
} catch (Throwable e) {
|
||||
log.warn("[Triple]", e);
|
||||
}
|
||||
}
|
||||
} catch (Throwable e) {
|
||||
log.warn("[Triple]", e);
|
||||
}
|
||||
return localAddress;
|
||||
}
|
||||
|
||||
public static String getHostName(String address) {
|
||||
try {
|
||||
int i = address.indexOf(':');
|
||||
if (i > -1) {
|
||||
address = address.substring(0, i);
|
||||
}
|
||||
InetAddress inetAddress = InetAddress.getByName(address);
|
||||
if (inetAddress != null) {
|
||||
return inetAddress.getHostName();
|
||||
}
|
||||
} catch (Throwable e) {
|
||||
// ignore
|
||||
}
|
||||
return address;
|
||||
}
|
||||
|
||||
/**
|
||||
* getIpByHost
|
||||
* @param hostName hostName
|
||||
* @return ip address or hostName if UnknownHostException
|
||||
*/
|
||||
public static String getIpByHost(String hostName) {
|
||||
try {
|
||||
return InetAddress.getByName(hostName).getHostAddress();
|
||||
} catch (UnknownHostException e) {
|
||||
return hostName;
|
||||
}
|
||||
}
|
||||
|
||||
public static String toAddressString(InetSocketAddress address) {
|
||||
return address.getAddress().getHostAddress() + ":" + address.getPort();
|
||||
}
|
||||
|
||||
public static InetSocketAddress toAddress(String address) {
|
||||
int i = address.indexOf(':');
|
||||
String host;
|
||||
int port;
|
||||
if (i > -1) {
|
||||
host = address.substring(0, i);
|
||||
port = Integer.parseInt(address.substring(i + 1));
|
||||
} else {
|
||||
host = address;
|
||||
port = 0;
|
||||
}
|
||||
return new InetSocketAddress(host, port);
|
||||
}
|
||||
|
||||
public static String toURL(String protocol, String host, int port, String path) {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append(protocol).append("://");
|
||||
sb.append(host).append(':').append(port);
|
||||
if (path.charAt(0) != '/') {
|
||||
sb.append('/');
|
||||
}
|
||||
sb.append(path);
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
public static void joinMulticastGroup(MulticastSocket multicastSocket, InetAddress multicastAddress) throws IOException {
|
||||
setInterface(multicastSocket, multicastAddress instanceof Inet6Address);
|
||||
multicastSocket.setLoopbackMode(false);
|
||||
multicastSocket.joinGroup(multicastAddress);
|
||||
}
|
||||
|
||||
public static void setInterface(MulticastSocket multicastSocket, boolean preferIpv6) throws IOException {
|
||||
boolean interfaceSet = false;
|
||||
Enumeration interfaces = NetworkInterface.getNetworkInterfaces();
|
||||
while (interfaces.hasMoreElements()) {
|
||||
NetworkInterface i = (NetworkInterface) interfaces.nextElement();
|
||||
Enumeration addresses = i.getInetAddresses();
|
||||
while (addresses.hasMoreElements()) {
|
||||
InetAddress address = (InetAddress) addresses.nextElement();
|
||||
if (preferIpv6 && address instanceof Inet6Address) {
|
||||
try {
|
||||
if(address.isReachable(100)){
|
||||
multicastSocket.setInterface(address);
|
||||
interfaceSet = true;
|
||||
break;
|
||||
}
|
||||
} catch (IOException e) {
|
||||
// ignore
|
||||
}
|
||||
} else if (!preferIpv6 && address instanceof Inet4Address) {
|
||||
try {
|
||||
if(address.isReachable(100)){
|
||||
multicastSocket.setInterface(address);
|
||||
interfaceSet = true;
|
||||
break;
|
||||
}
|
||||
} catch (IOException e) {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
}
|
||||
if (interfaceSet) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public static boolean matchIpRange(String pattern, String host, int port) throws UnknownHostException {
|
||||
if (pattern == null || host == null) {
|
||||
throw new IllegalArgumentException("Illegal Argument pattern or hostName. Pattern:" + pattern + ", Host:" + host);
|
||||
}
|
||||
pattern = pattern.trim();
|
||||
if ("*.*.*.*".equals(pattern) || "*".equals(pattern)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
InetAddress inetAddress = InetAddress.getByName(host);
|
||||
boolean isIpv4 = isValidV4Address(inetAddress) ? true : false;
|
||||
String[] hostAndPort = getPatternHostAndPort(pattern, isIpv4);
|
||||
if (hostAndPort[1] != null && !hostAndPort[1].equals(String.valueOf(port))) {
|
||||
return false;
|
||||
}
|
||||
pattern = hostAndPort[0];
|
||||
|
||||
String splitCharacter = SPLIT_IPV4_CHARECTER;
|
||||
if (!isIpv4) {
|
||||
splitCharacter = SPLIT_IPV6_CHARECTER;
|
||||
}
|
||||
String[] mask = pattern.split(splitCharacter);
|
||||
//check format of pattern
|
||||
checkHostPattern(pattern, mask, isIpv4);
|
||||
|
||||
host = inetAddress.getHostAddress();
|
||||
|
||||
String[] ipAddress = host.split(splitCharacter);
|
||||
if (pattern.equals(host)) {
|
||||
return true;
|
||||
}
|
||||
// short name condition
|
||||
if (!ipPatternContainExpression(pattern)) {
|
||||
InetAddress patternAddress = InetAddress.getByName(pattern);
|
||||
if (patternAddress.getHostAddress().equals(host)) {
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
for (int i = 0; i < mask.length; i++) {
|
||||
if ("*".equals(mask[i]) || mask[i].equals(ipAddress[i])) {
|
||||
continue;
|
||||
} else if (mask[i].contains("-")) {
|
||||
String[] rangeNumStrs = mask[i].split("-");
|
||||
if (rangeNumStrs.length != 2) {
|
||||
throw new IllegalArgumentException("There is wrong format of ip Address: " + mask[i]);
|
||||
}
|
||||
Integer min = getNumOfIpSegment(rangeNumStrs[0], isIpv4);
|
||||
Integer max = getNumOfIpSegment(rangeNumStrs[1], isIpv4);
|
||||
Integer ip = getNumOfIpSegment(ipAddress[i], isIpv4);
|
||||
if (ip < min || ip > max) {
|
||||
return false;
|
||||
}
|
||||
} else if ("0".equals(ipAddress[i]) && ("0".equals(mask[i]) || "00".equals(mask[i]) || "000".equals(mask[i]) || "0000".equals(mask[i]))) {
|
||||
continue;
|
||||
} else if (!mask[i].equals(ipAddress[i])) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private static boolean ipPatternContainExpression(String pattern) {
|
||||
return pattern.contains("*") || pattern.contains("-");
|
||||
}
|
||||
|
||||
private static void checkHostPattern(String pattern, String[] mask, boolean isIpv4) {
|
||||
if (!isIpv4) {
|
||||
if (mask.length != 8 && ipPatternContainExpression(pattern)) {
|
||||
throw new IllegalArgumentException("If you config ip expression that contains '*' or '-', please fill qulified ip pattern like 234e:0:4567:0:0:0:3d:*. ");
|
||||
}
|
||||
if (mask.length != 8 && !pattern.contains("::")) {
|
||||
throw new IllegalArgumentException("The host is ipv6, but the pattern is not ipv6 pattern : " + pattern);
|
||||
}
|
||||
} else {
|
||||
if (mask.length != 4) {
|
||||
throw new IllegalArgumentException("The host is ipv4, but the pattern is not ipv4 pattern : " + pattern);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static String[] getPatternHostAndPort(String pattern, boolean isIpv4) {
|
||||
String[] result = new String[2];
|
||||
if (pattern.startsWith("[") && pattern.contains("]:")) {
|
||||
int end = pattern.indexOf("]:");
|
||||
result[0] = pattern.substring(1, end);
|
||||
result[1] = pattern.substring(end + 2);
|
||||
return result;
|
||||
} else if (pattern.startsWith("[") && pattern.endsWith("]")) {
|
||||
result[0] = pattern.substring(1, pattern.length() - 1);
|
||||
result[1] = null;
|
||||
return result;
|
||||
} else if (isIpv4 && pattern.contains(":")) {
|
||||
int end = pattern.indexOf(":");
|
||||
result[0] = pattern.substring(0, end);
|
||||
result[1] = pattern.substring(end + 1);
|
||||
return result;
|
||||
} else {
|
||||
result[0] = pattern;
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
private static Integer getNumOfIpSegment(String ipSegment, boolean isIpv4) {
|
||||
if (isIpv4) {
|
||||
return Integer.parseInt(ipSegment);
|
||||
}
|
||||
return Integer.parseInt(ipSegment, 16);
|
||||
}
|
||||
}
|
@ -1,7 +1,7 @@
|
||||
package com.github.kfcfans.powerjob.common;
|
||||
package tech.powerjob.common;
|
||||
|
||||
/**
|
||||
* 容器常量
|
||||
* Container constants.
|
||||
*
|
||||
* @author tjq
|
||||
* @since 2020/5/15
|
||||
@ -9,13 +9,16 @@ package com.github.kfcfans.powerjob.common;
|
||||
public class ContainerConstant {
|
||||
|
||||
/**
|
||||
* spring-context 配置文件名称
|
||||
* Spring-context configuration file name of the container.
|
||||
*/
|
||||
public static final String SPRING_CONTEXT_FILE_NAME = "oms-worker-container-spring-context.xml";
|
||||
|
||||
/**
|
||||
* container 属性文件名称
|
||||
* Property file name of the container.
|
||||
*/
|
||||
public static final String CONTAINER_PROPERTIES_FILE_NAME = "oms-worker-container.properties";
|
||||
/**
|
||||
* Package name of the container.
|
||||
*/
|
||||
public static final String CONTAINER_PACKAGE_NAME_KEY = "PACKAGE_NAME";
|
||||
}
|
@ -0,0 +1,35 @@
|
||||
package tech.powerjob.common;
|
||||
|
||||
/**
|
||||
* Common constants.
|
||||
*
|
||||
* @author tjq
|
||||
* @since 2020/5/31
|
||||
*/
|
||||
public class OmsConstant {
|
||||
|
||||
/**
|
||||
* package name
|
||||
*/
|
||||
public static final String PACKAGE = "tech.powerjob";
|
||||
|
||||
public static final int SERVER_DEFAULT_AKKA_PORT = 10086;
|
||||
public static final int SERVER_DEFAULT_HTTP_PORT = 10010;
|
||||
|
||||
public static final String TIME_PATTERN = "yyyy-MM-dd HH:mm:ss";
|
||||
public static final String TIME_PATTERN_PLUS = "yyyy-MM-dd HH:mm:ss.SSS";
|
||||
|
||||
public static final String NONE = "N/A";
|
||||
|
||||
public static final String COMMA = ",";
|
||||
|
||||
public static final String AND = "&";
|
||||
|
||||
public static final String EQUAL = "=";
|
||||
public static final String LINE_SEPARATOR = "\r\n";
|
||||
|
||||
public static final String HTTP_HEADER_CONTENT_TYPE = "Content-Type";
|
||||
public static final String JSON_MEDIA_TYPE = "application/json; charset=utf-8";
|
||||
|
||||
public static final String NULL = "null";
|
||||
}
|
@ -1,4 +1,4 @@
|
||||
package com.github.kfcfans.powerjob.common;
|
||||
package tech.powerjob.common;
|
||||
|
||||
/**
|
||||
* OpenAPI 常量
|
||||
@ -8,32 +8,62 @@ package com.github.kfcfans.powerjob.common;
|
||||
*/
|
||||
public class OpenAPIConstant {
|
||||
|
||||
private OpenAPIConstant(){
|
||||
|
||||
}
|
||||
|
||||
public static final String WEB_PATH = "/openApi";
|
||||
|
||||
public static final String ASSERT = "/assert";
|
||||
|
||||
public static final String AUTH_APP = "/authApp";
|
||||
|
||||
/* ************* JOB 区 ************* */
|
||||
|
||||
public static final String SAVE_JOB = "/saveJob";
|
||||
public static final String COPY_JOB = "/copyJob";
|
||||
|
||||
public static final String EXPORT_JOB = "/exportJob";
|
||||
public static final String FETCH_JOB = "/fetchJob";
|
||||
public static final String FETCH_ALL_JOB = "/fetchAllJob";
|
||||
public static final String QUERY_JOB = "/queryJob";
|
||||
public static final String DISABLE_JOB = "/disableJob";
|
||||
public static final String ENABLE_JOB = "/enableJob";
|
||||
public static final String DELETE_JOB = "/deleteJob";
|
||||
public static final String RUN_JOB = "/runJob";
|
||||
|
||||
/* ************* Instance 区 ************* */
|
||||
|
||||
public static final String STOP_INSTANCE = "/stopInstance";
|
||||
public static final String CANCEL_INSTANCE = "/cancelInstance";
|
||||
public static final String RETRY_INSTANCE = "/retryInstance";
|
||||
public static final String FETCH_INSTANCE_STATUS = "/fetchInstanceStatus";
|
||||
public static final String FETCH_INSTANCE_INFO = "/fetchInstanceInfo";
|
||||
public static final String QUERY_INSTANCE = "/queryInstance";
|
||||
|
||||
/* ************* Workflow 区 ************* */
|
||||
|
||||
public static final String SAVE_WORKFLOW = "/saveWorkflow";
|
||||
public static final String COPY_WORKFLOW = "/copyWorkflow";
|
||||
public static final String FETCH_WORKFLOW = "/fetchWorkflow";
|
||||
public static final String DISABLE_WORKFLOW = "/disableWorkflow";
|
||||
public static final String ENABLE_WORKFLOW = "/enableWorkflow";
|
||||
public static final String DELETE_WORKFLOW = "/deleteWorkflow";
|
||||
public static final String RUN_WORKFLOW = "/runWorkflow";
|
||||
public static final String SAVE_WORKFLOW_NODE = "/addWorkflowNode";
|
||||
|
||||
/* ************* WorkflowInstance 区 ************* */
|
||||
|
||||
public static final String STOP_WORKFLOW_INSTANCE = "/stopWfInstance";
|
||||
public static final String RETRY_WORKFLOW_INSTANCE = "/retryWfInstance";
|
||||
public static final String FETCH_WORKFLOW_INSTANCE_INFO = "/fetchWfInstanceInfo";
|
||||
public static final String MARK_WORKFLOW_NODE_AS_SUCCESS = "/markWorkflowNodeAsSuccess";
|
||||
|
||||
/* ************* 鉴权 ************* */
|
||||
|
||||
public static final String REQUEST_HEADER_ACCESS_TOKEN = "X-POWERJOB-ACCESS-TOKEN";
|
||||
|
||||
public static final String REQUEST_HEADER_APP_ID = "X-POWERJOB-APP-ID";
|
||||
|
||||
public static final String RESPONSE_HEADER_AUTH_STATUS = "X-POWERJOB-AUTH-PASSED";
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user