[fix] fix the bug of can't find task_info table when use menory mode's h2 database

This commit is contained in:
朱八 2020-07-31 00:57:02 +08:00
parent da85a2da5a
commit f9a30423f3
14 changed files with 42 additions and 56 deletions

View File

@ -6,7 +6,7 @@
<groupId>com.github.kfcfans</groupId> <groupId>com.github.kfcfans</groupId>
<artifactId>powerjob</artifactId> <artifactId>powerjob</artifactId>
<version>1.0.0</version> <version>2.0.0</version>
<packaging>pom</packaging> <packaging>pom</packaging>
<name>powerjob</name> <name>powerjob</name>
<url>https://github.com/KFCFans/PowerJob</url> <url>https://github.com/KFCFans/PowerJob</url>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<artifactId>powerjob</artifactId> <artifactId>powerjob</artifactId>
<groupId>com.github.kfcfans</groupId> <groupId>com.github.kfcfans</groupId>
<version>1.0.0</version> <version>2.0.0</version>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<artifactId>powerjob</artifactId> <artifactId>powerjob</artifactId>
<groupId>com.github.kfcfans</groupId> <groupId>com.github.kfcfans</groupId>
<version>1.0.0</version> <version>2.0.0</version>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<artifactId>powerjob</artifactId> <artifactId>powerjob</artifactId>
<groupId>com.github.kfcfans</groupId> <groupId>com.github.kfcfans</groupId>
<version>1.0.0</version> <version>2.0.0</version>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>

View File

@ -1,16 +1,11 @@
package com.github.kfcfans.powerjob.server; package com.github.kfcfans.powerjob.server;
import com.github.kfcfans.powerjob.server.akka.OhMyServer; import com.github.kfcfans.powerjob.server.akka.OhMyServer;
import com.github.kfcfans.powerjob.server.common.utils.OmsFileUtils;
import com.google.common.annotations.VisibleForTesting;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.FileUtils;
import org.springframework.boot.SpringApplication; import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.scheduling.annotation.EnableScheduling; import org.springframework.scheduling.annotation.EnableScheduling;
import java.io.File;
/** /**
* SpringBoot 启动入口 * SpringBoot 启动入口
* *
@ -46,19 +41,8 @@ public class OhMyApplication {
} }
} }
@VisibleForTesting private static void pre() {
public static void pre() {
log.info(TIPS); log.info(TIPS);
// 删除历史遗留的 H2 数据库文件
File oldH2 = new File(OmsFileUtils.genH2BasePath());
try {
if (oldH2.exists()) {
FileUtils.forceDelete(oldH2);
}
}catch (Exception e) {
log.warn("[PowerJob] delete h2 workspace({}) failed, if server can't startup successfully, please delete it manually", oldH2, e);
}
} }
} }

View File

@ -3,6 +3,7 @@ package com.github.kfcfans.powerjob.server.persistence.config;
import com.github.kfcfans.powerjob.server.common.utils.OmsFileUtils; import com.github.kfcfans.powerjob.server.common.utils.OmsFileUtils;
import com.zaxxer.hikari.HikariConfig; import com.zaxxer.hikari.HikariConfig;
import com.zaxxer.hikari.HikariDataSource; import com.zaxxer.hikari.HikariDataSource;
import org.apache.commons.io.FileUtils;
import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.boot.jdbc.DataSourceBuilder; import org.springframework.boot.jdbc.DataSourceBuilder;
import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Bean;
@ -10,6 +11,7 @@ import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary; import org.springframework.context.annotation.Primary;
import javax.sql.DataSource; import javax.sql.DataSource;
import java.io.File;
/** /**
* 多重数据源配置 * 多重数据源配置
@ -34,14 +36,21 @@ public class MultiDatasourceConfig {
@Bean("omsLocalDatasource") @Bean("omsLocalDatasource")
public DataSource initOmsLocalDatasource() { public DataSource initOmsLocalDatasource() {
String h2Path = OmsFileUtils.genH2WorkPath();
HikariConfig config = new HikariConfig(); HikariConfig config = new HikariConfig();
config.setDriverClassName(H2_DRIVER_CLASS_NAME); config.setDriverClassName(H2_DRIVER_CLASS_NAME);
config.setJdbcUrl(String.format(H2_JDBC_URL_PATTERN, OmsFileUtils.genH2WorkPath())); config.setJdbcUrl(String.format(H2_JDBC_URL_PATTERN, h2Path));
config.setAutoCommit(true); config.setAutoCommit(true);
// 池中最小空闲连接数量 // 池中最小空闲连接数量
config.setMinimumIdle(H2_MIN_SIZE); config.setMinimumIdle(H2_MIN_SIZE);
// 池中最大连接数量 // 池中最大连接数量
config.setMaximumPoolSize(H2_MAX_ACTIVE_SIZE); config.setMaximumPoolSize(H2_MAX_ACTIVE_SIZE);
// JVM 关闭时删除文件
try {
FileUtils.forceDeleteOnExit(new File(h2Path));
}catch (Exception ignore) {
}
return new HikariDataSource(config); return new HikariDataSource(config);
} }
} }

View File

@ -92,9 +92,4 @@ public class UtilsTest {
System.out.println(StringUtils.containsWhitespace(goodAppName)); System.out.println(StringUtils.containsWhitespace(goodAppName));
System.out.println(StringUtils.containsWhitespace(appName)); System.out.println(StringUtils.containsWhitespace(appName));
} }
@Test
public void testPre() {
OhMyApplication.pre();
}
} }

View File

@ -5,7 +5,7 @@
<parent> <parent>
<artifactId>powerjob</artifactId> <artifactId>powerjob</artifactId>
<groupId>com.github.kfcfans</groupId> <groupId>com.github.kfcfans</groupId>
<version>1.0.0</version> <version>2.0.0</version>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<artifactId>powerjob</artifactId> <artifactId>powerjob</artifactId>
<groupId>com.github.kfcfans</groupId> <groupId>com.github.kfcfans</groupId>
<version>1.0.0</version> <version>2.0.0</version>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<artifactId>powerjob</artifactId> <artifactId>powerjob</artifactId>
<groupId>com.github.kfcfans</groupId> <groupId>com.github.kfcfans</groupId>
<version>1.0.0</version> <version>2.0.0</version>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>

View File

@ -5,7 +5,7 @@
<parent> <parent>
<artifactId>powerjob</artifactId> <artifactId>powerjob</artifactId>
<groupId>com.github.kfcfans</groupId> <groupId>com.github.kfcfans</groupId>
<version>1.0.0</version> <version>2.0.0</version>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>

View File

@ -12,16 +12,15 @@ import com.github.kfcfans.powerjob.common.utils.CommonUtils;
import com.github.kfcfans.powerjob.common.utils.HttpUtils; import com.github.kfcfans.powerjob.common.utils.HttpUtils;
import com.github.kfcfans.powerjob.common.utils.JsonUtils; import com.github.kfcfans.powerjob.common.utils.JsonUtils;
import com.github.kfcfans.powerjob.common.utils.NetUtils; import com.github.kfcfans.powerjob.common.utils.NetUtils;
import com.github.kfcfans.powerjob.worker.actors.TroubleshootingActor;
import com.github.kfcfans.powerjob.worker.actors.ProcessorTrackerActor; import com.github.kfcfans.powerjob.worker.actors.ProcessorTrackerActor;
import com.github.kfcfans.powerjob.worker.actors.TaskTrackerActor; import com.github.kfcfans.powerjob.worker.actors.TaskTrackerActor;
import com.github.kfcfans.powerjob.worker.actors.TroubleshootingActor;
import com.github.kfcfans.powerjob.worker.actors.WorkerActor; import com.github.kfcfans.powerjob.worker.actors.WorkerActor;
import com.github.kfcfans.powerjob.worker.background.OmsLogHandler; import com.github.kfcfans.powerjob.worker.background.OmsLogHandler;
import com.github.kfcfans.powerjob.worker.background.ServerDiscoveryService; import com.github.kfcfans.powerjob.worker.background.ServerDiscoveryService;
import com.github.kfcfans.powerjob.worker.background.WorkerHealthReporter; import com.github.kfcfans.powerjob.worker.background.WorkerHealthReporter;
import com.github.kfcfans.powerjob.worker.common.OhMyConfig; import com.github.kfcfans.powerjob.worker.common.OhMyConfig;
import com.github.kfcfans.powerjob.worker.common.OmsBannerPrinter; import com.github.kfcfans.powerjob.worker.common.OmsBannerPrinter;
import com.github.kfcfans.powerjob.worker.common.utils.OmsWorkerFileUtils;
import com.github.kfcfans.powerjob.worker.common.utils.SpringUtils; import com.github.kfcfans.powerjob.worker.common.utils.SpringUtils;
import com.github.kfcfans.powerjob.worker.persistence.TaskPersistenceService; import com.github.kfcfans.powerjob.worker.persistence.TaskPersistenceService;
import com.google.common.base.Stopwatch; import com.google.common.base.Stopwatch;
@ -31,7 +30,6 @@ import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory; import com.typesafe.config.ConfigFactory;
import lombok.Getter; import lombok.Getter;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.FileUtils;
import org.springframework.beans.BeansException; import org.springframework.beans.BeansException;
import org.springframework.beans.factory.DisposableBean; import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.InitializingBean;
@ -39,7 +37,6 @@ import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware; import org.springframework.context.ApplicationContextAware;
import org.springframework.util.StringUtils; import org.springframework.util.StringUtils;
import java.io.File;
import java.util.Map; import java.util.Map;
import java.util.Objects; import java.util.Objects;
import java.util.concurrent.Executors; import java.util.concurrent.Executors;
@ -83,7 +80,6 @@ public class OhMyWorker implements ApplicationContextAware, InitializingBean, Di
Stopwatch stopwatch = Stopwatch.createStarted(); Stopwatch stopwatch = Stopwatch.createStarted();
log.info("[OhMyWorker] start to initialize OhMyWorker..."); log.info("[OhMyWorker] start to initialize OhMyWorker...");
try { try {
pre();
OmsBannerPrinter.print(); OmsBannerPrinter.print();
// 校验 appName // 校验 appName
if (!config.isEnableTestMode()) { if (!config.isEnableTestMode()) {
@ -183,16 +179,4 @@ public class OhMyWorker implements ApplicationContextAware, InitializingBean, Di
public void destroy() throws Exception { public void destroy() throws Exception {
timingPool.shutdownNow(); timingPool.shutdownNow();
} }
private static void pre() {
// 删除历史遗留的 H2 数据库文件
File oldH2File = new File(OmsWorkerFileUtils.getH2BaseDir());
try {
if (oldH2File.exists()) {
FileUtils.forceDelete(oldH2File);
}
}catch (Exception e) {
log.warn("[PowerJob] delete h2 workspace({}) failed, if worker can't startup successfully, please delete it manually", oldH2File, e);
}
}
} }

View File

@ -5,8 +5,12 @@ import com.github.kfcfans.powerjob.worker.common.constants.StoreStrategy;
import com.github.kfcfans.powerjob.worker.common.utils.OmsWorkerFileUtils; import com.github.kfcfans.powerjob.worker.common.utils.OmsWorkerFileUtils;
import com.zaxxer.hikari.HikariConfig; import com.zaxxer.hikari.HikariConfig;
import com.zaxxer.hikari.HikariDataSource; import com.zaxxer.hikari.HikariDataSource;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.FileUtils;
import org.h2.Driver;
import javax.sql.DataSource; import javax.sql.DataSource;
import java.io.File;
import java.sql.Connection; import java.sql.Connection;
import java.sql.SQLException; import java.sql.SQLException;
@ -16,12 +20,14 @@ import java.sql.SQLException;
* @author tjq * @author tjq
* @since 2020/3/17 * @since 2020/3/17
*/ */
@Slf4j
public class ConnectionFactory { public class ConnectionFactory {
private static volatile DataSource dataSource; private static volatile DataSource dataSource;
private static final String DISK_JDBC_URL = String.format("jdbc:h2:file:%spowerjob_worker_db", OmsWorkerFileUtils.getH2WorkDir()); private static final String H2_PATH = OmsWorkerFileUtils.getH2WorkDir();
private static final String MEMORY_JDBC_URL = String.format("jdbc:h2:mem:%spowerjob_worker_db", OmsWorkerFileUtils.getH2WorkDir()); private static final String DISK_JDBC_URL = String.format("jdbc:h2:file:%spowerjob_worker_db;DB_CLOSE_DELAY=-1;DATABASE_TO_UPPER=false", H2_PATH);
private static final String MEMORY_JDBC_URL = String.format("jdbc:h2:mem:%spowerjob_worker_db;DB_CLOSE_DELAY=-1;DATABASE_TO_UPPER=false", H2_PATH);
public static Connection getConnection() throws SQLException { public static Connection getConnection() throws SQLException {
return getDataSource().getConnection(); return getDataSource().getConnection();
@ -38,7 +44,7 @@ public class ConnectionFactory {
StoreStrategy strategy = OhMyWorker.getConfig() == null ? StoreStrategy.DISK : OhMyWorker.getConfig().getStoreStrategy(); StoreStrategy strategy = OhMyWorker.getConfig() == null ? StoreStrategy.DISK : OhMyWorker.getConfig().getStoreStrategy();
HikariConfig config = new HikariConfig(); HikariConfig config = new HikariConfig();
config.setDriverClassName("org.h2.Driver"); config.setDriverClassName(Driver.class.getName());
config.setJdbcUrl(strategy == StoreStrategy.DISK ? DISK_JDBC_URL : MEMORY_JDBC_URL); config.setJdbcUrl(strategy == StoreStrategy.DISK ? DISK_JDBC_URL : MEMORY_JDBC_URL);
config.setAutoCommit(true); config.setAutoCommit(true);
// 池中最小空闲连接数量 // 池中最小空闲连接数量
@ -46,6 +52,14 @@ public class ConnectionFactory {
// 池中最大连接数量 // 池中最大连接数量
config.setMaximumPoolSize(32); config.setMaximumPoolSize(32);
dataSource = new HikariDataSource(config); dataSource = new HikariDataSource(config);
log.info("[OmsDatasource] init h2 datasource successfully, use url: {}", config.getJdbcUrl());
// JVM 关闭时删除数据库文件
try {
FileUtils.forceDeleteOnExit(new File(H2_PATH));
}catch (Exception ignore) {
}
} }
} }
return dataSource; return dataSource;

View File

@ -36,7 +36,7 @@ public class TaskPersistenceService {
private TaskPersistenceService() { private TaskPersistenceService() {
} }
private TaskDAO taskDAO = new TaskDAOImpl(); private final TaskDAO taskDAO = new TaskDAOImpl();
public void init() throws Exception { public void init() throws Exception {
if (initialized) { if (initialized) {
@ -207,8 +207,8 @@ public class TaskPersistenceService {
Map<TaskStatus, Long> result = Maps.newHashMap(); Map<TaskStatus, Long> result = Maps.newHashMap();
dbRES.forEach(row -> { dbRES.forEach(row -> {
// H2 数据库都是大写... // H2 数据库都是大写...
int status = Integer.parseInt(String.valueOf(row.get("STATUS"))); int status = Integer.parseInt(String.valueOf(row.get("status")));
long num = Long.parseLong(String.valueOf(row.get("NUM"))); long num = Long.parseLong(String.valueOf(row.get("num")));
result.put(TaskStatus.of(status), num); result.put(TaskStatus.of(status), num);
}); });
return result; return result;
@ -238,10 +238,10 @@ public class TaskPersistenceService {
try { try {
SimpleTaskQuery query = genKeyQuery(instanceId, taskId); SimpleTaskQuery query = genKeyQuery(instanceId, taskId);
query.setQueryContent("STATUS"); query.setQueryContent("status");
return execute(() -> { return execute(() -> {
List<Map<String, Object>> rows = taskDAO.simpleQueryPlus(query); List<Map<String, Object>> rows = taskDAO.simpleQueryPlus(query);
return Optional.of(TaskStatus.of((int) rows.get(0).get("STATUS"))); return Optional.of(TaskStatus.of((int) rows.get(0).get("status")));
}); });
}catch (Exception e) { }catch (Exception e) {
log.error("[TaskPersistenceService] getTaskStatus failed, instanceId={},taskId={}.", instanceId, taskId, e); log.error("[TaskPersistenceService] getTaskStatus failed, instanceId={},taskId={}.", instanceId, taskId, e);