提交 e189bf69 authored 作者: Matrix's avatar Matrix

最终修订版本

上级 566183d3
流水线 #330 已取消 于阶段
<?xml version="1.0" encoding="UTF-8"?>
<configuration debug="false">
<!--定义日志文件的存储地址 勿在 LogBack 的配置中使用相对路径 -->
<property name="LOG_HOME" value="./logs" />
<property name="AppName" value="fp-acq" />
<!-- 控制台输出 -->
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<!--格式化输出:%d表示日期,%thread表示线程名,%-5level:级别从左显示5个字符宽度%msg:日志消息,%n是换行符 -->
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50}-%msg %n</pattern>
</encoder>
</appender>
<!-- 设置分割 -->
<appender name="FILE"
class="ch.qos.logback.core.rolling.RollingFileAppender">
<!-- 设置按尺寸和时间(同时满足)分割 -->
<rollingPolicy
class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!-- rollover daily -->
<fileNamePattern>${LOG_HOME}/${AppName}.%d{yyyy-MM-dd}.%i.log
</fileNamePattern>
<!-- each file should be at most 20MB, keep 360 days worth of history,
but at most 3GB -->
<maxFileSize>20MB</maxFileSize>
<maxHistory>360</maxHistory>
<totalSizeCap>30GB</totalSizeCap>
</rollingPolicy>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50}-%msg%n</pattern>
</encoder>
</appender>
<!-- 日志输出级别 -->
<root level="INFO">
<appender-ref ref="STDOUT" />
<appender-ref ref="FILE" />
</root>
</configuration>
\ No newline at end of file
......@@ -18,11 +18,13 @@ public class PsspCount {
*/
public static int website_count = -1;
public static final String COUNT_PATH = "/files/pssp/count";
public static final String COUNT_PATH = "/home/middle-soft/fp-acq/files/pssp/count";
public static final String GLOBAL_COUNT_PATH = ImportAds.IMPORT_URL + "files/pssp/count";
public static final String COUNT_ADDRESS_ALERT = "/files/pssp/count/alert.txt";
public static final String FIX_MONTH_PATH = "/home/middle-soft/fp-acq/files/pssp/fix/month.txt";
public static final String COUNT_ADDRESS_WEBSITE = "/files/pssp/count/website.txt";
public static final String COUNT_ADDRESS_ALERT = "/home/middle-soft/fp-acq/files/pssp/count/alert.txt";
public static final String COUNT_ADDRESS_WEBSITE = "/home/middle-soft/fp-acq/files/pssp/count/website.txt";
}
......@@ -60,28 +60,30 @@ public class FileCreator {
* 更新Count值,Count+1
*/
private void updateCount(String countAddress) {
String globalPath = "";
if (countAddress.equals(COUNT_ADDRESS_ALERT)) {
alert_count += 1;
globalPath = ImportAds.IMPORT_URL + "files/pssp/count/alert.txt";
} else if (countAddress.equals(COUNT_ADDRESS_WEBSITE)) {
website_count += 1;
globalPath = ImportAds.IMPORT_URL + "files/pssp/count/website.txt";
}
//写入到count文件中去
Path countPath = Paths.get(countAddress);
//再写入一份到单导另外一端供API端参考
String globalPath = ImportAds.IMPORT_URL + countAddress;
Path globalCountPath = Paths.get(globalPath);
//先删除原有的count文件,再写入现有的
//先删除原有的count文件,再写入现有的(全局的不用删,单导会自动删)
try {
Files.deleteIfExists(countPath);
Files.deleteIfExists(countPath);
} catch (IOException e) {
log.warn("删除原count文件失败!原因:{}", e.toString());
}
boolean countMake = createFilePath(new File(COUNT_PATH));
boolean globalCountMake = createFilePath(new File(GLOBAL_COUNT_PATH));
if (countMake) {
if (countMake && globalCountMake) {
try {
if (countAddress.equals(COUNT_ADDRESS_ALERT)) {
Files.write(countPath, String.valueOf(alert_count).getBytes());
......
......@@ -4,6 +4,7 @@ import com.google.common.collect.Lists;
import com.zjty.fp.acq.misc.entity.ImportAds;
import com.zjty.fp.acq.pssp.subject.service.RegionService;
import com.zjty.fp.acq.pssp.subject.service.WebsiteService;
import com.zjty.fp.acq.pssp.task.CollectDataTask;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.CommandLineRunner;
......@@ -37,12 +38,16 @@ public class PsspInitialRunner {
@Autowired
private RegionService regionService;
@Autowired
private CollectDataTask task;
@Order(100)
@Bean
public CommandLineRunner initializeDictMap() {
return args -> {
initCount("COUNT_ALERT", COUNT_ADDRESS_ALERT);
initCount("COUNT_WEBSITE", COUNT_ADDRESS_WEBSITE);
task.fetchHisData();
};
}
......@@ -50,7 +55,12 @@ public class PsspInitialRunner {
log.info("[pssp] 正在尝试初始化 {} 文件", countFileName);
boolean countExists = Files.exists(Paths.get(CountAddress));
String GlobalCountAddress = ImportAds.IMPORT_URL + CountAddress;
String GlobalCountAddress = "";
if (countFileName.equals("COUNT_ALERT")){
GlobalCountAddress = ImportAds.IMPORT_URL + "files/pssp/count/alert.txt";
}else {
GlobalCountAddress = ImportAds.IMPORT_URL + "files/pssp/count/website.txt";
}
boolean globalCountExists = Files.exists(Paths.get(GlobalCountAddress));
......
......@@ -22,7 +22,7 @@ public class TimeTup {
public TimeTup(LocalDateTime startTime) {
setStartTime(startTime);
LocalDateTime endTime = startTime.plusHours(3);
LocalDateTime endTime = startTime.plusHours(24);
setEndTime(endTime);
}
......
......@@ -24,7 +24,6 @@ public interface RemoteAlertRepository extends JpaRepository<RemoteAlert, Long>,
@Query("select o from RemoteAlert o where o.tmFetch >= ?1 and o.tmFetch < ?2")
List<RemoteAlert> findDataFromTime(Date starTime, Date endTime);
/**
* 抓取指定id之后的源数据
*
......
package com.zjty.fp.acq.pssp.subject.service.impl;
import com.google.common.collect.Lists;
import com.zjty.fp.acq.misc.entity.ImportAds;
import com.zjty.fp.acq.misc.utils.DateTimeUtil;
import com.zjty.fp.acq.misc.utils.FileCreator;
......@@ -29,7 +30,6 @@ import java.nio.channels.FileChannel;
import java.nio.channels.ReadableByteChannel;
import java.time.LocalDate;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
......@@ -48,7 +48,6 @@ import static java.util.stream.Collectors.toList;
*/
@Slf4j
@Service
@Transactional(rollbackOn = Exception.class)
public class AlertServiceImpl implements AlertService {
private static final String alertName = "alert";
......@@ -122,6 +121,16 @@ public class AlertServiceImpl implements AlertService {
//历史任务完成之后重新设置回当前月份
log.info("[pssp] [历史任务] {} 月采集任务结束,将月份重新设置回{}时间", timeTup.getLocalStartTime(), LocalDate.now());
CollectDataTask.setDynamicTableMonth(LocalDate.now());
//4.更新 mcMap(如果mcMap中存在当前月的key-value值) 2021-4
Map<String, Long> mcMap = DicMapUtil.readMonthFile();
String currentKey = LocalDate.now().getYear() + "-" + LocalDate.now().getMonthValue();
if (mcMap.containsKey(currentKey)) {
//更新count值
long cmc = remoteAlertRepo.count();
mcMap.put(currentKey, cmc);
DicMapUtil.updateMonthFile(mcMap);
log.info("[pssp] [alert] mcMap中存在当前月: {} 的记录,更新count值 = {}", currentKey, cmc);
}
//全部完成后再开启计划任务
CollectDataTask.trigger = true;
}
......@@ -143,7 +152,8 @@ public class AlertServiceImpl implements AlertService {
//从拦截器中获得当前应当处理的月份时间,并进行查询获得当前月份本地数据库报警数据的最新id
//从dynamicMonth中获取当前应处理的时间,读取对应文件中的最新id值
String key = dynamicNow.getYear() + "-" + dynamicNow.getMonthValue();
Integer lastedDataId = DicMapUtil.readDictFile(alertName).getOrDefault(key, 0);
Map<String, Integer> monthMap = DicMapUtil.readDictFile(alertName);
Integer lastedDataId = monthMap.getOrDefault(key, 0);
log.info("[pssp] [alert] 执行更新数据任务,本地数据库 {} 月最新数据的id为 {} ,向源数据库采集在这id之后的数据....", key, lastedDataId);
log.debug("[pssp] [id_check],supposeId = {},actuallyId = {}", supposeId, lastedDataId);
......@@ -157,37 +167,62 @@ public class AlertServiceImpl implements AlertService {
} else {
log.info("[pssp] [alert] 采集完成,本次采集了 {} 条数据,数据写入本机数据库与Es数据库", updatedData.size());
// 每1000条分成一个任务来处理 size / 1000 = 任务总数
supposeId = updatedData.stream()
.map(Alert::getId)
.max(Comparator.naturalOrder())
.orElse(0L);
//1.更新写入本地文件
String webJson = JacksonUtil.toJSon(updatedData).replace("\n", "");
fileCreator.createFileAndZip(COUNT_ADDRESS_ALERT, "pssp", alertName, webJson);
//2.记住最大ID值
long maxId = updatedData.stream().mapToLong(Alert::getId).max().orElse(0L);
log.info("[pssp] [alert] 更新后的最大报警数据id为:{},记录到文件中", maxId);
Map<String, Integer> map = new HashMap<>();
map.put(key, (int) maxId);
DicMapUtil.createDictFile(alertName, map);
log.info("[pssp] [alert] 报警数据更新完成");
//3.下载文件后保存到指定目录
//解析要下载的URL
for (Alert alert : updatedData) {
if (!StringUtils.isEmpty(alert.getWebSnapshot())) {
//下载web快照,解析url
downloadWeb(alert);
}
if (!StringUtils.isEmpty(alert.getDocSnapshot())) {
//下载doc快照,解析url
downloadDoc(alert);
}
int jobCount = updatedData.size() / 1000;
int i = 1;
// 分批处理 数据
for (List<Alert> alerts : Lists.partition(updatedData, 1000)) {
log.info("[pssp]分批处理数据中... 当前批次数据量{}, 进度{}/{}", alerts.size(), i++, jobCount + 1);
handleData(key, monthMap, alerts);
}
//4.更新 mcMap(如果mcMap中存在当前月的key-value值) 2021-4
Map<String, Long> mcMap = DicMapUtil.readMonthFile();
String currentKey = LocalDate.now().getYear() + "-" + LocalDate.now().getMonthValue();
if (mcMap.containsKey(currentKey)) {
//更新count值
long cmc = remoteAlertRepo.count();
mcMap.put(currentKey, cmc);
DicMapUtil.updateMonthFile(mcMap);
log.info("[pssp] [alert] mcMap中存在当前月: {} 的记录,更新count值 = {}", currentKey, cmc);
}
}
}
private void handleData(String key, Map<String, Integer> monthMap, List<Alert> updatedData) {
//1.更新写入本地文件
String webJson = JacksonUtil.toJSon(updatedData).replace("\n", "");
fileCreator.createFileAndZip(COUNT_ADDRESS_ALERT, "pssp", alertName, webJson);
//2.记住最大ID值
long maxId = updatedData.stream().mapToLong(Alert::getId).max().orElse(0L);
log.info("[pssp] [alert] 更新后的最大报警数据id为:{},记录到文件中", maxId);
monthMap.put(key, (int) maxId);
DicMapUtil.createDictFile(alertName, monthMap);
log.info("[pssp] [alert] 报警数据更新完成");
//3.下载文件后保存到指定目录
//解析要下载的URL
for (Alert alert : updatedData) {
if (!StringUtils.isEmpty(alert.getWebSnapshot())) {
//下载web快照,解析url
downloadWeb(alert);
}
if (!StringUtils.isEmpty(alert.getDocSnapshot())) {
//下载doc快照,解析url
downloadDoc(alert);
}
}
}
private boolean downloadWeb(Alert alert) {
//1. getURL
String webUrl = "http://" + rootURL + "/snapshot" + alert.getWebSnapshot();
......
......@@ -9,18 +9,16 @@ import com.zjty.fp.acq.pssp.subject.repository.remote.RemoteAlertRepository;
import com.zjty.fp.acq.pssp.subject.service.AlertService;
import com.zjty.fp.acq.pssp.subject.service.RegionService;
import com.zjty.fp.acq.pssp.subject.service.WebsiteService;
import com.zjty.fp.acq.pssp.utils.DicMapUtil;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Configuration;
import org.springframework.scheduling.annotation.Scheduled;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.time.Month;
import java.time.format.DateTimeFormatter;
import java.util.ArrayDeque;
import java.util.Date;
import java.util.Queue;
import java.util.*;
/**
* <p>采集数据任务类</p>
......@@ -44,38 +42,7 @@ public class CollectDataTask {
public static LocalDate dynamicNow;
private static Queue<TimeTup> dateList = new ArrayDeque<>();
private static String dynamicTableMonth;
static {
dynamicTableMonth = DateTimeUtil.formatDateTimetoString(new Date(), "yyyyMM");
dynamicNow = LocalDate.now();
for (int day = 11; day <= 30; day++) {
for (int hour = 0; hour <= 21; hour += 3) {
LocalDate day2007 = LocalDate.of(2020, 7, day);
LocalDateTime dateTime4 = LocalDateTime.of(day2007, LocalTime.of(hour, 0, 0));
dateList.add(new TimeTup(dateTime4));
}
}
for (int day = 1; day <= 31; day++) {
for (int hour = 0; hour <= 21; hour += 3) {
LocalDate day1712 = LocalDate.of(2017, 12, day);
LocalDateTime dateTime1 = LocalDateTime.of(day1712, LocalTime.of(hour, 0, 0));
dateList.add(new TimeTup(dateTime1));
LocalDate day1801 = LocalDate.of(2018, 1, day);
LocalDateTime dateTime2 = LocalDateTime.of(day1712, LocalTime.of(hour, 0, 0));
dateList.add(new TimeTup(dateTime2));
// LocalDate day1807 = LocalDate.of(2018, 7, day);
// LocalDateTime dateTime3 = LocalDateTime.of(day1712, LocalTime.of(hour, 0, 0));
// dateList.add(new TimeTup(dateTime3));
}
}
}
private static Map<String, Long> mcMap = new HashMap<>();
@Autowired
private AlertService alertService;
......@@ -128,10 +95,10 @@ public class CollectDataTask {
}
/**
* 抓取更新的报警数据,当前为3min/次
* 抓取更新的报警数据,当前5min/次
*/
@EnablePsspSchProtect
@Scheduled(cron = "33 0/3 * * * ?")
@Scheduled(cron = "33 0/5 * * * ?")
public void collectAlertData() {
log.info("[pssp] [定时任务] 抓取更新的报警数据");
alertService.fetchUpdatedData();
......@@ -175,20 +142,40 @@ public class CollectDataTask {
/**
* 每天凌晨3点补充任务
* 每个小时运行补充任务
*/
@Scheduled(cron = "0 0 3 * * ?")
@Scheduled(cron = "0 0 0/1 * * ?")
// @Scheduled(cron = "30 * * * * ?")
@EnablePsspSchProtect
public void fetchHisData() {
if (dateList.size() <= 0) {
log.info("[pssp] 没有历史任务要补充了");
return;
}
log.info("[pssp] 丢失数据补充任务开始...");
//更新mcMap 文件中读取需要补充的数据 key:2021-4,value:0
mcMap = DicMapUtil.readMonthFile();
mcMap.forEach((ms, value) ->{
String[] split = ms.split("-");
Integer year = Integer.valueOf(split[0]);
Integer month = Integer.valueOf(split[1]);
//如果一样,则不补数据 如果当前count值大于上一次count值,则补充数据并且更新count值
setDynamicTableMonth(LocalDate.of(year, month, 1));
Long lmc = value;
long cmc = remotePsspRep.count();
if (lmc == cmc) {
log.info("[pssp] {}-{} 月没有找到丢失的数据需要补充...", year,month);
} else if (cmc > lmc) {
log.info("[pssp] 检测到{}-{} 需要补充丢失的数据,丢失的数据量为 {} 条", year,month, cmc - lmc);
//补充数据
alertService.fetchUpdatedData();
//更新count值
mcMap.put(ms, cmc);
DicMapUtil.updateMonthFile(mcMap);
}
TimeTup timeTup = dateList.remove();
} );
//将时间重新set回去
setDynamicTableMonth(LocalDate.now());
log.info("[pssp] 正在执行历史文件补充任务,要补充的时间段为 {} - {}", timeTup.getLocalStartTime(), timeTup.getLocalEndTime());
alertService.fetchAllData(timeTup);
}
}
......@@ -13,6 +13,8 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static com.zjty.fp.acq.misc.entity.PsspCount.FIX_MONTH_PATH;
/**
* DicMapUtil.
*
......@@ -26,7 +28,7 @@ public class DicMapUtil {
* 将MAP转为形如k,v的字符串保存到文件中去,文件名为/files/map/website/dictMap.txt
*/
public static File createDictFile(String catalog, Map<String, Integer> idMap) {
String pathname = "/files/map/" + catalog + "/";
String pathname = "files/map/" + catalog + "/";
String filename = "dictMap.txt";
Path path = Paths.get(pathname + filename);
......@@ -53,12 +55,59 @@ public class DicMapUtil {
}
return new File(pathname + filename);
}
public static void updateMonthFile(Map<String,Long> mcMap){
Path path = Paths.get(FIX_MONTH_PATH);
//每次写入前先删除文件从而达到更新的效果
try {
log.info("[pssp] 正在删除原始month文件:{}", FIX_MONTH_PATH);
Files.deleteIfExists(path);
log.info("[pssp] 删除成功,删除文件名为:{}", FIX_MONTH_PATH);
} catch (IOException e) {
log.warn("[pssp] 删除文件时发生异常,信息为:{}", e.toString());
}
boolean make = createFilePath(new File("/files/pssp/fix/"));
List<String> stringList = new ArrayList<>();
mcMap.forEach((k, v) -> stringList.add(k + "," + v));
log.info("[pssp] 正在重新生成month文件 : {}", path);
if (make) {
try {
Files.write(path, stringList);
log.info("[pssp] 重新生成 {} 文件成功", path);
} catch (IOException e) {
log.info("[pssp]生成文件时出现异常:{}", e.toString());
}
}
}
public static Map<String,Long> readMonthFile(){
List<String> data = Lists.newArrayList();
Path path = Paths.get(FIX_MONTH_PATH);
try {
data = Files.readAllLines(path);
} catch (IOException e) {
log.info("读取文件时出现异常:" + e);
}
//得到的data是形如2021-4,0这样的字符串 map key : 2021-4 value : 0
Map<String, Long> mcMap = new HashMap<>();
for (String d : data) {
String[] split = d.split(",");
if (split.length != 2) continue;
mcMap.put(split[0], Long.valueOf(split[1]));
}
return mcMap;
}
public static Map<String, Integer> readDictFile(String catalog) {
List<String> data = Lists.newArrayList();
String localFilePath = "/files/map/" + catalog + "/";
String localFilePath = "files/map/" + catalog + "/";
String fileName = "dictMap.txt";
File file = new File(localFilePath + fileName);
Path path = Paths.get(localFilePath + fileName);
......
<?xml version="1.0" encoding="UTF-8"?>
<configuration debug="false">
<!--定义日志文件的存储地址 勿在 LogBack 的配置中使用相对路径 -->
<property name="LOG_HOME" value="./logs" />
<property name="AppName" value="fp-acq" />
<!-- 控制台输出 -->
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<!--格式化输出:%d表示日期,%thread表示线程名,%-5level:级别从左显示5个字符宽度%msg:日志消息,%n是换行符 -->
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50}-%msg %n</pattern>
</encoder>
</appender>
<!-- 设置分割 -->
<appender name="FILE"
class="ch.qos.logback.core.rolling.RollingFileAppender">
<!-- 设置按尺寸和时间(同时满足)分割 -->
<rollingPolicy
class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!-- rollover daily -->
<fileNamePattern>${LOG_HOME}/${AppName}.%d{yyyy-MM-dd}.%i.log
</fileNamePattern>
<!-- each file should be at most 20MB, keep 360 days worth of history,
but at most 30GB -->
<maxFileSize>20MB</maxFileSize>
<maxHistory>360</maxHistory>
<totalSizeCap>30GB</totalSizeCap>
</rollingPolicy>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50}-%msg%n</pattern>
</encoder>
</appender>
<!-- 日志输出级别 -->
<root level="INFO">
<appender-ref ref="STDOUT" />
<appender-ref ref="FILE" />
</root>
</configuration>
\ No newline at end of file
......@@ -71,11 +71,6 @@
<artifactId>acq-misc</artifactId>
</dependency>
<dependency>
<groupId>com.zjty.fp</groupId>
<artifactId>acq-vomp</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
......
......@@ -17,7 +17,6 @@ import org.springframework.scheduling.annotation.EnableScheduling;
@ComponentScan(basePackages = {
"com.zjty.fp.acq.union",
"com.zjty.fp.acq.pssp",
"com.zjty.fp.acq.vomp",
"com.zjty.fp.acq.misc"
})
@EnableScheduling
......
......@@ -42,14 +42,4 @@ public class DataSourceConfig {
public DataSource primaryDataSource() {
return DruidDataSourceBuilder.create().build();
}
/**
* fp-simc数据源,用于vomp平台采集数据
*/
@Bean(name = "remoteDataSource2")
@Qualifier("remoteDataSource2")
@ConfigurationProperties(prefix = "spring.datasource.remote2")
public DataSource vompDataSource() {
return DruidDataSourceBuilder.create().build();
}
}
package com.zjty.fp.acq.union.config;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.autoconfigure.orm.jpa.JpaProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.boot.orm.jpa.EntityManagerFactoryBuilder;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.jpa.repository.config.EnableJpaRepositories;
import org.springframework.orm.jpa.JpaTransactionManager;
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.annotation.EnableTransactionManagement;
import javax.persistence.EntityManager;
import javax.sql.DataSource;
import java.util.Map;
/**
* <p>Description : fusion-platform
* <p>Date : 2019/1/3 14:33
* <p>@author : C
*/
@SuppressWarnings("SpringAutowiredFieldsWarningInspection")
@Configuration
@EnableTransactionManagement
@EnableJpaRepositories(
entityManagerFactoryRef = "entityManagerFactoryRemote2",
transactionManagerRef = "transactionManagerRemote2",
basePackages = {"com.zjty.fp.acq.vomp.subject.repository.remote"}
)
@EnableConfigurationProperties(JpaProperties.class)
public class Remote2Config {
@Autowired
@Qualifier("remoteDataSource2")
private DataSource remoteDataSource2;
@Autowired
private JpaProperties jpaProperties;
@Bean(name = "entityManageRemote2")
public EntityManager entityManager(EntityManagerFactoryBuilder builder) {
return entityManagerFactoryRemote(builder).getObject().createEntityManager();
}
@Bean(name = "entityManagerFactoryRemote2")
public LocalContainerEntityManagerFactoryBean entityManagerFactoryRemote(EntityManagerFactoryBuilder builder) {
return builder
.dataSource(remoteDataSource2)
.properties(getVendorProperties(remoteDataSource2))
.packages("com.zjty.fp.acq.vomp.subject.entity.remotedo")
.persistenceUnit("remotePersistenceUnit2")
.build();
}
private Map<String, String> getVendorProperties(DataSource dataSource) {
return jpaProperties.getHibernateProperties(dataSource);
}
@Bean(name = "transactionManagerRemote2")
public PlatformTransactionManager transactionManagerRemote(EntityManagerFactoryBuilder builder) {
return new JpaTransactionManager(entityManagerFactoryRemote(builder).getObject());
}
}
......@@ -27,16 +27,6 @@ spring.datasource.remote.test-on-borrow=true
spring.datasource.remote.test-on-return=false
spring.datasource.remote.test-while-idle=true
# 数据库three相关配置 这里接入的是VOMP的数据源
spring.datasource.remote2.driver-class-name=com.mysql.jdbc.Driver
spring.datasource.remote2.url=jdbc:mysql://localhost:3306/fp_simc?useSSL=false&serverTimezone=UTC&characterEncoding=utf-8&zeroDateTimeBehavior=convertToNull&allowPublicKeyRetrieval=true
spring.datasource.remote2.username=fp
spring.datasource.remote2.password=fp123456
#spring.datasource.driver-class-name=com.microsoft.sqlserver.jdbc.SQLServerDriver
#spring.datasource.url=jdbc:sqlserver://21.28.120.2:1433;DatabaseName=PowerMonJava;
#spring.datasource.username=ms
#spring.datasource.password=ms123456
# ftp
ftp.server.ip=192.168.1.159
ftp.server.port=2121
......
......@@ -11,14 +11,14 @@ spring.datasource.remote.password=efield-tech
spring.datasource.remote.filter.stat.db-type=mysql
spring.datasource.remote.initial-size=4
spring.datasource.remote.min-idle=4
spring.datasource.remote.max-active=20
spring.datasource.remote.max-active=30
## 配置获取连接等待超时的时间 ms
spring.datasource.remote.max-wait=60000
## 打开PSCache,并且指定每个连接上PSCache的大小
spring.datasource.remote.pool-prepared-statements=true
spring.datasource.remote.max-open-prepared-statements=20
## 配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒
spring.datasource.remote.time-between-eviction-runs-millis=60000
spring.datasource.remote.time-between-eviction-runs-millis=30000
## 配置一个连接在池中最小生存的时间,单位是毫秒
spring.datasource.remote.min-evictable-idle-time-millis=300000
## 配置查询语句验证(用于查询是否还在连接的语句)
......@@ -27,12 +27,6 @@ spring.datasource.remote.test-on-borrow=true
spring.datasource.remote.test-on-return=false
spring.datasource.remote.test-while-idle=true
# 数据库three相关配置 这里接入的是VOMP的数据源
spring.datasource.driver-class-name=com.microsoft.sqlserver.jdbc.SQLServerDriver
spring.datasource.url=jdbc:sqlserver://21.28.120.2:1433;DatabaseName=PowerMonJava;
spring.datasource.username=ms
spring.datasource.password=ms123456
# ftp
ftp.server.ip=192.168.1.159
ftp.server.port=2121
......
......@@ -15,7 +15,8 @@ spring.jpa.properties.hibernate.ejb.interceptor=com.zjty.fp.acq.union.config.Sql
# jackson
spring.jackson.date-format=yyyy-MM-dd HH:mm:ss
spring.jackson.time-zone=GMT+8
logging.file=./log/fp-api.log
logging.file=./log/fp-acq.log
logging.config=classpath:logback-spring.xml
# set log level for special package
logging.level.com.zjty.fp.acq.pssp.task.CollectDataTask=debug
......
<?xml version="1.0" encoding="UTF-8"?>
<configuration debug="false">
<!--定义日志文件的存储地址 勿在 LogBack 的配置中使用相对路径 -->
<property name="LOG_HOME" value="./logs" />
<property name="AppName" value="fp-acq" />
<!-- 控制台输出 -->
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<!--格式化输出:%d表示日期,%thread表示线程名,%-5level:级别从左显示5个字符宽度%msg:日志消息,%n是换行符 -->
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50}-%msg %n</pattern>
</encoder>
</appender>
<!-- 设置分割 -->
<appender name="FILE"
class="ch.qos.logback.core.rolling.RollingFileAppender">
<!-- 设置按尺寸和时间(同时满足)分割 -->
<rollingPolicy
class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!-- rollover daily -->
<fileNamePattern>${LOG_HOME}/${AppName}.%d{yyyy-MM-dd}.%i.log
</fileNamePattern>
<!-- each file should be at most 20MB, keep 360 days worth of history,
but at most 3GB -->
<maxFileSize>20MB</maxFileSize>
<maxHistory>360</maxHistory>
<totalSizeCap>30GB</totalSizeCap>
</rollingPolicy>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50}-%msg%n</pattern>
</encoder>
</appender>
<!-- 日志输出级别 -->
<root level="INFO">
<appender-ref ref="STDOUT" />
<appender-ref ref="FILE" />
</root>
</configuration>
\ No newline at end of file
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论