feat: 定时备份数据库任务有bug

This commit is contained in:
Bunny 2024-10-20 21:43:46 +08:00
parent d65d066b3e
commit 5d1b983c71
17 changed files with 246 additions and 36 deletions

View File

@ -43,7 +43,7 @@ public class ScheduleExecuteLog extends BaseEntity {
private String executeResult;
@Schema(name = "duration", title = "执行时间")
private Integer duration;
private Long duration;
@Schema(name = "endTime", title = "结束时间")
private LocalDateTime endTime;

View File

@ -6,6 +6,8 @@ import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.Map;
@Data
@AllArgsConstructor
@NoArgsConstructor
@ -26,5 +28,5 @@ public class ScheduleExecuteLogJson {
private String operationTime;
@Schema(name = "executeParams", title = "执行任务参数")
private String executeParams;
private Map<String, Object> executeParams;
}

View File

@ -7,4 +7,5 @@ public class LocalDateTimeConstant {
public static final String YYYY_MM_DD = "yyyy-MM-dd";
public static final String YYYY_MM_DD_HH_MM_SS = "yyyy-MM-dd HH:mm:ss";
public static final String YYYY_MM_DD_HH_MM_SS_SLASH = "yyyy/MM/dd HH:mm:ss";
public static final String YYYY_MM_DD_HH_MM_SS_UNDERLINE = "yyyy-MM-dd HH_mm_ss_SSS";
}

View File

@ -1,6 +1,5 @@
package cn.bunny.dao.vo.log;
import cn.bunny.dao.entity.log.ScheduleExecuteLogJson;
import cn.bunny.dao.vo.common.BaseVo;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.*;
@ -31,7 +30,7 @@ public class QuartzExecuteLogVo extends BaseVo {
private String triggerName;
@Schema(name = "executeResult", title = "执行结果")
private ScheduleExecuteLogJson executeResult;
private String executeResult;
@Schema(name = "duration", title = "执行时间")
private Integer duration;

View File

@ -1,4 +1,4 @@
FROM openjdk:17
FROM openjdk:24-ea-17-jdk-oraclelinux9
MAINTAINER server
#系统编码
@ -18,6 +18,7 @@ COPY target/*.jar /home/server/app.jar
VOLUME /usr/bin/docker
VOLUME ["/var/run/docker.sock"]
VOLUME /etc/docker/daemon.json
VOLUME ["/bunny/docker_data/mysql/slave_3304/backup"]
# 启动容器时的进程
ENTRYPOINT ["java","-jar","/home/server/app.jar"]
@ -25,5 +26,5 @@ ENTRYPOINT ["java","-jar","/home/server/app.jar"]
#暴露 8000 端口
EXPOSE 8000
# mvn clean package -Pprod -DskipTests
# mvn clean package -Pprod -DskipTests

View File

@ -0,0 +1,107 @@
package cn.bunny.services.aop;
import cn.bunny.dao.entity.log.ScheduleExecuteLog;
import cn.bunny.dao.entity.log.ScheduleExecuteLogJson;
import cn.bunny.dao.pojo.constant.LocalDateTimeConstant;
import cn.bunny.services.mapper.ScheduleExecuteLogMapper;
import com.alibaba.fastjson2.JSON;
import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.annotation.Around;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.annotation.Pointcut;
import org.quartz.Job;
import org.quartz.JobExecutionContext;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import java.time.Duration;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.Map;
@Aspect
@Component
public class JobExecuteAop {
DateTimeFormatter dateTimeFormatter = DateTimeFormatter.ofPattern(LocalDateTimeConstant.YYYY_MM_DD_HH_MM_SS);
@Autowired
private ScheduleExecuteLogMapper scheduleExecuteLogMapper;
@Around(value = "pointCut()")
public Object aroundMethod(ProceedingJoinPoint joinPoint) {
Object result;
Object[] args = joinPoint.getArgs();
JobExecutionContext context = (JobExecutionContext) args[0];
// 存储到任务调度日志中
ScheduleExecuteLog executeLog = new ScheduleExecuteLog();
ScheduleExecuteLogJson executeLogJson = new ScheduleExecuteLogJson();
// 格式化时间
LocalDateTime startLocalDateTime = LocalDateTime.now();
String startExecuteTIme = startLocalDateTime.format(dateTimeFormatter);
// 获取上下文map集合
Map<String, Object> jobDataMap = context.getJobDetail().getJobDataMap().getWrappedMap();
String jobName = String.valueOf(jobDataMap.get("jobName"));
String jobGroup = String.valueOf(jobDataMap.get("jobGroup"));
String cronExpression = String.valueOf(jobDataMap.get("cronExpression"));
String triggerName = String.valueOf(jobDataMap.get("triggerName"));
Class<? extends Job> jobClass = context.getJobDetail().getJobClass();
try {
// 开始执行
executeLog.setJobName(jobName);
executeLog.setJobGroup(jobGroup);
executeLog.setJobClassName(jobClass.getName());
executeLog.setCronExpression(cronExpression);
executeLog.setTriggerName(triggerName);
// 设置状态结果
executeLogJson.setResult("unfinished");
executeLogJson.setStatus("running");
executeLogJson.setMessage("running...");
executeLogJson.setOperationTime(startExecuteTIme);
executeLogJson.setExecuteParams(jobDataMap);
executeLog.setExecuteResult(JSON.toJSONString(executeLogJson));
scheduleExecuteLogMapper.insert(executeLog);
// 执行...
result = joinPoint.proceed();
// 设置执行结果-执行任务的日志
executeLogJson.setResult("finish");
executeLogJson.setStatus("finish");
executeLogJson.setMessage("finish");
setEndExecuteLog(executeLogJson, executeLog, startLocalDateTime);
} catch (Throwable e) {
// 设置执行结果-执行任务的日志
executeLogJson.setResult("error");
executeLogJson.setStatus("error");
executeLogJson.setMessage(e.getMessage());
setEndExecuteLog(executeLogJson, executeLog, startLocalDateTime);
throw new RuntimeException(e);
}
return result;
}
/**
* 设置结束日志存储
*/
private void setEndExecuteLog(ScheduleExecuteLogJson executeLogJson, ScheduleExecuteLog executeLog, LocalDateTime startLocalDateTime) {
LocalDateTime endLocalDateTime = LocalDateTime.now();
String endExecuteTime = endLocalDateTime.format(dateTimeFormatter);
executeLogJson.setOperationTime(endExecuteTime);
// 设置状态结果
executeLog.setId(null);
executeLog.setExecuteResult(JSON.toJSONString(executeLogJson));
executeLog.setDuration(Duration.between(startLocalDateTime, endLocalDateTime).toSeconds());
scheduleExecuteLogMapper.insert(executeLog);
}
@Pointcut("execution(* cn.bunny.services.quartz.*.execute(..))")
public void pointCut() {
}
}

View File

@ -10,5 +10,9 @@ import java.lang.annotation.Target;
public @interface QuartzSchedulers {
String value() default "";
// 类型
String type();
// 详情
String description();
}

View File

@ -17,10 +17,12 @@ import org.springframework.web.multipart.MultipartFile;
@Component
public class FileFactory {
@Autowired
private FilesMapper filesMapper;
@Value("${spring.servlet.multipart.max-file-size}")
private String maxFileSize;
@Autowired
private FilesMapper filesMapper;
@Autowired
private MinioUtil minioUtil;

View File

@ -0,0 +1,63 @@
package cn.bunny.services.quartz;
import cn.bunny.dao.pojo.constant.LocalDateTimeConstant;
import cn.bunny.services.aop.annotation.QuartzSchedulers;
import lombok.extern.slf4j.Slf4j;
import org.quartz.Job;
import org.quartz.JobExecutionContext;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import java.io.InputStream;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
@Slf4j
@QuartzSchedulers(type = "backup", description = "数据库备份任务")
@Component
public class DatabaseBackupJob implements Job {
@Value("${spring.datasource.dynamic.datasource.master.username}")
private String masterUsername;
@Value("${spring.datasource.dynamic.datasource.master.password}")
private String masterPassword;
@Value("${bunny.master.database}")
private String masterDatabase;
@Value("${bunny.master.databaseBackupDir}")
private String databaseBackupDir;
@Override
public void execute(JobExecutionContext context) {
// 格式化时间
LocalDateTime localStartExecuteTime = LocalDateTime.now();
DateTimeFormatter sqlTimeFormatter = DateTimeFormatter.ofPattern(LocalDateTimeConstant.YYYY_MM_DD_HH_MM_SS_UNDERLINE);
String sqlTimeNow = localStartExecuteTime.format(sqlTimeFormatter);
// 命令行参数
String dockerCommand = "docker exec -it bunny_auth_server bash";
String mysqldumpCommand = "mysqldump -u " + masterUsername + " -p" + masterPassword + " " + masterDatabase + " > " + databaseBackupDir + "backup_auth_admin_" + sqlTimeNow + ".sql";
ProcessBuilder processBuilder = new ProcessBuilder(dockerCommand, mysqldumpCommand);
try {
// 执行命令
Process process = processBuilder.start();
// 执行后读取内容
InputStream inputStream = process.getInputStream();
StringBuilder output = new StringBuilder();
byte[] bytes = new byte[1024];
int bytesRead;
while ((bytesRead = inputStream.read(bytes)) != -1) {
output.append(new String(bytes, 0, bytesRead));
}
System.out.println(output);
} catch (Exception exception) {
throw new RuntimeException(exception);
}
}
}

View File

@ -7,15 +7,10 @@ import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
@Slf4j
@QuartzSchedulers(description = "JobHello任务内容")
@QuartzSchedulers(type = "test", description = "JobHello任务内容")
public class JobHello implements Job {
public void start() {
log.error("执行任务--JobHello。。。。。。。。。");
System.out.print("执行任务--JobHello。。。。。。。。。");
}
@Override
public void execute(JobExecutionContext jobExecutionContext) throws JobExecutionException {
start();
System.out.print("执行任务--JobHello。。。。。。。。。");
}
}

View File

@ -7,15 +7,10 @@ import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
@Slf4j
@QuartzSchedulers(description = "Demo的类JobHello2")
@QuartzSchedulers(type = "test", description = "Demo的类JobHello2")
public class JobHello2 implements Job {
public void start() {
log.error("执行任务---JobHello2。。。。。。。。。");
System.out.print("执行任务--JobHello2。。。。。。。。。");
}
@Override
public void execute(JobExecutionContext jobExecutionContext) throws JobExecutionException {
start();
System.out.print("执行任务--JobHello2。。。。。。。。。");
}
}

View File

@ -1,10 +1,12 @@
package cn.bunny.services.quartz;
import cn.bunny.services.aop.annotation.QuartzSchedulers;
import cn.bunny.services.factory.EmailFactory;
import org.quartz.Job;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
@QuartzSchedulers(type = "email", description = "定时邮件任务")
public class MailingJob implements Job {
@Override
public void execute(JobExecutionContext jobExecutionContext) throws JobExecutionException {

View File

@ -86,9 +86,12 @@ public class SchedulersServiceImpl extends ServiceImpl<SchedulersMapper, Schedul
String classReference = cls.getName();
// 调度器详情
String description = cls.getAnnotation(QuartzSchedulers.class).description();
// 调度器类型
String type = cls.getAnnotation(QuartzSchedulers.class).type();
hashMap.put("value", classReference);
hashMap.put("label", description);
hashMap.put("type", type);
return hashMap;
}).toList();
}
@ -100,8 +103,12 @@ public class SchedulersServiceImpl extends ServiceImpl<SchedulersMapper, Schedul
*/
@SuppressWarnings("unchecked")
@Override
@CacheEvict(cacheNames = "schedulers", key = "'allSchedulers'", beforeInvocation = true)
// @CacheEvict(cacheNames = "schedulers", key = "'allSchedulers'", beforeInvocation = true)
public void addSchedulers(@Valid SchedulersAddDto dto) {
String jobName = dto.getJobName();
String jobGroup = dto.getJobGroup();
String cronExpression = dto.getCronExpression();
try {
// 动态创建Class对象
Class<?> className = Class.forName(dto.getJobClassName());
@ -111,18 +118,25 @@ public class SchedulersServiceImpl extends ServiceImpl<SchedulersMapper, Schedul
// 创建任务
JobDetail jobDetail = JobBuilder.newJob((Class<? extends Job>) className)
.withIdentity(dto.getJobName(), dto.getJobGroup())
.withIdentity(jobName, jobGroup)
.withDescription(dto.getDescription())
.build();
jobDetail.getJobDataMap().put("jobMethodName", "execute");
// 执行任务
CronTrigger trigger = TriggerBuilder.newTrigger()
.withIdentity(dto.getJobName(), dto.getJobGroup())
.withIdentity(jobName, jobGroup)
.withDescription(dto.getDescription())
.startNow()
.withSchedule(CronScheduleBuilder.cronSchedule(dto.getCronExpression()))
.withSchedule(CronScheduleBuilder.cronSchedule(cronExpression))
.build();
// 设置任务map值
JobDataMap jobDataMap = jobDetail.getJobDataMap();
jobDataMap.put("jobName", jobName);
jobDataMap.put("jobGroup", jobGroup);
jobDataMap.put("cronExpression", cronExpression);
jobDataMap.put("triggerName", trigger.getKey().getName());
scheduler.scheduleJob(jobDetail, trigger);
} catch (Exception exception) {
throw new BunnyException(exception.getMessage());

View File

@ -40,12 +40,13 @@ mybatis-plus:
# max-attempts: 3 # 最大重试次数
bunny:
datasource1:
master:
host: 192.168.3.98
port: 3304
sqlData: auth_admin
database: auth_admin
username: root
password: "02120212"
databaseBackupDir: "/home/backup/"
redis:
host: 192.168.3.98

View File

@ -24,12 +24,15 @@ knife4j:
production: true
bunny:
datasource1:
host: 106.15.251.123
port: 3306
sqlData: auth_admin
master:
# host: 106.15.251.123
# port: 3306
host: 192.168.3.98
port: 3304
database: auth_admin
username: root
password: "02120212"
databaseBackupDir: "/home/backup/"
redis:
host: 47.120.65.66

View File

@ -21,9 +21,9 @@ spring:
master:
type: com.zaxxer.hikari.HikariDataSource
driver-class-name: com.mysql.cj.jdbc.Driver
url: jdbc:mysql://${bunny.datasource1.host}:${bunny.datasource1.port}/${bunny.datasource1.sqlData}?serverTimezone=GMT%2B8&useSSL=false&characterEncoding=utf-8&allowPublicKeyRetrieval=true
username: ${bunny.datasource1.username}
password: ${bunny.datasource1.password}
url: jdbc:mysql://${bunny.master.host}:${bunny.master.port}/${bunny.master.database}?serverTimezone=GMT%2B8&useSSL=false&characterEncoding=utf-8&allowPublicKeyRetrieval=true
username: ${bunny.master.username}
password: ${bunny.master.password}
aop:
enabled: true

View File

@ -0,0 +1,21 @@
package cn.bunny.services.backup;
import cn.bunny.dao.pojo.constant.LocalDateTimeConstant;
import lombok.SneakyThrows;
import org.junit.jupiter.api.Test;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
public class MysqlBackupTest {
@SneakyThrows
@Test
void testMysqlBackup() {
LocalDateTime localDateTime = LocalDateTime.now();
DateTimeFormatter timeFormatter = DateTimeFormatter.ofPattern(LocalDateTimeConstant.YYYY_MM_DD_HH_MM_SS_UNDERLINE);
String timeNow = localDateTime.format(timeFormatter);
System.out.println(timeNow);
}
}