<parent>
<groupId>org.onap.aai.aai-common</groupId>
<artifactId>aai-parent</artifactId>
- <version>1.15.1</version>
+ <version>1.15.2</version>
</parent>
<groupId>org.onap.aai.graphadmin</groupId>
<artifactId>aai-graphadmin</artifactId>
- <version>1.15.1-SNAPSHOT</version>
+ <version>1.15.2-SNAPSHOT</version>
<properties>
<docker.push.registry>localhost:5000</docker.push.registry>
<aai.docker.version>1.0.0</aai.docker.version>
<aai.schema.service.version>1.12.5</aai.schema.service.version>
- <aai.common.version>1.15.1</aai.common.version>
+ <aai.common.version>1.15.2</aai.common.version>
<aai.build.directory>${project.build.directory}/${project.artifactId}-${project.version}-build/
</aai.build.directory>
<aai.docker.namespace>onap</aai.docker.namespace>
<janusgraph.version>1.0.0</janusgraph.version>
<!-- fix the driver version to match the one defined in janusgraph-cql
- spring-boot (2.4) is otherwise downgrading it to 4.9.0 -->
- <!-- see https://github.com/spring-projects/spring-boot/blob/d336a96b7f204a398b8237560c5dfa7095c53460/spring-boot-project/spring-boot-dependencies/build.gradle#L163 -->
+ spring-boot (2.6) is otherwise downgrading it to 4.13.0 -->
+ <!-- see https://github.com/spring-projects/spring-boot/blob/f8c9fee3b0c8ff9ef48cf12fb4a9f8a51630a485/spring-boot-project/spring-boot-dependencies/build.gradle#L170 -->
<!-- see https://github.com/JanusGraph/janusgraph/blob/6105d67f412def90ed6e704fa01cbf656602e6c9/pom.xml#L112 -->
<cassandra-driver.version>4.17.0</cassandra-driver.version>
<datastax.native-protocol.version>1.5.1</datastax.native-protocol.version>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
- <version>25.0-jre</version>
+ <version>33.3.1-jre</version>
</dependency>
<!-- cassandra driver -->
<dependency>
</dependencyManagement>
<dependencies>
<!-- Start of graphadmin metric collection dependencies -->
- <dependency>
- <groupId>org.projectlombok</groupId>
- <artifactId>lombok</artifactId>
- <version>1.18.30</version>
- <scope>provided</scope>
- </dependency>
+ <dependency>
+ <groupId>org.projectlombok</groupId>
+ <artifactId>lombok</artifactId>
+ <version>1.18.30</version>
+ <scope>provided</scope>
+ </dependency>
<dependency>
<groupId>io.micrometer</groupId>
<artifactId>micrometer-core</artifactId>
<dependency>
<groupId>org.codehaus.janino</groupId>
<artifactId>janino</artifactId>
+ <version>3.1.12</version>
</dependency>
<dependency>
<groupId>net.logstash.logback</groupId>
<version>${netty.handler.version}</version>
</dependency>
<!-- End of Netty Dependencies -->
- <!-- Only used for the WebTestClient -->
+ <!-- Only used for the WebTestClient -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-webflux</artifactId>
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.PropertySource;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
@Component
@PropertySource("file:${server.local.startpath}/etc/appprops/datatoolscrons.properties")
public class DataCleanupTasks {
-
+
+ @Value("#{new Boolean('${datagroomingcleanup.enabled:true}')}")
+ private Boolean groomingCleanupEnabled;
+
+ @Value("#{new Boolean('${datasnapshotcleanup.enabled:true}')}")
+ private Boolean snapshotCleanupEnabled;
+
@Autowired
- private AaiScheduledTaskAuditLog auditLog;
-
+ private AaiScheduledTaskAuditLog auditLog;
+
private static final Logger logger = LoggerFactory.getLogger(DataCleanupTasks.class);
private final SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyyMMdd");
*/
@Scheduled(cron = "${datagroomingcleanup.cron}" )
public void dataGroomingCleanup() {
+ if(groomingCleanupEnabled != null && !groomingCleanupEnabled) {
+ logger.info("Skipping the scheduled grooming cleanup task since datagroomingcleanup.enabled=false");
+ return;
+ }
+
auditLog.logBefore("dataGroomingCleanup", ONAPComponents.AAI.toString() );
-
+
logger.debug("Started cron job dataGroomingCleanup @ " + simpleDateFormat.format(new Date()));
-
+
try {
String logDir = AAIConstants.AAI_HOME + AAIConstants.AAI_FILESEP + "logs";
String dataGroomingDir = logDir + AAIConstants.AAI_FILESEP + "data" + AAIConstants.AAI_FILESEP + "dataGrooming";
String archiveDir = dataGroomingDir + AAIConstants.AAI_FILESEP + "ARCHIVE";
- String dataGroomingArcDir = archiveDir + AAIConstants.AAI_FILESEP + "dataGrooming";
+ String dataGroomingArcDir = archiveDir + AAIConstants.AAI_FILESEP + "dataGrooming";
File path = new File(dataGroomingDir);
File archivepath = new File(archiveDir);
File dataGroomingPath = new File(dataGroomingArcDir);
-
+
logger.debug("The logDir is " + logDir);
logger.debug("The dataGroomingDir is " + dataGroomingDir);
logger.debug("The archiveDir is " + archiveDir );
logger.debug("The dataGroomingArcDir is " + dataGroomingArcDir );
-
+
boolean exists = directoryExists(logDir);
logger.debug("Directory" + logDir + "exists: " + exists);
if(!exists)
logger.debug("The directory" + logDir +"does not exists");
-
+
Integer ageZip = AAIConfig.getInt("aai.datagrooming.agezip");
Integer ageDelete = AAIConfig.getInt("aai.datagrooming.agedelete");
-
+
Date newAgeZip = getZipDate(ageZip);
-
+
//Iterate through the dataGroomingDir
- File[] listFiles = path.listFiles();
+ File[] listFiles = path.listFiles();
if(listFiles != null) {
for(File listFile : listFiles) {
if (listFile.toString().contains("ARCHIVE")){
continue;
}
if(listFile.isFile()){
- logger.debug("The file name in dataGrooming: " +listFile.getName());
+ logger.debug("The file name in dataGrooming: " +listFile.getName());
Date fileCreateDate = fileCreationMonthDate(listFile);
logger.debug("The fileCreateDate in dataGrooming is " + fileCreateDate);
if( fileCreateDate.compareTo(newAgeZip) < 0) {
- archive(listFile,archiveDir,dataGroomingArcDir);
+ archive(listFile,archiveDir,dataGroomingArcDir);
}
}
}
}
-
+
Date newAgeDelete = getZipDate(ageDelete);
//Iterate through the archive/dataGrooming dir
- File[] listFilesArchive = dataGroomingPath.listFiles();
+ File[] listFilesArchive = dataGroomingPath.listFiles();
if(listFilesArchive != null) {
- for(File listFileArchive : listFilesArchive) {
+ for(File listFileArchive : listFilesArchive) {
if(listFileArchive.isFile()) {
- logger.debug("The file name in ARCHIVE/dataGrooming: " +listFileArchive.getName());
+ logger.debug("The file name in ARCHIVE/dataGrooming: " +listFileArchive.getName());
Date fileCreateDate = fileCreationMonthDate(listFileArchive);
logger.debug("The fileCreateDate in ARCHIVE/dataGrooming is " + fileCreateDate);
if(fileCreateDate.compareTo(newAgeDelete) < 0) {
delete(listFileArchive);
}
- }
+ }
}
}
}
logger.debug("Ended cron job dataGroomingCleanup @ " + simpleDateFormat.format(new Date()));
auditLog.logAfter();
}
-
+
/**
* This method checks if the directory exists
* @param dir the Directory
- *
+ *
*/
public boolean directoryExists(String dir) {
File path = new File(dir);
boolean exists = path.exists();
- return exists;
+ return exists;
}
-
+
public Date getZipDate(Integer days) {
return getZipDate(days, new Date());
}
-
+
public Date getZipDate(Integer days, Date date) {
-
+
Calendar cal = Calendar.getInstance();
logger.debug("The current date is " + date );
- cal.setTime(date);
+ cal.setTime(date);
cal.add(Calendar.DATE, -days);
Date newAgeZip = cal.getTime();
logger.debug("The newAgeDate is " +newAgeZip);
- return newAgeZip;
+ return newAgeZip;
}
-
-
+
+
public Date fileCreationMonthDate (File file) throws Exception {
BasicFileAttributes attr = Files.readAttributes(file.toPath(),
String formatted = simpleDateFormat.format( new Date( time.toMillis() ) );
return simpleDateFormat.parse(formatted);
}
-
+
/**
* This method will zip the files and add it to the archive folder
* Checks if the archive folder exists, if not then creates one
* @throws Exception
*/
public void archive(File file, String archiveDir, String afterArchiveDir) throws Exception {
-
- logger.debug("Inside the archive folder");
+
+ logger.debug("Inside the archive folder");
String filename = file.getName();
logger.debug("file name is " +filename);
-
+
String zipFile = afterArchiveDir + AAIConstants.AAI_FILESEP + filename;
-
+
File dataGroomingPath = new File(afterArchiveDir);
-
+
boolean exists = directoryExists(archiveDir);
- logger.debug("Directory" + archiveDir + "exists: " + exists);
+ logger.debug("Directory" + archiveDir + "exists: " + exists);
if(!exists) {
logger.debug("The directory" + archiveDir +"does not exists so will create a new archive folder");
- //Create an archive folder if does not exists
+ //Create an archive folder if does not exists
boolean flag = dataGroomingPath.mkdirs();
if(!flag)
- logger.debug("Failed to create ARCHIVE folder");
+ logger.debug("Failed to create ARCHIVE folder");
}
try(FileOutputStream outputstream = new FileOutputStream(zipFile + ".zip");
ZipOutputStream zoutputstream = new ZipOutputStream(outputstream);
int len;
while ((len = inputstream.read(buffer)) > 0) {
zoutputstream.write(buffer,0,len);
- }
+ }
//close all the sources
zoutputstream.closeEntry();
//Delete the file after been added to archive folder
delete(file);
logger.debug("The file archived is " + file + " at " + afterArchiveDir );
- }
+ }
}
-
+
/**
* This method will delete all the files from the archive folder that are older than 60 days
* @param file
*/
public static void delete(File file) {
-
+
logger.debug("Deleting the file " + file);
boolean deleteStatus = file.delete();
if(!deleteStatus){
- logger.debug("Failed to delete the file" +file);
+ logger.debug("Failed to delete the file" +file);
}
}
-
+
/**The function archives/deletes files that end in .out (Ie. dataGrooming.201511111305.out) that sit in our log/data directory structure.
logDir is the {project_home}/logs
archiveDir is the ARCHIVE directory where the files will be stored after 5 days.
*/
@Scheduled(cron = "${datasnapshotcleanup.cron}" )
public void dataSnapshotCleanup() {
-
- auditLog.logBefore("dataSnapshotCleanup", ONAPComponents.AAI.toString() );
-
- logger.debug("Started cron job dataSnapshotCleanup @ " + simpleDateFormat.format(new Date()));
-
+ if(snapshotCleanupEnabled != null && !snapshotCleanupEnabled) {
+ logger.info("Skipping the scheduled snapshot cleanup task since datasnapshotcleanup.enabled=false");
+ return;
+ }
+
+ auditLog.logBefore("dataSnapshotCleanup", ONAPComponents.AAI.toString() );
+
+ logger.debug("Started cron job dataSnapshotCleanup @ " + simpleDateFormat.format(new Date()));
+
try {
String logDir = AAIConstants.AAI_HOME + AAIConstants.AAI_FILESEP + "logs";
String dataSnapshotDir = logDir + AAIConstants.AAI_FILESEP + "data" + AAIConstants.AAI_FILESEP + "dataSnapshots";
String archiveDir = dataSnapshotDir + AAIConstants.AAI_FILESEP + "ARCHIVE";
- String dataSnapshotArcDir = archiveDir + AAIConstants.AAI_FILESEP + "dataSnapshots";
+ String dataSnapshotArcDir = archiveDir + AAIConstants.AAI_FILESEP + "dataSnapshots";
File path = new File(dataSnapshotDir);
File dataSnapshotPath = new File(dataSnapshotArcDir);
-
+
logger.debug("The logDir is " + logDir);
logger.debug("The dataSnapshotDir is " + dataSnapshotDir);
logger.debug("The archiveDir is " + archiveDir );
logger.debug("The dataSnapshotArcDir is " + dataSnapshotArcDir );
-
+
boolean exists = directoryExists(logDir);
logger.debug("Directory" + logDir + "exists: " + exists);
if(!exists)
logger.debug("The directory" + logDir +"does not exists");
-
+
Integer ageZipSnapshot = AAIConfig.getInt("aai.datasnapshot.agezip");
Integer ageDeleteSnapshot = AAIConfig.getInt("aai.datasnapshot.agedelete");
-
+
Date newAgeZip = getZipDate(ageZipSnapshot);
-
+
//Iterate through the dataGroomingDir
- File[] listFiles = path.listFiles();
+ File[] listFiles = path.listFiles();
if(listFiles != null) {
for(File listFile : listFiles) {
if (listFile.toString().contains("ARCHIVE")){
continue;
}
if(listFile.isFile()){
- logger.debug("The file name in dataSnapshot: " +listFile.getName());
+ logger.debug("The file name in dataSnapshot: " +listFile.getName());
Date fileCreateDate = fileCreationMonthDate(listFile);
logger.debug("The fileCreateDate in dataSnapshot is " + fileCreateDate);
if( fileCreateDate.compareTo(newAgeZip) < 0) {
- archive(listFile,archiveDir,dataSnapshotArcDir);
+ archive(listFile,archiveDir,dataSnapshotArcDir);
}
}
}
}
-
+
Date newAgeDelete = getZipDate(ageDeleteSnapshot);
//Iterate through the archive/dataSnapshots dir
- File[] listFilesArchive = dataSnapshotPath.listFiles();
+ File[] listFilesArchive = dataSnapshotPath.listFiles();
if(listFilesArchive != null) {
- for(File listFileArchive : listFilesArchive) {
+ for(File listFileArchive : listFilesArchive) {
if(listFileArchive.isFile()) {
- logger.debug("The file name in ARCHIVE/dataSnapshot: " +listFileArchive.getName());
+ logger.debug("The file name in ARCHIVE/dataSnapshot: " +listFileArchive.getName());
Date fileCreateDate = fileCreationMonthDate(listFileArchive);
logger.debug("The fileCreateDate in ARCHIVE/dataSnapshot is " + fileCreateDate);
if(fileCreateDate.compareTo(newAgeDelete) < 0) {
delete(listFileArchive);
}
- }
+ }
}
}
dmaapEventsDataCleanup(newAgeDelete);
}
logger.debug("Ended cron dmaapEventsDataCleanup @ " + simpleDateFormat.format(new Date()));
}
-
+
public void dataMigrationCleanup() throws AAIException {
Integer ageDeleteSnapshot = AAIConfig.getInt("aai.datamigration.agedelete");
-
+
Date deleteAge = getZipDate(ageDeleteSnapshot);
-
+
logger.debug("Started dataMigrationCleanup @ " + simpleDateFormat.format(new Date()));
-
+
try {
String logDir = AAIConstants.AAI_HOME + AAIConstants.AAI_FILESEP + "logs";
String dataMigrationCleanupDir = logDir + AAIConstants.AAI_FILESEP + "data" + AAIConstants.AAI_FILESEP + "migration-input-files";
File path = new File(dataMigrationCleanupDir);
-
+
logger.debug("The logDir is " + logDir);
logger.debug("The migrationInputFilesDir is " + dataMigrationCleanupDir);