Upgrade spring boot to 2.6 60/139360/4 1.15.2
authorFiete Ostkamp <Fiete.Ostkamp@telekom.de>
Fri, 8 Nov 2024 13:40:46 +0000 (14:40 +0100)
committerFiete Ostkamp <Fiete.Ostkamp@telekom.de>
Mon, 11 Nov 2024 07:22:03 +0000 (08:22 +0100)
- upgrade spring-boot (2.5.15 -> 2.6.15)
- upgrade vulnerable guava (25.1 -> 33.3.1) and janino (3.1.9 -> 3.1.12) dependencies
- bump snapshot version to 1.15.2-SNAPSHOT

Issue-ID: AAI-4048
Change-Id: I34d8c62894c6398c5a51906469890c2f76f4a745
Signed-off-by: Fiete Ostkamp <Fiete.Ostkamp@telekom.de>
pom.xml
src/main/java/org/onap/aai/datacleanup/DataCleanupTasks.java
src/main/java/org/onap/aai/datagrooming/DataGroomingTasks.java
src/main/java/org/onap/aai/datasnapshot/DataSnapshotTasks.java
src/main/resources/application.properties
src/main/resources/etc/appprops/datatoolscrons.properties
version.properties

diff --git a/pom.xml b/pom.xml
index 563d7dd..020d16f 100755 (executable)
--- a/pom.xml
+++ b/pom.xml
     <parent>
         <groupId>org.onap.aai.aai-common</groupId>
         <artifactId>aai-parent</artifactId>
-        <version>1.15.1</version>
+        <version>1.15.2</version>
     </parent>
     <groupId>org.onap.aai.graphadmin</groupId>
     <artifactId>aai-graphadmin</artifactId>
-    <version>1.15.1-SNAPSHOT</version>
+    <version>1.15.2-SNAPSHOT</version>
 
     <properties>
 
@@ -55,7 +55,7 @@
         <docker.push.registry>localhost:5000</docker.push.registry>
         <aai.docker.version>1.0.0</aai.docker.version>
         <aai.schema.service.version>1.12.5</aai.schema.service.version>
-        <aai.common.version>1.15.1</aai.common.version>
+        <aai.common.version>1.15.2</aai.common.version>
         <aai.build.directory>${project.build.directory}/${project.artifactId}-${project.version}-build/
         </aai.build.directory>
         <aai.docker.namespace>onap</aai.docker.namespace>
         <janusgraph.version>1.0.0</janusgraph.version>
 
         <!-- fix the driver version to match the one defined in janusgraph-cql
-            spring-boot (2.4) is otherwise downgrading it to 4.9.0 -->
-        <!-- see https://github.com/spring-projects/spring-boot/blob/d336a96b7f204a398b8237560c5dfa7095c53460/spring-boot-project/spring-boot-dependencies/build.gradle#L163 -->
+            spring-boot (2.6) is otherwise downgrading it to 4.13.0 -->
+        <!-- see https://github.com/spring-projects/spring-boot/blob/f8c9fee3b0c8ff9ef48cf12fb4a9f8a51630a485/spring-boot-project/spring-boot-dependencies/build.gradle#L170 -->
         <!-- see https://github.com/JanusGraph/janusgraph/blob/6105d67f412def90ed6e704fa01cbf656602e6c9/pom.xml#L112 -->
         <cassandra-driver.version>4.17.0</cassandra-driver.version>
         <datastax.native-protocol.version>1.5.1</datastax.native-protocol.version>
             <dependency>
                 <groupId>com.google.guava</groupId>
                 <artifactId>guava</artifactId>
-                <version>25.0-jre</version>
+                <version>33.3.1-jre</version>
             </dependency>
             <!-- cassandra driver -->
             <dependency>
     </dependencyManagement>
     <dependencies>
         <!-- Start of graphadmin metric collection dependencies -->
-               <dependency>
-                       <groupId>org.projectlombok</groupId>
-                       <artifactId>lombok</artifactId>
-                       <version>1.18.30</version>
-                       <scope>provided</scope>
-               </dependency>
+        <dependency>
+            <groupId>org.projectlombok</groupId>
+            <artifactId>lombok</artifactId>
+            <version>1.18.30</version>
+            <scope>provided</scope>
+        </dependency>
         <dependency>
             <groupId>io.micrometer</groupId>
             <artifactId>micrometer-core</artifactId>
         <dependency>
             <groupId>org.codehaus.janino</groupId>
             <artifactId>janino</artifactId>
+            <version>3.1.12</version>
         </dependency>
         <dependency>
             <groupId>net.logstash.logback</groupId>
             <version>${netty.handler.version}</version>
         </dependency>
         <!-- End of Netty Dependencies -->
-               <!-- Only used for the WebTestClient -->
+        <!-- Only used for the WebTestClient -->
         <dependency>
             <groupId>org.springframework.boot</groupId>
             <artifactId>spring-boot-starter-webflux</artifactId>
index df79335..2a4052a 100644 (file)
@@ -40,6 +40,7 @@ import org.onap.logging.filter.base.ONAPComponents;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Value;
 import org.springframework.context.annotation.PropertySource;
 import org.springframework.scheduling.annotation.Scheduled;
 import org.springframework.stereotype.Component;
@@ -47,10 +48,16 @@ import org.springframework.stereotype.Component;
 @Component
 @PropertySource("file:${server.local.startpath}/etc/appprops/datatoolscrons.properties")
 public class DataCleanupTasks {
-    
+
+       @Value("#{new Boolean('${datagroomingcleanup.enabled:true}')}")
+       private Boolean groomingCleanupEnabled;
+
+       @Value("#{new Boolean('${datasnapshotcleanup.enabled:true}')}")
+       private Boolean snapshotCleanupEnabled;
+
        @Autowired
-    private AaiScheduledTaskAuditLog auditLog;
-       
+  private AaiScheduledTaskAuditLog auditLog;
+
        private static final Logger logger = LoggerFactory.getLogger(DataCleanupTasks.class);
        private final SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyyMMdd");
 
@@ -62,65 +69,70 @@ public class DataCleanupTasks {
        */
        @Scheduled(cron = "${datagroomingcleanup.cron}" )
        public void dataGroomingCleanup() {
+               if(groomingCleanupEnabled != null && !groomingCleanupEnabled) {
+                       logger.info("Skipping the scheduled grooming cleanup task since datagroomingcleanup.enabled=false");
+                       return;
+               }
+
                auditLog.logBefore("dataGroomingCleanup", ONAPComponents.AAI.toString() );
-               
+
                logger.debug("Started cron job dataGroomingCleanup @ " + simpleDateFormat.format(new Date()));
-               
+
                try {
                        String logDir = AAIConstants.AAI_HOME + AAIConstants.AAI_FILESEP + "logs";
                        String dataGroomingDir = logDir + AAIConstants.AAI_FILESEP + "data" + AAIConstants.AAI_FILESEP + "dataGrooming";
                        String archiveDir = dataGroomingDir + AAIConstants.AAI_FILESEP + "ARCHIVE";
-                       String dataGroomingArcDir = archiveDir + AAIConstants.AAI_FILESEP + "dataGrooming";             
+                       String dataGroomingArcDir = archiveDir + AAIConstants.AAI_FILESEP + "dataGrooming";
                        File path = new File(dataGroomingDir);
                        File archivepath = new File(archiveDir);
                        File dataGroomingPath = new File(dataGroomingArcDir);
-               
+
                        logger.debug("The logDir is " + logDir);
                        logger.debug("The dataGroomingDir is " + dataGroomingDir);
                        logger.debug("The archiveDir is " + archiveDir );
                        logger.debug("The dataGroomingArcDir is " + dataGroomingArcDir );
-               
+
                        boolean exists = directoryExists(logDir);
                        logger.debug("Directory" + logDir + "exists: " + exists);
                        if(!exists)
                                logger.debug("The directory" + logDir +"does not exists");
-               
+
                        Integer ageZip = AAIConfig.getInt("aai.datagrooming.agezip");
                        Integer ageDelete = AAIConfig.getInt("aai.datagrooming.agedelete");
-                                                       
+
                        Date newAgeZip = getZipDate(ageZip);
-                                                               
+
                        //Iterate through the dataGroomingDir
-                       File[] listFiles = path.listFiles();  
+                       File[] listFiles = path.listFiles();
                        if(listFiles != null) {
                                for(File listFile : listFiles) {
                                        if (listFile.toString().contains("ARCHIVE")){
                                                continue;
                                        }
                                        if(listFile.isFile()){
-                                               logger.debug("The file name in dataGrooming: " +listFile.getName()); 
+                                               logger.debug("The file name in dataGrooming: " +listFile.getName());
                                                Date fileCreateDate = fileCreationMonthDate(listFile);
                                                logger.debug("The fileCreateDate in dataGrooming is " + fileCreateDate);
                                                if( fileCreateDate.compareTo(newAgeZip) < 0) {
-                                               archive(listFile,archiveDir,dataGroomingArcDir);                                                
+                                               archive(listFile,archiveDir,dataGroomingArcDir);
                                                }
                                        }
                                }
                        }
-               
+
                        Date newAgeDelete = getZipDate(ageDelete);
                        //Iterate through the archive/dataGrooming dir
-                       File[] listFilesArchive = dataGroomingPath.listFiles(); 
+                       File[] listFilesArchive = dataGroomingPath.listFiles();
                        if(listFilesArchive != null) {
-                               for(File listFileArchive : listFilesArchive) { 
+                               for(File listFileArchive : listFilesArchive) {
                                        if(listFileArchive.isFile()) {
-                               logger.debug("The file name in ARCHIVE/dataGrooming: " +listFileArchive.getName()); 
+                               logger.debug("The file name in ARCHIVE/dataGrooming: " +listFileArchive.getName());
                                Date fileCreateDate = fileCreationMonthDate(listFileArchive);
                                logger.debug("The fileCreateDate in ARCHIVE/dataGrooming is " + fileCreateDate);
                                if(fileCreateDate.compareTo(newAgeDelete) < 0) {
                                        delete(listFileArchive);
                                        }
-                               }       
+                               }
                        }
                        }
                }
@@ -131,34 +143,34 @@ public class DataCleanupTasks {
                logger.debug("Ended cron job dataGroomingCleanup @ " + simpleDateFormat.format(new Date()));
                auditLog.logAfter();
        }
-       
+
     /**
      * This method checks if the directory exists
      * @param dir the Directory
-     * 
+     *
      */
     public boolean directoryExists(String dir) {
        File path = new File(dir);
                boolean exists = path.exists();
-               return exists;  
+               return exists;
     }
-    
+
     public Date getZipDate(Integer days) {
        return getZipDate(days, new Date());
     }
-    
+
     public Date getZipDate(Integer days, Date date) {
-       
+
        Calendar cal = Calendar.getInstance();
        logger.debug("The current date is " + date );
-       cal.setTime(date);      
+       cal.setTime(date);
        cal.add(Calendar.DATE, -days);
        Date newAgeZip = cal.getTime();
                logger.debug("The newAgeDate is " +newAgeZip);
-               return newAgeZip;               
+               return newAgeZip;
     }
-    
-    
+
+
     public Date fileCreationMonthDate (File file) throws Exception {
 
         BasicFileAttributes attr = Files.readAttributes(file.toPath(),
@@ -167,7 +179,7 @@ public class DataCleanupTasks {
            String formatted = simpleDateFormat.format( new Date( time.toMillis() ) );
            return simpleDateFormat.parse(formatted);
     }
-    
+
     /**
      * This method will zip the files and add it to the archive folder
      * Checks if the archive folder exists, if not then creates one
@@ -175,23 +187,23 @@ public class DataCleanupTasks {
      * @throws Exception
      */
     public void archive(File file, String archiveDir, String afterArchiveDir) throws Exception {
-               
-       logger.debug("Inside the archive folder");  
+
+       logger.debug("Inside the archive folder");
        String filename = file.getName();
        logger.debug("file name is " +filename);
-               
+
                String zipFile = afterArchiveDir + AAIConstants.AAI_FILESEP + filename;
-               
+
                File dataGroomingPath = new File(afterArchiveDir);
-       
+
                boolean exists = directoryExists(archiveDir);
-               logger.debug("Directory" + archiveDir + "exists: " + exists);           
+               logger.debug("Directory" + archiveDir + "exists: " + exists);
                if(!exists) {
                        logger.debug("The directory" + archiveDir +"does not exists so will create a new archive folder");
-                       //Create an archive folder if does not exists           
+                       //Create an archive folder if does not exists
                        boolean flag = dataGroomingPath.mkdirs();
                        if(!flag)
-                               logger.debug("Failed to create ARCHIVE folder");                
+                               logger.debug("Failed to create ARCHIVE folder");
                }
                try(FileOutputStream outputstream = new FileOutputStream(zipFile + ".zip");
                                ZipOutputStream zoutputstream = new ZipOutputStream(outputstream);
@@ -202,28 +214,28 @@ public class DataCleanupTasks {
                        int len;
                        while ((len = inputstream.read(buffer)) > 0) {
                                zoutputstream.write(buffer,0,len);
-                       }                       
+                       }
                        //close all the sources
                        zoutputstream.closeEntry();
                        //Delete the file after been added to archive folder
                        delete(file);
                        logger.debug("The file archived is " + file + " at " + afterArchiveDir );
-               }       
+               }
     }
-    
+
     /**
      * This method will delete all the files from the archive folder that are older than 60 days
      * @param file
      */
     public static void delete(File file) {
-       
+
        logger.debug("Deleting the file " + file);
        boolean deleteStatus = file.delete();
                if(!deleteStatus){
-                       logger.debug("Failed to delete the file" +file);                        
+                       logger.debug("Failed to delete the file" +file);
                }
     }
-    
+
     /**The function archives/deletes files that end in .out (Ie. dataGrooming.201511111305.out) that sit in our log/data directory structure.
        logDir is the {project_home}/logs
        archiveDir is the ARCHIVE directory where the files will be stored after 5 days.
@@ -232,65 +244,69 @@ public class DataCleanupTasks {
 */
     @Scheduled(cron = "${datasnapshotcleanup.cron}" )
     public void dataSnapshotCleanup() {
-               
-               auditLog.logBefore("dataSnapshotCleanup", ONAPComponents.AAI.toString() );
-       
-               logger.debug("Started cron job dataSnapshotCleanup @ " + simpleDateFormat.format(new Date()));
-       
+                       if(snapshotCleanupEnabled != null && !snapshotCleanupEnabled) {
+                               logger.info("Skipping the scheduled snapshot cleanup task since datasnapshotcleanup.enabled=false");
+                               return;
+                       }
+
+                       auditLog.logBefore("dataSnapshotCleanup", ONAPComponents.AAI.toString() );
+
+                       logger.debug("Started cron job dataSnapshotCleanup @ " + simpleDateFormat.format(new Date()));
+
        try {
                String logDir = AAIConstants.AAI_HOME + AAIConstants.AAI_FILESEP + "logs";
                String dataSnapshotDir = logDir + AAIConstants.AAI_FILESEP + "data" + AAIConstants.AAI_FILESEP + "dataSnapshots";
                String archiveDir = dataSnapshotDir + AAIConstants.AAI_FILESEP + "ARCHIVE";
-               String dataSnapshotArcDir = archiveDir + AAIConstants.AAI_FILESEP + "dataSnapshots";            
+               String dataSnapshotArcDir = archiveDir + AAIConstants.AAI_FILESEP + "dataSnapshots";
                File path = new File(dataSnapshotDir);
                File dataSnapshotPath = new File(dataSnapshotArcDir);
-       
+
                logger.debug("The logDir is " + logDir);
                logger.debug("The dataSnapshotDir is " + dataSnapshotDir);
                logger.debug("The archiveDir is " + archiveDir );
                logger.debug("The dataSnapshotArcDir is " + dataSnapshotArcDir );
-       
+
                boolean exists = directoryExists(logDir);
                logger.debug("Directory" + logDir + "exists: " + exists);
                if(!exists)
                        logger.debug("The directory" + logDir +"does not exists");
-       
+
                Integer ageZipSnapshot = AAIConfig.getInt("aai.datasnapshot.agezip");
                Integer ageDeleteSnapshot = AAIConfig.getInt("aai.datasnapshot.agedelete");
-               
+
                Date newAgeZip = getZipDate(ageZipSnapshot);
-                                       
+
                //Iterate through the dataGroomingDir
-               File[] listFiles = path.listFiles();  
+               File[] listFiles = path.listFiles();
                if(listFiles != null) {
                        for(File listFile : listFiles) {
                                if (listFile.toString().contains("ARCHIVE")){
                                        continue;
                                }
                                if(listFile.isFile()){
-                                       logger.debug("The file name in dataSnapshot: " +listFile.getName()); 
+                                       logger.debug("The file name in dataSnapshot: " +listFile.getName());
                                        Date fileCreateDate = fileCreationMonthDate(listFile);
                                        logger.debug("The fileCreateDate in dataSnapshot is " + fileCreateDate);
                                        if( fileCreateDate.compareTo(newAgeZip) < 0) {
-                                               archive(listFile,archiveDir,dataSnapshotArcDir);                                                
+                                               archive(listFile,archiveDir,dataSnapshotArcDir);
                                        }
                                }
                        }
                }
-       
+
                Date newAgeDelete = getZipDate(ageDeleteSnapshot);
                //Iterate through the archive/dataSnapshots dir
-               File[] listFilesArchive = dataSnapshotPath.listFiles(); 
+               File[] listFilesArchive = dataSnapshotPath.listFiles();
                if(listFilesArchive != null) {
-                       for(File listFileArchive : listFilesArchive) { 
+                       for(File listFileArchive : listFilesArchive) {
                                if(listFileArchive.isFile()) {
-                                       logger.debug("The file name in ARCHIVE/dataSnapshot: " +listFileArchive.getName()); 
+                                       logger.debug("The file name in ARCHIVE/dataSnapshot: " +listFileArchive.getName());
                                        Date fileCreateDate = fileCreationMonthDate(listFileArchive);
                                        logger.debug("The fileCreateDate in ARCHIVE/dataSnapshot is " + fileCreateDate);
                                        if(fileCreateDate.compareTo(newAgeDelete) < 0) {
                                                delete(listFileArchive);
                                        }
-                               }       
+                               }
                        }
                }
                dmaapEventsDataCleanup(newAgeDelete);
@@ -338,19 +354,19 @@ public class DataCleanupTasks {
                }
                logger.debug("Ended cron dmaapEventsDataCleanup @ " + simpleDateFormat.format(new Date()));
        }
-       
+
     public void dataMigrationCleanup() throws AAIException {
                Integer ageDeleteSnapshot = AAIConfig.getInt("aai.datamigration.agedelete");
-               
+
                Date deleteAge = getZipDate(ageDeleteSnapshot);
-       
+
                logger.debug("Started dataMigrationCleanup @ " + simpleDateFormat.format(new Date()));
-       
+
        try {
                String logDir = AAIConstants.AAI_HOME + AAIConstants.AAI_FILESEP + "logs";
                String dataMigrationCleanupDir = logDir + AAIConstants.AAI_FILESEP + "data" + AAIConstants.AAI_FILESEP + "migration-input-files";
                File path = new File(dataMigrationCleanupDir);
-               
+
                logger.debug("The logDir is " + logDir);
                        logger.debug("The migrationInputFilesDir is " + dataMigrationCleanupDir);
 
index 4e162ca..c19fb55 100644 (file)
@@ -40,12 +40,14 @@ import org.onap.logging.filter.base.ONAPComponents;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
 import org.springframework.context.annotation.PropertySource;
 import org.springframework.scheduling.annotation.Scheduled;
 import org.springframework.stereotype.Component;
 
 @Component
 @PropertySource("file:${server.local.startpath}/etc/appprops/datatoolscrons.properties")
+@ConditionalOnProperty(name="datagroomingtasks.enabled", havingValue = "true", matchIfMissing = true)
 public class DataGroomingTasks {
 
        private AaiScheduledTaskAuditLog auditLog;
index d8cb65a..36aa560 100644 (file)
@@ -26,24 +26,21 @@ import java.text.SimpleDateFormat;
 import java.util.*;
 
 import org.onap.aai.aailog.logs.AaiScheduledTaskAuditLog;
-import org.onap.aai.datagrooming.DataGrooming;
-import org.onap.aai.datagrooming.DataGroomingTasks;
 import org.onap.aai.exceptions.AAIException;
 import org.onap.aai.logging.ErrorLogHelper;
 import org.onap.aai.logging.LogFormatTools;
 import org.onap.aai.util.AAIConfig;
 import org.onap.logging.filter.base.ONAPComponents;
-import org.onap.logging.ref.slf4j.ONAPLogConstants;
-import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
 import org.springframework.context.annotation.PropertySource;
 import org.springframework.scheduling.annotation.Scheduled;
 import org.springframework.stereotype.Component;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import org.slf4j.MDC;
 
 @Component
 @PropertySource("file:${server.local.startpath}/etc/appprops/datatoolscrons.properties")
+@ConditionalOnProperty(name="datasnapshottasks.enabled", havingValue = "true", matchIfMissing = true)
 public class DataSnapshotTasks {
 
        private AaiScheduledTaskAuditLog auditLog;
index ba35da5..0fda84d 100644 (file)
@@ -99,6 +99,7 @@ management.endpoints.enabled-by-default=true
 #To Enable Actuator Endpoint, you can override this in OOM Charts
 management.endpoints.web.exposure.include=info, health, loggers, prometheus
 management.metrics.web.server.auto-time-requests=false
+management.info.env.enabled=true
 
 # If true, the actuator health check will be overriden
 # to use the AaiGraphChecker check instead
index e37931f..bbad8d5 100644 (file)
@@ -5,7 +5,12 @@
 #for more information refer to http://www.quartz-scheduler.org/documentation/quartz-2.x/tutorials/crontrigger.html
 #this site can generate new expressions for you: http://www.cronmaker.com/
 #BUT you must omit the last (seventh) column when you copy its output (spring expects exactly 6 fields and doesn't allow the seventh optional one)
+datagroomingcleanup.enabled=${DATA_GROOMING_CLEANUP_ENABLED:true}
 datagroomingcleanup.cron=0 06 0 * * ?
+datagroomingtasks.enabled=${DATA_GROOMING_TASKS_ENABLED:true}
+datagroomingtasks.cron=0 10 1,5,9,13,17,21 * * ?
+
+datasnapshotcleanup.enabled=${DATA_SNAPSHOT_CLEANUP_ENABLED:true}
 datasnapshotcleanup.cron=0 17 0 * * ?
+datasnapshottasks.enabled=${DATA_SNAPSHOT_TASKS_ENABLED:true}
 datasnapshottasks.cron=0 45 * * * ?
-datagroomingtasks.cron=0 10 1,5,9,13,17,21 * * ?
\ No newline at end of file
index bcf6bda..df31475 100644 (file)
@@ -5,7 +5,7 @@
 
 major_version=1
 minor_version=15
-patch_version=1
+patch_version=2
 
 base_version=${major_version}.${minor_version}.${patch_version}