update junit to recheck for snapshot file 95/108595/5
authorLaMont, William(wl2432) <wl2432@att.com>
Fri, 29 May 2020 21:44:43 +0000 (17:44 -0400)
committerLaMont, William (wl2432) <wl2432@att.com>
Mon, 6 Jul 2020 18:41:19 +0000 (14:41 -0400)
Issue-ID: AAI-2922
Change-Id: I3ca6189fa6f1eec245f5299c79f052f0fac42be7
Signed-off-by: LaMont, William(wl2432) <wl2432@att.com>
Signed-off-by: LaMont, William (wl2432) <wl2432@att.com>
57 files changed:
src/main/assembly/descriptor.xml
src/main/java/org/onap/aai/GraphAdminApp.java
src/main/java/org/onap/aai/audit/AuditGraphson2Sql.java [new file with mode: 0644]
src/main/java/org/onap/aai/datacleanup/DataCleanupTasks.java
src/main/java/org/onap/aai/dataexport/DataExportTasks.java
src/main/java/org/onap/aai/datagrooming/DataGrooming.java
src/main/java/org/onap/aai/datasnapshot/DataSnapshot.java
src/main/java/org/onap/aai/datasnapshot/DataSnapshot4HistInit.java
src/main/java/org/onap/aai/db/schema/ScriptDriver.java
src/main/java/org/onap/aai/dbgen/DupeTool.java
src/main/java/org/onap/aai/dbgen/DynamicPayloadGenerator.java
src/main/java/org/onap/aai/dbgen/ForceDeleteTool.java
src/main/java/org/onap/aai/dbgen/schemamod/SchemaMod.java
src/main/java/org/onap/aai/dbgen/schemamod/SchemaMod4Hist.java
src/main/java/org/onap/aai/historytruncate/HistoryTruncate.java
src/main/java/org/onap/aai/interceptors/pre/VersionInterceptor.java
src/main/java/org/onap/aai/migration/EdgeSwingMigrator.java
src/main/java/org/onap/aai/migration/MigrationController.java
src/main/java/org/onap/aai/migration/MigrationControllerInternal.java
src/main/java/org/onap/aai/migration/Migrator.java
src/main/java/org/onap/aai/migration/NotificationHelper.java
src/main/java/org/onap/aai/migration/ValueMigrator.java
src/main/java/org/onap/aai/migration/VertexMerge.java
src/main/java/org/onap/aai/migration/v20/MigrateL2DefaultToFalse.java [new file with mode: 0644]
src/main/java/org/onap/aai/migration/v20/MigrateVlanTag.java [new file with mode: 0644]
src/main/java/org/onap/aai/rest/AuditSqlDbConsumer.java [new file with mode: 0644]
src/main/java/org/onap/aai/rest/client/ApertureConfiguration.java [new file with mode: 0644]
src/main/java/org/onap/aai/rest/client/ApertureService.java [new file with mode: 0644]
src/main/java/org/onap/aai/rest/client/ApertureServiceNoAuthClient.java [new file with mode: 0644]
src/main/java/org/onap/aai/rest/client/ApertureServiceOneWayClient.java [new file with mode: 0644]
src/main/java/org/onap/aai/rest/client/ApertureServiceRestClient.java [new file with mode: 0644]
src/main/java/org/onap/aai/schema/GenTester.java
src/main/java/org/onap/aai/schema/GenTester4Hist.java
src/main/java/org/onap/aai/util/SendDeleteMigrationNotificationsMain.java
src/main/java/org/onap/aai/util/SendMigrationNotifications.java
src/main/java/org/onap/aai/util/SendMigrationNotificationsMain.java
src/main/java/org/onap/aai/web/JerseyConfiguration.java
src/main/resources/application.properties
src/main/resources/etc/appprops/aaiconfig.properties
src/main/resources/etc/appprops/datatoolscrons.properties
src/main/resources/logback.xml
src/main/resources/uniquePropertyCheck-logback.xml
src/main/scripts/common_functions.sh
src/main/scripts/dupeTool.sh
src/main/scripts/dynamicPayloadArchive.sh
src/main/scripts/extract-events.sh [new file with mode: 0644]
src/main/scripts/getDslResult.sh [new file with mode: 0644]
src/main/scripts/resend-dmaap-events.sh
src/main/scripts/run_SendDeleteMigrationNotification.sh
src/main/scripts/run_SendMigrationNotification.sh
src/test/java/org/onap/aai/AAIGremlinQueryTest.java
src/test/java/org/onap/aai/audit/AuditGraphson2SqlTest.java [new file with mode: 0644]
src/test/java/org/onap/aai/datasnapshot/DataSnapshotTest.java
src/test/java/org/onap/aai/datasnapshot/DataSnapshotTest4HistInit.java [new file with mode: 0644]
src/test/java/org/onap/aai/dbgen/ForceDeleteToolTest.java
src/test/java/org/onap/aai/migration/v20/MigrateL2DefaultToFalseTest.java [new file with mode: 0644]
src/test/resources/logback.xml

index 91e8e18..af77dee 100644 (file)
@@ -1,32 +1,35 @@
-<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2"\r
-          xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"\r
-          xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2 http://maven.apache.org/xsd/assembly-1.1.2.xsd">\r
-    <id>build</id>\r
-    <includeBaseDirectory>false</includeBaseDirectory>\r
-    <formats>\r
-        <format>dir</format>\r
-    </formats>\r
-    <fileSets>\r
-        <fileSet>\r
-            <directory>${project.basedir}/src/main/resources</directory>\r
-            <outputDirectory>/resources</outputDirectory>\r
-            <includes>\r
-                <include>**/*</include>\r
-            </includes>\r
-        </fileSet>\r
-        <fileSet>\r
-            <directory>${project.basedir}/src/main/scripts</directory>\r
-            <outputDirectory>/bin</outputDirectory>\r
-            <includes>\r
-                <include>**/*</include>\r
-            </includes>\r
-        </fileSet>\r
-        <fileSet>\r
-            <directory>${project.build.directory}</directory>\r
-            <outputDirectory>/lib</outputDirectory>\r
-            <includes>\r
-                <include>${project.artifactId}-${project.version}.jar</include>\r
-            </includes>\r
-        </fileSet>\r
-    </fileSets>\r
-</assembly>\r
+<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2"
+          xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+          xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2 http://maven.apache.org/xsd/assembly-1.1.2.xsd">
+    <id>build</id>
+    <includeBaseDirectory>false</includeBaseDirectory>
+    <formats>
+        <format>dir</format>
+    </formats>
+    <fileSets>
+        <fileSet>
+            <directory>${project.basedir}/src/main/resources</directory>
+            <outputDirectory>/resources</outputDirectory>
+            <includes>
+                <include>**/*</include>
+            </includes>
+            <fileMode>755</fileMode>
+        </fileSet>
+        <fileSet>
+            <directory>${project.basedir}/src/main/scripts</directory>
+            <outputDirectory>/bin</outputDirectory>
+            <includes>
+                <include>**/*</include>
+            </includes>
+            <fileMode>777</fileMode>
+        </fileSet>
+        <fileSet>
+            <directory>${project.build.directory}</directory>
+            <outputDirectory>/lib</outputDirectory>
+            <includes>
+                <include>${project.artifactId}-${project.version}.jar</include>
+            </includes>
+            <fileMode>755</fileMode>
+        </fileSet>
+    </fileSets>
+</assembly>
index 3f7abf3..1339604 100644 (file)
  */
 package org.onap.aai;
 
+import com.att.eelf.configuration.Configuration;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.apache.commons.lang3.exception.ExceptionUtils;
 import org.onap.aai.aailog.logs.AaiDebugLog;
-import org.onap.aai.config.PropertyPasswordConfiguration;
+import org.onap.aai.restclient.PropertyPasswordConfiguration;
 import org.onap.aai.dbmap.AAIGraph;
+import java.util.Properties;
 import org.onap.aai.exceptions.AAIException;
 import org.onap.aai.logging.ErrorLogHelper;
+import org.onap.aai.logging.LogFormatTools;
 import org.onap.aai.nodes.NodeIngestor;
 import org.onap.aai.util.AAIConfig;
+import org.onap.aai.util.AAIConstants;
 import org.onap.aai.util.ExceptionTranslator;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.boot.SpringApplication;
 import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
@@ -36,6 +41,7 @@ import org.springframework.boot.autoconfigure.SpringBootApplication;
 import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration;
 import org.springframework.boot.autoconfigure.orm.jpa.HibernateJpaAutoConfiguration;
 import org.springframework.context.annotation.ComponentScan;
+import org.springframework.context.annotation.EnableAspectJAutoProxy;
 import org.springframework.core.env.Environment;
 import org.springframework.scheduling.annotation.EnableAsync;
 import org.springframework.scheduling.annotation.EnableScheduling;
@@ -61,10 +67,13 @@ import java.util.UUID;
     "org.onap.aai.datagrooming",
     "org.onap.aai.dataexport",
     "org.onap.aai.datacleanup",
-    "org.onap.aai.aailog"
+    "org.onap.aai.aailog",
+    "org.onap.aai.failover",
+    "org.onap.aai.audit"
 })
 @EnableAsync
 @EnableScheduling
+@EnableAspectJAutoProxy
 @EnableAutoConfiguration(exclude = {DataSourceAutoConfiguration.class, HibernateJpaAutoConfiguration.class})
 public class GraphAdminApp {
 
diff --git a/src/main/java/org/onap/aai/audit/AuditGraphson2Sql.java b/src/main/java/org/onap/aai/audit/AuditGraphson2Sql.java
new file mode 100644 (file)
index 0000000..ff29157
--- /dev/null
@@ -0,0 +1,605 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.aai.audit;
+
+import com.google.gson.JsonObject;
+import com.google.gson.JsonParser;
+import org.apache.tinkerpop.gremlin.structure.Direction;
+import org.javatuples.Triplet;
+import org.onap.aai.restclient.PropertyPasswordConfiguration;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.edges.EdgeRule;
+import org.onap.aai.edges.exceptions.EdgeRuleNotFoundException;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.introspection.Loader;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.logging.ErrorLogHelper;
+import org.onap.aai.rest.client.ApertureService;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.util.AAIConfig;
+import org.onap.aai.util.AAIConstants;
+import org.onap.aai.util.ExceptionTranslator;
+import org.onap.aai.util.FormatDate;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.slf4j.MDC;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.context.annotation.AnnotationConfigApplicationContext;
+
+import java.io.*;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.*;
+import java.util.stream.Collectors;
+
+import com.att.eelf.configuration.Configuration;
+import org.springframework.stereotype.Component;
+
+@Component
+public class AuditGraphson2Sql {
+       
+       private EdgeIngestor ei;
+       private Loader loader;
+       private static final Logger LOGGER = LoggerFactory.getLogger(AuditGraphson2Sql.class.getSimpleName());
+       private SchemaVersions schemaVersions;
+       private ApertureService apertureService;
+
+    public static final String DEFAULT_SRC_DIR = "logs/data/dataSnapshots/"; 
+    public static final int DEFAULT_THRESHOLD_PERCENT = 10;
+    public static final String DEFAULT_OUTPUT_DIR = "logs/data/audit";
+
+    //DEBUG -- should be getting default-src-dir, default-output-dir and rdbms-db-name from param file
+       @Autowired
+       public AuditGraphson2Sql( EdgeIngestor ei, SchemaVersions schemaVersions, LoaderFactory loaderFactory, ApertureService apertureService) {
+               this.schemaVersions = schemaVersions;
+               this.loader = loaderFactory.createLoaderForVersion(ModelType.MOXY, schemaVersions.getDefaultVersion());
+               this.ei = ei;
+           this.apertureService = apertureService;
+       }
+
+       public String runAudit(String dbName, String sourceFName, String sourceDir )
+                               throws Exception {
+
+               if( sourceDir == null || sourceDir.isEmpty() ){
+                       sourceDir = DEFAULT_SRC_DIR;
+               }
+
+        HashMap <String,Integer> gsonTableCounts = new HashMap <> ();
+        try {
+               gsonTableCounts = getCountsFromGraphsonSnapshot(dbName, sourceFName, sourceDir);
+        } catch( Exception e ) {
+               LOGGER.error(" ERROR when calling getCountsFromGraphsonSnapshot(" 
+                               + dbName + "," 
+                               + sourceFName  + ").  Exception = " 
+                               + e.getMessage() );
+               throw e;
+        }
+        
+        long timestamp = 0L;
+        try {
+               timestamp = getTimestampFromFileName(sourceFName);
+        } catch( Exception e ) {
+               LOGGER.error(" ERROR getting timestamp from filename: " 
+                               + e.getMessage() );
+                       throw e;
+        }
+
+        JsonObject rdbmsJsonResults = new JsonObject ();
+               try {
+                       rdbmsJsonResults = getRdbmsTableCounts(timestamp, dbName);
+        } catch( Exception e ) {
+               LOGGER.error(" ERROR getting table count info from mySql. timestamp = ["
+                               + timestamp + "], dbName = [" + dbName +
+                                       "].  Exception = " + e.getMessage() );
+                       throw e;
+        }
+               HashMap <String,Integer> sqlNodeCounts = getNodeCountsFromSQLRes(rdbmsJsonResults);
+        HashMap <String,Integer> sqlEdgeCounts = getEdgeCountsFromSQLRes(rdbmsJsonResults);
+
+        String resultString = "Audit ran and logged to file: ";
+        try {
+               String titleInfo = "Comparing data from GraphSon file: " +
+                       sourceFName + ", and Aperture data using timeStamp = [" + 
+                       timestamp + "] ";
+               String outFileName = compareResults(gsonTableCounts, sqlNodeCounts,
+                                       sqlEdgeCounts, DEFAULT_OUTPUT_DIR, titleInfo);
+               resultString = resultString + outFileName;
+        }
+        catch( IOException ie) {
+               LOGGER.error(" ERROR writing to output file. [" + ie.getMessage() + "]" );
+                       throw new Exception( ie.getMessage() );
+        }
+        
+        return resultString;
+        
+       }
+       
+       
+    public String compareResults(HashMap <String,Integer> gsonTableCounts,
+               HashMap <String,Integer> sqlNodeCounts, 
+               HashMap <String,Integer> sqlEdgeCounts,
+               String outputDir, String titleInfo) 
+       throws IOException {
+       
+       FormatDate fd = new FormatDate("yyyyMMddHHmm", "GMT");
+               String dteStr = fd.getDateTime();
+               String fName = outputDir + "/"  + "gson-sql-audit" + dteStr;
+       File auditOutFile = new File(fName);
+        auditOutFile.getParentFile().mkdirs();
+        BufferedWriter outWriter = new BufferedWriter(new FileWriter(auditOutFile));
+        outWriter.write(titleInfo);
+        outWriter.newLine();
+       outWriter.write("Totals from Graphson: " + getGsonTotals(gsonTableCounts));
+       outWriter.newLine();
+       outWriter.write("Totals from mySql: " + getMSqlTotals(sqlNodeCounts,sqlEdgeCounts));
+       outWriter.newLine();
+       outWriter.newLine();
+       
+       for (Map.Entry<String, Integer> entry : gsonTableCounts.entrySet()) {
+           String tblKey = entry.getKey();
+           int gVal = entry.getValue();
+           if(!sqlNodeCounts.containsKey(tblKey) && !sqlEdgeCounts.containsKey(tblKey)) {
+               String msg = "> Entry for Table: [" + tblKey 
+                               + "] not found in the SQL db but had "
+                               + gVal + " entries in the GraphSon db";
+               LOGGER.error(msg);
+               System.out.println(msg);
+               outWriter.write(msg);
+               outWriter.newLine();
+               continue;
+           }
+           int sVal = 0;
+           if( sqlNodeCounts.containsKey(tblKey) ) {
+               sVal = sqlNodeCounts.get(tblKey);
+           }else {
+               sVal = sqlEdgeCounts.get(tblKey);
+           }
+           if ((gVal > 0) && (sVal == 0)){
+               String msg = "For Table [" + tblKey + "], GSon count = " 
+                               + gVal + ", zero found in SQL db.";
+               LOGGER.error(msg);
+               System.out.println(msg);
+               outWriter.write(msg);
+               outWriter.newLine();
+           }
+           String msg = tblKey + ": gson/sql = " + gVal + "/" + sVal;
+           LOGGER.debug(msg);
+               System.out.println(msg); 
+               outWriter.write(msg);
+               outWriter.newLine();
+       }
+       for (Map.Entry<String, Integer> entry : sqlNodeCounts.entrySet()) {
+               // check for Node types that are no longer on the Graphson side, but are 
+               // still in the SQL DB
+           String tblKey = entry.getKey();
+           int sVal = entry.getValue();
+           if(!gsonTableCounts.containsKey(tblKey)) {
+               String msg = " Entry for Table [" + tblKey 
+                               + "] not found in the Graphson db, but had " 
+                               + sVal + " entries in the SQL db.";
+               LOGGER.error(msg);
+               System.out.println(msg);
+               outWriter.write(msg);
+               outWriter.newLine();
+               continue;
+           }
+       }
+       for (Map.Entry<String, Integer> entry : sqlEdgeCounts.entrySet()) {
+               // check for Edge+Label combos that are no longer on the Graphson side, but are 
+               // still in the SQL DB
+           String tblKey = entry.getKey();
+           int sVal = entry.getValue();
+           if(!gsonTableCounts.containsKey(tblKey)) {
+               String msg = " Entry for edge+Label combo [" + tblKey 
+                               + "] not found in the Graphson db, but had " 
+                               + sVal + " entries in the SQL db.";
+               LOGGER.error(msg);
+               System.out.println(msg);
+               outWriter.write(msg);
+               outWriter.newLine();
+               continue;
+           }
+       }
+       outWriter.close();
+       String msg = "Audit Results written to: " + fName;
+       LOGGER.debug(msg);
+       System.out.println(msg);
+       return fName;
+
+    }
+
+       
+    public HashMap <String,Integer> getNodeCountsFromSQLRes(JsonObject sqlJsonData) {
+       
+       HashMap<String,Integer> nodeCtHash = new HashMap<>();
+       if( sqlJsonData != null ) {
+                       for (Object e : sqlJsonData.entrySet()) {
+                               Map.Entry entry = (Map.Entry) e;
+                               String tableName = String.valueOf(entry.getKey());
+                               if (!tableName.startsWith("edge__")) {
+                                       nodeCtHash.put(String.valueOf(entry.getKey()), Integer.parseInt(entry.getValue().toString()));
+                               }
+                       }
+               }
+       return nodeCtHash;
+    }
+
+
+       public HashMap <String,Integer> getEdgeCountsFromSQLRes(JsonObject sqlJsonData){
+
+               HashMap<String,Integer> edgeCtHash = new HashMap<>();
+               if( sqlJsonData != null ) {
+                       for (Object e : sqlJsonData.entrySet()) {
+                               Map.Entry entry = (Map.Entry) e;
+                               String tableName = String.valueOf(entry.getKey());
+                               if (tableName.startsWith("edge__")) {
+                                       edgeCtHash.put(String.valueOf(entry.getKey()), Integer.parseInt(entry.getValue().toString()));
+
+                               }
+                       }
+               }
+               return edgeCtHash;
+       }
+
+
+       public JsonObject getRdbmsTableCounts(long timestamp, String dbName)
+                       throws Exception {
+
+               return apertureService.runAudit(timestamp, dbName);
+       }
+  
+    
+    public String getGsonTotals(HashMap <String,Integer> tableCounts){
+       int nodeCount = 0;
+       int edgeCount = 0;
+           for (Map.Entry<String, Integer> entry : tableCounts.entrySet()) {
+               String tblKey = entry.getKey();
+               if( tblKey != null && tblKey.startsWith("edge__")){
+                       edgeCount = edgeCount + entry.getValue();
+               } else {
+                       nodeCount = nodeCount + entry.getValue();
+               }
+           }
+           String countStr = " nodes = " + nodeCount + ", edges = " + edgeCount;
+           return countStr;
+       }
+        
+    public String getMSqlTotals(HashMap <String,Integer> nodeCounts,
+               HashMap <String,Integer> edgeCounts){           
+       int nodeCount = 0;
+       int edgeCount = 0;
+           for (Map.Entry<String, Integer> entry : nodeCounts.entrySet()) {
+               nodeCount = nodeCount + entry.getValue();
+           }
+           for (Map.Entry<String, Integer> entry : edgeCounts.entrySet()) {
+               edgeCount = edgeCount + entry.getValue();
+           }
+           String countStr = " nodes = " + nodeCount + ", edges = " + edgeCount;
+           return countStr;
+       }
+     
+    public Long getTimestampFromFileName(String fName) throws Exception {      
+       // Note -- we are expecting filenames that look like our 
+       //  current snapshot filenames (without the ".PXX" suffix)
+       //  Ie. dataSnapshot.graphSON.201908151845
+       
+       String datePiece = getDateTimePieceOfFileName(fName);
+       final SimpleDateFormat dateFormat = new SimpleDateFormat("YYYYMMddHHmm");
+       Date date = null;
+       try {
+               date = dateFormat.parse(datePiece);
+       }
+       catch (ParseException pe) {
+               throw new Exception ("Error trying to parse this to a Date-Timestamp ["
+                               + datePiece + "]: " + pe.getMessage() );
+       }
+       final long timestamp = date.getTime();
+       return timestamp;
+       
+    }
+
+       
+    public String getDateTimePieceOfFileName(String fName) throws Exception {          
+       // Note -- we are expecting filenames that look like our 
+       //  current snapshot filenames (without the ".PXX" suffix)
+       //  Ie. dataSnapshot.graphSON.201908151845
+       
+       if( fName == null || fName.isEmpty() || fName.length() < 12) {
+               throw new Exception ("File name must end with .yyyymmddhhmm ");
+       }
+       int index = fName.lastIndexOf('.');
+       
+       if ( index == -1 ) {
+               throw new Exception ("File name must end with .yyyymmddhhmm ");
+       }
+       index++;
+       if(fName.length() <= index) {
+               throw new Exception ("File name must end with .yyyymmddhhmm ");
+       }
+       String datePiece = fName.substring(index);
+       if( datePiece.length() != 12 ) {
+               throw new Exception ("File name must end date in the format .yyyymmddhhmm ");
+       }
+       
+       return datePiece;
+    }
+    
+    
+    public Map<String, String> getEdgeMapKeys() throws EdgeRuleNotFoundException {
+       
+       int edKeyCount = 0;
+        Map<String, String> edgeKeys = new HashMap<>();
+        // EdgeKey will look like:  nodeTypeA__nodeTypeB
+        // The value will be the key with "edge__" pre-pended
+        
+        for (Map.Entry<String, Collection<EdgeRule>> rulePairings : ei.getAllCurrentRules().asMap().entrySet()) {
+            for (EdgeRule er : rulePairings.getValue()) {
+               String a = er.getTo();
+               String b = er.getFrom();
+               if (a.compareTo(b) >= 0) {
+                       er.flipDirection();
+                }
+                if (!loader.getAllObjects().containsKey(er.getTo()) || !loader.getAllObjects().containsKey(er.getFrom())) {
+                       // we don't have one of these nodeTypes, so skip this entry
+                       continue;
+                }
+                
+                final String to = er.getTo().replace("-", "_");
+                final String from = er.getFrom().replace("-", "_");
+                final String edKey = from + "__" + to;
+                
+                if (edgeKeys.containsKey(edKey)) {
+                       continue;
+                }
+                edKeyCount++;
+                edgeKeys.put(edKey, "edge__"+ edKey);
+               
+            }
+        }
+        System.out.println("DEBUG --> There are " + edKeyCount + " edge table keys defined. " );
+        LOGGER.debug(" -- There are " + edKeyCount + " edge table keys defined. " );
+        return edgeKeys;
+    }
+    
+       
+    public HashMap <String,Integer> getCountsFromGraphsonSnapshot(String databaseName, 
+               String fileNamePart, String srcDir) throws Exception {
+        
+        final JsonParser jsonParser = new JsonParser();
+        final Set<String> uris = new HashSet<>();
+        final Set<String> uuids = new HashSet<>();
+        final Map<Long, String> idToUri = new HashMap<>();
+        final Map<Long, String> idToType = new HashMap<>();
+        final Map<Long, String> idToUuid = new HashMap<>();
+        final Set<Triplet<Long, Long, String>> idIdLabelOfEdges = new HashSet<>();
+        
+        final HashMap<String,Integer> tableCountHash = new HashMap<>();
+      
+              
+        String sourceDir = DEFAULT_SRC_DIR;  // Default for now
+        if( srcDir != null && srcDir.length() > 1 ) {
+               // If they passed one in, then we'll use it.
+               sourceDir = srcDir;
+        }
+        String graphsonDir = sourceDir + "/";
+     
+        if( fileNamePart == null || fileNamePart.trim().isEmpty() ){
+               String msg = "ERROR -- fileName is required to be passed in. ";
+                   LOGGER.error(msg);
+               System.out.println(msg);
+               throw new Exception(msg);
+        }
+        
+        final List<File> graphsons = Files.walk(Paths.get(graphsonDir))
+                .filter(Files::isRegularFile)
+                .map(Path::toFile)
+                .sorted()
+                .collect(Collectors.toList());
+
+        int skippedNodeCount = 0;  
+        int nodeCounter = 0;
+        int edgeCounter = 0;
+        for (File graphson : graphsons) {
+            if( !graphson.getName().contains(fileNamePart) ) {
+               continue;
+            }
+            
+            try (BufferedReader reader = new BufferedReader(new FileReader(graphson))) {
+                String msg = "Processing snapshot file " + graphson.getName();
+                LOGGER.debug(msg);
+                System.out.println(msg);
+                String line;
+                    
+                while ((line = reader.readLine()) != null) {
+                    JsonObject vertex = jsonParser.parse(line).getAsJsonObject();
+                    long id = vertex.get("id").getAsLong();
+
+                    if ((vertex.get("properties") == null) ||
+                               !vertex.get("properties").getAsJsonObject().has("aai-uri") ||
+                            !vertex.get("properties").getAsJsonObject().has("aai-node-type") ||
+                            !vertex.get("properties").getAsJsonObject().has("aai-uuid")) {
+                               
+                        msg = "DEBUG  --  Could not find keys for this line: [" +
+                                               line + "] ------";
+                        LOGGER.debug(msg);
+                        System.out.println(msg);
+                        skippedNodeCount++;
+                        continue;
+                    }
+
+                    String uri = vertex.get("properties").getAsJsonObject().get("aai-uri").getAsJsonArray().get(0).getAsJsonObject().get("value").getAsString();
+                    String nodeType = vertex.get("properties").getAsJsonObject().get("aai-node-type").getAsJsonArray().get(0).getAsJsonObject().get("value").getAsString();
+                    String nodeTypeKey = nodeType.replaceAll("-", "_");
+                    String uuid = vertex.get("properties").getAsJsonObject().get("aai-uuid").getAsJsonArray().get(0).getAsJsonObject().get("value").getAsString();
+
+                    try {
+                        loader.introspectorFromName(nodeType);
+                    } catch (Exception e) {
+                        msg = "DEBUG -- loader introspector for nodeType error: [" +
+                                       e.getMessage() + "], [" + e.getLocalizedMessage() + "]------";
+                        LOGGER.debug(msg);
+                        System.out.println(msg);
+                        skippedNodeCount++;
+                        continue;
+                    }
+                                               
+                    if (uris.contains(uri)) {
+                        msg = "DEBUG -- SKIP Uri because it has been seen before: [" +
+                                       uri + "] ------";
+                        LOGGER.debug(msg);
+                       System.out.println(msg);
+                        skippedNodeCount++;
+                        continue;
+                    }
+                    else if (uuids.contains(uuid)) {
+                        msg = "DEBUG -- SKIP UUID because it has been seen before: [" +
+                                       uuid + "] ------";
+                        LOGGER.debug(msg);
+                       System.out.println(msg);
+                        skippedNodeCount++;
+                        continue;
+                    }
+                        
+                    uris.add(uri);
+                    uuids.add(uuid);
+                    idToUri.put(id, uri);
+                    idToType.put(id, nodeType);
+                    idToUuid.put(id, uuid);
+                      
+                    // Collect Edge Info for this node
+                    if (vertex.has("inE")) {
+                        vertex.get("inE").getAsJsonObject().entrySet().forEach(es -> {
+                            String label = es.getKey();
+                            es.getValue().getAsJsonArray().forEach(e -> {
+                                long otherId = e.getAsJsonObject().get("outV").getAsLong();
+                                idIdLabelOfEdges.add(new Triplet<>(id, otherId, label));
+                            });
+                        });
+                    }
+
+                    if( !tableCountHash.containsKey(nodeTypeKey)) {
+                       int ct = 1;
+                       tableCountHash.put(nodeTypeKey, ct);
+                    }
+                    else {
+                       int tmpCt = tableCountHash.get(nodeTypeKey);
+                       tmpCt++;
+                       tableCountHash.remove(nodeTypeKey);
+                       tableCountHash.put(nodeTypeKey, tmpCt);
+                    }
+                    nodeCounter++;
+                }// end of looping over this file
+            } catch (IOException e) {
+                   String msg = "DEBUG --  Error while processing nodes ------";
+                LOGGER.debug(msg);
+                   System.out.println(msg);
+                e.printStackTrace();
+            }
+        }// End of looping over all files
+        
+        String msg = "DEBUG -- Found this many Kinds of nodes: " + tableCountHash.size();
+        LOGGER.debug(msg);
+               System.out.println(msg);
+        
+        msg = "DEBUG -- Found this many total nodes: " + nodeCounter;
+        LOGGER.debug(msg);
+               System.out.println(msg);
+               
+        msg = "  >> Skipped a total of " + skippedNodeCount + " Node Records ------";
+        LOGGER.debug(msg);
+               System.out.println(msg);
+               
+        
+        msg = "DEBUG -- Begin Processing Edges ------";
+        LOGGER.debug(msg);
+               System.out.println(msg);
+        
+               int edgeTableCounter = 0;
+        int edgeSkipCounter = 0;
+        
+        Map<String, String> edgeKeys = this.getEdgeMapKeys();
+        for (Triplet<Long, Long, String> edge : idIdLabelOfEdges) {
+            if (!idToType.containsKey(edge.getValue0())) {
+               LOGGER.info(" Edge Skipped because ID not found: [" + edge.getValue0() + "]");
+                System.out.println(" Edge Skipped because ID not found: [" + edge.getValue0() + "]");
+                edgeSkipCounter++;
+                continue;
+            }
+            else if (!idToType.containsKey(edge.getValue1())) {
+                System.out.println(" Edge Skipped because ID not found: [" + edge.getValue1() + "]");
+                LOGGER.info(" Edge Skipped because ID not found: [" + edge.getValue1() + "]");
+                edgeSkipCounter++;
+                continue;
+            }
+            else {
+                String colA = idToType.get(edge.getValue1()).replace("-","_");
+                String colB = idToType.get(edge.getValue0()).replace("-","_");
+                
+                String edLabel = edge.getValue2(); // if we start using edLabel to sort
+                String edKey = colA + "__" + colB;
+                // Note the edgeKeys table has nodeTypeX__nodeTypeY as the key
+                //  The value stored for that key just has "edge__" pre-pended which 
+                //  is the key used by the tableCount thing
+                String tcEdKey = "";
+                if (!edgeKeys.containsKey(edKey)) {
+                    tcEdKey = edgeKeys.get(colB + "__" + colA );
+                } else {
+                    tcEdKey = edgeKeys.get(edKey);
+                }
+              
+                if( !tableCountHash.containsKey(tcEdKey)) {
+                       int ct = 1;
+                       tableCountHash.put(tcEdKey, ct);
+                       edgeTableCounter++;
+                }
+                else {
+                       int tmpCt = tableCountHash.get(tcEdKey);
+                       tmpCt++;
+                       tableCountHash.remove(tcEdKey);
+                       tableCountHash.put(tcEdKey, tmpCt);
+                }
+                edgeCounter++;
+            }
+        }
+            
+        msg = " Processed a total of " + edgeCounter + " Edge Records ------";
+        LOGGER.debug(msg);
+        System.out.println(msg);
+        
+        msg = " Found data for this many edgeTables: " + edgeTableCounter;
+        LOGGER.debug(msg);
+        System.out.println(msg);
+               
+        msg = "  >> Skipped a total of " + edgeSkipCounter + " Edge Records ------";
+        LOGGER.debug(msg);
+       System.out.println(msg);
+           
+        return tableCountHash;
+        
+    }
+
+
+
+}
index 8fc6295..99b1619 100644 (file)
  * ============LICENSE_END=========================================================
  */
 package org.onap.aai.datacleanup;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.nio.file.Files;
+import java.nio.file.attribute.BasicFileAttributes;
+import java.nio.file.attribute.FileTime;
+import java.text.SimpleDateFormat;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipOutputStream;
 
 import org.onap.aai.aailog.logs.AaiScheduledTaskAuditLog;
 import org.onap.aai.logging.ErrorLogHelper;
 import org.onap.aai.logging.LogFormatTools;
+import org.onap.aai.exceptions.AAIException;
 import org.onap.aai.util.AAIConfig;
 import org.onap.aai.util.AAIConstants;
 import org.onap.logging.filter.base.ONAPComponents;
-import org.onap.logging.ref.slf4j.ONAPLogConstants;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
@@ -33,18 +44,6 @@ import org.springframework.context.annotation.PropertySource;
 import org.springframework.scheduling.annotation.Scheduled;
 import org.springframework.stereotype.Component;
 
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
-import java.nio.file.Files;
-import java.nio.file.attribute.BasicFileAttributes;
-import java.nio.file.attribute.FileTime;
-import java.text.SimpleDateFormat;
-import java.util.Calendar;
-import java.util.Date;
-import java.util.zip.ZipEntry;
-import java.util.zip.ZipOutputStream;
-
 @Component
 @PropertySource("file:${server.local.startpath}/etc/appprops/datatoolscrons.properties")
 public class DataCleanupTasks {
@@ -73,6 +72,7 @@ public class DataCleanupTasks {
                        String archiveDir = dataGroomingDir + AAIConstants.AAI_FILESEP + "ARCHIVE";
                        String dataGroomingArcDir = archiveDir + AAIConstants.AAI_FILESEP + "dataGrooming";             
                        File path = new File(dataGroomingDir);
+                       File archivepath = new File(archiveDir);
                        File dataGroomingPath = new File(dataGroomingArcDir);
                
                        logger.debug("The logDir is " + logDir);
@@ -138,7 +138,9 @@ public class DataCleanupTasks {
      * 
      */
     public boolean directoryExists(String dir) {
-               return new File(dir).exists();
+       File path = new File(dir);
+               boolean exists = path.exists();
+               return exists;  
     }
     
     public Date getZipDate(Integer days) {
@@ -177,7 +179,7 @@ public class DataCleanupTasks {
        logger.debug("Inside the archive folder");  
        String filename = file.getName();
        logger.debug("file name is " +filename);
-
+               
                String zipFile = afterArchiveDir + AAIConstants.AAI_FILESEP + filename;
                
                File dataGroomingPath = new File(afterArchiveDir);
@@ -230,8 +232,10 @@ public class DataCleanupTasks {
 */
     @Scheduled(cron = "${datasnapshotcleanup.cron}" )
     public void dataSnapshotCleanup() {
-       
-       logger.info(ONAPLogConstants.Markers.ENTRY, "Started cron job dataSnapshotCleanup @ " + simpleDateFormat.format(new Date()));
+               
+               auditLog.logBefore("dataSnapshotCleanup", ONAPComponents.AAI.toString() );
+       
+               logger.debug("Started cron job dataSnapshotCleanup @ " + simpleDateFormat.format(new Date()));
        
        try {
                String logDir = AAIConstants.AAI_HOME + AAIConstants.AAI_FILESEP + "logs";
@@ -289,11 +293,88 @@ public class DataCleanupTasks {
                                }       
                        }
                }
+               dmaapEventsDataCleanup(newAgeDelete);
+               dataMigrationCleanup();
        }
        catch (Exception e) {
                ErrorLogHelper.logError("AAI_4000", "Exception running cron job for DataCleanup"+LogFormatTools.getStackTop(e));
                logger.debug("AAI_4000", "Exception running cron job for DataCleanup"+LogFormatTools.getStackTop(e));
        }
-    logger.info(ONAPLogConstants.Markers.EXIT, "Ended cron job dataSnapshotCleanup @ " + simpleDateFormat.format(new Date()));
-  }   
+    logger.debug("Ended cron job dataSnapshotCleanup @ " + simpleDateFormat.format(new Date()));
+       auditLog.logAfter();
+  }
+       public void dmaapEventsDataCleanup(Date deleteAge) {
+
+               logger.debug("Started dmaapEventsDataCleanup @ " + simpleDateFormat.format(new Date()));
+
+               try {
+                       String logDir = AAIConstants.AAI_HOME + AAIConstants.AAI_FILESEP + "logs";
+                       String dmaapEventsDataDir = logDir + AAIConstants.AAI_FILESEP + "data" + AAIConstants.AAI_FILESEP + "dmaapEvents";
+                       File path = new File(dmaapEventsDataDir);
+
+                       logger.debug("The logDir is " + logDir);
+                       logger.debug("The dmaapEventsDataDir is " + dmaapEventsDataDir);
+
+                       //Iterate through the files
+                       File[] listFiles = path.listFiles();
+                       if(listFiles != null) {
+                               for(File listFile : listFiles) {
+                                       if(listFile.isFile()){
+                                               logger.debug("The file name in dmaapEvents is: " +listFile.getName());
+                                               Date fileCreateDate = fileCreationMonthDate(listFile);
+                                               logger.debug("The fileCreateDate in dmaapEvents is " + fileCreateDate);
+                                               if( fileCreateDate.compareTo(deleteAge) < 0) {
+                                                       delete(listFile);
+                                                       logger.debug("Deleted " + listFile.getName());
+                                               }
+                                       }
+                               }
+                       }
+
+               }
+               catch (Exception e) {
+                       ErrorLogHelper.logError("AAI_4000", "Exception in dmaapEventsDataCleanup");
+                       logger.debug("AAI_4000", "Exception in dmaapEventsDataCleanup "+LogFormatTools.getStackTop(e));
+               }
+               logger.debug("Ended cron dmaapEventsDataCleanup @ " + simpleDateFormat.format(new Date()));
+       }
+       
+    public void dataMigrationCleanup() throws AAIException {
+               Integer ageDeleteSnapshot = AAIConfig.getInt("aai.datamigration.agedelete");
+               
+               Date deleteAge = getZipDate(ageDeleteSnapshot);
+       
+               logger.debug("Started dataMigrationCleanup @ " + simpleDateFormat.format(new Date()));
+       
+       try {
+               String logDir = AAIConstants.AAI_HOME + AAIConstants.AAI_FILESEP + "logs";
+               String dataMigrationCleanupDir = logDir + AAIConstants.AAI_FILESEP + "data" + AAIConstants.AAI_FILESEP + "migration-input-files";
+               File path = new File(dataMigrationCleanupDir);
+               
+               logger.debug("The logDir is " + logDir);
+                       logger.debug("The migrationInputFilesDir is " + dataMigrationCleanupDir);
+
+                       //Iterate through the files
+                       File[] listFiles = path.listFiles();
+                       if(listFiles != null) {
+                               for(File listFile : listFiles) {
+                                       if(listFile.isFile()){
+                                               logger.debug("The file name in migration-input-files is: " +listFile.getName());
+                                               Date fileCreateDate = fileCreationMonthDate(listFile);
+                                               logger.debug("The fileCreateDate in migration-input-files is " + fileCreateDate);
+                                               if( fileCreateDate.compareTo(deleteAge) < 0) {
+                                                       delete(listFile);
+                                                       logger.debug("Deleted " + listFile.getName());
+                                               }
+                                       }
+                               }
+                       }
+
+               }
+               catch (Exception e) {
+                       ErrorLogHelper.logError("AAI_4000", "Exception in dataMigrationCleanup");
+                       logger.debug("AAI_4000", "Exception in dataMigrationCleanup "+LogFormatTools.getStackTop(e));
+               }
+               logger.debug("Ended cron dataMigrationCleanup @ " + simpleDateFormat.format(new Date()));
+       }
 }
index 2d0625c..028c729 100644 (file)
  */
 package org.onap.aai.dataexport;
 
-import com.att.eelf.configuration.Configuration;
-import org.apache.commons.io.FileUtils;
-import org.apache.commons.io.comparator.LastModifiedFileComparator;
-import org.apache.commons.io.filefilter.DirectoryFileFilter;
-import org.apache.commons.io.filefilter.FileFileFilter;
-import org.apache.commons.io.filefilter.RegexFileFilter;
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileFilter;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Date;
+import java.util.List;
+import java.util.Map;
+import java.util.NavigableMap;
+import java.util.Properties;
+import java.util.TreeMap;
+import java.util.UUID;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
 import org.onap.aai.aailog.logs.AaiScheduledTaskAuditLog;
 import org.onap.aai.dbgen.DynamicPayloadGenerator;
 import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.exceptions.AAIException;
 import org.onap.aai.introspection.LoaderFactory;
 import org.onap.aai.logging.ErrorLogHelper;
 import org.onap.aai.logging.LogFormatTools;
@@ -35,18 +49,22 @@ import org.onap.aai.setup.SchemaVersions;
 import org.onap.aai.util.AAIConfig;
 import org.onap.aai.util.AAIConstants;
 import org.onap.logging.filter.base.ONAPComponents;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import org.onap.logging.ref.slf4j.ONAPLogConstants;
 import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
 import org.springframework.context.annotation.PropertySource;
 import org.springframework.scheduling.annotation.Scheduled;
 import org.springframework.stereotype.Component;
 
-import java.io.*;
-import java.text.SimpleDateFormat;
-import java.util.*;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
+import com.att.eelf.configuration.Configuration;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.slf4j.MDC;
+import org.apache.commons.io.comparator.LastModifiedFileComparator;
+import org.apache.commons.io.filefilter.DirectoryFileFilter;
+import org.apache.commons.io.filefilter.FileFileFilter;
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.io.filefilter.RegexFileFilter;
 
 /**
  * DataExportTasks obtains a graph snapshot and invokes DynamicPayloadGenerator
@@ -54,11 +72,12 @@ import java.util.regex.Pattern;
  */
 @Component
 @PropertySource("file:${server.local.startpath}/etc/appprops/datatoolscrons.properties")
+@ConditionalOnProperty(prefix = "dataexporttask", name = "cron")
 public class DataExportTasks {
 
        private static final Logger LOGGER;
        private static final SimpleDateFormat dateFormat = new SimpleDateFormat("HH:mm:ss");
-
+       
        static {
                System.setProperty("aai.service.name", DataExportTasks.class.getSimpleName());
                Properties props = System.getProperties();
@@ -94,11 +113,19 @@ public class DataExportTasks {
        /**
         * The exportTask method.
         *
+        * @throws AAIException, Exception
         */
-       public void exportTask() throws Exception   {
-               AaiScheduledTaskAuditLog auditLog = new AaiScheduledTaskAuditLog();
+       public void exportTask() throws AAIException, Exception   {
+           AaiScheduledTaskAuditLog auditLog = new AaiScheduledTaskAuditLog();
                auditLog.logBefore("dataExportTask", ONAPComponents.AAI.toString());
                LOGGER.info("Started exportTask: " + dateFormat.format(new Date()));
+               try {
+                       String isDataExportEnabled = AAIConfig.get("aai.dataexport.enable");
+               } catch (AAIException ex){
+                       LOGGER.info("Ended exportTask: " + dateFormat.format(new Date()) + " " + ex.getMessage());
+                       auditLog.logAfter();
+                       throw ex;
+               }
                if (AAIConfig.get("aai.dataexport.enable").equalsIgnoreCase("false")) {
                        LOGGER.debug("Data Export is not enabled");
                        return;
@@ -208,7 +235,7 @@ public class DataExportTasks {
                        LOGGER.debug("Exception while running the check to see if dataExport is running "+ LogFormatTools.getStackTop(e));
                }
 
-               return count > 0;
+        return count > 0;
        }
 
        /**
@@ -275,7 +302,7 @@ public class DataExportTasks {
                                        String g1 = m.group(1);
                                        LOGGER.debug ("Found group " + g1);
                                        if ( !fileMap.containsKey(g1) ) {
-                                               ArrayList<File> l = new ArrayList<>();
+                                               ArrayList<File> l = new ArrayList<File>();
                                                l.add(f);
                                                fileMap.put(g1, l);
                                        }
@@ -317,6 +344,7 @@ public class DataExportTasks {
        /**
         * The deletePayload method deletes all the payload files that it finds at targetDirectory
         * @param targetDirFile the directory that contains payload files
+        * @throws AAIException
         */
        private static void deletePayload(File targetDirFile) {
                
index 88bfebb..3884a01 100644 (file)
@@ -47,7 +47,7 @@ import org.apache.tinkerpop.gremlin.structure.Property;
 import org.apache.tinkerpop.gremlin.structure.Vertex;
 import org.apache.tinkerpop.gremlin.structure.VertexProperty;
 import org.onap.aai.GraphAdminApp;
-import org.onap.aai.config.PropertyPasswordConfiguration;
+import org.onap.aai.restclient.PropertyPasswordConfiguration;
 import org.onap.aai.util.GraphAdminConstants;
 import org.onap.logging.ref.slf4j.ONAPLogConstants;
 import org.onap.aai.dbmap.AAIGraph;
index 217d6c0..946489b 100644 (file)
@@ -317,7 +317,7 @@ public class DataSnapshot {
                }
                else if( command.equals("MULTITHREAD_RELOAD") ){
                        // Note - this will use as many threads as the snapshot file is
-                       //   broken up into.  (up to a limit)
+                       //   broken up into.  (up to a limit - whatever the 'threadCount' variable is set to)
                        if (args.length >= 2) {
                                // Since they are re-loading, they need to pass the snapshot file name to use.
                                // We expected the file to be found in our snapshot directory.  Note - if
@@ -357,8 +357,14 @@ public class DataSnapshot {
                        }
                }
 
-               
-               threadCount4Create = cArgs.threadCount; 
+               try {
+                       threadCount4Create = cArgs.threadCount;
+               }
+               catch ( NumberFormatException nfe ){
+                       ErrorLogHelper.logError("AAI_6128", "Bad (non-integer) threadCount found for DataSnapshot [" + cArgs.threadCount + "]");
+                       LOGGER.debug("Bad (non-integer) threadCount found by DataSnapshot [" + cArgs.threadCount + "]");
+                       AAISystemExitUtil.systemExitCloseAAIGraph(1);
+               }
                maxNodesPerFile4Create = cArgs.maxNodesPerFile;
                //Print Defaults
                LOGGER.debug("DataSnapshot command is [" + cArgs.command + "]");
@@ -375,8 +381,7 @@ public class DataSnapshot {
                LOGGER.debug("VertToEdgeProcDelay is [" + cArgs.vertToEdgeProcDelay + "]");
                LOGGER.debug("StaggerThreadDelay is [" + cArgs.staggerThreadDelay + "]");
                LOGGER.debug("Caller process is ["+ cArgs.caller + "]");
-               
-                               
+
                //Print non-default values
                if (!AAIConfig.isEmpty(cArgs.fileName)){
                        LOGGER.debug("Snapshot file name (if not default) to use  is [" + cArgs.fileName + "]");
@@ -553,6 +558,9 @@ public class DataSnapshot {
                                // NOTE - it will only use as many threads as the number of files the
                                //    snapshot is  written to.  Ie. if you have a single-file snapshot,
                                //    then this will be single-threaded.
+                               // If the number of files is greater than the 'threadCount' parameter,
+                               //    then we will use more than one pass to keep the number of simultaneous 
+                               //    threads below the threadCount param.
                                //
                                LOGGER.debug(" Command = " + command );
                                
@@ -561,114 +569,129 @@ public class DataSnapshot {
                                }
                                ArrayList <File> snapFilesArr = getFilesToProcess(targetDir, oldSnapshotFileName, false);
                                int fCount = snapFilesArr.size();
+                               int threadPassesNeeded = (int) Math.ceil((double)fCount / (double)threadCount4Create);
+                               int filesPerPass = (int) Math.ceil((double)fCount / (double)threadPassesNeeded);
+
                                JanusGraph graph1 = AAIGraph.getInstance().getGraph();
                                long timeStart = System.nanoTime();
 
                                HashMap <String,String> old2NewVertIdMap = new HashMap <String,String> ();
 
-                                       // We're going to try loading in the vertices - without edges or properties
-                                       //    using Separate threads
-
-                                       ExecutorService executor = Executors.newFixedThreadPool(fCount);
-                                       List<Future<HashMap<String,String>>> list = new ArrayList<Future<HashMap<String,String>>>();
-
-                                       for( int i=0; i < fCount; i++ ){
-                                               File f = snapFilesArr.get(i);
+                               ExecutorService executor = Executors.newFixedThreadPool(fCount);
+                               int threadFailCount = 0;
+                               
+                               LOGGER.debug(" -- vertAddDelayMs = " + vertAddDelayMs
+                                               + ", failureDelayMs = " + failureDelayMs + ", retryDelayMs = " + retryDelayMs
+                                               + ", maxErrorsPerThread = " + maxErrorsPerThread );
+                                       
+                               // --------------------------------------
+                               // Step 1 -- Load empty vertices
+                               // --------------------------------------
+                               int fileNo = 0;
+                               for( int passNo = 1; passNo <= threadPassesNeeded; passNo++ ){
+                                       List<Future<HashMap<String,String>>> listFutV = new ArrayList<Future<HashMap<String,String>>>();
+
+                                       int thisPassCount = 0;
+                                       while( (thisPassCount < filesPerPass) && (fileNo < fCount) ){
+                                               File f = snapFilesArr.get(fileNo);
                                                String fname = f.getName();
                                                String fullSnapName = targetDir + AAIConstants.AAI_FILESEP + fname;
                                                Thread.sleep(cArgs.staggerThreadDelay);  // Stagger the threads a bit
                                                LOGGER.debug(" -- Read file: [" + fullSnapName + "]");
-                                               LOGGER.debug(" -- Call the PartialVertexLoader to just load vertices  ----");
-                                               LOGGER.debug(" -- vertAddDelayMs = " + vertAddDelayMs
-                                                               + ", failureDelayMs = " + failureDelayMs + ", retryDelayMs = " + retryDelayMs
-                                                               + ", maxErrorsPerThread = " + maxErrorsPerThread );
                                                Callable <HashMap<String,String>> vLoader = new PartialVertexLoader(graph1, fullSnapName,
                                                                vertAddDelayMs, failureDelayMs, retryDelayMs, maxErrorsPerThread, LOGGER);
                                                Future <HashMap<String,String>> future = (Future<HashMap<String, String>>) executor.submit(vLoader);
 
-                                               // add Future to the list, we can get return value using Future
-                                               list.add(future);
-                                               LOGGER.debug(" --  Starting PartialDbLoad VERT_ONLY thread # "+ i );
+                                               // add future to the list, we can get return value later
+                                               listFutV.add(future);
+                                               LOGGER.debug(" --  Starting PartialDbLoad VERT_ONLY file # "+ fileNo
+                                                               + "( passNo = " + passNo + ", passIndex = " + thisPassCount + ")");
+                                               
+                                               thisPassCount++;
+                                               fileNo++;
                                        }
-
+                                       
                                        int threadCount4Reload = 0;
-                                       int threadFailCount = 0;
-                                       for(Future<HashMap<String,String>> fut : list){
-                               threadCount4Reload++;
-                               try {
-                                       old2NewVertIdMap.putAll(fut.get());
-                                       LOGGER.debug(" -- back from PartialVertexLoader.  returned thread # " + threadCount4Reload +
-                                                       ", current size of old2NewVertMap is: " + old2NewVertIdMap.size() );
-                               }
-                               catch (InterruptedException e) {
-                                       threadFailCount++;
-                                       AAIException ae = new AAIException("AAI_6128", e , "InterruptedException");
+                                       for(Future<HashMap<String,String>> fut : listFutV){
+                                               threadCount4Reload++;
+                                               try {
+                                                       old2NewVertIdMap.putAll(fut.get());
+                                                       LOGGER.debug(" -- back from PartialVertexLoader.  returned pass # " 
+                                                                       + passNo + ", thread # " 
+                                                                       + threadCount4Reload +
+                                                                       ", current size of old2NewVertMap is: " + old2NewVertIdMap.size() );
+                                               }
+                                               catch (InterruptedException e) {
+                                                       threadFailCount++;
+                                                       AAIException ae = new AAIException("AAI_6128", e , "InterruptedException");
                                                        ErrorLogHelper.logException(ae);
-                               }
-                               catch (ExecutionException e) {
-                                       threadFailCount++;
-                                       AAIException ae = new AAIException("AAI_6128", e , "ExecutionException");
+                                               }
+                                               catch (ExecutionException e) {
+                                                       threadFailCount++;
+                                                       AAIException ae = new AAIException("AAI_6128", e , "ExecutionException");
                                                        ErrorLogHelper.logException(ae);
-                               }
-                           }
-
-                                       executor.shutdown();
-
-                                       if( threadFailCount > 0 ) {
-                                               String emsg = " FAILURE >> " + threadFailCount + " Vertex-loader thread(s) failed to complete successfully.  ";
-                                               LOGGER.debug(emsg);
-                                               throw new Exception( emsg );
+                                               }
                                        }
+                               } // end of passes for loading empty vertices
+                                       
+                               executor.shutdown();
+                               if( threadFailCount > 0 ) {
+                                       String emsg = " FAILURE >> " + threadFailCount + " Vertex-loader thread(s) failed to complete successfully.  ";
+                                       LOGGER.debug(emsg);
+                                       throw new Exception( emsg );
+                               }
 
-                                       long timeX = System.nanoTime();
-                                       long diffTime =  timeX - timeStart;
-                                       long minCount = TimeUnit.NANOSECONDS.toMinutes(diffTime);
-                                       long secCount = TimeUnit.NANOSECONDS.toSeconds(diffTime) - (60 * minCount);
-                                       LOGGER.debug("   -- To reload just the vertex ids from the snapshot files, it took: " +
-                                                       minCount + " minutes, " + secCount + " seconds " );
-
-                                       // Give the DB a little time to chew on all those vertices
-                                       Thread.sleep(vertToEdgeProcDelay);
-
-                                       // ----------------------------------------------------------------------------------------
-                                       LOGGER.debug("\n\n\n  -- Now do the edges/props ----------------------");
-                                       // ----------------------------------------------------------------------------------------
+                               long timeX = System.nanoTime();
+                               long diffTime =  timeX - timeStart;
+                               long minCount = TimeUnit.NANOSECONDS.toMinutes(diffTime);
+                               long secCount = TimeUnit.NANOSECONDS.toSeconds(diffTime) - (60 * minCount);
+                               LOGGER.debug("   -- To reload just the vertex ids from the snapshot files, it took: " +
+                                               minCount + " minutes, " + secCount + " seconds " );
 
+                               // Give the DB a little time to chew on all those new vertices
+                               Thread.sleep(vertToEdgeProcDelay);
 
-                                       // We're going to try loading in the edges and missing properties
-                                       // Note - we're passing the whole oldVid2newVid mapping to the PartialPropAndEdgeLoader
-                                       //     so that the String-updates to the GraphSON will happen in the threads instead of
-                                       //     here in the un-threaded calling method.
-                                       executor = Executors.newFixedThreadPool(fCount);
-                                       ArrayList<Future<ArrayList<String>>> listEdg = new ArrayList<Future<ArrayList<String>>>();
-                                       for( int i=0; i < fCount; i++ ){
-                                               File f = snapFilesArr.get(i);
+                                       
+                               // -------------------------------------------------------------
+                               // Step 2 -- Load Edges and properties onto the empty vertices
+                               // -------------------------------------------------------------
+                               LOGGER.debug("\n\n\n  -- Now load the edges/properties ----------------------");
+                               executor = Executors.newFixedThreadPool(fCount);
+
+                               fileNo = 0;
+                               for( int passNo = 1; passNo <= threadPassesNeeded; passNo++ ){
+                                       ArrayList<Future<ArrayList<String>>> listFutEdg = new ArrayList<Future<ArrayList<String>>>();
+
+                                       int thisPassCount = 0;
+                                       while( (thisPassCount < filesPerPass) && (fileNo < fCount) ){
+                                               File f = snapFilesArr.get(fileNo);
                                                String fname = f.getName();
                                                String fullSnapName = targetDir + AAIConstants.AAI_FILESEP + fname;
                                                Thread.sleep(cArgs.staggerThreadDelay);  // Stagger the threads a bit
                                                LOGGER.debug(" -- Read file: [" + fullSnapName + "]");
-                                               LOGGER.debug(" -- Call the PartialPropAndEdgeLoader for Properties and EDGEs  ----");
-                                               LOGGER.debug(" -- edgeAddDelayMs = " + vertAddDelayMs
-                                                               + ", failureDelayMs = " + failureDelayMs + ", retryDelayMs = " + retryDelayMs
-                                                               + ", maxErrorsPerThread = " + maxErrorsPerThread );
-
-                                               Callable  eLoader = new PartialPropAndEdgeLoader(graph1, fullSnapName,
+                                               Callable eLoader = new PartialPropAndEdgeLoader(graph1, fullSnapName,
                                                                edgeAddDelayMs, failureDelayMs, retryDelayMs,
                                                                old2NewVertIdMap, maxErrorsPerThread, LOGGER);
                                                Future <ArrayList<String>> future = (Future<ArrayList<String>>) executor.submit(eLoader);
 
-                                               //add Future to the list, we can get return value using Future
-                                               listEdg.add(future);
-                                               LOGGER.debug(" --  Starting PartialPropAndEdge thread # "+ i );
+                                               // add future to the list, we can wait for it below
+                                               listFutEdg.add(future);
+                                               LOGGER.debug(" --  Starting PartialPropAndEdge file # "
+                                                               + fileNo + " (pass # " + passNo + ", passIndex " 
+                                                               + thisPassCount + ")" );
+                                               
+                                               thisPassCount++;
+                                               fileNo++;
                                        }
 
-                                       threadCount4Reload = 0;
-                                       for(Future<ArrayList<String>> fut : listEdg){
-                                   threadCount4Reload++;
-                                   try{
-                                       fut.get();  // DEBUG -- should be doing something with the return value if it's not empty - ie. errors
-                                       LOGGER.debug(" -- back from PartialPropAndEdgeLoader.  thread # " + threadCount4Reload  );
-                                   }
+                                       int threadCount4Reload = 0;
+                                       for( Future<ArrayList<String>> fut : listFutEdg ){
+                                               threadCount4Reload++;
+                                               try{
+                                                       fut.get();  
+                                                       LOGGER.debug(" -- back from PartialPropAndEdgeLoader.  pass # "
+                                                                       + passNo + ", thread # " + threadCount4Reload  );
+                                               }
                                                catch (InterruptedException e) {
                                                        threadFailCount++;
                                                        AAIException ae = new AAIException("AAI_6128", e , "InterruptedException");
@@ -680,30 +703,32 @@ public class DataSnapshot {
                                                        ErrorLogHelper.logException(ae);
                                                }
                                        }
+                                       
+                               } // end of passes for reloading edges and properties 
 
-                                       executor.shutdown();
+                               executor.shutdown();
 
-                                       if( threadFailCount > 0 ) {
-                                               String emsg = " FAILURE >> " + threadFailCount + " Property/Edge-loader thread(s) failed to complete successfully.  ";
-                                               LOGGER.debug(emsg);
-                                               throw new Exception( emsg );
-                                       }
+                               if( threadFailCount > 0 ) {
+                                       String emsg = " FAILURE >> " + threadFailCount + " Property/Edge-loader thread(s) failed to complete successfully.  ";
+                                       LOGGER.debug(emsg);
+                                       throw new Exception( emsg );
+                               }
 
-                                       // This is needed so we can see the data committed by the called threads
-                                       graph1.tx().commit();
+                               // This is needed so we can see the data committed by the called threads
+                               graph1.tx().commit();
 
-                                       long timeEnd = System.nanoTime();
-                                       diffTime =  timeEnd - timeX;
-                                       minCount = TimeUnit.NANOSECONDS.toMinutes(diffTime);
-                                       secCount = TimeUnit.NANOSECONDS.toSeconds(diffTime) - (60 * minCount);
-                                       LOGGER.debug("   -- To reload the edges and properties from snapshot files, it took: " +
-                                                       minCount + " minutes, " + secCount + " seconds " );
+                               long timeEnd = System.nanoTime();
+                               diffTime =  timeEnd - timeX;
+                               minCount = TimeUnit.NANOSECONDS.toMinutes(diffTime);
+                               secCount = TimeUnit.NANOSECONDS.toSeconds(diffTime) - (60 * minCount);
+                               LOGGER.debug("   -- To reload the edges and properties from snapshot files, it took: " +
+                                               minCount + " minutes, " + secCount + " seconds " );
 
-                                       long totalDiffTime =  timeEnd - timeStart;
-                                       long totalMinCount = TimeUnit.NANOSECONDS.toMinutes(totalDiffTime);
-                                       long totalSecCount = TimeUnit.NANOSECONDS.toSeconds(totalDiffTime) - (60 * totalMinCount);
-                                       LOGGER.debug("   -- TOTAL multi-threaded reload time: " +
-                                                       totalMinCount + " minutes, " + totalSecCount + " seconds " );
+                               long totalDiffTime =  timeEnd - timeStart;
+                               long totalMinCount = TimeUnit.NANOSECONDS.toMinutes(totalDiffTime);
+                               long totalSecCount = TimeUnit.NANOSECONDS.toSeconds(totalDiffTime) - (60 * totalMinCount);
+                               LOGGER.debug("   -- TOTAL multi-threaded reload time: " +
+                                               totalMinCount + " minutes, " + totalSecCount + " seconds " );
 
                        } else if (command.equals("CLEAR_ENTIRE_DATABASE")) {
                                // ------------------------------------------------------------------
index 8d250d7..9aba8cf 100644 (file)
@@ -40,7 +40,7 @@ import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.Future;
 import java.util.concurrent.TimeUnit;
-import org.onap.aai.config.PropertyPasswordConfiguration;
+import org.onap.aai.restclient.PropertyPasswordConfiguration;
 import org.apache.tinkerpop.gremlin.structure.Vertex;
 import org.apache.commons.configuration.PropertiesConfiguration;
 import org.apache.tinkerpop.gremlin.structure.io.IoCore;
@@ -75,8 +75,8 @@ import com.beust.jcommander.ParameterException;
 
 public class DataSnapshot4HistInit {
 
-       private static Logger LOGGER;
-       
+       private static Logger LOGGER = LoggerFactory.getLogger(DataSnapshot4HistInit.class);
+
        /* Using realtime d */
        private static final String REALTIME_DB = "realtime";
 
@@ -109,6 +109,13 @@ public class DataSnapshot4HistInit {
         */
        public static void main(String[] args) {
                
+               // Set the logging file properties to be used by EELFManager
+               System.setProperty("aai.service.name", DataSnapshot4HistInit.class.getSimpleName());
+               Properties props = System.getProperties();
+               props.setProperty(Configuration.PROPERTY_LOGGING_FILE_NAME, AAIConstants.AAI_LOGBACK_PROPS);
+               props.setProperty(Configuration.PROPERTY_LOGGING_FILE_PATH, AAIConstants.AAI_HOME_BUNDLECONFIG);
+
+               
                AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext();
                PropertyPasswordConfiguration initializer = new PropertyPasswordConfiguration();
                initializer.initialize(ctx);
@@ -149,13 +156,6 @@ public class DataSnapshot4HistInit {
 
        public boolean executeCommand(String[] args) {
                
-               // Set the logging file properties to be used by EELFManager
-               System.setProperty("aai.service.name", DataSnapshot4HistInit.class.getSimpleName());
-               Properties props = System.getProperties();
-               props.setProperty(Configuration.PROPERTY_LOGGING_FILE_NAME, AAIConstants.AAI_LOGBACK_PROPS);
-               props.setProperty(Configuration.PROPERTY_LOGGING_FILE_PATH, AAIConstants.AAI_HOME_BUNDLECONFIG);
-               LOGGER = LoggerFactory.getLogger(DataSnapshot4HistInit.class);
-               
                Boolean dbClearFlag = false;
                JanusGraph graph = null;
                String command = "UNKNOWN"; 
@@ -576,19 +576,25 @@ public class DataSnapshot4HistInit {
                                // NOTE - it will only use as many threads as the number of files the
                                //    snapshot is  written to.  Ie. if you have a single-file snapshot,
                                //    then this will be single-threaded.
+                               // If the number of files is greater than the 'threadCount' parameter,
+                               //    then we will use more than one pass to keep the number of simultaneous 
+                               //    threads below the threadCount param.
                                //
                                LOGGER.debug(" Command = " + command );
+                               
                                if (cArgs.oldFileDir != null && cArgs.oldFileDir != ""){
                                        targetDir = cArgs.oldFileDir;
                                }
                                ArrayList <File> snapFilesArr = getFilesToProcess(targetDir, oldSnapshotFileName, false);
                                int fCount = snapFilesArr.size();
+                               int threadPassesNeeded = (int) Math.ceil((double)fCount / (double)threadCount4Create);
+                               int filesPerPass = (int) Math.ceil((double)fCount / (double)threadPassesNeeded);
+                               
                                JanusGraph graph1 = AAIGraph.getInstance().getGraph();
-                               GraphAdminDBUtils.logConfigs(graph1.configuration());
                                long timeStart = System.nanoTime();
-                               HashMap <String,String> old2NewVertIdMap = new <String,String> HashMap ();
-                               HashMap <String,ArrayList<String>> nodeKeyNames = new <String,ArrayList<String>> HashMap ();
-                       
+                               GraphAdminDBUtils.logConfigs(graph1.configuration());
+                               HashMap <String,String> old2NewVertIdMap = new HashMap <String,String>  ();
+                               HashMap <String,ArrayList<String>> nodeKeyNames = new  HashMap <String,ArrayList<String>> ();                   
                                try {
                                        LOGGER.debug("call getNodeKeyNames ()" );
                                        nodeKeyNames = getNodeKeyNames();
@@ -597,142 +603,162 @@ public class DataSnapshot4HistInit {
                                        ErrorLogHelper.logException(ae);
                                        AAISystemExitUtil.systemExitCloseAAIGraph(1);
                                }
+
+                               ExecutorService executor = Executors.newFixedThreadPool(fCount);
+                               int threadFailCount = 0;
                                
-                                       // We're going to try loading in the vertices - without edges or properties
-                                       //    using Separate threads
+                               LOGGER.debug(" -- vertAddDelayMs = " + vertAddDelayMs
+                                               + ", failureDelayMs = " + failureDelayMs + ", retryDelayMs = " + retryDelayMs
+                                               + ", maxErrorsPerThread = " + maxErrorsPerThread );
+                                       
+                               // --------------------------------------
+                               // Step 1 -- Load empty vertices
+                               // --------------------------------------
+                               int fileNo = 0;
+                               for( int passNo = 1; passNo <= threadPassesNeeded; passNo++ ){
+                                       List<Future<HashMap<String,String>>> listFutV = new ArrayList<Future<HashMap<String,String>>>();
 
-                                       ExecutorService executor = Executors.newFixedThreadPool(fCount);
-                                       List<Future<HashMap<String,String>>> list = new ArrayList<Future<HashMap<String,String>>>();
-                                       for( int i=0; i < fCount; i++ ){
-                                               File f = snapFilesArr.get(i);
+                                       int thisPassCount = 0;
+                                       while( (thisPassCount < filesPerPass) && (fileNo < fCount) ){
+                                               File f = snapFilesArr.get(fileNo);
                                                String fname = f.getName();
                                                String fullSnapName = targetDir + AAIConstants.AAI_FILESEP + fname;
                                                Thread.sleep(cArgs.staggerThreadDelay);  // Stagger the threads a bit
                                                LOGGER.debug(" -- Read file: [" + fullSnapName + "]");
-                                               LOGGER.debug(" -- Call the PartialVertexLoader to just load vertices  ----");
-                                               LOGGER.debug(" -- vertAddDelayMs = " + vertAddDelayMs
-                                                               + ", failureDelayMs = " + failureDelayMs + ", retryDelayMs = " + retryDelayMs
-                                                               + ", maxErrorsPerThread = " + maxErrorsPerThread );
                                                Callable <HashMap<String,String>> vLoader = new PartialVertexLoader(graph1, fullSnapName,
                                                                vertAddDelayMs, failureDelayMs, retryDelayMs, maxErrorsPerThread, LOGGER);
                                                Future <HashMap<String,String>> future = (Future<HashMap<String, String>>) executor.submit(vLoader);
 
-                                               // add Future to the list, we can get return value using Future
-                                               list.add(future);
-                                               LOGGER.debug(" --  Starting PartialDbLoad VERT_ONLY thread # "+ i );
+                                               // add future to the list, we can get return value later
+                                               listFutV.add(future);
+                                               LOGGER.debug(" --  Starting PartialDbLoad VERT_ONLY file # "+ fileNo
+                                                               + "( passNo = " + passNo + ", passIndex = " + thisPassCount + ")");
+                                               
+                                               thisPassCount++;
+                                               fileNo++;
                                        }
-
+                                       
                                        int threadCount4Reload = 0;
-                                       int threadFailCount = 0;
-                                       for(Future<HashMap<String,String>> fut : list){
-                               threadCount4Reload++;
-                               try {
-                                       old2NewVertIdMap.putAll(fut.get());
-                                       LOGGER.debug(" -- back from PartialVertexLoader.  returned thread # " + threadCount4Reload +
-                                                       ", current size of old2NewVertMap is: " + old2NewVertIdMap.size() );
-                               }
-                               catch (InterruptedException e) {
-                                       threadFailCount++;
-                                       AAIException ae = new AAIException("AAI_6128", e , "InterruptedException");
-                                               ErrorLogHelper.logException(ae);
-                                       }
-                               catch (ExecutionException e) {
-                                       threadFailCount++;
-                                       AAIException ae = new AAIException("AAI_6128", e , "ExecutionException");
-                                               ErrorLogHelper.logException(ae);
-                               }
-                           }
-                                       executor.shutdown();
-
-                                       if( threadFailCount > 0 ) {
-                                               String emsg = " FAILURE >> " + threadFailCount + " Vertex-loader thread(s) failed to complete successfully.  ";
-                                               LOGGER.debug(emsg);
-                                               throw new Exception( emsg );
+                                       for(Future<HashMap<String,String>> fut : listFutV){
+                                               threadCount4Reload++;
+                                               try {
+                                                       old2NewVertIdMap.putAll(fut.get());
+                                                       LOGGER.debug(" -- back from PartialVertexLoader.  returned pass # " 
+                                                                       + passNo + ", thread # " 
+                                                                       + threadCount4Reload +
+                                                                       ", current size of old2NewVertMap is: " + old2NewVertIdMap.size() );
+                                               }
+                                               catch (InterruptedException e) {
+                                                       threadFailCount++;
+                                                       AAIException ae = new AAIException("AAI_6128", e , "InterruptedException");
+                                                       ErrorLogHelper.logException(ae);
+                                               }
+                                               catch (ExecutionException e) {
+                                                       threadFailCount++;
+                                                       AAIException ae = new AAIException("AAI_6128", e , "ExecutionException");
+                                                       ErrorLogHelper.logException(ae);
+                                               }
                                        }
+                               } // end of passes for loading empty vertices
+                                       
+                               executor.shutdown();
+                               if( threadFailCount > 0 ) {
+                                       String emsg = " FAILURE >> " + threadFailCount + " Vertex-loader thread(s) failed to complete successfully.  ";
+                                       LOGGER.debug(emsg);
+                                       throw new Exception( emsg );
+                               }
 
-                                       long timeX = System.nanoTime();
-                                       long diffTime =  timeX - timeStart;
-                                       long minCount = TimeUnit.NANOSECONDS.toMinutes(diffTime);
-                                       long secCount = TimeUnit.NANOSECONDS.toSeconds(diffTime) - (60 * minCount);
-                                       LOGGER.debug("   -- To reload just the vertex ids from the snapshot files, it took: " +
-                                                       minCount + " minutes, " + secCount + " seconds " );
+                               long timeX = System.nanoTime();
+                               long diffTime =  timeX - timeStart;
+                               long minCount = TimeUnit.NANOSECONDS.toMinutes(diffTime);
+                               long secCount = TimeUnit.NANOSECONDS.toSeconds(diffTime) - (60 * minCount);
+                               LOGGER.debug("   -- To reload just the vertex ids from the snapshot files, it took: " +
+                                               minCount + " minutes, " + secCount + " seconds " );
 
-                                       // Give the DB a little time to chew on all those vertices
-                                       Thread.sleep(vertToEdgeProcDelay);
+                               // Give the DB a little time to chew on all those new vertices
+                               Thread.sleep(vertToEdgeProcDelay);
 
-                                       // ----------------------------------------------------------------------------------------
-                                       LOGGER.debug("\n\n\n  -- Now do the edges/props ----------------------");
-                                       // ----------------------------------------------------------------------------------------
                                        
-                                       // We're going to try loading in the edges and missing properties
-                                       // Note - we're passing the whole oldVid2newVid mapping to the PartialPropAndEdgeLoader
-                                       //     so that the String-updates to the GraphSON will happen in the threads instead of
-                                       //     here in the un-threaded calling method.
-                                       executor = Executors.newFixedThreadPool(fCount);
-                                       ArrayList<Future<ArrayList<String>>> listEdg = new ArrayList<Future<ArrayList<String>>>();
-                                       for( int i=0; i < fCount; i++ ){
-                                               File f = snapFilesArr.get(i);
+                               // -------------------------------------------------------------
+                               // Step 2 -- Load Edges and properties onto the empty vertices
+                               // -------------------------------------------------------------
+                               LOGGER.debug("\n\n\n  -- Now load the edges/properties ----------------------");
+                               executor = Executors.newFixedThreadPool(fCount);
+
+                               fileNo = 0;
+                               for( int passNo = 1; passNo <= threadPassesNeeded; passNo++ ){
+                                       ArrayList<Future<ArrayList<String>>> listFutEdg = new ArrayList<Future<ArrayList<String>>>();
+
+                                       int thisPassCount = 0;
+                                       while( (thisPassCount < filesPerPass) && (fileNo < fCount) ){
+                                               File f = snapFilesArr.get(fileNo);                                                              
                                                String fname = f.getName();
                                                String fullSnapName = targetDir + AAIConstants.AAI_FILESEP + fname;
                                                Thread.sleep(cArgs.staggerThreadDelay);  // Stagger the threads a bit
                                                LOGGER.debug(" -- Read file: [" + fullSnapName + "]");
-                                               LOGGER.debug(" -- Call the PartialPropAndEdgeLoader4HistInit for Properties and EDGEs  ----");
-                                               LOGGER.debug(" -- edgeAddDelayMs = " + vertAddDelayMs
-                                                               + ", failureDelayMs = " + failureDelayMs + ", retryDelayMs = " + retryDelayMs
-                                                               + ", maxErrorsPerThread = " + maxErrorsPerThread );
-
-                                               
-                                               Callable  eLoader = new PartialPropAndEdgeLoader4HistInit(graph1, fullSnapName,
+                                               Callable eLoader = new PartialPropAndEdgeLoader4HistInit(graph1, fullSnapName,
                                                                edgeAddDelayMs, failureDelayMs, retryDelayMs,
                                                                old2NewVertIdMap, maxErrorsPerThread, LOGGER,
                                                                scriptStartTime, nodeKeyNames);
+                                               
                                                Future <ArrayList<String>> future = (Future<ArrayList<String>>) executor.submit(eLoader);
 
-                                               //add Future to the list, we can get return value using Future
-                                               listEdg.add(future);
-                                               LOGGER.debug(" --  Starting PartialPropAndEdge thread # "+ i );
+                                               // add future to the list, we can wait for it below
+                                               listFutEdg.add(future);
+                                               LOGGER.debug(" --  Starting PartialPropAndEdgeLoader4HistInit file # "
+                                                               + fileNo + " (pass # " + passNo + ", passIndex " 
+                                                               + thisPassCount + ")" );
+                                               
+                                               thisPassCount++;
+                                               fileNo++;
                                        }
-                                       threadCount4Reload = 0;
-                                       for(Future<ArrayList<String>> fut : listEdg){
-                                   threadCount4Reload++;
-                                   try{
-                                       fut.get();  // DEBUG -- should be doing something with the return value if it's not empty - ie. errors
-                                       LOGGER.debug(" -- back from PartialPropAndEdgeLoader.  thread # " + threadCount4Reload  );
-                                   }
+
+                                       int threadCount4Reload = 0;
+                                       for( Future<ArrayList<String>> fut : listFutEdg ){
+                                               threadCount4Reload++;
+                                               try{
+                                                       fut.get();  
+                                                       LOGGER.debug(" -- back from PartialPropAndEdgeLoader4HistInit.  pass # "
+                                                                       + passNo + ", thread # " + threadCount4Reload  );
+                                               }
                                                catch (InterruptedException e) {
                                                        threadFailCount++;
                                                        AAIException ae = new AAIException("AAI_6128", e , "InterruptedException");
-                                               ErrorLogHelper.logException(ae);
+                                                       ErrorLogHelper.logException(ae);
                                                }
                                                catch (ExecutionException e) {
                                                        threadFailCount++;
                                                        AAIException ae = new AAIException("AAI_6128", e , "ExecutionException");
-                                               ErrorLogHelper.logException(ae);
+                                                       ErrorLogHelper.logException(ae);
                                                }
                                        }
+                                       
+                               } // end of passes for reloading edges and properties 
 
-                                       executor.shutdown();
-                                       if( threadFailCount > 0 ) {
-                                               String emsg = " FAILURE >> " + threadFailCount + " Property/Edge-loader thread(s) failed to complete successfully.  ";
-                                               LOGGER.debug(emsg);
-                                               throw new Exception( emsg );
-                                       }
+                               executor.shutdown();
 
-                                       // This is needed so we can see the data committed by the called threads
-                                       graph1.tx().commit();
+                               if( threadFailCount > 0 ) {
+                                       String emsg = " FAILURE >> " + threadFailCount + " Property/Edge-loader thread(s) failed to complete successfully.  ";
+                                       LOGGER.debug(emsg);
+                                       throw new Exception( emsg );
+                               }
 
-                                       long timeEnd = System.nanoTime();
-                                       diffTime =  timeEnd - timeX;
-                                       minCount = TimeUnit.NANOSECONDS.toMinutes(diffTime);
-                                       secCount = TimeUnit.NANOSECONDS.toSeconds(diffTime) - (60 * minCount);
-                                       LOGGER.debug("   -- To reload the edges and properties from snapshot files, it took: " +
-                                                       minCount + " minutes, " + secCount + " seconds " );
+                               // This is needed so we can see the data committed by the called threads
+                               graph1.tx().commit();
 
-                                       long totalDiffTime =  timeEnd - timeStart;
-                                       long totalMinCount = TimeUnit.NANOSECONDS.toMinutes(totalDiffTime);
-                                       long totalSecCount = TimeUnit.NANOSECONDS.toSeconds(totalDiffTime) - (60 * totalMinCount);
-                                       LOGGER.debug("   -- TOTAL multi-threaded reload time: " +
-                                                       totalMinCount + " minutes, " + totalSecCount + " seconds " );
+                               long timeEnd = System.nanoTime();
+                               diffTime =  timeEnd - timeX;
+                               minCount = TimeUnit.NANOSECONDS.toMinutes(diffTime);
+                               secCount = TimeUnit.NANOSECONDS.toSeconds(diffTime) - (60 * minCount);
+                               LOGGER.debug("   -- To reload the edges and properties from snapshot files, it took: " +
+                                               minCount + " minutes, " + secCount + " seconds " );
+
+                               long totalDiffTime =  timeEnd - timeStart;
+                               long totalMinCount = TimeUnit.NANOSECONDS.toMinutes(totalDiffTime);
+                               long totalSecCount = TimeUnit.NANOSECONDS.toSeconds(totalDiffTime) - (60 * totalMinCount);
+                               LOGGER.debug("   -- TOTAL multi-threaded reload time: " +
+                                               totalMinCount + " minutes, " + totalSecCount + " seconds " );
+                       
                        } else if (command.equals("CLEAR_ENTIRE_DATABASE")) {
                                // ------------------------------------------------------------------
                                // They are calling this to clear the db before re-loading it
@@ -1075,4 +1101,4 @@ public class DataSnapshot4HistInit {
                
        }
        
-}
+}
\ No newline at end of file
index a634e0b..3d9ec69 100644 (file)
@@ -27,7 +27,7 @@ import org.apache.commons.configuration.PropertiesConfiguration;
 import org.codehaus.jackson.JsonGenerationException;
 import org.janusgraph.core.JanusGraph;
 import org.janusgraph.core.JanusGraphFactory;
-import org.onap.aai.config.PropertyPasswordConfiguration;
+import org.onap.aai.restclient.PropertyPasswordConfiguration;
 import org.onap.aai.dbmap.AAIGraphConfig;
 import org.onap.aai.edges.EdgeIngestor;
 import org.onap.aai.exceptions.AAIException;
index d8d3ce0..0532e2e 100644 (file)
@@ -28,7 +28,7 @@ import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__;
 import org.apache.tinkerpop.gremlin.structure.*;
 import org.janusgraph.core.JanusGraph;
 import org.janusgraph.core.JanusGraphFactory;
-import org.onap.aai.config.PropertyPasswordConfiguration;
+import org.onap.aai.restclient.PropertyPasswordConfiguration;
 import org.onap.aai.dbmap.AAIGraph;
 import org.onap.aai.dbmap.AAIGraphConfig;
 import org.onap.aai.edges.enums.AAIDirection;
index 2e1bc4b..ba5fad4 100644 (file)
@@ -30,7 +30,7 @@ import org.codehaus.jackson.JsonNode;
 import org.codehaus.jackson.map.ObjectMapper;
 import org.codehaus.jackson.node.ObjectNode;
 import org.codehaus.jackson.type.TypeReference;
-import org.onap.aai.config.PropertyPasswordConfiguration;
+import org.onap.aai.restclient.PropertyPasswordConfiguration;
 import org.onap.aai.db.props.AAIProperties;
 import org.onap.aai.dbmap.InMemoryGraph;
 import org.onap.aai.edges.EdgeIngestor;
index 7bb0555..c12f3f3 100644 (file)
@@ -100,29 +100,24 @@ public class ForceDeleteTool {
                        for (int i = 0; i < args.length; i++) {
                                String thisArg = args[i];
                                argStr4Msg = argStr4Msg + " " + thisArg;
+                               
                                if (thisArg.equals("-action")) {
                                        i++;
                                        if (i >= args.length) {
                                                logger.error(" No value passed with -action option.  ");
-                                               System.out.println(" No value passed with -action option.  ");
                                                exit(0);
                                        }
-                                       else {
-                                               actionVal = args[i];
-                                               argStr4Msg = argStr4Msg + " " + actionVal;
-                                       }
+                                       actionVal = args[i];
+                                       argStr4Msg = argStr4Msg + " " + actionVal;
                                }
                                else if (thisArg.equals("-userId")) {
                                        i++;
                                        if (i >= args.length) {
                                                logger.error(" No value passed with -userId option.  ");
-                                               System.out.println(" No value passed with -userId option.  ");
                                                exit(0);
                                        }
-                                       else {
-                                               userIdVal = args[i];
-                                               argStr4Msg = argStr4Msg + " " + userIdVal;
-                                       }
+                                       userIdVal = args[i];
+                                       argStr4Msg = argStr4Msg + " " + userIdVal;
                                }
                                else if (thisArg.equals("-overRideProtection")) {
                                        overRideProtection = true;
@@ -134,54 +129,40 @@ public class ForceDeleteTool {
                                        i++;
                                        if (i >= args.length) {
                                                logger.error(" No value passed with -vertexId option.  ");
-                                               System.out.println(" No value passed with -vertexId option.  "); 
                                                exit(0);
                                        }
-                                       else {
-                                               String nextArg = args[i];
-                                               argStr4Msg = argStr4Msg + " " + nextArg;
-                                       
-                                               try {
-                                                       vertexIdLong = Long.parseLong(nextArg);
-                                               } catch (Exception e) {
-                                                       logger.error("Bad value passed with -vertexId option: ["
+                                       String nextArg = args[i];
+                                       argStr4Msg = argStr4Msg + " " + nextArg;
+                                       try {
+                                               vertexIdLong = Long.parseLong(nextArg);
+                                       } catch (Exception e) {
+                                               logger.error("Bad value passed with -vertexId option: ["
                                                                                + nextArg + "]");
-                                                       System.out.println("Bad value passed with -vertexId option: [" 
-                                                                               + nextArg + "]");
-                                                       exit(0);
-                                               }
+                                               exit(0);
                                        }
                                }
                                else if (thisArg.equals("-params4Collect")) {
                                        i++;
                                        if (i >= args.length) {
                                                logger.error(" No value passed with -params4Collect option.  ");
-                                               System.out.println(" No value passed with -params4Collect option.  "); 
                                                exit(0);
                                        }
-                                       else {
-                                               dataString = args[i];
-                                               argStr4Msg = argStr4Msg + " " + dataString;
-                                       }
+                                       dataString = args[i];
+                                       argStr4Msg = argStr4Msg + " " + dataString;
                                }
                                else if (thisArg.equals("-edgeId")) {
                                        i++;
                                        if (i >= args.length) {
                                                logger.error(" No value passed with -edgeId option.  ");
-                                               System.out.println(" No value passed with -edgeId option.  "); 
                                                exit(0);
                                        }
-                                       else {
-                                               String nextArg = args[i];
-                                               argStr4Msg = argStr4Msg + " " + nextArg;
-                                               edgeIdStr = nextArg;
-                                       }
+                                       String nextArg = args[i];
+                                       argStr4Msg = argStr4Msg + " " + nextArg;
+                                       edgeIdStr = nextArg;
                                }
                                else {
                                        logger.error(" Unrecognized argument passed to ForceDeleteTool: ["
                                                                        + thisArg + "]. ");
-                                       System.out.println(" Unrecognized argument passed to ForceDeleteTool: [" 
-                                                       + thisArg + "]");
                                        logger.error(" Valid values are: -action -userId -vertexId -edgeId -overRideProtection -params4Collect -DISPLAY_ALL_VIDS");
                                        exit(0);
                                }
@@ -368,19 +349,18 @@ public class ForceDeleteTool {
                                System.out.println(infMsg);
                                exit(0);
                        }
-                       else {
-                               if( fd.getEdgeDelConfirmation(logger, userIdVal, thisEdge, overRideProtection) ){
-                                       thisEdge.remove();
-                                       graph.tx().commit();
-                                       String infMsg = ">>>>>>>>>> Removed edge with edgeId = " + edgeIdStr;
-                                       logger.debug( infMsg );
-                                       System.out.println(infMsg);
-                               } 
-                               else {  
-                                       String infMsg = " Delete Cancelled. ";
-                                       System.out.println(infMsg);
-                                       logger.debug( infMsg );
-                               }
+                       
+                       if( fd.getEdgeDelConfirmation(logger, userIdVal, thisEdge, overRideProtection) ){
+                               thisEdge.remove();
+                               graph.tx().commit();
+                               String infMsg = ">>>>>>>>>> Removed edge with edgeId = " + edgeIdStr;
+                               logger.debug( infMsg );
+                               System.out.println(infMsg);
+                       } 
+                       else {  
+                               String infMsg = " Delete Cancelled. ";
+                               System.out.println(infMsg);
+                               logger.debug( infMsg );
                        }
                        exit(0);
                }
index 15b4c81..6e48d4b 100644 (file)
@@ -20,7 +20,7 @@
 package org.onap.aai.dbgen.schemamod;
 
 import com.att.eelf.configuration.Configuration;
-import org.onap.aai.config.PropertyPasswordConfiguration;
+import org.onap.aai.restclient.PropertyPasswordConfiguration;
 import org.onap.aai.exceptions.AAIException;
 import org.onap.aai.introspection.Loader;
 import org.onap.aai.introspection.LoaderFactory;
index 837f8a8..4c393eb 100644 (file)
@@ -20,7 +20,7 @@
 package org.onap.aai.dbgen.schemamod;
 
 import com.att.eelf.configuration.Configuration;
-import org.onap.aai.config.PropertyPasswordConfiguration;
+import org.onap.aai.restclient.PropertyPasswordConfiguration;
 import org.onap.aai.exceptions.AAIException;
 import org.onap.aai.introspection.Loader;
 import org.onap.aai.introspection.LoaderFactory;
index 45b5d04..e0adec0 100644 (file)
@@ -22,7 +22,7 @@ package org.onap.aai.historytruncate;
 import java.util.*;
 import java.util.concurrent.TimeUnit;
 
-import org.onap.aai.config.PropertyPasswordConfiguration;
+import org.onap.aai.restclient.PropertyPasswordConfiguration;
 import org.apache.tinkerpop.gremlin.process.traversal.P;
 import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
 import org.apache.tinkerpop.gremlin.structure.Edge;
index f591120..5d32649 100644 (file)
@@ -62,7 +62,7 @@ public class VersionInterceptor extends AAIContainerFilter implements ContainerR
 
         String uri = requestContext.getUriInfo().getPath();
 
-        if (uri.startsWith("search") || uri.startsWith("util/echo") || uri.startsWith("tools")) {
+        if (uri.startsWith("search") || uri.startsWith("util/echo") || uri.startsWith("tools") || uri.endsWith("audit-sql-db")) {
             return;
                }
 
index 3f25119..d2a336b 100644 (file)
@@ -140,7 +140,7 @@ public abstract class EdgeSwingMigrator extends Migrator {
 
                        // If the nodeTypes don't match, throw an error 
                        if( !oldNodeType.equals(newNodeType) ){
-                               logger.debug ( "Can not swing edge from a [" + oldNodeType + "] node to a [" +
+                               logger.info ( "Can not swing edge from a [" + oldNodeType + "] node to a [" +
                                                newNodeType + "] node. ");
                                success = false;
                                return;
@@ -182,7 +182,7 @@ public abstract class EdgeSwingMigrator extends Migrator {
                                                }
                                                
                                                String otherSideUri = otherSideNode4ThisEdge.<String> property("aai-uri").isPresent()  ? otherSideNode4ThisEdge.<String> property("aai-uri").value() : "URI Not present"; 
-                                               logger.debug ( "\nSwinging [" + eLabel + "] OUT edge.  \n    >> Unchanged side is [" 
+                                               logger.info ( "\nSwinging [" + eLabel + "] OUT edge.  \n    >> Unchanged side is [" 
                                                                + otherSideNodeType + "][" + otherSideUri + "] \n    >> Edge used to go to [" + oldNodeType 
                                                                + "][" + oldUri + "],\n    >> now swung to [" + newNodeType + "][" + newUri + "]. ");
                                                // remove the old edge
@@ -203,8 +203,9 @@ public abstract class EdgeSwingMigrator extends Migrator {
                                                                Map.Entry pair = (Map.Entry)it.next();
                                                                newOutE.property(pair.getKey().toString(), pair.getValue().toString() );
                                                            }
+                                                           logger.info("\n Edge swing of [" + eLabel + "] OUT edge successful");
                                                        }else {
-                                                               logger.debug("\n Edge was not swung due to Multiplicity Rule Violation...");
+                                                               logger.info("\n Edge was not swung due to Multiplicity Rule Violation...");
                                                        }
                                                }
                                        }
@@ -245,7 +246,7 @@ public abstract class EdgeSwingMigrator extends Migrator {
                                                }
 
                                                String otherSideUri = otherSideNode4ThisEdge.<String> property("aai-uri").isPresent()  ? otherSideNode4ThisEdge.<String> property("aai-uri").value() : "URI Not present"; 
-                                               logger.debug ( "\nSwinging [" + eLabel + "] IN edge.  \n    >> Unchanged side is  [" 
+                                               logger.info ( "\nSwinging [" + eLabel + "] IN edge.  \n    >> Unchanged side is  [" 
                                                                + otherSideNodeType + "][" + otherSideUri + "] \n    >>  Edge used to go to [" + oldNodeType 
                                                                + "][" + oldUri + "],\n    >>   now swung to [" + newNodeType + "][" + newUri + "]. ");
                                                
@@ -267,8 +268,9 @@ public abstract class EdgeSwingMigrator extends Migrator {
                                                                Map.Entry pair = (Map.Entry)it.next();
                                                                newInE.property(pair.getKey().toString(), pair.getValue().toString() );
                                                            }
+                                                           logger.info("\n Edge swing of [" + eLabel + "] IN edge successful");
                                                        } else {
-                                                               logger.debug("\t Edge was not swung due to Multiplicity Rule Violation...");
+                                                               logger.info("\t Edge was not swung due to Multiplicity Rule Violation...");
                                                        }
                                                }
                                        }
@@ -276,7 +278,7 @@ public abstract class EdgeSwingMigrator extends Migrator {
                        }       
                        
                } catch (Exception e) {
-                       logger.error("error encountered", e);
+                       logger.info("error encountered", e);
                        success = false;
                }
        }
index 8d758e3..5e969bf 100644 (file)
@@ -19,7 +19,7 @@
  */
 package org.onap.aai.migration;
 
-import org.onap.aai.config.PropertyPasswordConfiguration;
+import org.onap.aai.restclient.PropertyPasswordConfiguration;
 import org.onap.aai.dbmap.AAIGraph;
 import org.onap.aai.edges.EdgeIngestor;
 import org.onap.aai.exceptions.AAIException;
index 7cb71ca..97ed045 100644 (file)
@@ -99,8 +99,8 @@ public class MigrationControllerInternal {
                props.setProperty(Configuration.PROPERTY_LOGGING_FILE_NAME, "migration-logback.xml");
                props.setProperty(Configuration.PROPERTY_LOGGING_FILE_PATH, AAIConstants.AAI_HOME_ETC_APP_PROPERTIES);
 
-               logger = LoggerFactory.getLogger(MigrationControllerInternal.class.getSimpleName());
                MDC.put("logFilenameAppender", MigrationController.class.getSimpleName());
+        logger = LoggerFactory.getLogger(MigrationControllerInternal.class.getSimpleName());           
 
                CommandLineArgs cArgs = new CommandLineArgs();
 
index 7d6a7c1..9498cd1 100644 (file)
@@ -21,7 +21,6 @@ package org.onap.aai.migration;
 
 import java.io.File;
 import java.io.IOException;
-import java.io.UnsupportedEncodingException;
 import java.nio.file.Files;
 import java.nio.file.Paths;
 import java.nio.file.StandardOpenOption;
@@ -39,6 +38,7 @@ import org.apache.tinkerpop.gremlin.structure.Vertex;
 import org.apache.tinkerpop.gremlin.structure.VertexProperty;
 import org.json.JSONException;
 import org.json.JSONObject;
+import org.onap.aai.aailog.logs.AaiDebugLog;
 import org.onap.aai.edges.EdgeIngestor;
 import org.onap.aai.edges.enums.EdgeType;
 import org.onap.aai.edges.exceptions.AmbiguousRuleChoiceException;
@@ -54,9 +54,9 @@ import org.onap.aai.serialization.db.exceptions.NoEdgeRuleFoundException;
 import org.onap.aai.serialization.engines.TransactionalGraphEngine;
 import org.onap.aai.setup.SchemaVersion;
 import org.onap.aai.setup.SchemaVersions;
-
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+import org.slf4j.MDC;
 
 /**
  * This class defines an A&AI Migration
@@ -81,6 +81,12 @@ public abstract class Migrator implements Runnable {
 
        protected static final String MIGRATION_ERROR = "Migration Error: ";
        protected static final String MIGRATION_SUMMARY_COUNT = "Migration Summary Count: ";
+       
+       private static AaiDebugLog debugLog = new AaiDebugLog();
+       static {
+               debugLog.setupMDC();
+       }
+
 
        /**
         * Instantiates a new migrator.
@@ -96,8 +102,9 @@ public abstract class Migrator implements Runnable {
                this.schemaVersions = schemaVersions;
         initDBSerializer();
         this.notificationHelper = new NotificationHelper(loader, serializer, loaderFactory, schemaVersions, engine, "AAI-MIGRATION", this.getMigrationName());
-               logger = LoggerFactory.getLogger(this.getClass().getSimpleName());
-               logger.debug("\tInitilization of " + this.getClass().getSimpleName() + " migration script complete.");
+               MDC.put("logFilenameAppender", this.getClass().getSimpleName());
+        logger = LoggerFactory.getLogger(this.getClass().getSimpleName());
+               logAndPrint(logger,"\tInitilization of " + this.getClass().getSimpleName() + " migration script complete.");
        }
 
        /**
@@ -127,7 +134,7 @@ public abstract class Migrator implements Runnable {
         */
        public void createDmaapFiles(List<String> dmaapMsgList) {
                String fileName = getMigrationName() + "-" + UUID.randomUUID();
-               String logDirectory = System.getProperty("AJSC_HOME") + "/logs/migration/dmaapEvents";
+               String logDirectory = System.getProperty("AJSC_HOME") + "/logs/data/dmaapEvents";
 
                File f = new File(logDirectory);
                f.mkdirs();
@@ -136,10 +143,12 @@ public abstract class Migrator implements Runnable {
                        try {
                                Files.write(Paths.get(logDirectory+"/"+fileName), (Iterable<String>)dmaapMsgList.stream()::iterator);
                        } catch (IOException e) {                               
+                               System.out.println("Unable to generate file with dmaap msgs for " + getMigrationName() + 
+                                               " Exception is: " + e.getMessage());
                                logger.error("Unable to generate file with dmaap msgs for " + getMigrationName(), e);
                        }
                } else {
-                       logger.debug("No dmaap msgs detected for " + getMigrationName());
+                       logAndPrint(logger,"No dmaap msgs detected for " + getMigrationName());
                }
        }
 
@@ -150,7 +159,7 @@ public abstract class Migrator implements Runnable {
        public void createDmaapFilesForDelete(List<Introspector> dmaapDeleteIntrospectorList) {try {
                System.out.println("dmaapDeleteIntrospectorList :: " + dmaapDeleteIntrospectorList.size());
                String fileName = "DELETE-"+ getMigrationName() + "-" + UUID.randomUUID();
-               String logDirectory = System.getProperty("AJSC_HOME") + "/logs/migration/dmaapEvents/";
+               String logDirectory = System.getProperty("AJSC_HOME") + "/logs/data/dmaapEvents/";
                File f = new File(logDirectory);
                f.mkdirs();
                
@@ -169,6 +178,8 @@ public abstract class Migrator implements Runnable {
                                                finalStr=svIntr.getName() + "#@#" + svIntr.getURI() + "#@#" + str+"\n";
                                                Files.write(Paths.get(logDirectory + "/" + fileName),finalStr.getBytes(),StandardOpenOption.APPEND);
                                        } catch (IOException e) {
+                                               System.out.println("Unable to generate file with dmaap msgs for " + getMigrationName() + 
+                                                               " Exception is: " + e.getMessage());
                                                logger.error("Unable to generate file with dmaap msgs for "+getMigrationName(), e);
                                        }
 
@@ -201,6 +212,7 @@ public abstract class Migrator implements Runnable {
                                result.put(pk.key(), pk.value());
                        }
                } catch (JSONException e) {
+                       System.out.println("Warning error reading vertex: " + e.getMessage());
                        logger.error("Warning error reading vertex: " + e);
                }
 
@@ -223,6 +235,7 @@ public abstract class Migrator implements Runnable {
                                result.put(pk.key(), pk.value());
                        }
                } catch (JSONException e) {
+                       System.out.println("Warning error reading edge: " + e.getMessage());
                        logger.error("Warning error reading edge: " + e);
                }
 
@@ -404,4 +417,15 @@ public abstract class Migrator implements Runnable {
        public NotificationHelper getNotificationHelper() {
                return this.notificationHelper;
        }
+       
+       /**
+        * Log and print.
+        *
+        * @param logger the logger
+        * @param msg the msg
+        */
+       protected void logAndPrint(Logger logger, String msg) {
+               System.out.println(msg);
+               logger.info(msg);
+       }
 }
index f10a824..f08c56d 100644 (file)
@@ -39,6 +39,7 @@ import org.onap.aai.serialization.engines.query.QueryEngine;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+import org.slf4j.MDC;
 import org.onap.aai.setup.SchemaVersions;
 
 /**
@@ -46,7 +47,7 @@ import org.onap.aai.setup.SchemaVersions;
  */
 public class NotificationHelper {
 
-       private static final Logger LOGGER = LoggerFactory.getLogger(NotificationHelper.class);
+       protected Logger LOGGER = null;
        protected final DBSerializer serializer;
        protected final Loader loader;
        protected final TransactionalGraphEngine engine;
@@ -61,6 +62,9 @@ public class NotificationHelper {
                this.transactionId = transactionId;
                this.sourceOfTruth = sourceOfTruth;
                this.notification = new UEBNotification(loader, loaderFactory, schemaVersions);
+               MDC.put("logFilenameAppender", this.getClass().getSimpleName());
+               LOGGER = LoggerFactory.getLogger(this.getClass().getSimpleName());
+               
        }
        
        public void addEvent(Vertex v, Introspector obj, EventAction action, URI uri, String basePath) throws UnsupportedEncodingException, AAIException {
index f45b20b..685855c 100644 (file)
@@ -23,7 +23,6 @@ import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
-import java.util.Map.Entry;
 
 import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
 import org.apache.tinkerpop.gremlin.structure.Vertex;
@@ -113,6 +112,8 @@ public abstract class ValueMigrator extends Migrator {
                         }                                         
                     }
                 } catch (Exception e) {
+                       System.out.println(String.format("caught exception updating aai-node-type %s's property %s's value to " +
+                            "%s: %s", nodeType, property, newValue.toString(), e.getMessage()));
                     logger.error(String.format("caught exception updating aai-node-type %s's property %s's value to " +
                             "%s: %s", nodeType, property, newValue.toString(), e.getMessage()));
                     logger.error(e.getMessage());
@@ -121,12 +122,12 @@ public abstract class ValueMigrator extends Migrator {
               this.nodeTotalSuccess.put(nodeType, Integer.toString(this.subTotal));
         }
         
-        logger.debug ("\n \n ******* Final Summary for " + " " + getMigrationName() +" ********* \n");                
+        logAndPrint(logger, " ******* Final Summary for " + " " + getMigrationName() +" ********* ");                
         for (Map.Entry<String, String> migratedNode: nodeTotalSuccess.entrySet()) {
-               logger.debug("Total Migrated Records for " + migratedNode.getKey() +": " + migratedNode.getValue());
+               logAndPrint(logger,"Total Migrated Records for " + migratedNode.getKey() +": " + migratedNode.getValue());
                
         }
-        logger.debug(this.MIGRATION_SUMMARY_COUNT + "Total Migrated Records: "+ migrationSuccess);           
+        logAndPrint(logger,this.MIGRATION_SUMMARY_COUNT + "Total Migrated Records: "+ migrationSuccess);           
         
     }
     
@@ -136,18 +137,18 @@ public abstract class ValueMigrator extends Migrator {
             String propertyValue = v.property(property).value().toString();
             if (propertyValue.isEmpty()) {
                 v.property(property, newValue);
-                logger.debug(String.format("Node Type %s: Property %s is empty, adding value %s",
+               logAndPrint(logger,String.format("Node Type %s: Property %s is empty, adding value %s",
                         nodeType, property, newValue.toString()));
                 this.touchVertexProperties(v, false);
                 updateDmaapList(v);
                 this.migrationSuccess++;
                 this.subTotal++;
             } else {
-                logger.debug(String.format("Node Type %s: Property %s value already exists - skipping",
+               logAndPrint(logger,String.format("Node Type %s: Property %s value already exists - skipping",
                         nodeType, property));
             }
         } else {
-            logger.debug(String.format("Node Type %s: Property %s does not exist or " +
+               logAndPrint(logger,String.format("Node Type %s: Property %s does not exist or " +
                     "updateExistingValues flag is set to True - adding the property with value %s",
                     nodeType, property, newValue.toString()));
             v.property(property, newValue);
@@ -178,7 +179,7 @@ public abstract class ValueMigrator extends Migrator {
     private void updateDmaapList(Vertex v){
        String dmaapMsg = System.nanoTime() + "_" + v.id().toString() + "_"     + v.value("resource-version").toString();
         dmaapMsgList.add(dmaapMsg);
-        logger.debug("\tAdding Updated Vertex " + v.id().toString() + " to dmaapMsgList....");
+        logAndPrint(logger,"\tAdding Updated Vertex " + v.id().toString() + " to dmaapMsgList....");
     }
     
     public boolean isUpdateDmaap(){
index a2c814e..6480e2a 100644 (file)
@@ -43,6 +43,7 @@ import org.onap.aai.serialization.db.EdgeSerializer;
 import org.onap.aai.serialization.engines.TransactionalGraphEngine;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+import org.slf4j.MDC;
 
 /**
  * This class recursively merges two vertices passed in.
@@ -52,8 +53,7 @@ import org.slf4j.LoggerFactory;
  */
 public class VertexMerge {
 
-       private final Logger logger = LoggerFactory.getLogger(this.getClass().getSimpleName());
-
+       protected Logger logger = null;
        private final GraphTraversalSource g;
        private final TransactionalGraphEngine engine;
        private final DBSerializer serializer;
@@ -70,6 +70,9 @@ public class VertexMerge {
                this.loader = builder.getLoader();
                this.notificationHelper = builder.getHelper();
                this.hasNotifications = builder.isHasNotifications();
+               MDC.put("logFilenameAppender", this.getClass().getSimpleName());
+        logger = LoggerFactory.getLogger(this.getClass().getSimpleName());
+               
        }
        
        /**
diff --git a/src/main/java/org/onap/aai/migration/v20/MigrateL2DefaultToFalse.java b/src/main/java/org/onap/aai/migration/v20/MigrateL2DefaultToFalse.java
new file mode 100644 (file)
index 0000000..cbe15de
--- /dev/null
@@ -0,0 +1,79 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v20;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Optional;
+
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.serialization.db.EdgeSerializer;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.migration.Enabled;
+import org.onap.aai.migration.MigrationDangerRating;
+import org.onap.aai.migration.MigrationPriority;
+import org.onap.aai.migration.Status;
+import org.onap.aai.migration.ValueMigrator;
+import org.onap.aai.setup.SchemaVersions;
+
+
+@MigrationPriority(20)
+@MigrationDangerRating(2)
+@Enabled
+public class MigrateL2DefaultToFalse extends ValueMigrator {
+       
+       protected static final String L_INTERFACE_NODE_TYPE = "l-interface";
+       protected static final String L2_MULTI_PROPERTY = "l2-multicasting";
+               
+       private static Map<String, Map<String, Boolean>> map;
+    private static Map<String, Boolean> pair;
+       public MigrateL2DefaultToFalse(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) {
+               super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions, setL2ToFalse(), false);
+       }       
+               
+       private static Map<String, Map<String, Boolean>> setL2ToFalse(){
+               map = new HashMap<>();
+        pair = new HashMap<>();
+
+               pair.put(L2_MULTI_PROPERTY, false);
+               
+               map.put(L_INTERFACE_NODE_TYPE, pair);           
+        
+        return map;
+       }       
+
+       @Override
+       public Status getStatus() {
+               return Status.SUCCESS;
+       }
+
+       @Override
+       public Optional<String[]> getAffectedNodeTypes() {
+               return Optional.of(new String[]{L_INTERFACE_NODE_TYPE});
+       }
+
+       @Override
+       public String getMigrationName() {
+               return "MigrateL2DefaultToFalse";
+       }
+
+}
\ No newline at end of file
diff --git a/src/main/java/org/onap/aai/migration/v20/MigrateVlanTag.java b/src/main/java/org/onap/aai/migration/v20/MigrateVlanTag.java
new file mode 100644 (file)
index 0000000..0519cad
--- /dev/null
@@ -0,0 +1,391 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v20;
+
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.Set;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.javatuples.Pair;
+import org.json.simple.JSONArray;
+import org.json.simple.JSONObject;
+import org.json.simple.parser.JSONParser;
+import org.onap.aai.db.props.AAIProperties;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.introspection.Introspector;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.migration.EdgeSwingMigrator;
+import org.onap.aai.migration.Enabled;
+import org.onap.aai.migration.MigrationDangerRating;
+import org.onap.aai.migration.MigrationPriority;
+import org.onap.aai.migration.Status;
+import org.onap.aai.serialization.db.EdgeSerializer;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.util.AAIConstants;
+
+@MigrationPriority(20)
+@MigrationDangerRating(2)
+@Enabled
+public class MigrateVlanTag extends EdgeSwingMigrator {
+
+       protected static final String CLOUD_REGION_NODE_TYPE = "cloud-region";
+       protected static final String CLOUD_OWNER = "cloud-owner";
+       protected static final String CLOUD_REGION_ID = "cloud-region-id";
+       protected static final String VLAN_RANGE = "vlan-range";
+       protected static final String VLAN_RANGE_ID = "vlan-range-id";
+       protected static final String VLAN_TAG = "vlan-tag";
+       protected static final String VLAN_TAG_ID = "vlan-tag-id";
+       protected static final String UPGRADE_CYCLE = "upgrade-cycle";
+
+       protected final AtomicInteger skippedRowsCount = new AtomicInteger(0);
+       protected final AtomicInteger processedRowsCount = new AtomicInteger(0);
+       private static List<String> dmaapMsgList = new ArrayList<String>();
+       private static List<Introspector> dmaapDeleteIntrospectorList = new ArrayList<Introspector>();
+       private static int vlanRangeCount = 0;
+       private static int vlanRangeFailureCount = 0;
+       private static int vlanTagCount = 0;
+       private static int vlanTagFailureCount = 0;
+       
+       private boolean success = true;
+       protected int headerLength;
+
+       protected final AtomicInteger falloutRowsCount = new AtomicInteger(0);
+
+       public MigrateVlanTag(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor,
+                       EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) {
+               super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+       }
+
+       @Override
+       public void run() {
+               logger.info("---------- Start Updating vlan-tags under cloud-region  ----------");
+
+               String logDir = AAIConstants.AAI_HOME + AAIConstants.AAI_FILESEP + "logs";
+               String feedDir = logDir +  AAIConstants.AAI_FILESEP + "data"
+                               + AAIConstants.AAI_FILESEP + "migration-input-files" 
+                               + AAIConstants.AAI_FILESEP;
+               String fileName = feedDir + "dslResults.json";
+               logger.info(fileName);
+               logger.info("---------- Processing vlan-tags from file  ----------");
+
+               Map cloudOwnerRegionIdWiseVertexMap = new HashMap();
+               Map<String, Vertex> vlanTagIdWiseVertexMap = new HashMap<>();
+
+               try {
+                       List<Vertex> cloudRegionList = this.engine.asAdmin().getTraversalSource().V()
+                                       .has(AAIProperties.NODE_TYPE, CLOUD_REGION_NODE_TYPE).toList();
+
+                       List<Vertex> vlantagIdList = this.engine.asAdmin().getTraversalSource().V()
+                                       .has(AAIProperties.NODE_TYPE, VLAN_TAG).toList();
+
+                       String cloudOwner, cloudRegionId;
+                       for (Vertex vertex : cloudRegionList) {
+                               cloudOwner = getCloudOwnerNodeValue(vertex);
+                               cloudRegionId = getCloudRegionIdNodeValue(vertex);
+                               if (!cloudOwner.isEmpty() && !cloudRegionId.isEmpty()) {
+                                       cloudOwnerRegionIdWiseVertexMap.put(cloudOwner + "/" + cloudRegionId, vertex);
+                               }
+                       }
+
+                       String vlantagId;
+                       for (Vertex vertex : vlantagIdList) {
+                               vlantagId = getVlanTagIdNodeValue(vertex);
+                               if (!vlantagId.isEmpty() && 
+                                               !(vertex.property(AAIProperties.AAI_URI).isPresent() &&
+                                                       vertex.property(AAIProperties.AAI_URI).toString().contains(CLOUD_REGION_NODE_TYPE))) {
+                                       vlanTagIdWiseVertexMap.put(vlantagId, vertex);
+                               }
+                       }
+
+                       JSONParser jsonParser = new JSONParser();
+                       Object obj = jsonParser
+                                       .parse(new FileReader(fileName));
+                       JSONObject vlanRangeResultJsonObj = (JSONObject) obj;
+                       JSONArray vlanRangeResultJsonArray = (JSONArray) vlanRangeResultJsonObj.get("results");
+
+                       JSONObject vlanRangeJsonObject, vlanTagsJsonObject, vlanTagJsonObject;
+                       JSONArray vlanTagJsonArray;
+                       String vlanTagId;
+                       boolean isFirstTime;
+                       Vertex vlanRangeVtx;
+                       Set<String> matchedVlanTagIdSet = new HashSet<String>();
+                       for (int i = 0; i < vlanRangeResultJsonArray.size(); i++) {
+                               isFirstTime = true;
+                               vlanRangeVtx = null;
+                               vlanRangeJsonObject = (JSONObject) vlanRangeResultJsonArray.get(i);
+                               cloudOwner = getCloudOwnerValueFromUrl((String) vlanRangeJsonObject.get("url"));
+                               cloudRegionId = getCloudRegionIdValueFromUrl((String) vlanRangeJsonObject.get("url"));
+                               if (cloudOwnerRegionIdWiseVertexMap.containsKey(cloudOwner + "/" + cloudRegionId)) {// ap1 key contains
+                                                                                                                                                                                                       // api2 key
+                                       JSONObject vlanRangeInnerObject = (JSONObject) vlanRangeJsonObject.get("vlan-range");
+                                       String vlanRangeId = (String) vlanRangeInnerObject.get("vlan-range-id");
+                                       vlanTagsJsonObject = (JSONObject) vlanRangeInnerObject.get("vlan-tags");
+                                       vlanTagJsonArray = (JSONArray) vlanTagsJsonObject.get("vlan-tag");
+
+                                       for (int j = 0; j < vlanTagJsonArray.size(); j++) {
+                                               vlanTagJsonObject = (JSONObject) vlanTagJsonArray.get(j);
+                                               vlanTagId = (String) vlanTagJsonObject.get("vlan-tag-id");
+                                               if (vlanTagIdWiseVertexMap.containsKey(vlanTagId)) {// ap1 key contains api2 key
+                                                       matchedVlanTagIdSet.add(vlanTagId);
+                                                       if (isFirstTime) {
+                                                               isFirstTime = false;
+                                                               vlanRangeVtx = createNewVlanRangeFromCloudRegion(vlanRangeInnerObject, cloudOwner,
+                                                                               cloudRegionId, vlanRangeId);
+                                                       }
+                                                       Vertex vertex = vlanTagIdWiseVertexMap.get(vlanTagId);
+                                                       createNewVlanTagFromVlanRange(vlanTagJsonObject, vlanRangeVtx, cloudOwner, cloudRegionId,
+                                                                       vlanRangeId, vertex);
+                                                       vlanTagIdWiseVertexMap.remove(vlanTagId);
+                                               }
+                                       }
+                               }
+                       }
+                       logger.info("******* Final Summary of Migrate vlan-tag  Migration *********");
+                       if(!vlanTagIdWiseVertexMap.isEmpty()) {
+                               logger.info("The following vlan-ids in A&AI graph were not found in the Narad input"
+                                               + "file and not migrated: ");                   
+                               for(Vertex vlanTagIdWiseVertex : vlanTagIdWiseVertexMap.values()) {
+                                       logger.info(vlanTagIdWiseVertex.property("vlan-tag-id").toString());
+                               }
+                       }
+                       logger.info(MIGRATION_SUMMARY_COUNT+"Total Vlan Ranges Updated Count:" + vlanRangeCount);
+                       logger.info(MIGRATION_ERROR+"Total Vlan Ranges Not Updated Count:" + vlanRangeFailureCount);
+                       logger.info(MIGRATION_SUMMARY_COUNT+"Total Vlan Tags Updated Sucessfully Count: " + vlanTagCount);
+                       logger.info(MIGRATION_ERROR+"Total Vlan Tags Not Updated Count: " + vlanTagFailureCount);
+               } catch (FileNotFoundException e) {
+                       logger.info("ERROR: Could not file file " + fileName, e.getMessage());
+                       logger.error("ERROR: Could not file file " + fileName, e);
+                       success = false;
+               } catch (IOException e) {
+                       logger.info("ERROR: Issue reading file " + fileName, e.getMessage());
+                       logger.error("ERROR: Issue reading file " + fileName, e);
+                       success = false;
+               } catch (Exception e) {
+                       logger.info("encountered exception", e.getMessage());
+                       logger.error("encountered exception", e);
+                       e.printStackTrace();
+                       success = false;
+               }
+       }
+
+       private Vertex createNewVlanRangeFromCloudRegion(JSONObject vlanRangeJsonObject, String cloudOwner,
+                       String cloudRegionId, String vlanRangeId) {
+
+               Vertex vlanRangeVtx = null;
+               try {
+
+                       GraphTraversal<Vertex, Vertex> cloudRegionVtxList = this.engine.asAdmin().getTraversalSource().V()
+                                       .has(AAIProperties.NODE_TYPE, CLOUD_REGION_NODE_TYPE).has("cloud-owner", cloudOwner)
+                                       .has("cloud-region-id", cloudRegionId);
+
+                       if (cloudRegionVtxList != null && cloudRegionVtxList.hasNext()) {
+                               Vertex cloudRegionVtx = cloudRegionVtxList.next();
+                               if (cloudRegionVtx != null) {
+                                       Introspector vlanRange = loader.introspectorFromName("vlan-range");
+                                       vlanRangeVtx = serializer.createNewVertex(vlanRange);
+                                       vlanRange.setValue("vlan-range-id", vlanRangeId);// required
+                                       vlanRange.setValue("vlan-id-lower", (Long) vlanRangeJsonObject.get("vlan-id-lower"));// required
+                                       vlanRange.setValue("vlan-id-upper", (Long) vlanRangeJsonObject.get("vlan-id-upper"));// required
+                                       vlanRange.setValue("vlan-type", (String) vlanRangeJsonObject.get("vlan-type"));// required
+                                       this.createTreeEdge(cloudRegionVtx, vlanRangeVtx);
+                                       vlanRangeVtx.property(AAIProperties.AAI_URI,
+                                                       "/cloud-infrastructure/" + "cloud-regions/cloud-region/" + cloudOwner + "/" + cloudRegionId
+                                                                       + "/vlan-ranges/vlan-range/" + vlanRangeId);
+                                       serializer.serializeSingleVertex(vlanRangeVtx, vlanRange, "migrations");
+
+                                       logger.info("\t Created new vlan-range " + vlanRangeVtx + " with vlan-range-id = "
+                                                       + (String) vlanRangeJsonObject.get("vlan-range-id"));
+
+                                       String dmaapMsg = System.nanoTime() + "_" + vlanRangeVtx.id().toString() + "_"
+                                                       + vlanRangeVtx.value("resource-version").toString();
+                                       dmaapMsgList.add(dmaapMsg);
+                                       vlanRangeCount++;
+                               }
+                       }
+
+               } catch (Exception e) {
+                       vlanRangeFailureCount++;
+                       logger.error("vlan-range failure: ", e);
+                       logger.info("vlan-range with id : " + vlanRangeId + " failed: ", e.getMessage());
+                       
+               }
+               return vlanRangeVtx;
+
+       }
+
+       private Vertex createNewVlanTagFromVlanRange(JSONObject vlanTagJsonObject, Vertex vlanRangeVtx, String cloudOwner,
+                       String cloudRegionId, String vlanRangeId, Vertex oldVlanTagVtx) {
+
+               Vertex vlanTagVtx = null;
+               try {
+                       Introspector vlanTag = loader.introspectorFromName("vlan-tag");
+                       vlanTagVtx = serializer.createNewVertex(vlanTag);
+                       String vlanTagId = (String) vlanTagJsonObject.get("vlan-tag-id");
+                       vlanTag.setValue("vlan-tag-id", vlanTagId);// required
+                       vlanTag.setValue("vlan-tag-role", (String) vlanTagJsonObject.get("vlan-tag-role"));// required
+                       vlanTag.setValue("is-private", (Boolean) vlanTagJsonObject.get("is-private"));// required
+                       if (vlanTagJsonObject.containsKey("vlan-id-inner"))
+                               vlanTag.setValue("vlan-id-inner", (Long) vlanTagJsonObject.get("vlan-id-inner"));
+                       if (vlanTagJsonObject.containsKey("vlan-id-outer"))
+                               vlanTag.setValue("vlan-id-outer", (Long) vlanTagJsonObject.get("vlan-id-outer"));
+                       if (vlanTagJsonObject.containsKey("vlan-tag-function"))
+                               vlanTag.setValue("vlan-tag-function", (String) vlanTagJsonObject.get("vlan-tag-function"));
+                       if (vlanTagJsonObject.containsKey("config-phase"))
+                               vlanTag.setValue("config-phase", (String) vlanTagJsonObject.get("config-phase"));
+                       if (vlanTagJsonObject.containsKey("vlan-tag-type"))
+                               vlanTag.setValue("vlan-tag-type", (String) vlanTagJsonObject.get("vlan-tag-type"));
+                       this.createTreeEdge(vlanRangeVtx, vlanTagVtx);
+                       vlanTagVtx.property(AAIProperties.AAI_URI,
+                                       "/cloud-infrastructure/" + "cloud-regions/cloud-region/" + cloudOwner + "/" + cloudRegionId
+                                                       + "/vlan-ranges/vlan-range/" + vlanRangeId + "/vlan-tags/vlan-tag/" + vlanTagId);
+                       executeModifyOperation(oldVlanTagVtx, vlanTagVtx);
+                       Introspector deletedVlanEvent = serializer.getLatestVersionView(oldVlanTagVtx);
+                       dmaapDeleteIntrospectorList.add(deletedVlanEvent);
+                       oldVlanTagVtx.remove();
+                       serializer.serializeSingleVertex(vlanTagVtx, vlanTag, "migrations");
+
+                       logger.info("\t Created new vlan-tag " + vlanTagVtx + " with vlan-tag-id = "
+                                       + (String) vlanTagJsonObject.get("vlan-tag-id"));
+
+                       String dmaapMsg = System.nanoTime() + "_" + vlanTagVtx.id().toString() + "_"
+                                       + vlanRangeVtx.value("resource-version").toString();
+                       dmaapMsgList.add(dmaapMsg);
+                       vlanTagCount++;
+               } catch (Exception e) {
+                       vlanTagFailureCount++;
+                       logger.error("vlan-tag failure: ", e);
+                       if(vlanTagJsonObject != null && vlanTagJsonObject.get("vlan-tag-id") != null){
+                               logger.info("vlan-tag with id : " + vlanTagJsonObject.get("vlan-tag-id") + " failed: ", e.getMessage());
+                       }
+                       else {
+                               logger.info("vlan-tag failure: ", e.getMessage());
+                       }
+               }
+               return vlanRangeVtx;
+
+       }
+
+       private String getCloudRegionIdNodeValue(Vertex vertex) {
+               String propertyValue = "";
+               if (vertex != null && vertex.property(CLOUD_REGION_ID).isPresent()) {
+                       propertyValue = vertex.property(CLOUD_REGION_ID).value().toString();
+               }
+               return propertyValue;
+       }
+
+       private String getCloudOwnerNodeValue(Vertex vertex) {
+               String propertyValue = "";
+               if (vertex != null && vertex.property(CLOUD_OWNER).isPresent()) {
+                       propertyValue = vertex.property(CLOUD_OWNER).value().toString();
+               }
+               return propertyValue;
+       }
+
+       private String getVlanTagIdNodeValue(Vertex vertex) {
+               String propertyValue = "";
+               if (vertex != null && vertex.property(VLAN_TAG_ID).isPresent()) {
+                       propertyValue = vertex.property(VLAN_TAG_ID).value().toString();
+               }
+               return propertyValue;
+       }
+
+       private String getCloudOwnerValueFromUrl(String url) {
+               String arr[] = url.split("/");
+               return arr[6];
+       }
+
+       private String getCloudRegionIdValueFromUrl(String url) {
+               String arr[] = url.split("/");
+               return arr[7];
+       }
+
+       protected void executeModifyOperation(Vertex fromNode, Vertex toNode) {
+               try {
+                       
+                       this.swingEdges(fromNode, toNode, "l3-network", "none", "BOTH");
+                       this.swingEdges(fromNode, toNode, "cp", "none", "BOTH");
+               } catch (Exception e) {
+                       logger.error("error encountered", e);
+                       success = false;
+               }
+       }
+       
+       @Override
+       public void commit() {
+        engine.commit();
+        createDmaapFiles(dmaapMsgList);
+        createDmaapFilesForDelete(dmaapDeleteIntrospectorList);
+       }
+
+       @Override
+       public Status getStatus() {
+               if (success) {
+                       return Status.SUCCESS;
+               } else {
+                       return Status.FAILURE;
+               }
+       }
+
+       @Override
+       public Optional<String[]> getAffectedNodeTypes() {
+               return Optional.of(new String[] { CLOUD_REGION_NODE_TYPE });
+       }
+
+       @Override
+       public String getMigrationName() {
+               return "MigrateVlanTag";
+       }
+
+       @Override
+       public List<Pair<Vertex, Vertex>> getAffectedNodePairs() {
+               return null;
+       }
+
+       @Override
+       public String getNodeTypeRestriction() {
+               return null;
+       }
+
+       @Override
+       public String getEdgeLabelRestriction() {
+               return null;
+       }
+
+       @Override
+       public String getEdgeDirRestriction() {
+               return null;
+       }
+
+       @Override
+       public void cleanupAsAppropriate(List<Pair<Vertex, Vertex>> nodePairL) {
+       }
+
+}
\ No newline at end of file
diff --git a/src/main/java/org/onap/aai/rest/AuditSqlDbConsumer.java b/src/main/java/org/onap/aai/rest/AuditSqlDbConsumer.java
new file mode 100644 (file)
index 0000000..aec7224
--- /dev/null
@@ -0,0 +1,120 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.rest;
+
+import org.onap.aai.audit.AuditGraphson2Sql;
+import org.onap.aai.concurrent.AaiCallable;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.restcore.HttpMethod;
+import org.onap.aai.restcore.RESTAPI;
+import org.onap.aai.util.AAIConstants;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.stereotype.Component;
+
+import javax.ws.rs.*;
+import javax.ws.rs.core.*;
+import javax.ws.rs.core.Response.Status;
+
+@Component
+@Path("{version: v1}/audit-sql-db")
+public class AuditSqlDbConsumer extends RESTAPI {
+
+       private static String SOURCE_FILE_NAME = "sourceFileName";
+       private static String SOURCE_FILE_DIR_DEFAULT = "logs/data/dataSnapshots/";
+
+       private static final Logger LOGGER = LoggerFactory.getLogger(AuditSqlDbConsumer.class);
+
+       private String rdbmsDbName;
+       private AuditGraphson2Sql auditGraphson2Sql;
+
+       @Autowired
+       public AuditSqlDbConsumer(
+               AuditGraphson2Sql auditGraphson2Sql,
+               @Value("${aperture.rdbmsname}") String rdbmsDbName
+       ){
+               this.auditGraphson2Sql  = auditGraphson2Sql;
+               this.rdbmsDbName                = rdbmsDbName;
+       }
+
+
+       @GET
+       @Consumes({ MediaType.APPLICATION_JSON})
+       @Produces({ MediaType.APPLICATION_JSON})
+       public Response executeAudit(String content,
+                                                                @PathParam("uri") @Encoded String uri,
+                                                                @Context HttpHeaders headers,
+                                                                @Context UriInfo info){
+               return runner(AAIConstants.AAI_GRAPHADMIN_TIMEOUT_ENABLED,
+                               AAIConstants.AAI_GRAPHADMIN_TIMEOUT_APP,
+                               AAIConstants.AAI_GRAPHADMIN_TIMEOUT_LIMIT,
+                               headers,
+                               info,
+                               HttpMethod.GET,
+                               new AaiCallable<Response>() {
+                                       @Override
+                                       public Response process() {
+                                               return processExecuteAudit(content, uri, headers, info);
+                                       }
+                               }
+               );
+       }
+
+
+       public Response processExecuteAudit(String content,
+                                                                               @PathParam("uri") @Encoded String uri,
+                                                                               @Context HttpHeaders headers,
+                                                                               @Context UriInfo info) {
+
+               Response response = null;
+               try {
+                       this.checkParams(info.getQueryParameters());
+
+                       String resStr = auditGraphson2Sql.runAudit( rdbmsDbName,
+                                       info.getQueryParameters().getFirst(SOURCE_FILE_NAME),
+                                       SOURCE_FILE_DIR_DEFAULT );
+
+                       LOGGER.info ("Completed");
+                       
+                       response = Response.status(Status.OK)
+                                       .type(MediaType.APPLICATION_JSON)
+                                       .entity(resStr).build();
+               
+               } catch (AAIException e) {
+                       response = consumerExceptionResponseGenerator(headers, info, HttpMethod.GET, e);
+               } catch (Exception e ) {
+                       AAIException ex = new AAIException("AAI_4000", e);
+                       response = consumerExceptionResponseGenerator(headers, info, HttpMethod.GET, ex);
+               }
+               
+               return response;
+       }
+
+
+       public void checkParams(MultivaluedMap<String, String> params) throws AAIException {
+               
+               if (!params.containsKey(SOURCE_FILE_NAME)) {
+                       throw new AAIException("AAI_6120", "parameter: sourceFileName (of snapshot file) is required. ");
+               }
+       }
+
+}
diff --git a/src/main/java/org/onap/aai/rest/client/ApertureConfiguration.java b/src/main/java/org/onap/aai/rest/client/ApertureConfiguration.java
new file mode 100644 (file)
index 0000000..34fdbec
--- /dev/null
@@ -0,0 +1,49 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.rest.client;
+
+import org.onap.aai.restclient.RestClient;
+import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.context.annotation.Profile;
+
+
+@Configuration
+public class ApertureConfiguration {
+
+    @Bean(name = "apertureRestClient")
+    @ConditionalOnProperty(name = "aperture.service.client", havingValue = "two-way-ssl", matchIfMissing = true)
+    public RestClient apertureRestClientTwoWaySSL() {
+        return new ApertureServiceRestClient();
+    }
+
+    @Bean(name = "apertureRestClient")
+    @ConditionalOnProperty(name = "aperture.service.client", havingValue = "no-auth")
+    public RestClient apertureRestClientNoAuth() {
+        return new ApertureServiceNoAuthClient();
+    }
+
+    @Bean(name = "apertureRestClient")
+    @ConditionalOnProperty(name = "aperture.service.client", havingValue = "one-way-ssl")
+    public RestClient apertureRestClientOneWaySSL() {
+        return new ApertureServiceOneWayClient();
+    }
+}
diff --git a/src/main/java/org/onap/aai/rest/client/ApertureService.java b/src/main/java/org/onap/aai/rest/client/ApertureService.java
new file mode 100644 (file)
index 0000000..493a4cd
--- /dev/null
@@ -0,0 +1,142 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.rest.client;
+
+
+import com.google.gson.JsonObject;
+import org.apache.http.conn.ConnectTimeoutException;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.restclient.RestClient;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.context.annotation.Profile;
+import org.springframework.http.HttpMethod;
+import org.springframework.http.ResponseEntity;
+import org.springframework.stereotype.Service;
+import javax.annotation.PostConstruct;
+import java.net.ConnectException;
+import java.net.SocketTimeoutException;
+import java.util.*;
+
+@Service
+public class ApertureService {
+
+    /**
+     * Error indicating that the service trying to connect is down
+     */
+    static final String CONNECTION_REFUSED_STRING =
+        "Connection refused to the Aperture microservice due to service unreachable";
+
+    /**
+     * Error indicating that the server is unable to reach the port
+     * Could be server related connectivity issue
+     */
+    static final String CONNECTION_TIMEOUT_STRING =
+        "Connection timeout to the Aperture microservice as this could " +
+        "indicate the server is unable to reach port, " +
+        "please check on server by running: nc -w10 -z -v ${APERTURE_HOST} ${APERTURE_PORT}";
+
+    /**
+     * Error indicating that the request exceeded the allowed time
+     *
+     * Note: This means that the service could be active its
+     *       just taking some time to process our request
+     */
+    static final String REQUEST_TIMEOUT_STRING =
+        "Request to Aperture service took longer than the currently set timeout";
+
+    static final String APERTURE_ENDPOINT = "/v1/audit/";
+
+    private static final Logger LOGGER = LoggerFactory.getLogger(ApertureService.class);
+    private final RestClient apertureRestClient;
+    private final String appName;
+
+
+    @Autowired
+    public ApertureService(
+        @Qualifier("apertureRestClient") RestClient apertureRestClient,
+        @Value("${spring.application.name}") String appName
+    ){
+        this.apertureRestClient = apertureRestClient;
+        this.appName = appName;
+
+        LOGGER.info("Successfully initialized the aperture service");
+    }
+
+
+    public JsonObject runAudit(Long tStamp, String dbName) throws AAIException {
+
+        Map<String, String> httpHeaders = new HashMap<>();
+
+        httpHeaders.put("X-FromAppId", appName);
+        httpHeaders.put("X-TransactionID", UUID.randomUUID().toString());
+        httpHeaders.put("Accept", "application/json");
+
+        String queryParams = "?timestamp=" + tStamp + "&dbname=" + dbName;
+
+        ResponseEntity responseEntity = null;
+        try {
+            responseEntity = apertureRestClient.execute(
+                    APERTURE_ENDPOINT + queryParams,
+                    HttpMethod.GET,
+                    httpHeaders
+            );
+
+            if(isSuccess(responseEntity)){
+                LOGGER.debug("Audit returned following response status code {} and body {}", responseEntity.getStatusCodeValue(), responseEntity.getBody());
+            }
+
+        } catch(Exception e){
+            // If the exception cause is client side timeout
+            // then proceed as if it passed validation
+            // resources microservice shouldn't be blocked because of validation service
+            // is taking too long or if the validation service is down
+            // Any other exception it should block the request from passing?
+            if(e.getCause() instanceof SocketTimeoutException){
+                LOGGER.error(REQUEST_TIMEOUT_STRING, e.getCause());
+            } else if(e.getCause() instanceof ConnectException){
+                LOGGER.error(CONNECTION_REFUSED_STRING, e.getCause());
+            } else if(e.getCause() instanceof ConnectTimeoutException){
+                LOGGER.error(CONNECTION_TIMEOUT_STRING, e.getCause());
+            } else {
+                LOGGER.error("Unknown exception thrown please investigate", e.getCause());
+            }
+        }
+
+        JsonObject jsonResults = new JsonObject ();
+        if( responseEntity != null ) {
+            jsonResults = (JsonObject) (responseEntity.getBody());
+        }
+
+        return jsonResults;
+    }
+
+
+    boolean isSuccess(ResponseEntity responseEntity){
+        return responseEntity != null && responseEntity.getStatusCode().is2xxSuccessful();
+    }
+
+
+}
diff --git a/src/main/java/org/onap/aai/rest/client/ApertureServiceNoAuthClient.java b/src/main/java/org/onap/aai/rest/client/ApertureServiceNoAuthClient.java
new file mode 100644 (file)
index 0000000..778a821
--- /dev/null
@@ -0,0 +1,79 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.aai.rest.client;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.onap.aai.restclient.NoAuthRestClient;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.http.HttpHeaders;
+import org.springframework.http.MediaType;
+import org.springframework.http.client.HttpComponentsClientHttpRequestFactory;
+import org.springframework.util.MultiValueMap;
+
+import java.util.Collections;
+import java.util.Map;
+import java.util.UUID;
+
+public class ApertureServiceNoAuthClient extends NoAuthRestClient {
+
+    private static EELFLogger logger = EELFManager.getInstance().getLogger(ApertureServiceNoAuthClient.class);
+
+    @Value("${aperture.service.base.url}")
+    private String baseUrl;
+
+    @Value("${aperture.service.timeout-in-milliseconds}")
+    private Integer timeout;
+
+    @Override
+    protected HttpComponentsClientHttpRequestFactory getHttpRequestFactory() throws Exception {
+        HttpComponentsClientHttpRequestFactory requestFactory = super.getHttpRequestFactory();
+        requestFactory.setConnectionRequestTimeout(timeout);
+        requestFactory.setReadTimeout(timeout);
+        requestFactory.setConnectTimeout(timeout);
+        return requestFactory;
+    }
+
+    @Override
+    public String getBaseUrl() {
+        return baseUrl;
+    }
+
+    @Override
+    public MultiValueMap<String, String> getHeaders(Map<String, String> headers) {
+        HttpHeaders httpHeaders = new HttpHeaders();
+
+        String defaultAccept = headers.getOrDefault(HttpHeaders.ACCEPT, MediaType.APPLICATION_JSON.toString());
+        String defaultContentType =
+                headers.getOrDefault(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_JSON.toString());
+
+        if (headers.isEmpty()) {
+            httpHeaders.setAccept(Collections.singletonList(MediaType.parseMediaType(defaultAccept)));
+            httpHeaders.setContentType(MediaType.parseMediaType(defaultContentType));
+        }
+
+        httpHeaders.add("X-FromAppId", appName);
+        httpHeaders.add("X-TransactionId", UUID.randomUUID().toString());
+        headers.forEach(httpHeaders::add);
+        return httpHeaders;
+    }
+
+}
diff --git a/src/main/java/org/onap/aai/rest/client/ApertureServiceOneWayClient.java b/src/main/java/org/onap/aai/rest/client/ApertureServiceOneWayClient.java
new file mode 100644 (file)
index 0000000..a888536
--- /dev/null
@@ -0,0 +1,92 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.aai.rest.client;
+
+import org.onap.aai.restclient.OneWaySSLRestClient;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.http.HttpHeaders;
+import org.springframework.http.MediaType;
+import org.springframework.http.client.HttpComponentsClientHttpRequestFactory;
+import org.springframework.util.MultiValueMap;
+
+import java.util.Collections;
+import java.util.Map;
+import java.util.UUID;
+
+public class ApertureServiceOneWayClient extends OneWaySSLRestClient {
+
+    @Value("${aperture.service.base.url}")
+    private String baseUrl;
+
+    @Value("${aperture.service.ssl.trust-store}")
+    private String truststorePath;
+
+    @Value("${aperture.service.ssl.trust-store-password}")
+    private String truststorePassword;
+
+    @Value("${aperture.service.timeout-in-milliseconds}")
+    private Integer timeout;
+
+    @Override
+    public String getBaseUrl() {
+        return baseUrl;
+    }
+
+    @Override
+    protected String getTruststorePath() {
+        return truststorePath;
+    }
+
+    @Override
+    protected char[] getTruststorePassword() {
+        return truststorePassword.toCharArray();
+    }
+
+    @Override
+    protected HttpComponentsClientHttpRequestFactory getHttpRequestFactory() throws Exception {
+        HttpComponentsClientHttpRequestFactory requestFactory = super.getHttpRequestFactory();
+        requestFactory.setConnectionRequestTimeout(timeout);
+        requestFactory.setReadTimeout(timeout);
+        requestFactory.setConnectTimeout(timeout);
+        return requestFactory;
+    }
+
+    @Override
+    public MultiValueMap<String, String> getHeaders(Map<String, String> headers) {
+        HttpHeaders httpHeaders = new HttpHeaders();
+
+        String defaultAccept = headers.getOrDefault(HttpHeaders.ACCEPT, MediaType.APPLICATION_JSON.toString());
+        String defaultContentType =
+            headers.getOrDefault(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_JSON.toString());
+
+        if (headers.isEmpty()) {
+            httpHeaders.setAccept(Collections.singletonList(MediaType.parseMediaType(defaultAccept)));
+            httpHeaders.setContentType(MediaType.parseMediaType(defaultContentType));
+        }
+
+        httpHeaders.add("X-FromAppId", appName);
+        httpHeaders.add("X-TransactionId", UUID.randomUUID().toString());
+        httpHeaders.add("X-TransactionId", appName);
+        headers.forEach(httpHeaders::add);
+        return httpHeaders;
+    }
+
+}
diff --git a/src/main/java/org/onap/aai/rest/client/ApertureServiceRestClient.java b/src/main/java/org/onap/aai/rest/client/ApertureServiceRestClient.java
new file mode 100644 (file)
index 0000000..da32fbe
--- /dev/null
@@ -0,0 +1,106 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.aai.rest.client;
+
+import org.onap.aai.restclient.TwoWaySSLRestClient;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.http.HttpHeaders;
+import org.springframework.http.MediaType;
+import org.springframework.http.client.HttpComponentsClientHttpRequestFactory;
+import org.springframework.util.MultiValueMap;
+
+import java.util.Collections;
+import java.util.Map;
+import java.util.UUID;
+
+public class ApertureServiceRestClient extends TwoWaySSLRestClient {
+
+    @Value("${aperture.service.base.url}")
+    private String baseUrl;
+
+    @Value("${aperture.service.ssl.key-store}")
+    private String keystorePath;
+
+    @Value("${aperture.service.ssl.trust-store}")
+    private String truststorePath;
+
+    @Value("${aperture.service.ssl.key-store-password}")
+    private String keystorePassword;
+
+    @Value("${aperture.service.ssl.trust-store-password}")
+    private String truststorePassword;
+
+    @Value("${aperture.service.timeout-in-milliseconds}")
+    private Integer timeout;
+
+    @Override
+    public String getBaseUrl() {
+        return baseUrl;
+    }
+
+    @Override
+    protected String getKeystorePath() {
+        return keystorePath;
+    }
+
+    @Override
+    protected String getTruststorePath() {
+        return truststorePath;
+    }
+
+    @Override
+    protected char[] getKeystorePassword() {
+        return keystorePassword.toCharArray();
+    }
+
+    @Override
+    protected char[] getTruststorePassword() {
+        return truststorePassword.toCharArray();
+    }
+
+    protected HttpComponentsClientHttpRequestFactory getHttpRequestFactory() throws Exception {
+        HttpComponentsClientHttpRequestFactory requestFactory = super.getHttpRequestFactory();
+        requestFactory.setConnectionRequestTimeout(timeout);
+        requestFactory.setReadTimeout(timeout);
+        requestFactory.setConnectTimeout(timeout);
+        return requestFactory;
+    }
+
+    @Override
+    public MultiValueMap<String, String> getHeaders(Map<String, String> headers) {
+        HttpHeaders httpHeaders = new HttpHeaders();
+
+        String defaultAccept = headers.getOrDefault(HttpHeaders.ACCEPT, MediaType.APPLICATION_JSON.toString());
+        String defaultContentType =
+                headers.getOrDefault(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_JSON.toString());
+
+        if (headers.isEmpty()) {
+            httpHeaders.setAccept(Collections.singletonList(MediaType.parseMediaType(defaultAccept)));
+            httpHeaders.setContentType(MediaType.parseMediaType(defaultContentType));
+        }
+
+        httpHeaders.add("X-FromAppId", appName);
+        httpHeaders.add("X-TransactionId", UUID.randomUUID().toString());
+        headers.forEach(httpHeaders::add);
+        return httpHeaders;
+    }
+
+}
index 84ecf19..160ec22 100644 (file)
@@ -24,7 +24,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.janusgraph.core.JanusGraph;
 import org.janusgraph.core.schema.JanusGraphManagement;
-import org.onap.aai.config.PropertyPasswordConfiguration;
+import org.onap.aai.restclient.PropertyPasswordConfiguration;
 import org.onap.aai.dbgen.SchemaGenerator;
 import org.onap.aai.dbmap.AAIGraph;
 import org.onap.aai.exceptions.AAIException;
index eefb7b0..05478e2 100644 (file)
@@ -24,7 +24,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.janusgraph.core.JanusGraph;
 import org.janusgraph.core.schema.JanusGraphManagement;
-import org.onap.aai.config.PropertyPasswordConfiguration;
+import org.onap.aai.restclient.PropertyPasswordConfiguration;
 import org.onap.aai.dbgen.SchemaGenerator4Hist;
 import org.onap.aai.dbmap.AAIGraph;
 import org.onap.aai.exceptions.AAIException;
index 0bd254f..4297886 100644 (file)
@@ -21,7 +21,7 @@ package org.onap.aai.util;
 
 import com.beust.jcommander.JCommander;
 import com.beust.jcommander.Parameter;
-import org.onap.aai.config.PropertyPasswordConfiguration;
+import org.onap.aai.restclient.PropertyPasswordConfiguration;
 import org.onap.aai.dbmap.AAIGraph;
 import org.onap.aai.exceptions.AAIException;
 import org.onap.aai.introspection.LoaderFactory;
index dfe2649..c51de1e 100644 (file)
@@ -54,8 +54,8 @@ public class SendMigrationNotifications {
        private String config;
        private String path;
        private Set<String> notifyOn;
-       private long sleepInMilliSecs;
-       private int numToBatch;
+       long sleepInMilliSecs;
+       int numToBatch;
        private String requestId;
        private EventAction eventAction;
        private String eventSource;
@@ -108,22 +108,25 @@ public class SendMigrationNotifications {
                        vertexes = g.V(entry.getKey()).toList();
                        if (vertexes == null || vertexes.isEmpty()) {
                                logAndPrint("Vertex " + entry.getKey() + " no longer exists." );
+                               continue;
                        } else if (vertexes.size() > 1) {
                                logAndPrint("Vertex " + entry.getKey() + " query returned " + vertexes.size() + " vertexes." );
+                               continue;
                        } else {
                                logger.debug("Processing " + entry.getKey() + "resource-version " + entry.getValue());
                                v = vertexes.get(0);
-                               if ((notifyOn.isEmpty() || notifyOn.contains(v.value(AAIProperties.NODE_TYPE).toString()))
-                                               && entry.getValue().equals(v.value(AAIProperties.RESOURCE_VERSION).toString())) {
-                                       Introspector introspector = serializer.getLatestVersionView(v);
-                                       uri = this.serializer.getURIForVertex(v, false);
-                                       this.notificationHelper.addEvent(v, introspector, eventAction, uri, basePath);
-                                       count++;
-                                       if (count >= this.numToBatch) {
-                                               trigger();
-                                               logger.debug("Triggered " + entry.getKey());
-                                               count = 0;
-                                               Thread.sleep(this.sleepInMilliSecs);
+                               if (notifyOn.isEmpty() || notifyOn.contains(v.value(AAIProperties.NODE_TYPE).toString())) {
+                                       if (entry.getValue().equals(v.value(AAIProperties.RESOURCE_VERSION).toString())) {
+                                               Introspector introspector = serializer.getLatestVersionView(v);
+                                               uri = this.serializer.getURIForVertex(v, false);
+                                               this.notificationHelper.addEvent(v, introspector, eventAction, uri, basePath);
+                                               count++;
+                                               if (count >= this.numToBatch) {
+                                                       trigger();
+                                                       logger.debug("Triggered " + entry.getKey());
+                                                       count = 0;
+                                                       Thread.sleep(this.sleepInMilliSecs);
+                                               }
                                        }
                                }
                        }
index d3670f2..0eeaecd 100644 (file)
@@ -21,7 +21,7 @@ package org.onap.aai.util;
 
 import com.beust.jcommander.JCommander;
 import com.beust.jcommander.Parameter;
-import org.onap.aai.config.PropertyPasswordConfiguration;
+import org.onap.aai.restclient.PropertyPasswordConfiguration;
 import org.onap.aai.dbmap.AAIGraph;
 import org.onap.aai.exceptions.AAIException;
 import org.onap.aai.introspection.LoaderFactory;
index 629d11e..f193b8c 100644 (file)
@@ -32,6 +32,7 @@ import org.apache.tinkerpop.gremlin.structure.T;
 import org.glassfish.jersey.filter.LoggingFilter;
 import org.glassfish.jersey.server.ResourceConfig;
 import org.glassfish.jersey.servlet.ServletProperties;
+import org.onap.aai.rest.AuditSqlDbConsumer;
 import org.onap.aai.rest.QueryConsumer;
 import org.onap.aai.rest.util.EchoResponse;
 import org.onap.logging.filter.base.AuditLogContainerFilter;
@@ -54,7 +55,7 @@ public class JerseyConfiguration extends ResourceConfig {
         this.env = env;
 
         register(QueryConsumer.class);
-
+        register(AuditSqlDbConsumer.class);
         register(EchoResponse.class);
 
         //Filters
index 698a6da..82780f7 100644 (file)
@@ -61,7 +61,7 @@ schema.version.namespace.change.start=v12
 # Specifies from which version should the client start seeing the edge label in payload
 schema.version.edge.label.start=v12
 # Specifies the version that the application should default to
-schema.version.api.default=v16
+schema.version.api.default=v19
 schema.translator.list=config
 #schema.service.client=no-auth
 schema.service.base.url=https://localhost:8452/aai/schema-service/v1/
@@ -74,4 +74,14 @@ schema.service.ssl.key-store-password=password(OBF:1vn21ugu1saj1v9i1v941sar1ugw1
 schema.service.ssl.trust-store=${server.local.startpath}etc/auth/aai_keystore
 schema.service.ssl.trust-store-password=password(OBF:1vn21ugu1saj1v9i1v941sar1ugw1vo0)
 
-aaf.cadi.file=${server.local.startpath}/cadi.properties
\ No newline at end of file
+aaf.cadi.file=${server.local.startpath}/cadi.properties
+
+aperture.rdbmsname=aai_relational
+
+aperture.service.client=no-auth
+aperture.service.base.url=http://localhost:8457/aai/aperture
+aperture.service.ssl.key-store=${server.local.startpath}etc/auth/aai_keystore
+aperture.service.ssl.trust-store=${server.local.startpath}etc/auth/aai_keystore
+aperture.service.ssl.key-store-password=password(OBF:1vn21ugu1saj1v9i1v941sar1ugw1vo0)
+aperture.service.ssl.trust-store-password=password(OBF:1vn21ugu1saj1v9i1v941sar1ugw1vo0)
+aperture.service.timeout-in-milliseconds=300000
index ae5084f..b2dd2ed 100644 (file)
@@ -99,6 +99,7 @@ aai.datagrooming.agezip=5
 aai.datagrooming.agedelete=30
 aai.datasnapshot.agezip=5
 aai.datasnapshot.agedelete=30
+aai.datamigration.agedelete=30
 
 #used by the dataSnapshot and dataGrooming tasks
 aai.cron.enable.dataSnapshot=true
index 74b3c9e..e37931f 100644 (file)
@@ -8,5 +8,4 @@
 datagroomingcleanup.cron=0 06 0 * * ?
 datasnapshotcleanup.cron=0 17 0 * * ?
 datasnapshottasks.cron=0 45 * * * ?
-datagroomingtasks.cron=0 10 1,5,9,13,17,21 * * ?
-dataexporttask.cron=0 02 3 * * ?
\ No newline at end of file
+datagroomingtasks.cron=0 10 1,5,9,13,17,21 * * ?
\ No newline at end of file
index eff961f..7561f3f 100644 (file)
                <appender-ref ref="translog" />
        </appender>
 
+       <appender name="failover" class="ch.qos.logback.core.rolling.RollingFileAppender">
+               <file>${logDirectory}/failover/failover.log</file>
+               <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+                       <fileNamePattern>${logDirectory}/failover/failover.log.%d{yyyy-MM-dd}</fileNamePattern>
+               </rollingPolicy>
+               <encoder>
+                       <pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{1024} - %msg%n
+                       </pattern>
+               </encoder>
+       </appender>
+
+       <appender name="asyncFailover" class="ch.qos.logback.classic.AsyncAppender">
+               <queueSize>1000</queueSize>
+               <includeCallerData>true</includeCallerData>
+               <appender-ref ref="failover" />
+       </appender>
+
        <appender name="dmaapAAIEventConsumer"
                class="ch.qos.logback.core.rolling.RollingFileAppender">
                <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
                        </fileNamePattern>
                </rollingPolicy>
                <encoder>
-                       <pattern>${"errorPattern"}</pattern>
+                       <pattern>${errorPattern}</pattern>
                </encoder>
        </appender>
        <appender name="dmaapAAIEventConsumerInfo"
                        <fileNamePattern>${logDirectory}/createDBSchema/error.log.%d{yyyy-MM-dd}</fileNamePattern>
                </rollingPolicy>
                <encoder>
-                       <pattern>${"errorPattern"}</pattern>
+                       <pattern>${errorPattern}</pattern>
                </encoder>
        </appender>
 
                        <fileNamePattern>${logDirectory}/misc/error.log.%d{yyyy-MM-dd}</fileNamePattern>
                </rollingPolicy>
                <encoder>
-                       <pattern>${"errorPattern"}</pattern>
+                       <pattern>${errorPattern}</pattern>
                </encoder>
        </appender>
 
                        <onMatch>ACCEPT</onMatch>
                        <onMismatch>DENY</onMismatch>
                </filter>
-               <File>${logDirectory}/dupetool/debug.log</File>
+               <File>${logDirectory}/dupeTool/debug.log</File>
                <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
-                       <fileNamePattern>${logDirectory}/dupetool/debug.log.%d{yyyy-MM-dd}</fileNamePattern>
+                       <fileNamePattern>${logDirectory}/dupeTool/debug.log.%d{yyyy-MM-dd}</fileNamePattern>
                </rollingPolicy>
                <encoder>
                        <pattern>${debugPattern}</pattern>
        </appender>     
        <!-- forceDelete logs ended -->
        
-       <!-- migration logs started --> 
+       <!-- default migration logs started --> 
        <appender name="migrationdebug" class="ch.qos.logback.core.rolling.RollingFileAppender">
                <filter class="ch.qos.logback.classic.filter.LevelFilter">
                        <level>DEBUG</level>
                        <pattern>${errorPattern}</pattern>
                </encoder>
        </appender>     
-       <!-- migration logs ended -->   
+       <!-- default migration logs ended -->
+  
+    <!-- other migration logs started -->
+    <appender name="migrationlog" class="ch.qos.logback.classic.sift.SiftingAppender">
+               <filter class="ch.qos.logback.classic.filter.LevelFilter">
+                       <level>INFO</level>
+                       <onMatch>ACCEPT</onMatch>
+                       <onMismatch>DENY</onMismatch>
+               </filter>
+               <!-- This is MDC value -->
+               <!-- We will assign a value to 'logFilenameAppender' via Java code -->
+               <discriminator>
+                       <key>logFilenameAppender</key>
+                       <defaultValue>undefined</defaultValue>
+               </discriminator>
+               <sift>
+                       <!-- A standard RollingFileAppender, the log file is based on 'logFileName' 
+                               at runtime -->
+                       <appender name="FILE-${logFilenameAppender}"
+                               class="ch.qos.logback.core.rolling.RollingFileAppender">
+                               <file>${logDirectory}/migration/${logFilenameAppender}.log</file>
+                               <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+                                       <fileNamePattern>${logDirectory}/migration/${logFilenameAppender}.log.%d{yyyy-MM-dd}
+                                       </fileNamePattern>
+                               </rollingPolicy>
+                               <encoder>
+                                       <pattern>%d{yyyy-MM-dd'T'HH:mm:ss.SSSXXX}|%m%n</pattern>
+                               </encoder>
+                       </appender>
+               </sift>
+       </appender>
+       <!-- other migration logs ended -->
        
        <!-- DataGrooming logs started -->
        <appender name="dataExportError" class="ch.qos.logback.core.rolling.RollingFileAppender">
        <logger name="org.onap.aai.logging.ErrorLogHelper" level="WARN">
                <appender-ref ref="asyncERROR"/>
        </logger>
+       <logger name="org.onap.aai.failover" level="DEBUG" additivity="false">
+               <appender-ref ref="asyncFailover" />
+       </logger>
        <logger name="org.onap.aai.interceptors.post" level="DEBUG" additivity="false">
                <appender-ref ref="asynctranslog" />
        </logger>
        <appender-ref ref="migrationdebug" />
        <appender-ref ref="migrationerror" />
        </logger>
+       
+       <logger name="org.onap.aai.migration" level="ERROR" additivity="false">
+               <appender-ref ref="migrationlog" />
+       </logger>
+       
+       <logger name="org.onap.aai.migration" level="INFO" additivity="false">
+               <appender-ref ref="migrationlog" />
+       </logger>
+       
                        
        <logger name="org.onap.aai.dataexport" level="DEBUG" additivity="false">
                <appender-ref ref="dataExportError"/>
        <logger name="org.janusgraph" level="WARN" />
        <logger name="com.att.aft.dme2" level="WARN" />
 
-       <!-- ============================================================================ -->
-       <!-- General EELF logger -->
-       <!-- ============================================================================ -->
-       <logger name="com.att.eelf" level="WARN" additivity="false">
-               <appender-ref ref="asyncDEBUG" />
-               <appender-ref ref="asyncERROR" />
-               <appender-ref ref="asyncMETRIC" />
-       </logger>
-
        <root level="DEBUG">
                <appender-ref ref="external" />
        </root>
index b4522d5..6420690 100644 (file)
        <property name="p_thr" value="%thread"/>
        <property name="pattern" value="%nopexception${p_tim}\t${p_thr}\t${p_lvl}\t${p_log}\t${p_mdc}\t${p_msg}\t${p_exc}\t${p_mak}\t%n"/>
        <property name="errorPattern" value="%X{LogTimestamp}|%X{RequestID}|%thread|%X{ServiceName}|%X{PartnerName}|%X{TargetEntity}|%X{TargetServiceName}|%.-5level|%X{ErrorCode}|%X{ErrorDesc}|%msg%n" />
-       <property name="debugPattern" value="%X{LogTimestamp}|%X{RequestID}|%msg\t${p_mdc}\t${p_msg}\t${p_exc}\t${p_mak}\t|^%n" />
+       <property name="debugPattern" value="%X{LogTimestamp}|%X{RequestID}|${p_msg}\t${p_mdc}\t${p_exc}\t${p_mak}\t|^%n" />
        <property name="auditPattern" value="%X{EntryTimestamp}|%X{LogTimestamp}|%X{RequestID}|%X{ServiceInstanceID}|%thread||%X{ServiceName}|%X{PartnerName}|%X{StatusCode}|%X{ResponseCode}|%X{ResponseDesc}|%X{InstanceUUID}|%.-5level|%X{AlertSeverity}|%X{ServerIPAddress}|%X{ElapsedTime}|%X{ServerFQDN}|%X{RemoteHost}||||${p_mak}|${p_mdc}|||%msg%n" />
        <property name="metricPattern" value="%X{InvokeTimestamp}|%X{LogTimestamp}|%X{RequestID}|%X{ServiceInstanceID}|%thread||%X{ServiceName}|%X{PartnerName}|%X{TargetEntity}|%X{TargetServiceName}|%X{StatusCode}|%X{ResponseCode}|%X{ResponseDesc}|%X{InstanceUUID}|%.-5level|%X{AlertSeverity}|%X{ServerIPAddress}|%X{ElapsedTime}|%X{ServerFQDN}|%X{RemoteHost}||||%X{TargetVirtualEntity}|${p_mak}|${p_mdc}|||%msg%n" />
        <property name="transLogPattern" value="%X{LogTimestamp}|%date{yyyy-MM-dd'T'HH:mm:ss.SSSZ, UTC}|%X{RequestID}|%X{ServiceInstanceID}|%-10t|%X{ServerFQDN}|%X{ServiceName}|%X{PartnerName}|%X{StatusCode}|%X{ResponseCode}|%replace(%replace(%X{ResponseDesc}){'\\|', '!'}){'\r|\n', '^'}|%X{InstanceUUID}|%level|%X{AlertSeverity}|%X{ServerIPAddress}|%X{ElapsedTime}|%X{ServerFQDN}|%X{clientIpAddress}||%X{unused}|%X{processKey}|%X{customField1}|%X{customField2}|%X{customField3}|%X{customField4}|co=%X{PartnerName}:%m%n"/>
 
-       <appender name="uniquePropertyChecklog" class="ch.qos.logback.classic.sift.SiftingAppender">
-               <filter class="ch.qos.logback.classic.filter.LevelFilter">
-                       <level>INFO</level>
-                       <onMatch>ACCEPT</onMatch>
-                       <onMismatch>DENY</onMismatch>
-               </filter>
-               <!-- This is MDC value -->
-               <!-- We will assign a value to 'logFilenameAppender' via Java code -->
-               <discriminator>
-                       <key>logFilenameAppender</key>
-                       <defaultValue>undefined</defaultValue>
-               </discriminator>
-               <sift>
-                       <!-- A standard RollingFileAppender, the log file is based on 'logFileName' 
-                               at runtime -->
-                       <appender name="FILE-${logFilenameAppender}"
-                               class="ch.qos.logback.core.rolling.RollingFileAppender">
-                               <file>${logDirectory}/uniquePropertyCheck/${logFilenameAppender}.log</file>
-                               <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
-                                       <fileNamePattern>${logDirectory}/uniquePropertyCheck/${logFilenameAppender}.log.%d{yyyy-MM-dd}
-                                       </fileNamePattern>
-                               </rollingPolicy>
-                               <encoder>
-                                       <pattern>${auditPattern}</pattern>
-                               </encoder>
-                       </appender>
-               </sift>
-       </appender>
+       <appender name="uniquePropertyChecklog" class="ch.qos.logback.core.rolling.RollingFileAppender">
+    <filter class="ch.qos.logback.classic.filter.LevelFilter">
+      <level>INFO</level>
+      <onMatch>ACCEPT</onMatch>
+      <onMismatch>DENY</onMismatch>
+    </filter>
+    <File>${logDirectory}/uniquePropertyCheck/debug.log</File>
+    <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+      <fileNamePattern>${logDirectory}/uniquePropertyCheck/debug.log.%d{yyyy-MM-dd}</fileNamePattern>
+      <maxHistory>7</maxHistory>
+      <cleanHistoryOnStart>true</cleanHistoryOnStart>
+    </rollingPolicy>
+    <encoder>
+      <pattern>${debugPattern}</pattern>
+    </encoder>
+  </appender>
+
 
        <logger name="org.reflections" level="WARN" additivity="false">
                <appender-ref ref="uniquePropertyChecklog" />
index f279334..1fac7ce 100644 (file)
@@ -16,7 +16,6 @@ check_user(){
 
 # Sources the profile and sets the project home
 source_profile(){
-    . /etc/profile.d/aai.sh
     PROJECT_HOME=/opt/app/aai-graphadmin
 }
 
index 350b0bd..d4cdb9c 100644 (file)
@@ -68,6 +68,9 @@ COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P )
 start_date;
 check_user;
 source_profile;
+
+export JAVA_PRE_OPTS=${JAVA_PRE_OPTS:--Xms6g -Xmx6g};
+
 execute_spring_jar org.onap.aai.dbgen.DupeTool ${PROJECT_HOME}/resources/dupeTool-logback.xml "$@"
 end_date;
-exit 0
\ No newline at end of file
+exit 0
index 87cce13..89cf990 100644 (file)
@@ -29,7 +29,7 @@
 COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P )
 . ${COMMON_ENV_PATH}/common_functions.sh
 
-. /etc/profile.d/aai.sh
+
 PROJECT_HOME=/opt/app/aai-graphadmin
 
 PROGNAME=$(basename $0)
diff --git a/src/main/scripts/extract-events.sh b/src/main/scripts/extract-events.sh
new file mode 100644 (file)
index 0000000..5a2bb3b
--- /dev/null
@@ -0,0 +1,20 @@
+#!/bin/bash
+usage(){
+    echo "Usage $0 input-file output-file event-type";
+}
+
+if [ "${#}" -lt 3 ]; then
+        usage;
+       exit -1
+fi;
+
+input_file=$1
+output_file=$2
+event_type=$3
+
+grep "|${event_type}|" ${input_file} > ${output_file}.1 
+sed -i -e '/InvokeReturn/s/^.*$//g' ${output_file}.1 
+sed -i '/^$/d' ${output_file}.1
+cat ${output_file}.1 | awk -F '|' '{print $29}' > ${output_file}
+rm ${output_file}.1
+exit 0
diff --git a/src/main/scripts/getDslResult.sh b/src/main/scripts/getDslResult.sh
new file mode 100644 (file)
index 0000000..78286cd
--- /dev/null
@@ -0,0 +1,151 @@
+#!/bin/ksh
+
+###
+# ============LICENSE_START=======================================================
+# org.onap.aai
+# ================================================================================
+# Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#     http://www.apache.org/licenses/LICENSE-2.0
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+###
+
+#
+
+display_usage() {
+        cat <<EOF
+        Usage: $0 [options]
+
+        1. Usage: getDslResult.sh <base-path or optional host url> <optional input-json-filepath> <optional -XFROMAPPID> <optional -XTRANSID>
+        2. This script requires one argument, a base-path 
+        3. Example for basepath: aai/{version}\
+        4. Adding the optional input-json-payload replaces the default dsl payload with the contents of the input-file
+        5. The query result is returned in the file resources/migration-input-files/dslResults.json
+EOF
+}
+if [ $# -eq 0 ]; then
+        display_usage
+        exit 1
+fi
+
+RESOURCE="dsl?format=resource_and_url&nodesOnly=true"
+
+BASEPATH=$1
+if [ -z $BASEPATH ]; then
+        echo "base-path parameter is missing"
+        echo "usage: $0 <base-path>"
+        exit 1
+fi
+
+
+PROJECT_HOME=/opt/app/aai-graphadmin
+RESULTDIR=$PROJECT_HOME/logs/data/migration-input-files
+
+if [ ! -d ${RESULTDIR} ]; then
+        mkdir -p ${RESULTDIR}
+        chown aaiadmin:aaiadmin ${RESULTDIR}
+        chmod u+w ${RESULTDIR}
+fi
+
+RESULTPATH=$RESULTDIR/dslResults.json
+
+JSONFILE=$2
+TEMPFILE=/tmp/dslResult-temp.json
+if [ -z $JSONFILE ]; then
+        JSONFILE=$TEMPFILE
+        echo "{ \"dsl\":\"l3-network('network-type','Tenant Network')>vlan-tag>vlan-range*\" }" > $JSONFILE
+fi
+
+echo `date` "   Starting $0 for resource $RESOURCE"
+
+
+XFROMAPPID="AAI-TOOLS"
+XTRANSID=`uuidgen`
+
+if [ ! -z "$3" ]; then
+   XFROMAPPID=$3
+fi
+
+if [ ! -z "$4" ]; then
+   XTRANSID=$4
+fi
+
+userid=$( id | cut -f2 -d"(" | cut -f1 -d")" )
+if [ "${userid}" != "aaiadmin" ]; then
+    echo "You must be aaiadmin to run $0. The id used $userid."
+    exit 1
+fi
+
+prop_file=$PROJECT_HOME/resources/etc/appprops/aaiconfig.properties
+log_dir=$PROJECT_HOME/logs/misc
+today=$(date +\%Y-\%m-\%d)
+
+RETURNRESPONSE=true
+
+MISSING_PROP=false
+RESTURLPROP=`grep ^aai.server.url.base= $prop_file |cut -d'=' -f2 |tr -d "\015"`
+if [ -z $RESTURLPROP ]; then
+        echo "Property [aai.server.url.base] not found in file $prop_file"
+        MISSING_PROP=true
+else
+       RESTURL=`echo $RESTURLPROP | sed -e "s#aai/#$BASEPATH/#"`
+fi
+
+if [ ! -z "$1" ]; then
+       if [[ $1 == *"https"* ]]; then
+               RESTURL=$1
+       fi
+fi
+
+USEBASICAUTH=false
+BASICENABLE=`grep ^aai.tools.enableBasicAuth $prop_file |cut -d'=' -f2 |tr -d "\015"`
+if [ -z $BASICENABLE ]; then
+        USEBASICAUTH=false
+else
+        USEBASICAUTH=true
+        CURLUSER=`grep ^aai.tools.username $prop_file |cut -d'=' -f2 |tr -d "\015"`
+        if [ -z $CURLUSER ]; then
+                echo "Property [aai.tools.username] not found in file $prop_file"
+                MISSING_PROP=true
+        fi
+        CURLPASSWORD=`grep ^aai.tools.password $prop_file |cut -d'=' -f2 |tr -d "\015"`
+        if [ -z $CURLPASSWORD ]; then
+                echo "Property [aai.tools.password] not found in file $prop_file"
+                MISSING_PROP=true
+        fi
+fi
+
+fname=$JSONFILE
+if [ -f /tmp/$(basename $JSONFILE) ]; then
+       fname=/tmp/$(basename $JSONFILE)
+elif [ ! -f $JSONFILE ]; then
+       echo "The file $JSONFILE does not exist"
+       exit -1
+fi
+
+if [ $MISSING_PROP = false ]; then
+        if [ $USEBASICAUTH = false ]; then
+                AUTHSTRING="--cert $PROJECT_HOME/resources/etc/auth/aaiClientPublicCert.pem --key $PROJECT_HOME/resources/etc/auth/aaiClientPrivateKey.pem"
+        else
+                AUTHSTRING="-u $CURLUSER:$CURLPASSWORD"
+        fi
+               curl --request PUT -k $AUTHSTRING -H "X-FromAppId: $XFROMAPPID" -H "X-TransactionId: $XTRANSID" -H "Accept: application/json" -H "Content-Type: application/json" -T $fname $RESTURL$RESOURCE | jq '.' > $RESULTPATH
+               RC=$?
+       
+else
+        echo "usage: $0 <base-path>"
+        RC=-1
+fi
+if [ "a$JSONFILE" = "a$TEMPFILE" ]; then
+       rm $TEMPFILE
+fi
+echo `date` "   Done $0, exit code is $RC, returning result in $RESULTPATH" 
+exit $RC
\ No newline at end of file
index 2afa3a7..b8ff73f 100644 (file)
 #
 # -b,  (required) <string> the base url for the dmaap server 
 # -e,  (required) <file>   filename containing the missed events
-# -l,  (optional)          indicating that the script should be run it debug mode
+# -l,  (optional)          indicating that the script should be run in debug mode
 #                          it will not send the dmaap messages to dmaap server 
 #                          but it will write to a file named resend_dmaap_server.out
+# -x   (optional)          skip resource version check
 # -p,  (required) <string> the password for the dmaap server 
 # -s,  (required) <file>   containing the data snapshot graphson file to compare the resource versions against
-# -u,  (required) <string> the username for the dmaap server 
+#                          partial snapshots should be concatenated into a full snapshot file
+#                          before running the script
+# -u,  (required) <string> the username for the dmaap server
+# -t,  (required) <string> the dmaap topic
 #
 # An example of how to use the tool:
 # Please use right credentials and right dmaap server in the cluster
 #
-#  ./resend-dmaap-events.sh -e example_events.txt -s dataSnapshot.graphSON.201808091545 -u username -p example_pass -b https://localhost:3905
+#  ./resend-dmaap-events.sh -e example_events.txt -s dataSnapshot.graphSON.201808091545 -u username -p example_pass -b https://localhost:3905 -t AAI-EVENT
 #
 # For each dmaap message in the example_events.txt, it will check 
 # against graphson and try to send it to the dmaap server
 # For testing purposes, if you are trying to run this script and don't want to actually
 # send it to a dmaap server, then you can run either of the following:
 #
-# ./resend-dmaap-events.sh -l -e example_events.txt -s dataSnapshot.graphSON.201808091545
+# ./resend-dmaap-events.sh -l -e example_events.txt -s dataSnapshot.graphSON.201808091545 -t <other-dmaap-topic>
 # or
-# ./resend-dmaap-events.sh -l -e example_events.txt -s dataSnapshot.graphSON.201808091545 -u username -p example_pass -b https://localhost:3905
+# ./resend-dmaap-events.sh -l -e example_events.txt -s dataSnapshot.graphSON.201808091545 -u username -p example_pass -b https://localhost:3905 -t <other-dmaap-topic>
+# or, to skip the resource-version check
+# ./resend-dmaap-events.sh -l -x -e example_events.txt -s dataSnapshot.graphSON.201808091545 -u username -p example_pass -b https://localhost:3905 -t <other-dmaap-topic>
 #
 # Following will output what would have been sent out based on checking the datasnapshot with example_events.txt
 #
@@ -90,18 +96,22 @@ usage(){
     echo;
     echo "  -b,   <string> the base url for the dmaap server";
     echo "  -e,   <file>   filename containing the missed events";
-    echo "  -l, (optional) indicating that the script should be run it debug mode"
+    echo "  -l, (optional) indicating that the script should be run in debug mode"
     echo "                 it will not send the dmaap messages to dmaap server "
     echo "                 but it will write to a file named resend_dmaap_server.out"
+    echo "  -x, (optional) indicating that the script will skip the resource-version check"
     echo "  -p,   <string> the password for the dmaap server";
     echo "  -s,   <file>   containing the data snapshot graphson file to compare the resource versions against";
+    echo "                  partial snapshots should be concatenated into a full snapshot file";
+    echo "                  before running the script";
     echo "  -u,   <string> the username for the dmaap server";
+    echo "  -t,   <string> the dmaap topic";
     echo;
     echo;
     echo " An example of how to use the tool:";
     echo " Please use right credentials and right dmaap server in the cluster";
     echo;
-    echo "  ./resend-dmaap-events.sh -e example_events.txt -s dataSnapshot.graphSON.201808091545 -u username -p example_pass -b https://localhost:3905";
+    echo "  ./resend-dmaap-events.sh -e example_events.txt -s dataSnapshot.graphSON.201808091545 -u username -p example_pass -b https://localhost:3905 -t AAI-EVENT";
     echo;
     echo " For each dmaap message in the example_events.txt, it will check ";
     echo " against graphson and try to send it to the dmaap server";
@@ -126,7 +136,9 @@ usage(){
     echo;
     echo " ./resend-dmaap-events.sh -l -e example_events.txt -s dataSnapshot.graphSON.201808091545";
     echo " or";
-    echo " ./resend-dmaap-events.sh -l -e example_events.txt -s dataSnapshot.graphSON.201808091545 -u username -p example_pass -b https://localhost:3905";
+    echo " ./resend-dmaap-events.sh -l -e example_events.txt -s dataSnapshot.graphSON.201808091545 -u username -p example_pass -b https://localhost:3905 -t AAI-EVENT";
+    echo " or, to skip the resource-version check";
+    echo " ./resend-dmaap-events.sh -l -x -e example_events.txt -s dataSnapshot.graphSON.201808091545 -u username -p example_pass -b https://localhost:3905 -t AAI-EVENT";
     echo;
     echo " Following will output what would have been sent out based on checking the datasnapshot with example_events.txt";
     echo;
@@ -143,25 +155,25 @@ usage(){
 # Checks if the argument of the string is greater than zero
 # Also check if the file actually exists
 validate(){
-    local type_of_file=$1;
+    type_of_file=$1;
 
     if [ $# -eq 0 ]; then
         echo "Error expecting the validate method to have at least one argument indicating what type";
-        exit -1;
+        exit 1;
     fi;
 
     shift;
 
-    local arg=$1;
+    arg=$1;
 
     if [ -z "$arg" ]; then
         echo "Error missing the expected argument for ${type_of_file}";
-        exit -1;
+        exit 1;
     fi;
 
     if [ ! -f "$arg" ]; then
         echo "Error: file $arg cannot be found, please check the file again";
-        exit -1;
+        exit 1;
     fi;
 }
 
@@ -171,26 +183,31 @@ validate(){
 # Otherwise it will return non zero to indicate that this method failed
 resource_version_matches_snapshot_file(){
 
-    local snapshot_file=$1;
-    local entity_link=$2;
-    local resource_version=$3;
-    local action=$4;
+    snapshot_file=$1;
+    entity_link=$2;
+    resource_version=$3;
+    action=$4;
+    topic=$5;
     
     if [ -z ${resource_version} ]; then
         echo "Missing the parameter resource version to be passed";
-        return -1;
+        return 1;
     fi
 
     # Modify the entity link passed to remove the /aai/v[0-9]+
-    aai_uri=$(echo $entity_link | sed 's/\/aai\/v[0-9][0-9]*//g');
+    if [ "${topic}" = "<other-dmaap-topic>" ]; then 
+       aai_uri=$(echo $entity_link | sed 's/\/<other-base>\/v[0-9][0-9]*//g');
+    else
+       aai_uri=$(echo $entity_link | sed 's/\/aai\/v[0-9][0-9]*//g');
+    fi
 
-    local line=$(grep '"value":"'${aai_uri}'"' ${snapshot_file} 2> /dev/null);
+    line=$(grep '"value":"'${aai_uri}'"' ${snapshot_file} 2> /dev/null);
 
     if [ -z "${line}" ] ; then
         if [ "${action}" = "DELETE" ]; then
             return 0;
         else
-            return -1;
+            return 1;
         fi;
     fi;
 
@@ -199,51 +216,26 @@ resource_version_matches_snapshot_file(){
     if [ $cnt -eq 1 ]; then
         return 0;
     else
-        return -1;
+        return 1;
     fi;
 }
 
-# From a array being passed, it will determine the smallest element
-# and return the index of the smallest element
-# If the array length is zero, then it will return -1
-retrieve_smallest_index(){
-
-    local elements=("${@}");
-
-    if [ ${#elements} -eq 0 ]; then
-        return -1;
-    fi;
-
-    local smallest_element=${elements[0]};
-
-    local index=0;
-    local smallest_index=0;
-
-    for element in ${elements[@]}; do
-        if [ $element -lt $smallest_element ]; then
-            smallest_index=${index};
-        fi;
-        index=$((index+1));
-    done;
-
-    return ${smallest_index};
-}
-
 # Send the dmaap event to the host based on
 # the line that was send to the function
 send_dmaap(){
 
-    local local_mode=$1;
-    local line=$2;
-    local username=$3;
-    local password=$4;
-    local baseurl=$5;
-    local resp_code=0;
+    local_mode=$1;
+    line=$2;
+    username=$3;
+    password=$4;
+    baseurl=$5;
+    topic=$6;
+    resp_code=0;
     
     generated_file=$(uuidgen);
 
-    local json_file=/tmp/${generated_file}.json;
-    local curl_output=/tmp/${generated_file}.txt;
+    json_file=/tmp/${generated_file}.json;
+    curl_output=/tmp/${generated_file}.txt;
 
     echo ${line} > ${json_file};
     > ${curl_output};
@@ -259,13 +251,13 @@ send_dmaap(){
             -X POST \
             -H "Content-Type: application/json" \
             -d "@${json_file}" \
-            "${baseurl}/events/AAI-EVENT"\
+            "${baseurl}/events/${topic}"\
         );
 
         if [ "$response_code" -ne "200" ]; then
             echo -n "Response failure for dmaap message with id ${id}," >> ${resend_error_log};
             echo " code: ${response_code} body: $(cat ${curl_output})" >> ${resend_error_log};
-            resp_code=-1;
+            resp_code=1;
         fi;
     fi;
     
@@ -303,25 +295,32 @@ main(){
         usage;
     fi;
 
-    while getopts ":e:s:u:lp:b:h" opt; do
+    versioncheck=true
+    while getopts ":e:s:u:xlp:b:t:h" opt; do
         case ${opt} in
             l ) # Specify that the application will not send messages to dmaap but save it a file
-                local local_mode=true
+                local_mode=true
                 ;;
             e ) # Specify the file for missed events
-                local missed_events_file=$OPTARG
+                missed_events_file=$OPTARG
                 ;;
             s ) # Specify the file for snapshot
-                local snapshot_file=$OPTARG
+                snapshot_file=$OPTARG
                 ;;
             u ) # Specify the username to dmaap
-                local username=$OPTARG
+                username=$OPTARG
                 ;;
             p ) # Specify the password to dmaap
-                local password=$OPTARG
+                password=$OPTARG
+                ;;
+            t ) # Specify the dmaap topic
+                topic=$OPTARG
+                ;;
+            x ) # Specify whether to skip version check
+                versioncheck=false
                 ;;
             b ) # Specify the baseurl to dmaap
-                local hostname=$OPTARG
+                hostname=$OPTARG
                 ;;
             h ) 
                 usage;
@@ -345,15 +344,25 @@ main(){
         id=$(echo $dmaap_event | grep -o '"id":"[^"]*"' | cut -d":" -f2- | sed 's/"//g');
         action=$(echo $dmaap_event | grep -o '"action":"[^"]*"' | cut -d":" -f2- | sed 's/"//g');
         smallest_resource_version=$(echo $dmaap_event | jq -M '.' | grep 'resource-version' | sort | tail -1 | sed 's/[^0-9]//g');
-        resource_version_matches_snapshot_file "${snapshot_file}" "${entity_link}" "${smallest_resource_version}" "${action}" && {
-            send_dmaap "${local_mode}" "$dmaap_event" "$username" "$password" "$hostname" && {
+        #smallest_resource_version=$(echo $dmaap_event | python -m json.tool | grep 'resource-version' | sort | tail -1 | sed 's/[^0-9]//g');
+        match=0;
+        if [ "$versioncheck" = true ]; then
+            resource_version_matches_snapshot_file "${snapshot_file}" "${entity_link}" "${smallest_resource_version}" "${action}" "${topic}" && {
+                match=0;
+            } || {
+                match=1;
+            }
+         fi;
+
+        if [ $match -eq 0 ]; then
+            send_dmaap "${local_mode}" "$dmaap_event" "$username" "$password" "$hostname" "$topic" && {
                 echo "Dmaap Event with Id $id was sent";
             } || {
                 echo "Dmaap Event with Id $id was not sent due to dmaap error, please check logs";
             }
-        } || {
+       else
             echo "Dmaap Event with Id $id not sent";
-        }
+       fi;
 
     done < ${missed_events_file};
 
index ebd8677..0b2f3e1 100644 (file)
@@ -1,4 +1,4 @@
-#!/bin/ksh
+#!/bin/bash
 
 ###
 # ============LICENSE_START=======================================================
index 4bcc0d9..994485e 100644 (file)
@@ -1,4 +1,4 @@
-#!/bin/ksh
+#!/bin/bash
 
 ###
 # ============LICENSE_START=======================================================
@@ -43,7 +43,7 @@ fi
 INPUT_DIR_FOR_JAVA=${INPUT_PATH}/combined
 mkdir -p "$INPUT_DIR_FOR_JAVA"
 INPUT_FILE_FOR_JAVA=${INPUT_DIR_FOR_JAVA}/sorted_dmaap_files.txt
-sort --numeric-sort -k 1 -t '_' $(find ${INPUT_PATH}/* -maxdepth 0 -type f) | awk -F '_' '{ print $2"_"$3; }' > $INPUT_FILE_FOR_JAVA
+sort -g -k 1 -t '_' $(find ${INPUT_PATH}/* -maxdepth 0 -type f) | awk -F '_' '{ print $2"_"$3; }' > $INPUT_FILE_FOR_JAVA
 
 shift
 
index ec40b90..3115b87 100644 (file)
@@ -23,7 +23,7 @@ import com.jayway.jsonpath.JsonPath;
 import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
 import org.janusgraph.core.JanusGraphTransaction;
 import org.junit.*;
-import org.onap.aai.config.PropertyPasswordConfiguration;
+import org.onap.aai.restclient.PropertyPasswordConfiguration;
 import org.onap.aai.dbmap.AAIGraph;
 import org.onap.aai.exceptions.AAIException;
 import org.onap.aai.util.AAIConfig;
diff --git a/src/test/java/org/onap/aai/audit/AuditGraphson2SqlTest.java b/src/test/java/org/onap/aai/audit/AuditGraphson2SqlTest.java
new file mode 100644 (file)
index 0000000..2bc6250
--- /dev/null
@@ -0,0 +1,287 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.aai.audit;
+
+import com.google.gson.JsonObject;
+import org.junit.Before;
+import org.junit.FixMethodOrder;
+import org.junit.Ignore;
+import org.junit.Test;
+import org.junit.runners.MethodSorters;
+import org.mockito.Mock;
+
+import static org.mockito.Matchers.*;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+import org.onap.aai.AAISetup;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.edges.exceptions.EdgeRuleNotFoundException;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.rest.client.ApertureService;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.test.context.TestPropertySource;
+
+import java.io.IOException;
+import java.util.HashMap;
+
+import static org.junit.Assert.*;
+
+public class AuditGraphson2SqlTest extends AAISetup {
+
+       
+       private AuditGraphson2Sql auditG2S;
+       
+       @Autowired 
+       private EdgeIngestor ei;
+
+       @Mock
+       private ApertureService apertureServiceMock;
+
+       @Before
+       public void setUp() {
+               
+               auditG2S = new AuditGraphson2Sql(ei, schemaVersions, loaderFactory, apertureServiceMock);
+                   
+       }
+
+    @Test
+    public void testCompareGood() throws IOException, EdgeRuleNotFoundException {
+       String outDir = "logs/data/audit";
+       Boolean resultOk = true;
+       HashMap <String,Integer> gHash = new HashMap <String,Integer> ();
+       gHash.put("tbl2", 5);
+       gHash.put("tbl-x", 6);
+       gHash.put("edge__tbly__tblX", 7);
+       gHash.put("edge__tblZ__tblX", 8);
+       gHash.put("tbl-only-in-g", 88);
+       
+       HashMap <String,Integer> sEdgeHash = new HashMap <String,Integer> ();
+       sEdgeHash.put("edge__tbly__tblX", 7);
+       sEdgeHash.put("edge__tblZ__tblX", 8);
+       
+       HashMap <String,Integer> sNodeHash = new HashMap <String,Integer> ();
+       sNodeHash.put("tbl2", 5);
+       sNodeHash.put("tbl-x", 6);
+       sNodeHash.put("tbl-only-in-sql", 89);
+       
+       String titleInfo = "Comparing data from GraphSon file: " +
+                       "fileXYZ.202001223344, and Aperture data using timeStamp = [987654321]"; 
+               
+       try {
+               auditG2S.compareResults(gHash,sNodeHash,sEdgeHash,outDir,titleInfo);
+       }
+       catch ( Exception e ) {
+               System.out.println("ERROR - got this exception: " + e.getMessage());
+               resultOk = false;
+       }       
+       assertTrue(resultOk);   
+    }
+       
+    @Test
+    public void testCompareResMissingRecords() throws IOException, EdgeRuleNotFoundException {
+       
+       String outDir = "logs/data/audit";
+       Boolean resultOk = true;
+       HashMap <String,Integer> gHash = new HashMap <String,Integer> ();
+       gHash.put("tbl2", 5);
+       gHash.put("tbl-x", 6);
+       gHash.put("edge__tblZ__tblX", 7);
+       gHash.put("edge__tblZ__tblX", 8);
+       gHash.put("tbl-only-in-g", 88);
+       HashMap <String,Integer> sNodeHash = new HashMap <String,Integer> ();
+       HashMap <String,Integer> sEdgeHash = new HashMap <String,Integer> ();
+       
+       String titleInfo = "Comparing data from GraphSon file: " +
+                       "fileXYZ.202001223344, and Aperture data using timeStamp = [987654321]"; 
+                               
+       try {
+               auditG2S.compareResults(gHash,sNodeHash,sEdgeHash,outDir,titleInfo);
+       }
+       catch ( Exception e ) {
+               System.out.println("ERROR - got this exception: " + e.getMessage());
+               resultOk = false;
+       }
+       
+       assertTrue(resultOk);           
+    }
+    
+    @Ignore
+    @Test
+    public void testGetDateTimeStamp() {
+       long dts = 0;
+       String fName = "xxxxx.yyyy.202003220000";
+       try {
+               dts = auditG2S.getTimestampFromFileName(fName); 
+       } catch (Exception e) {
+               System.out.println(" threw Exception e = " + e.getMessage());
+       }
+
+       assertEquals(1577595600000L, dts);
+    }
+    
+    @Test
+    public void testTimePieceGood() {
+       String dtPiece = "";
+       String fName = "xxxxx.yyyy.222233445566";
+       try {
+               dtPiece = auditG2S.getDateTimePieceOfFileName(fName); 
+       } catch (Exception e) {
+               System.out.println(" threw Exception e = " + e.getMessage());
+       }
+       assertEquals( "222233445566", dtPiece);
+    }
+    
+    @Test
+    public void testTimePieceGoodStill() {
+       String dtPiece = "";
+       String fName = "x.222233445566";
+       try {
+               dtPiece = auditG2S.getDateTimePieceOfFileName(fName); 
+       } catch (Exception e) {
+               System.out.println(" threw Exception e = " + e.getMessage());
+       }
+       assertEquals(dtPiece, "222233445566");
+    }
+    
+    @Test
+    public void testTimePieceNotime() {
+       String fName = "xxxxx.yyyy";
+       Boolean resultOk = true;
+       try {
+               auditG2S.getDateTimePieceOfFileName(fName); 
+       } catch (Exception e) {
+               System.out.println(" threw Exception e = " + e.getMessage());
+               resultOk = false;
+       }
+       assertFalse(resultOk);
+    }
+    
+    @Test
+    public void testTimePieceTooShort() {
+       String fName = "xxxxx.yyyy.22223";
+       Boolean resultOk = true;
+       try {
+               auditG2S.getDateTimePieceOfFileName(fName); 
+       } catch (Exception e) {
+               System.out.println(" threw Exception e = " + e.getMessage());
+               resultOk = false;
+       }
+       assertFalse(resultOk);
+    }
+    
+
+    @Ignore
+    @Test
+    public void testGetCounts() throws IOException, EdgeRuleNotFoundException {
+       Boolean resultOk = true;
+       String dbName = "aai";
+       String fileNamePart = "dataSnapshot.graphSON.201908151845";
+       String srcDir = "src/test/resources/audit/";
+       HashMap <String,Integer> resHash = new HashMap <String,Integer> ();
+       try {
+               resHash = auditG2S.getCountsFromGraphsonSnapshot(dbName, fileNamePart, srcDir);
+       }
+       catch ( Exception e ) {
+               System.out.println("ERROR - got this exception: " + e.getMessage());
+               resultOk = false;
+       }
+       
+       assertTrue(resultOk);           
+    }
+    
+
+    @Ignore
+    @Test
+    public void testGoodRun() throws IOException, EdgeRuleNotFoundException {
+       
+       String [] argVals = {};
+
+       // this is the tStamp that would go
+               //     with this file name: "dataSnapshot.graphSON.201908151845"
+               Long tStamp = 1565842725000L;
+
+
+               JsonObject jVal = new JsonObject();
+               jVal.addProperty("autonomous-system",5);
+               jVal.addProperty("pnf",7);
+
+
+               String dbn = "aai_relational";
+               String resStr = "";
+       try {
+                       when(apertureServiceMock.runAudit(anyLong(), anyString())).thenReturn(jVal);
+               resStr = auditG2S.runAudit("aai",
+                                       "dataSnapshot.graphSON.201908151845",
+                                       "src/test/resources/audit/");
+       }
+       catch ( Exception e ) {
+               System.out.println("ERROR - got this exception: " + e.getMessage());
+                       resStr = "Error";
+       }
+
+       assertTrue( resStr.startsWith("Audit ran and logged") );
+
+    }
+    
+    @Test
+    public void testRunWithBadParam() throws IOException, EdgeRuleNotFoundException {
+       
+       String resStr = "";
+       try {
+               resStr = auditG2S.runAudit("aai",
+                                       "bogusFileName",
+                                       "src/test/resources/audit/");
+       }
+       catch ( Exception e ) {
+               System.out.println("ERROR - got this exception: " + e.getMessage());
+               resStr = "Error";
+       }
+
+               assertTrue( resStr.equals("Error") );
+    }
+
+
+       @Test
+       public void testGetCountsFromJson() throws IOException, EdgeRuleNotFoundException {
+
+               JsonObject sqlJsonData = new JsonObject ();
+               sqlJsonData.addProperty("tableName1", 4);
+               sqlJsonData.addProperty("tableName2", 5);
+               sqlJsonData.addProperty("tableName3", 6);
+               sqlJsonData.addProperty("tableName4", 7);
+
+               HashMap <String,Integer> results1 = new HashMap <String,Integer> ();
+               HashMap <String,Integer> results2 = new HashMap <String,Integer> ();
+               try {
+                       results1 = auditG2S.getNodeCountsFromSQLRes(sqlJsonData);
+                       results2 = auditG2S.getNodeCountsFromSQLRes(sqlJsonData);
+               }
+               catch ( Exception e ) {
+                       System.out.println("ERROR - got this exception: " + e.getMessage());
+               }
+
+               assertEquals(4, results1.size());
+               assertTrue( results1.containsKey("tableName3") );
+               assertTrue( results1.get("tableName4") == 7);
+               assertEquals(4, results2.size());
+       }
+    
+}
index ad27188..ef57aaf 100644 (file)
@@ -32,6 +32,9 @@ import org.junit.Test;
 import org.onap.aai.AAISetup;
 import org.onap.aai.dbmap.AAIGraph;
 import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.util.AAIConstants;
+import org.onap.aai.util.FormatDate;
+import org.onap.aai.util.GraphAdminDBUtils;
 import org.springframework.boot.test.rule.OutputCapture;
 
 import com.beust.jcommander.ParameterException;
@@ -58,6 +61,8 @@ public class DataSnapshotTest extends AAISetup {
     private JanusGraphTransaction currentTransaction;
 
     private List<Vertex> vertexes;
+    
+    private static final int DELAYSINGLETHREADTEST = 90;
 
     @Rule
     public OutputCapture outputCapture = new OutputCapture();
@@ -133,7 +138,10 @@ public class DataSnapshotTest extends AAISetup {
         String logsFolder     = System.getProperty("AJSC_HOME") + "/logs/data/dataSnapshots/";
 
         Set<Path> preSnapshotFiles = Files.walk(Paths.get(logsFolder)).collect(Collectors.toSet());
-
+        
+        // previous test may have the same generated file name, this wait will ensure a new name is used for this test
+        System.out.println("delay generation, seconds " + DELAYSINGLETHREADTEST);
+        Thread.sleep(DELAYSINGLETHREADTEST*1000);
         // Run the clear dataSnapshot and this time it should fail
         //String [] args = {"JUST_TAKE_SNAPSHOT"};  >> default behavior is now to use 15 threads
         // To just get one file, you have to tell it to just use one.
diff --git a/src/test/java/org/onap/aai/datasnapshot/DataSnapshotTest4HistInit.java b/src/test/java/org/onap/aai/datasnapshot/DataSnapshotTest4HistInit.java
new file mode 100644 (file)
index 0000000..adcde4e
--- /dev/null
@@ -0,0 +1,471 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.aai.datasnapshot;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.structure.Property;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.apache.tinkerpop.gremlin.structure.VertexProperty;
+
+import org.janusgraph.core.JanusGraph;
+import org.janusgraph.core.JanusGraphTransaction;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.onap.aai.AAISetup;
+import org.onap.aai.datagrooming.DataGrooming;
+import org.onap.aai.dbmap.AAIGraph;
+import org.onap.aai.exceptions.AAIException;
+
+import org.onap.aai.logging.LogFormatTools;
+import org.onap.aai.util.AAISystemExitUtil;
+import org.springframework.boot.test.rule.OutputCapture;
+
+import com.beust.jcommander.ParameterException;
+
+import java.lang.NumberFormatException;
+import java.io.File;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+import static org.hamcrest.CoreMatchers.is;
+import static org.hamcrest.Matchers.containsString;
+import static org.junit.Assert.*;
+
+
+public class DataSnapshotTest4HistInit extends AAISetup {
+
+    private GraphTraversalSource g;
+    
+    private JanusGraphTransaction currentTransaction;
+
+    private List<Vertex> vertexes;
+    
+    private DataSnapshot4HistInit dataSnapshot4HistInit;
+
+    @Rule
+    public OutputCapture outputCapture = new OutputCapture();
+
+    @Before
+    public void setup() throws AAIException {
+       dataSnapshot4HistInit = new DataSnapshot4HistInit(loaderFactory, schemaVersions);
+       
+       JanusGraph graph = AAIGraph.getInstance().getGraph();
+        currentTransaction = graph.newTransaction();
+        g = currentTransaction.traversal();
+        
+        // Setup the graph so it has one pserver vertex
+        vertexes = setupPserverData(g);
+        currentTransaction.commit();
+    }
+
+    @After
+    public void tearDown(){
+
+        JanusGraph graph = AAIGraph.getInstance().getGraph();
+        currentTransaction = graph.newTransaction();
+        g = currentTransaction.traversal();
+
+        vertexes.stream().forEach((v) -> g.V(v).next().remove());
+        currentTransaction.commit();
+    }
+    
+    @Test
+    public void testClearEntireDatabaseAndVerifyDataIsRemoved() throws IOException {
+
+        // Copy the pserver.graphson file from src/test/resoures to ${AJSC_HOME}/logs/data/dataSnapshots/ folder
+        String sourceFileName = "src/test/resources/pserver.graphson";
+        String destFileName   = System.getProperty("AJSC_HOME") + "/logs/data/dataSnapshots/pserver.graphson";
+        copySnapshotFile(sourceFileName,destFileName);
+
+
+        // Run the dataSnapshot to clear the graph
+        String [] args = {"-c", "CLEAR_ENTIRE_DATABASE", "-f", "pserver.graphson"};
+        dataSnapshot4HistInit.executeCommand(args);
+
+        // Since the code doesn't clear the graph using AAIGraph.getInstance().getGraph(), its creating a second inmemory graph
+        // so we can't verify this with by counting the vertexes and edges in the graph
+        // In the future we could do that but for now we will depend on the following string "All done clearing DB"
+
+        // Capture the standard output and see if the following text is there
+        assertThat(outputCapture.toString(), containsString("All done clearing DB"));
+    }
+
+
+    @Test
+    public void testClearEntireDatabaseWithEmptyGraphSONFileAndItShouldNotClearDatabase() throws IOException {
+
+        // Create a empty file called empty.graphson in src/test/resources/
+
+        // Copy that file to ${AJSC_HOME}/logs/data/dataSnapshots/
+        String sourceFileName = "src/test/resources/empty.graphson";
+        String destFileName   = System.getProperty("AJSC_HOME") + "/logs/data/dataSnapshots/empty.graphson";
+        copySnapshotFile(sourceFileName,destFileName);
+
+        // Run the clear dataSnapshot and this time it should fail
+        String [] args = {"-c","CLEAR_ENTIRE_DATABASE", "-f","empty.graphson"};
+        dataSnapshot4HistInit.executeCommand(args);
+
+        // Capture the standard output and see if the following text had no data is there
+        // Since the graphson is empty it should output that and not clear the graph
+        // Uncomment the following line after the test changes are done
+         assertThat(outputCapture.toString(), containsString("graphson had no data."));
+    }
+
+    
+    @Test
+    public void testTakeSnapshotAndItShouldCreateASnapshotFileWithOneVertex() throws IOException, InterruptedException {
+
+        String logsFolder     = System.getProperty("AJSC_HOME") + "/logs/data/dataSnapshots/";
+
+        Set<Path> preSnapshotFiles = Files.walk(Paths.get(logsFolder)).collect(Collectors.toSet());
+
+        // Run the clear dataSnapshot and this time it should fail
+        //String [] args = {"JUST_TAKE_SNAPSHOT"};  >> default behavior is now to use 15 threads
+        // To just get one file, you have to tell it to just use one.
+        String [] args = {"-c","THREADED_SNAPSHOT", "-threadCount" ,"1"};
+
+        dataSnapshot4HistInit.executeCommand(args);
+
+        // Add sleep so the file actually gets created with the data
+
+        Set<Path> postSnapshotFiles = Files.walk(Paths.get(logsFolder)).collect(Collectors.toSet());
+
+        assertThat(postSnapshotFiles.size(), is(preSnapshotFiles.size()+1));
+        postSnapshotFiles.removeAll(preSnapshotFiles);
+        List<Path> snapshotPathList = postSnapshotFiles.stream().collect(Collectors.toList());
+
+        assertThat(snapshotPathList.size(), is(1));
+
+        List<String> fileContents = Files.readAllLines(snapshotPathList.get(0));
+        assertThat(fileContents.get(0), containsString("id"));
+    }
+    
+
+    @Test
+    public void testTakeSnapshotMultiAndItShouldCreateMultipleSnapshotFiles() throws IOException {
+
+        String logsFolder     = System.getProperty("AJSC_HOME") + "/logs/data/dataSnapshots/";
+
+        // Run the clear dataSnapshot and this time it should fail
+        String [] args = {"-c","THREADED_SNAPSHOT", "-threadCount","2"};
+
+        dataSnapshot4HistInit.executeCommand(args);
+
+        // For this test if there is only one vertex in the graph, not sure if it will create multiple files
+        // would need to add more data to the janusgraph
+    }
+
+
+    @Test
+    public void testTakeSnapshotMultiWithDebugAndItShouldCreateMultipleSnapshotFiles() throws IOException {
+
+        String logsFolder     = System.getProperty("AJSC_HOME") + "/logs/data/dataSnapshots/";
+
+        // Run the clear dataSnapshot and this time it should fail
+        String [] args = {"-c","THREADED_SNAPSHOT", "-threadCount","2", "-debugFlag","DEBUG"};
+
+        dataSnapshot4HistInit.executeCommand(args);
+
+        // For this test if there is only one vertex in the graph, not sure if it will create multiple files
+        // would need to add more data to the janusgraph
+    }
+
+
+    @Test
+    public void testTakeSnapshotMultiWithDebugAndInvalidNumberAndItShouldFail() throws IOException {
+
+        boolean thrown = false;
+       String logsFolder     = System.getProperty("AJSC_HOME") + "/logs/data/dataSnapshots/";
+
+        // Run the clear dataSnapshot and this time it should fail
+        String [] args = {"-c","THREADED_SNAPSHOT", "-threadCount","foo","-debugFlag", "DEBUG"};
+        
+        dataSnapshot4HistInit.executeCommand(args);
+
+        // For this test if there is only one vertex in the graph, not sure if it will create multiple files
+        // would need to add more data to the janusgraph
+    }
+
+    @Test
+    public void testTakeSnapshotMultiWithDebugAndTimeDelayAndInvalidNumberAndItShouldFail() throws IOException {
+
+        String logsFolder     = System.getProperty("AJSC_HOME") + "/logs/data/dataSnapshots/";
+
+        // Run the clear dataSnapshot and this time it should fail
+        String [] args = {"-c","THREADED_SNAPSHOT","-threadCount", "foo", "-debugFlag","DEBUG","-debugAddDelayTime", "100"};
+
+               dataSnapshot4HistInit.executeCommand(args);
+
+        // For this test if there is only one vertex in the graph, not sure if it will create multiple files
+        // would need to add more data to the janusgraph
+    }
+
+    @Test
+    public void testTakeSnapshotMultiWithDebugAndTimeDelayAndZeroThreadsAndItShouldFail() throws IOException {
+
+        String logsFolder     = System.getProperty("AJSC_HOME") + "/logs/data/dataSnapshots/";
+
+        // Run the clear dataSnapshot and this time it should fail
+        String [] args = {"-c","THREADED_SNAPSHOT", "-threadCount","0", "-debugFlag","DEBUG", "-debugAddDelayTime","100"};
+
+        dataSnapshot4HistInit.executeCommand(args);
+
+        // For this test if there is only one vertex in the graph, not sure if it will create multiple files
+        // would need to add more data to the janusgraph
+    }
+
+    @Test
+    public void testTakeSnapshotMultiWithDebugAndTimeDelayIsInvalidNumberAndItShouldFail() throws IOException {
+
+        String logsFolder     = System.getProperty("AJSC_HOME") + "/logs/data/dataSnapshots/";
+
+        // Run the clear dataSnapshot and this time it should fail
+        String [] args = {"-c","THREADED_SNAPSHOT","-threadCount", "0","-debugFlag","DEBUG", "-debugAddDelayTime","foo"};
+
+        dataSnapshot4HistInit.executeCommand(args);
+
+        // For this test if there is only one vertex in the graph, not sure if it will create multiple files
+        // would need to add more data to the janusgraph
+    }
+
+//    @Test
+    public void testTakeSnapshotMultiWithMoreParametersThanAllowedAndItShouldFail() throws IOException {
+
+        String logsFolder     = System.getProperty("AJSC_HOME") + "/logs/data/dataSnapshots/";
+
+        // Run the clear dataSnapshot and this time it should fail
+        String [] args = {"-c","THREADED_SNAPSHOT", "-threadCount", "0", "-debugFlag","DEBUG",  "-debugAddDelayTime","foo", "bar"};
+
+        dataSnapshot4HistInit.executeCommand(args);
+
+        // For this test if there is only one vertex in the graph, not sure if it will create multiple files
+        // would need to add more data to the janusgraph
+    }
+
+    @Test
+    public void testTakeSnapshotMultiWithZeroThreadsAndItShouldFail(){
+
+        // For this test if there is only one vertex in the graph, not sure if it will create multiple files
+        // would need to add more data to the janusgraph
+        String logsFolder     = System.getProperty("AJSC_HOME") + "/logs/data/dataSnapshots/";
+
+        // Run the clear dataSnapshot and this time it should fail
+        String [] args = {"-c","THREADED_SNAPSHOT", "-threadCount","0"};
+
+        dataSnapshot4HistInit.executeCommand(args);
+    }
+
+    @Test
+    public void testTakeSnapshotMultiWithInvalidNumberForThreadsAndItShouldFail(){
+
+        // For this test if there is only one vertex in the graph, not sure if it will create multiple files
+        // would need to add more data to the janusgraph
+        String logsFolder     = System.getProperty("AJSC_HOME") + "/logs/data/dataSnapshots/";
+
+        // Run the clear dataSnapshot and this time it should fail
+        String [] args = {"-c","THREADED_SNAPSHOT","-threadCount", "foo"};
+
+        dataSnapshot4HistInit.executeCommand(args);
+    }
+   
+
+    @Test
+    public void testReloadDataAndVerifyDataInGraphMatchesGraphson() throws IOException {
+
+        // Create a graphson file that contains a couple of vertexes in src/test/resources
+        // Copy that file to ${AJSC_HOME}/logs/data/dataSnasphots/
+        // Run the reload arguments and ensure that the graph was recreated by checking vertexes in graph
+
+        // After reload remove the added vertexes in the graph
+        // The reason for this so each test is independent
+        // as there shouldn't be dependencies and cause weird issues
+        String sourceFileName = "src/test/resources/pserver.graphson";
+        String destFileName   = System.getProperty("AJSC_HOME") + "/logs/data/dataSnapshots/pserver.graphson";
+        copySnapshotFile(sourceFileName,destFileName);
+
+        String [] args = {"-c","RELOAD_DATA", "-f","pserver.graphson"};
+
+        dataSnapshot4HistInit.executeCommand(args);
+    }
+
+   
+    @Test
+    public void testMultiReloadDataAndVerifyDataInGraphMatchesGraphson() throws IOException, AAIException {
+
+        // Create multiple graphson files that contains a couple of vertexes in src/test/resources
+        // Copy those files to ${AJSC_HOME}/logs/data/dataSnasphots/
+        // Run the reload arguments and ensure that the graph was recreated by checking vertexes in graph
+        String sourceFileName = "src/test/resources/pserver2.graphson.P0";
+        String destFileName   = System.getProperty("AJSC_HOME") + "/logs/data/dataSnapshots/pserver2.graphson.P0";
+        copySnapshotFile(sourceFileName,destFileName);
+
+        sourceFileName = "src/test/resources/pserver2.graphson.P1";
+        destFileName   = System.getProperty("AJSC_HOME") + "/logs/data/dataSnapshots/pserver2.graphson.P1";
+        copySnapshotFile(sourceFileName,destFileName);
+
+        // After reload remove the added vertexes in the graph
+        // The reason for this so each test is independent
+        // as there shouldn't be dependencies and cause weird issues
+       
+        String [] args = {"-c","MULTITHREAD_RELOAD","-f", "pserver2.graphson"};
+        dataSnapshot4HistInit.executeCommand(args);
+        
+    }       
+    
+    @Test
+    public void testMultiReloadDataWithNonExistentFilesAndItShouldFail() throws IOException {
+
+        // After reload remove the added vertexes in the graph
+        // The reason for this so each test is independent
+        // as there shouldn't be dependencies and cause weird issues
+        String [] args = {"-c","MULTITHREAD_RELOAD", "-f","emptyfoo2.graphson"};
+
+        dataSnapshot4HistInit.executeCommand(args);
+    }
+
+    @Test
+    public void testReloadMultiDataAndVerifyDataInGraphMatchesGraphson() throws IOException {
+
+        // Create multiple graphson files that contains a couple of vertexes in src/test/resources
+        // Copy those files to ${AJSC_HOME}/logs/data/dataSnasphots/
+        // Run the reload arguments and ensure that the graph was recreated by checking vertexes in graph
+        String sourceFileName = "src/test/resources/pserver2.graphson.P0";
+        String destFileName   = System.getProperty("AJSC_HOME") + "/logs/data/dataSnapshots/pserver2.graphson.P0";
+        copySnapshotFile(sourceFileName,destFileName);
+
+        sourceFileName = "src/test/resources/pserver2.graphson.P1";
+        destFileName   = System.getProperty("AJSC_HOME") + "/logs/data/dataSnapshots/pserver2.graphson.P1";
+        copySnapshotFile(sourceFileName,destFileName);
+
+        // After reload remove the added vertexes in the graph
+        // The reason for this so each test is independent
+        // as there shouldn't be dependencies and cause weird issues
+        String [] args = {"-c","RELOAD_DATA_MULTI","-f", "pserver2.graphson"};
+
+        dataSnapshot4HistInit.executeCommand(args);
+    }
+    
+    @Test
+    public void testCanRetrieveNamesOfKeyProps() throws IOException {
+
+        // Make sure we can get the key names without failing
+               HashMap <String,ArrayList<String>> keyNamesHash = dataSnapshot4HistInit.getNodeKeyNames();
+       Iterator  keyItr = keyNamesHash.entrySet().iterator();
+       while( keyItr.hasNext() ) {
+               Map.Entry entry = (Map.Entry) keyItr.next();
+               String nodeType = (String)entry.getKey();
+               ArrayList<String> keyNames = (ArrayList<String>)entry.getValue();
+               keyNamesHash.put(nodeType,keyNames);
+               System.out.println("DEBUGjojo === for nType " + nodeType + ", got keys = [" + keyNames + "]");
+       }   
+
+       assertTrue(keyNamesHash != null );
+       assertFalse(keyNamesHash.isEmpty());
+    }
+
+    
+    private void showVertProperties(String propKey, String propVal)  {
+       
+       Vertex v1 = g.V().has(propKey, propVal).next();
+        Iterator<VertexProperty<Object>> pI = v1.properties();
+       while( pI.hasNext() ){
+               VertexProperty<Object> tp = pI.next();
+               String infStr = " [" + tp.key() + "][" + tp.value() + "] ";
+               System.out.println("Regular ole properties are: " + infStr  ); 
+               Iterator<Property<Object>> fullPropI = tp.properties();
+               while( fullPropI.hasNext() ){
+                       // Note - the 'real' key/value of a property are not part of this list, just the
+                       //    extra stuff beyond those two.
+                       Property<Object> propOfProp = fullPropI.next();
+                       String infStr2 = " [" + propOfProp.key() + "][" + propOfProp.value() + "] ";
+                       System.out.println("For " + infStr + ", got sub-property:" + infStr2 );
+               }
+       }  
+    }
+    
+    
+    private List<Vertex> setupOneHistoryNode(GraphTraversalSource g) throws AAIException {
+       
+        Vertex v1 = g.addV().property("aai-node-type", "pserver","start-ts", 9988707,"source-of-truth","N/A")
+            .property("hostname", "historyHOstGuy--8","start-ts", 9988707,"source-of-truth","N/A")
+            .property("equip-vendor", "historyVendor","start-ts", 9988707,"source-of-truth","N/A")
+            .property("role", "historyRole","start-ts", 9988707,"source-of-truth","N/A")
+            .next();
+       List<Vertex> list = new ArrayList<>();
+        list.add(v1);
+        
+        Iterator<VertexProperty<Object>> pI = v1.properties();
+       while( pI.hasNext() ){
+               VertexProperty<Object> tp = pI.next();
+               String infStr = " [" + tp.key() + "|" + tp.value() + "] ";
+               System.out.println("Regular ole properties are: " + infStr  ); 
+               Iterator<Property<Object>> fullPropI = tp.properties();
+               while( fullPropI.hasNext() ){
+                       // Note - the 'real' key/value of a property are not part of this list, just the
+                       //    extra stuff beyond those two.
+                       Property<Object> propOfProp = fullPropI.next();
+                       String infStr2 = " [" + propOfProp.key() + "|" + propOfProp.value() + "] ";
+                       System.out.println("For " + infStr + ", got sub-property:" + infStr2 );
+               }
+       }    
+       return list;
+    }
+    
+    private List<Vertex> setupPserverData(GraphTraversalSource g) throws AAIException {
+        Vertex v1 = g.addV().property("aai-node-type", "pserver")
+            .property("hostname", "somerandomhostname")
+            .next();
+        List<Vertex> list = new ArrayList<>();
+        list.add(v1);
+        Vertex v2 = g.addV().property("aai-node-type", "pserver")
+            .property("hostname", "somerandomhostname2")
+            .next();
+        Vertex pinterface = g.addV()
+                .property("aai-node-type", "p-interface")
+                .property("interface-name", "p-interface-name")
+                .property("in-maint", false)
+                .property("source-of-truth", "JUNIT")
+                .next();
+        edgeSerializer.addTreeEdge(g, v2, pinterface);
+        list.add(v2);
+        return list;
+    }
+
+    private void copySnapshotFile(String sourceFileName, String destFileName) throws IOException {
+
+        File inputFile = new File(sourceFileName);
+        File outputFile = new File(destFileName);
+
+        FileUtils.copyFile(inputFile, outputFile);
+    }
+}
\ No newline at end of file
index 7dacfe6..0ca8481 100644 (file)
@@ -21,7 +21,6 @@ package org.onap.aai.dbgen;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import org.springframework.boot.test.rule.OutputCapture;
 import org.janusgraph.core.JanusGraphTransaction;
 import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
 import org.apache.tinkerpop.gremlin.structure.Edge;
@@ -29,7 +28,6 @@ import org.apache.tinkerpop.gremlin.structure.Vertex;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.FixMethodOrder;
-import org.junit.Rule;
 import org.junit.Test;
 import org.junit.runners.MethodSorters;
 import org.onap.aai.AAISetup;
@@ -39,8 +37,6 @@ import java.io.ByteArrayInputStream;
 import java.io.InputStream;
 import java.util.List;
 
-import static org.hamcrest.Matchers.containsString;
-import static org.junit.Assert.assertThat;
 import static org.junit.Assert.fail;
 
 @FixMethodOrder(MethodSorters.NAME_ASCENDING)
@@ -51,9 +47,6 @@ public class ForceDeleteToolTest extends AAISetup {
     private ForceDeleteTool deleteTool;
 
     private Vertex cloudRegionVertex;
-    
-    @Rule
-    public OutputCapture outputCapture = new OutputCapture();
 
     @Before
     public void setup(){
@@ -124,12 +117,10 @@ public class ForceDeleteToolTest extends AAISetup {
     @Test
     public void testDeleteNode(){
 
-        InputStream systemInputStream = System.in;
-        ByteArrayInputStream in = new ByteArrayInputStream("y".getBytes());
-        System.setIn(in);
         String id = cloudRegionVertex.id().toString();
 
         String [] args = {
+
                 "-action",
                 "DELETE_NODE",
                 "-userId",
@@ -139,7 +130,6 @@ public class ForceDeleteToolTest extends AAISetup {
         };
 
         deleteTool.main(args);
-        System.setIn(systemInputStream);
     }
 
     @Test
@@ -151,6 +141,7 @@ public class ForceDeleteToolTest extends AAISetup {
         String cloudRegionToPserverId = edges.get(0).id().toString();
 
         String [] args = {
+
                 "-action",
                 "COLLECT_DATA",
                 "-userId",
@@ -160,7 +151,6 @@ public class ForceDeleteToolTest extends AAISetup {
         };
 
         deleteTool.main(args);
-        
     }
 
     @Test
@@ -175,6 +165,7 @@ public class ForceDeleteToolTest extends AAISetup {
         String cloudRegionToPserverId = edges.get(0).id().toString();
 
         String [] args = {
+
                 "-action",
                 "DELETE_EDGE",
                 "-userId",
@@ -186,200 +177,6 @@ public class ForceDeleteToolTest extends AAISetup {
         deleteTool.main(args);
         System.setIn(systemInputStream);
     }
-    
- //------------------------------ Adding some new tests --------------
-
-    
-    @Test
-    public void testCollectDataForVertexId(){
-        String id = cloudRegionVertex.id().toString();
-        
-        String [] args = {
-                "-action",
-                "COLLECT_DATA",
-                "-userId",
-                "someuser",
-                "-vertexId",
-                id
-        };
-
-        deleteTool.main(args);
-    }
-       
-    
-    @Test
-    public void testInputParamsBadAction(){
-        JanusGraphTransaction transaction = AAIGraph.getInstance().getGraph().newTransaction();
-        GraphTraversalSource g = transaction.traversal();
-        List<Edge> edges = g.E().toList();
-        String cloudRegionToPserverId = edges.get(0).id().toString();
-
-        String [] args = {
-                "-action",
-                "JUNK-ACTION",
-                "-userId",
-                "someuser",
-                "-edgeId",
-                cloudRegionToPserverId
-        };
-
-        deleteTool.main(args);
-       // Capture the standard output and see if the following text is there
-        assertThat(outputCapture.toString(), containsString("Bad action parameter"));
-       
-    }
-   
-    @Test
-    public void testMissingInputs(){
-
-        JanusGraphTransaction transaction = AAIGraph.getInstance().getGraph().newTransaction();
-        GraphTraversalSource g = transaction.traversal();
-
-        String [] args = {
-                "-action"
-        };
-        deleteTool.main(args);
-        assertThat(outputCapture.toString(), containsString("No value passed with"));
-        
-
-        args = new String []{
-                "-vertexId"
-        };
-        deleteTool.main(args);
-        assertThat(outputCapture.toString(), containsString("No value passed with"));
-        
-        
-        args = new String []{
-                "-edgeId"
-        };
-        deleteTool.main(args);
-        assertThat(outputCapture.toString(), containsString("No value passed with"));
-       
-        
-        args = new String []{
-                "-params4Collect"
-        };
-        deleteTool.main(args);
-        assertThat(outputCapture.toString(), containsString("No value passed with"));
-       
-    }
-       
-        
-    
-    @Test
-    public void testInvalidUserIds(){
-
-        JanusGraphTransaction transaction = AAIGraph.getInstance().getGraph().newTransaction();
-        GraphTraversalSource g = transaction.traversal();
-        List<Edge> edges = g.E().toList();
-        String cloudRegionToPserverId = edges.get(0).id().toString();
-
-        String [] args = {              
-                "-userId"
-        };
-        deleteTool.main(args);
-        assertThat(outputCapture.toString(), containsString("No value passed with"));
-        
-        args = new String []{
-                "-userId",
-                "bad"
-        };
-        deleteTool.main(args);
-        assertThat(outputCapture.toString(), containsString("Bad userId parameter"));
-        
-        args = new String []{
-                "-userId",
-                "AAIADMIN"
-        };
-        deleteTool.main(args);
-        assertThat(outputCapture.toString(), containsString("Bad userId parameter"));
-    }
-        
-        
-    @Test
-    public void testBadInputs2(){
-
-       // pass in a bad/unknown argument (-junkParam)
-        String [] args = {
-                "-junkParam",
-                "COLLECT_DATA",
-                "-userId",
-                "someuser",
-                "-edgeId",
-                "999"
-        };
-        
-        deleteTool.main(args);
-        assertThat(outputCapture.toString(), containsString("Unrecognized argument"));           
-    }
-        
-    @Test
-    public void testBadInputs3(){
-
-       // pass in a nonExistant edgeId for DELETE EDGE
-        JanusGraphTransaction transaction = AAIGraph.getInstance().getGraph().newTransaction();
-        GraphTraversalSource g = transaction.traversal();
-    
-        String [] args = {
-                "-action",
-                "DELETE_EDGE",
-                "-userId",
-                "someuser",
-                "-edgeId",
-                "NotRealId"
-        };
-        
-        deleteTool.main(args);
-        assertThat(outputCapture.toString(), containsString("Edge with edgeId = NotRealId not found"));
-             
-    }
-    
-    @Test
-    public void testBadInputs4(){
-
-        JanusGraphTransaction transaction = AAIGraph.getInstance().getGraph().newTransaction();
-        GraphTraversalSource g = transaction.traversal();
-        
-        // pass in a bad vertex Id when collecting data
-    
-        String [] args = {
-                       "-action",
-                "COLLECT_DATA",
-                "-userId",
-                "someuser",
-                "-vertexId",
-                "NotANumber"
-        };
-        
-        deleteTool.main(args);
-        assertThat(outputCapture.toString(), containsString("Bad value passed"));
-             
-    }
-         
-    
-    @Test
-    public void testBadInputs5(){
-
-        JanusGraphTransaction transaction = AAIGraph.getInstance().getGraph().newTransaction();
-        GraphTraversalSource g = transaction.traversal();
-        
-        // pass in a bad vertex Id when collecting data
-    
-        String [] args = {
-                       "-action",
-                "DELETE_NODE",
-                "-userId",
-                "someuser",
-                "-vertexId",
-                "555"
-        };
-        
-        deleteTool.main(args);
-        assertThat(outputCapture.toString(), containsString("Vertex with vertexId = 555 not found"));
-             
-    }
-    
     @After
     public void tearDown(){
 
diff --git a/src/test/java/org/onap/aai/migration/v20/MigrateL2DefaultToFalseTest.java b/src/test/java/org/onap/aai/migration/v20/MigrateL2DefaultToFalseTest.java
new file mode 100644 (file)
index 0000000..cba5c45
--- /dev/null
@@ -0,0 +1,135 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v20;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.aai.AAISetup;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.introspection.Loader;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.serialization.db.EdgeSerializer;
+import org.onap.aai.serialization.engines.JanusGraphDBEngine;
+import org.onap.aai.serialization.engines.QueryStyle;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.setup.SchemaVersions;
+
+import edu.emory.mathcs.backport.java.util.Arrays;
+
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.when;
+
+public class MigrateL2DefaultToFalseTest extends
+               AAISetup {
+       
+       protected static final String L_INTERFACE_NODE_TYPE = "l-interface";
+       protected static final String L2_MULTI_PROPERTY = "l2-multicasting";
+
+       public static class L2DefaultMigrator extends MigrateL2DefaultToFalse {
+        public L2DefaultMigrator(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions){
+            super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+        }
+    }
+
+    private final static ModelType introspectorFactoryType = ModelType.MOXY;
+    private final static QueryStyle queryStyle = QueryStyle.TRAVERSAL;
+    private Loader loader;
+    private TransactionalGraphEngine dbEngine;
+    private L2DefaultMigrator migration;
+    private GraphTraversalSource g;
+
+    @Before
+    public void setup() throws Exception{
+        g = tx.traversal();
+        loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, schemaVersions.getDefaultVersion());
+        dbEngine = new JanusGraphDBEngine(
+                queryStyle,
+                loader);
+
+        g.addV().property("aai-node-type", L_INTERFACE_NODE_TYPE)
+                .property("interface-name", "no-value")
+                .property("interface-id", "1")
+                .next();
+        g.addV().property("aai-node-type", L_INTERFACE_NODE_TYPE)
+                       .property("interface-name", "empty-value")
+                       .property("interface-id", "2")
+                       .property(L2_MULTI_PROPERTY, "")
+                       .next();
+        g.addV().property("aai-node-type", L_INTERFACE_NODE_TYPE)
+                       .property("interface-name", "true-value")
+                       .property("interface-id", "3")
+                       .property(L2_MULTI_PROPERTY, "true")                    
+                       .next();
+        g.addV().property("aai-node-type", L_INTERFACE_NODE_TYPE)
+                       .property("interface-name", "false-value")
+                       .property("interface-id", "4")
+                       .property(L2_MULTI_PROPERTY, "false")                   
+                       .next();
+        g.addV().property("aai-node-type", L_INTERFACE_NODE_TYPE)
+                       .property("interface-name", "extra")
+                       .property("interface-id", "5")
+                       .next();      
+        
+        TransactionalGraphEngine spy = spy(dbEngine);
+        TransactionalGraphEngine.Admin adminSpy = spy(dbEngine.asAdmin());
+        GraphTraversalSource traversal = g;
+        when(spy.asAdmin()).thenReturn(adminSpy);
+        when(adminSpy.getTraversalSource()).thenReturn(traversal);
+        migration = new L2DefaultMigrator(spy, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+        migration.run();
+    }
+    
+    @Test
+    public void testAllValuesSet() {
+       assertTrue("Value of node 1 \"no-value\" should have " + L2_MULTI_PROPERTY + " of false ",
+                       g.V().has("aai-node-type", L_INTERFACE_NODE_TYPE).has("interface-name", "no-value")
+                       .has(L2_MULTI_PROPERTY,false).hasNext());
+       
+       assertTrue("Value of node 2 \"empty-value\" should have " + L2_MULTI_PROPERTY + " of false ",
+                       g.V().has("aai-node-type", L_INTERFACE_NODE_TYPE).has("interface-name", "empty-value")
+                       .has(L2_MULTI_PROPERTY,false).hasNext());
+       
+       assertTrue("Value of node 3 \"true-value\" should have " + L2_MULTI_PROPERTY + " of true ",
+                       g.V().has("aai-node-type", L_INTERFACE_NODE_TYPE).has("interface-name", "true-value")
+                       .has(L2_MULTI_PROPERTY,true).hasNext()); 
+       
+       assertTrue("Value of node 4 \"false-value\" should have " + L2_MULTI_PROPERTY + " of false ",
+                       g.V().has("aai-node-type", L_INTERFACE_NODE_TYPE).has("interface-name", "false-value")
+                       .has(L2_MULTI_PROPERTY,false).hasNext()); 
+
+       assertTrue("Value of node 5 \"extra\" should have " + L2_MULTI_PROPERTY + " of false ",
+                       g.V().has("aai-node-type", L_INTERFACE_NODE_TYPE).has("interface-name", "extra")
+                       .has(L2_MULTI_PROPERTY,false).hasNext());       
+    }
+    
+    @Test
+    public void testOtherMethods() {
+       assertTrue("getStatus function works", migration.getStatus().toString().contains("SUCCESS"));
+       
+       assertTrue("getAffectedNodeTypes returns " + L_INTERFACE_NODE_TYPE, 
+                       Arrays.asList(migration.getAffectedNodeTypes().get()).contains(L_INTERFACE_NODE_TYPE));
+
+       assertTrue("getMigrationName returns MigrateL2DefaultToFalse", 
+                       migration.getMigrationName().equals("MigrateL2DefaultToFalse"));
+    }
+}
\ No newline at end of file
index 2b45d8c..f634010 100644 (file)
                        </fileNamePattern>
                </rollingPolicy>
                <encoder>
-                       <pattern>${"errorPattern"}</pattern>
+                       <pattern>${errorPattern}</pattern>
                </encoder>
        </appender>
        <appender name="dmaapAAIEventConsumerInfo"
        </appender>
        
        <!-- DataSnapshot logs ended -->
-       
+
+
+       <!-- AuditGraphson2SQL logs started -->
+       <appender name="auditGraphson2Sql" class="ch.qos.logback.core.rolling.RollingFileAppender">
+               <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
+                       <level>WARN</level>
+               </filter>
+               <File>${logDirectory}/auditGraphson2Sql/error.log</File>
+               <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+                       <fileNamePattern>${logDirectory}/auditGraphson2Sql/error.log.%d{yyyy-MM-dd}</fileNamePattern>
+               </rollingPolicy>
+               <encoder>
+                       <pattern>${errorPattern}</pattern>
+               </encoder>
+       </appender>
+
+       <appender name="auditGraphson2Sqldebug" class="ch.qos.logback.core.rolling.RollingFileAppender">
+               <filter class="ch.qos.logback.classic.filter.LevelFilter">
+                       <level>DEBUG</level>
+                       <onMatch>ACCEPT</onMatch>
+                       <onMismatch>DENY</onMismatch>
+               </filter>
+               <File>${logDirectory}/auditGraphson2Sql/debug.log</File>
+               <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+                       <fileNamePattern>${logDirectory}/auditGraphson2Sql/debug.log.%d{yyyy-MM-dd}</fileNamePattern>
+               </rollingPolicy>
+               <encoder>
+                       <pattern>${debugPattern}</pattern>
+               </encoder>
+       </appender>
+
+       <appender name="auditGraphson2Sqlaudit" class="ch.qos.logback.core.rolling.RollingFileAppender">
+               <filter class="ch.qos.logback.classic.filter.LevelFilter">
+                       <level>INFO</level>
+                       <onMatch>ACCEPT</onMatch>
+                       <onMismatch>DENY</onMismatch>
+               </filter>
+               <File>${logDirectory}/auditGraphson2Sql/audit.log</File>
+               <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+                       <fileNamePattern>${logDirectory}/auditGraphson2Sql/audit.log.%d{yyyy-MM-dd}</fileNamePattern>
+               </rollingPolicy>
+               <encoder>
+                       <pattern>${auditPattern}</pattern>
+               </encoder>
+       </appender>
+
+       <!-- AuditGraphson2Sql logs ended -->
+
+
+
        <!-- HistoryTruncate logs started -->
        <appender name="historyTruncate" class="ch.qos.logback.core.rolling.RollingFileAppender">
                <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
                        <fileNamePattern>${logDirectory}/createDBSchema/error.log.%d{yyyy-MM-dd}</fileNamePattern>
                </rollingPolicy>
                <encoder>
-                       <pattern>${"errorPattern"}</pattern>
+                       <pattern>${errorPattern}</pattern>
                </encoder>
        </appender>
 
                        <fileNamePattern>${logDirectory}/misc/error.log.%d{yyyy-MM-dd}</fileNamePattern>
                </rollingPolicy>
                <encoder>
-                       <pattern>${"errorPattern"}</pattern>
+                       <pattern>${errorPattern}</pattern>
                </encoder>
        </appender>
 
                        <onMatch>ACCEPT</onMatch>
                        <onMismatch>DENY</onMismatch>
                </filter>
-               <File>${logDirectory}/dupetool/debug.log</File>
+               <File>${logDirectory}/dupeTool/debug.log</File>
                <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
-                       <fileNamePattern>${logDirectory}/dupetool/debug.log.%d{yyyy-MM-dd}</fileNamePattern>
+                       <fileNamePattern>${logDirectory}/dupeTool/debug.log.%d{yyyy-MM-dd}</fileNamePattern>
                </rollingPolicy>
                <encoder>
                        <pattern>${debugPattern}</pattern>
        </appender>     
        <!-- forceDelete logs ended -->
        
-       <!-- migration logs started --> 
+       <!-- default migration logs started --> 
        <appender name="migrationdebug" class="ch.qos.logback.core.rolling.RollingFileAppender">
                <filter class="ch.qos.logback.classic.filter.LevelFilter">
                        <level>DEBUG</level>
                        <pattern>${errorPattern}</pattern>
                </encoder>
        </appender>     
-       <!-- migration logs ended -->   
+       <!-- default migration logs ended -->
+  
+    <!-- other migration logs started -->
+    <appender name="migrationlog" class="ch.qos.logback.classic.sift.SiftingAppender">
+               <filter class="ch.qos.logback.classic.filter.LevelFilter">
+                       <level>INFO</level>
+                       <onMatch>ACCEPT</onMatch>
+                       <onMismatch>DENY</onMismatch>
+               </filter>
+               <!-- This is MDC value -->
+               <!-- We will assign a value to 'logFilenameAppender' via Java code -->
+               <discriminator>
+                       <key>logFilenameAppender</key>
+                       <defaultValue>undefined</defaultValue>
+               </discriminator>
+               <sift>
+                       <!-- A standard RollingFileAppender, the log file is based on 'logFileName' 
+                               at runtime -->
+                       <appender name="FILE-${logFilenameAppender}"
+                               class="ch.qos.logback.core.rolling.RollingFileAppender">
+                               <file>${logDirectory}/migration/${logFilenameAppender}.log</file>
+                               <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+                                       <fileNamePattern>${logDirectory}/migration/${logFilenameAppender}.log.%d{yyyy-MM-dd}
+                                       </fileNamePattern>
+                               </rollingPolicy>
+                               <encoder>
+                                       <pattern>%d{yyyy-MM-dd'T'HH:mm:ss.SSSXXX}|%m%n</pattern>
+                               </encoder>
+                       </appender>
+               </sift>
+       </appender>
+       <!-- other migration logs ended -->
        
        <!-- DataGrooming logs started -->
        <appender name="dataExportError" class="ch.qos.logback.core.rolling.RollingFileAppender">
                <appender-ref ref="STDOUT"/>
        </logger>
 
+       <logger name="org.onap.aai.audit.AuditGraphson2Sql" level="DEBUG" additivity="false">
+               <appender-ref ref="auditGraphson2Sql"/>
+               <appender-ref ref="auditGraphson2Sqldebug"/>
+               <appender-ref ref="auditGraphson2Sqlaudit"/>
+               <appender-ref ref="STDOUT"/>
+       </logger>
+
        <logger name="org.onap.aai.historytruncate" level="DEBUG" additivity="false">
                <appender-ref ref="historyTruncate"/>
                <appender-ref ref="historyTruncatedebug"/>
        <appender-ref ref="migrationdebug" />
        <appender-ref ref="migrationerror" />
        </logger>
+       
+       <logger name="org.onap.aai.migration" level="ERROR" additivity="false">
+               <appender-ref ref="migrationlog" />
+       </logger>
+       
+       <logger name="org.onap.aai.migration" level="INFO" additivity="false">
+               <appender-ref ref="migrationlog" />
+       </logger>
                        
        <logger name="org.onap.aai.dataexport" level="DEBUG" additivity="false">
                <appender-ref ref="dataExportError"/>