merge of v15 changes to onap 49/75949/3
authorLaMont, William (wl2432) <wl2432@us.att.com>
Thu, 17 Jan 2019 18:48:07 +0000 (13:48 -0500)
committerKajur, Harish (vk250x) <vk250x@att.com>
Tue, 22 Jan 2019 15:46:02 +0000 (10:46 -0500)
Issue-ID: AAI-2088
Change-Id: I588499af58d0e799d860b2b4362da7c48050fafd
Signed-off-by: LaMont, William (wl2432) <wl2432@us.att.com>
Signed-off-by: Kajur, Harish (vk250x) <vk250x@att.com>
109 files changed:
pom.xml
src/main/docker/docker-entrypoint.sh
src/main/java/org/onap/aai/GraphAdminApp.java
src/main/java/org/onap/aai/datacleanup/DataCleanupTasks.java
src/main/java/org/onap/aai/dataexport/DataExportTasks.java [new file with mode: 0644]
src/main/java/org/onap/aai/datagrooming/DataGrooming.java
src/main/java/org/onap/aai/datagrooming/DataGroomingTasks.java
src/main/java/org/onap/aai/datasnapshot/DataSnapshot.java
src/main/java/org/onap/aai/datasnapshot/DataSnapshotTasks.java
src/main/java/org/onap/aai/datasnapshot/PartialPropAndEdgeLoader.java
src/main/java/org/onap/aai/datasnapshot/PartialVertexLoader.java
src/main/java/org/onap/aai/datasnapshot/PrintVertexDetails.java
src/main/java/org/onap/aai/db/schema/ScriptDriver.java
src/main/java/org/onap/aai/dbgen/DupeTool.java
src/main/java/org/onap/aai/dbgen/DynamicPayloadGenerator.java [new file with mode: 0644]
src/main/java/org/onap/aai/dbgen/GraphSONPartialIO.java [new file with mode: 0644]
src/main/java/org/onap/aai/dbgen/GraphSONPartialReader.java [new file with mode: 0644]
src/main/java/org/onap/aai/dbgen/schemamod/SchemaMod.java
src/main/java/org/onap/aai/migration/EdgeSwingMigrator.java
src/main/java/org/onap/aai/migration/MigrationController.java
src/main/java/org/onap/aai/migration/MigrationControllerInternal.java
src/main/java/org/onap/aai/migration/Migrator.java
src/main/java/org/onap/aai/migration/ValueMigrator.java
src/main/java/org/onap/aai/migration/v12/ALTSLicenseEntitlementMigration.java [new file with mode: 0644]
src/main/java/org/onap/aai/migration/v12/MigrateDataFromASDCToConfiguration.java [new file with mode: 0644]
src/main/java/org/onap/aai/migration/v12/MigrateHUBEvcInventory.java [new file with mode: 0644]
src/main/java/org/onap/aai/migration/v12/MigrateINVEvcInventory.java [new file with mode: 0644]
src/main/java/org/onap/aai/migration/v12/MigrateINVPhysicalInventory.java [new file with mode: 0644]
src/main/java/org/onap/aai/migration/v12/MigratePATHEvcInventory.java [new file with mode: 0644]
src/main/java/org/onap/aai/migration/v12/MigratePATHPhysicalInventory.java [new file with mode: 0644]
src/main/java/org/onap/aai/migration/v12/MigrateSAREvcInventory.java [new file with mode: 0644]
src/main/java/org/onap/aai/migration/v13/MigrateBadWidgetModelsPartOne.java [new file with mode: 0644]
src/main/java/org/onap/aai/migration/v13/MigrateBadWidgetModelsPartTwo.java [new file with mode: 0644]
src/main/java/org/onap/aai/migration/v13/MigrateEdgesBetweenVnfcAndVfModule.java [new file with mode: 0644]
src/main/java/org/onap/aai/migration/v13/MigrateForwarderEvcCircuitId.java [new file with mode: 0644]
src/main/java/org/onap/aai/migration/v14/MigrateGenericVnfMgmtOptions.java [new file with mode: 0644]
src/main/java/org/onap/aai/migration/v14/MigrateMissingFqdnOnPservers.java [new file with mode: 0644]
src/main/java/org/onap/aai/migration/v14/MigrateNetworkTechToCloudRegion.java [new file with mode: 0644]
src/main/java/org/onap/aai/migration/v14/MigrateSameSourcedRCTROPserverData.java [new file with mode: 0644]
src/main/java/org/onap/aai/migration/v14/MigrateSdnaIvlanData.java [new file with mode: 0644]
src/main/java/org/onap/aai/migration/v14/PserverDedupWithDifferentSourcesOfTruth.java [new file with mode: 0644]
src/main/java/org/onap/aai/migration/v15/MigrateBooleanDefaultsToFalse.java [new file with mode: 0644]
src/main/java/org/onap/aai/migration/v15/MigrateCloudRegionUpgradeCycle.java [new file with mode: 0644]
src/main/java/org/onap/aai/migration/v15/MigrateInMaintDefaultToFalse.java [new file with mode: 0644]
src/main/java/org/onap/aai/migration/v15/MigrateRadcomChanges.java [new file with mode: 0644]
src/main/java/org/onap/aai/rest/search/GenericQueryProcessor.java
src/main/java/org/onap/aai/rest/search/GroovyShellImpl.java
src/main/java/org/onap/aai/schema/GenTester.java
src/main/java/org/onap/aai/util/ExceptionTranslator.java [new file with mode: 0644]
src/main/java/org/onap/aai/util/GraphAdminConstants.java [new file with mode: 0644]
src/main/java/org/onap/aai/util/GraphAdminDBUtils.java [new file with mode: 0644]
src/main/java/org/onap/aai/util/SendDeleteMigrationNotificationsMain.java
src/main/java/org/onap/aai/util/SendMigrationNotificationsMain.java
src/main/resources/application.properties
src/main/resources/etc/appprops/aaiEventDMaaPPublisher.properties
src/main/resources/etc/appprops/aaiconfig.properties
src/main/resources/etc/appprops/error.properties
src/main/resources/etc/appprops/janusgraph-migration.properties [new file with mode: 0644]
src/main/scripts/dataRestoreFromSnapshot.sh
src/main/scripts/dataRestoreFromSnapshotMulti.sh [new file with mode: 0644]
src/main/scripts/dataSnapshot.sh
src/main/scripts/dynamicPayloadGenerator.sh
src/main/scripts/dynamicPayloadPartial.sh
src/main/scripts/preDataRestore.sh [new file with mode: 0644]
src/main/scripts/run_Migrations.sh
src/test/java/org/onap/aai/AAIGremlinQueryTest.java
src/test/java/org/onap/aai/AAISetup.java
src/test/java/org/onap/aai/datagrooming/DataGroomingTest.java
src/test/java/org/onap/aai/datasnapshot/DataSnapshotTest.java
src/test/java/org/onap/aai/dbgen/DupeToolTest.java
src/test/java/org/onap/aai/dbgen/schemamod/SchemaModTest.java [new file with mode: 0644]
src/test/java/org/onap/aai/migration/v12/ALTSLicenseEntitlementMigrationTest.java [new file with mode: 0644]
src/test/java/org/onap/aai/migration/v12/MigrateDataFromASDCToConfigurationTest.java [new file with mode: 0644]
src/test/java/org/onap/aai/migration/v12/MigrateHUBEvcInventoryTest.java [new file with mode: 0644]
src/test/java/org/onap/aai/migration/v12/MigrateINVPhysicalInventoryMethodTest.java [new file with mode: 0644]
src/test/java/org/onap/aai/migration/v12/MigrateINVPhysicalInventoryTest.java [new file with mode: 0644]
src/test/java/org/onap/aai/migration/v12/MigrateInvEvcInventoryTest.java [new file with mode: 0644]
src/test/java/org/onap/aai/migration/v12/MigratePATHEvcInventoryTest.java [new file with mode: 0644]
src/test/java/org/onap/aai/migration/v12/MigratePATHPhysicalInventoryTest.java [new file with mode: 0644]
src/test/java/org/onap/aai/migration/v12/MigrateSAREvcInventoryTest.java [new file with mode: 0644]
src/test/java/org/onap/aai/migration/v13/MigrateBadWidgetModelsPartOneTest.java [new file with mode: 0644]
src/test/java/org/onap/aai/migration/v13/MigrateBadWidgetModelsPartTwoTest.java [new file with mode: 0644]
src/test/java/org/onap/aai/migration/v13/MigrateEdgesBetweenVnfcAndVfModuleTest.java [new file with mode: 0644]
src/test/java/org/onap/aai/migration/v13/MigrateForwarderEvcCircuitIdTest.java [new file with mode: 0644]
src/test/java/org/onap/aai/migration/v14/MigrateGenericVnfMgmtOptionsTest.java [new file with mode: 0644]
src/test/java/org/onap/aai/migration/v14/MigrateMissingFqdnOnPserversTest.java [new file with mode: 0644]
src/test/java/org/onap/aai/migration/v14/MigrateNetworkTechToCloudRegionTest.java [new file with mode: 0644]
src/test/java/org/onap/aai/migration/v14/MigrateSameSourcedRCTROPServerDataTest.java [new file with mode: 0644]
src/test/java/org/onap/aai/migration/v14/MigrateSdnaIvlanDataTest.java [new file with mode: 0644]
src/test/java/org/onap/aai/migration/v14/PserverDedupWithDifferentSourcesOfTruthTest.java [new file with mode: 0644]
src/test/java/org/onap/aai/migration/v15/MigrateBooleanDefaultsToFalseTest.java [new file with mode: 0644]
src/test/java/org/onap/aai/migration/v15/MigrateCloudRegionUpgradeCycleTest.java [new file with mode: 0644]
src/test/java/org/onap/aai/migration/v15/MigrateInMaintDefaultToFalseTest.java [new file with mode: 0644]
src/test/java/org/onap/aai/migration/v15/MigrateRadcomChangesTest.java [new file with mode: 0644]
src/test/java/org/onap/aai/util/SendMigrationNotificationsTest.java
src/test/resources/groomingInput [new file with mode: 0644]
src/test/resources/migration-input-files/ALTS-migration-data/ALTS-migration-input.csv [new file with mode: 0644]
src/test/resources/migration-input-files/CloudRegion-ART-migration-data/CloudRegion-ART-migration-data.csv [new file with mode: 0644]
src/test/resources/migration-input-files/VNT-migration-data/VNT-migration-input.csv [new file with mode: 0644]
src/test/resources/migration-input-files/radcom-changes/INPUT-MODEL.csv [new file with mode: 0644]
src/test/resources/migration-input-files/radcom-changes/INPUT-VNF.csv [new file with mode: 0644]
src/test/resources/migration-input-files/sarea-inventory/circuitIds.csv [new file with mode: 0644]
src/test/resources/migration-input-files/sarea-inventory/hub.csv [new file with mode: 0644]
src/test/resources/migration-input-files/sarea-inventory/inv.csv [new file with mode: 0644]
src/test/resources/migration-input-files/sarea-inventory/ivlanData.csv [new file with mode: 0644]
src/test/resources/migration-input-files/sarea-inventory/path.csv [new file with mode: 0644]
src/test/resources/migration-input-files/sarea-inventory/sar.csv [new file with mode: 0644]
src/test/resources/migration-input-files/sarea-inventory/secondary-hub-path.csv [new file with mode: 0644]
src/test/resources/migration-input-files/widget-model-migration-data/widget-model-migration-input.csv [new file with mode: 0644]

diff --git a/pom.xml b/pom.xml
index 482e3a2..ba7bb07 100755 (executable)
--- a/pom.xml
+++ b/pom.xml
@@ -99,8 +99,8 @@
         <gson.version>2.7</gson.version>
         <json.version>20090211</json.version>
 
-        <aai.core.version>1.3.0</aai.core.version>
-        <aai.schema.version>1.3.0</aai.schema.version>
+        <aai.core.version>1.4.1-SNAPSHOT</aai.core.version>
+        <aai.schema.version>1.0.0-SNAPSHOT</aai.schema.version>
 
         <netty.handler.version>4.1.9.Final</netty.handler.version>
         <netty.version>4.0.56.Final</netty.version>
         <schema.version.namespace.change.start>v12</schema.version.namespace.change.start>
         <schema.version.edge.label.start>v12</schema.version.edge.label.start>
         <schema.version.api.default>v14</schema.version.api.default>
-        <schema.version.list>v8,v9,v10,v11,v12,v13,v14</schema.version.list>
+        <schema.version.list>v8,v9,v10,v11,v12,v13,v14,v15</schema.version.list>
         <schema.uri.base.path>/aai</schema.uri.base.path>
         <!-- End of Default ONAP Schema Properties -->
     </properties>
             <properties>
                 <schema.source.name>onap</schema.source.name>
                 <schema.version.namespace.change.start>v12</schema.version.namespace.change.start>
-                <schema.version.list>v8,v9,v10,v11,v12,v13,v14</schema.version.list>
+                <schema.version.list>v8,v9,v10,v11,v12,v13,v14,v15</schema.version.list>
             </properties>
         </profile>
         <!-- End of ONAP Profile -->
             <dependency>
                 <groupId>org.springframework.boot</groupId>
                 <artifactId>spring-boot-starter-parent</artifactId>
-               <version>${spring.boot.version}</version>
+                <version>${spring.boot.version}</version>
                 <scope>import</scope>
                 <type>pom</type>
             </dependency>
                             <configuration>
                                 <artifactItems>
                                     <artifactItem>
-                                        <groupId>org.onap.aai.aai-common</groupId>
+                                        <groupId>org.onap.aai.schema-service</groupId>
                                         <artifactId>aai-schema</artifactId>
                                         <version>${aai.schema.version}</version>
                                         <outputDirectory>${project.basedir}/src/main/resources/schema/</outputDirectory>
                             <configuration>
                                 <artifactItems>
                                     <artifactItem>
-                                        <groupId>org.onap.aai.aai-common</groupId>
+                                        <groupId>org.onap.aai.schema-service</groupId>
                                         <artifactId>aai-schema</artifactId>
                                         <version>${aai.schema.version}</version>
                                         <outputDirectory>${project.basedir}/src/main/resources/schema/</outputDirectory>
index 79adc2b..76c8d5e 100644 (file)
@@ -79,8 +79,7 @@ fi;
 
 MIN_HEAP_SIZE=${MIN_HEAP_SIZE:-512m};
 MAX_HEAP_SIZE=${MAX_HEAP_SIZE:-1024m};
-MAX_PERM_SIZE=${MAX_PERM_SIZE:-512m};
-PERM_SIZE=${PERM_SIZE:-512m};
+MAX_METASPACE_SIZE=${MAX_METASPACE_SIZE:-512m};
 
 JAVA_CMD="exec gosu aaiadmin java";
 
@@ -91,8 +90,7 @@ JVM_OPTS="${JVM_OPTS} -Xmx${MAX_HEAP_SIZE}";
 
 JVM_OPTS="${JVM_OPTS} -XX:+PrintGCDetails";
 JVM_OPTS="${JVM_OPTS} -XX:+PrintGCTimeStamps";
-JVM_OPTS="${JVM_OPTS} -XX:MaxPermSize=${MAX_PERM_SIZE}";
-JVM_OPTS="${JVM_OPTS} -XX:PermSize=${PERM_SIZE}";
+JVM_OPTS="${JVM_OPTS} -XX:MaxMetaspaceSize=${MAX_METASPACE_SIZE}";
 
 JVM_OPTS="${JVM_OPTS} -server";
 JVM_OPTS="${JVM_OPTS} -XX:NewSize=512m";
@@ -127,6 +125,7 @@ JAVA_OPTS="${JAVA_OPTS} -DAAI_BUILD_VERSION=${AAI_BUILD_VERSION}";
 JAVA_OPTS="${JAVA_OPTS} -Djava.security.egd=file:/dev/./urandom";
 JAVA_OPTS="${JAVA_OPTS} -Dlogback.configurationFile=./resources/logback.xml";
 JAVA_OPTS="${JAVA_OPTS} -Dloader.path=$APP_HOME/resources";
+JAVA_OPTS="${JAVA_OPTS} -Dgroovy.use.classvalue=true";
 JAVA_OPTS="${JAVA_OPTS} ${POST_JAVA_OPTS}";
 
 JAVA_MAIN_JAR=$(ls lib/aai-graphadmin*.jar);
index aa9c457..554a014 100644 (file)
  */
 package org.onap.aai;
 
+import com.att.eelf.configuration.Configuration;
 import com.att.eelf.configuration.EELFLogger;
 import com.att.eelf.configuration.EELFManager;
+import org.apache.commons.lang3.exception.ExceptionUtils;
 import org.onap.aai.config.PropertyPasswordConfiguration;
 import org.onap.aai.dbmap.AAIGraph;
+import java.util.Properties;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.logging.ErrorLogHelper;
+import org.onap.aai.logging.LogFormatTools;
 import org.onap.aai.logging.LoggingContext;
 import org.onap.aai.nodes.NodeIngestor;
 import org.onap.aai.util.AAIConfig;
+import org.onap.aai.util.AAIConstants;
+import org.onap.aai.util.ExceptionTranslator;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.boot.SpringApplication;
 import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
@@ -55,6 +63,7 @@ import java.util.UUID;
         "org.onap.aai.interceptors",
         "org.onap.aai.datasnapshot",
         "org.onap.aai.datagrooming",
+        "org.onap.aai.dataexport",
         "org.onap.aai.datacleanup"
 })
 @EnableAsync
@@ -65,6 +74,9 @@ public class GraphAdminApp {
     public static final String APP_NAME = "GraphAdmin";
     private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(GraphAdminApp.class);
 
+    private static final String FROMAPPID = "AAI-GA";
+    private static final String TRANSID = UUID.randomUUID().toString();
+
     @Autowired
     private Environment env;
 
@@ -74,7 +86,10 @@ public class GraphAdminApp {
     @PostConstruct
     private void initialize(){
         loadDefaultProps();
+        initializeLoggingContext();
+    }
 
+    private static void initializeLoggingContext() {
         LoggingContext.save();
         LoggingContext.component("init");
         LoggingContext.partnerName("NA");
@@ -92,11 +107,27 @@ public class GraphAdminApp {
     public static void main(String[] args) throws Exception {
 
         loadDefaultProps();
-        SpringApplication app = new SpringApplication(GraphAdminApp.class);
-        app.setRegisterShutdownHook(true);
-        app.addInitializers(new PropertyPasswordConfiguration());
-        Environment env = app.run(args).getEnvironment();
+        ErrorLogHelper.loadProperties();
+        initializeLoggingContext();
+
+        Environment env =null;
+        AAIConfig.init();
+        try {
+            SpringApplication app = new SpringApplication(GraphAdminApp.class);
+            app.setRegisterShutdownHook(true);
+            app.addInitializers(new PropertyPasswordConfiguration());
+            env = app.run(args).getEnvironment();
+        }
 
+        catch(Exception ex){
+            AAIException aai = ExceptionTranslator.schemaServiceExceptionTranslator(ex);
+            LoggingContext.statusCode(LoggingContext.StatusCode.ERROR);
+            LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+            LOGGER.error("Problems starting GraphAdminApp "+aai.getMessage());
+            ErrorLogHelper.logException(aai);
+            ErrorLogHelper.logError(aai.getCode(), ex.getMessage() + ", resolve and restart GraphAdmin");
+            throw aai;
+        }
         LOGGER.info(
                 "Application '{}' is running on {}!" ,
                 env.getProperty("spring.application.name"),
@@ -106,7 +137,7 @@ public class GraphAdminApp {
         // to the SchemaGenerator needs the bean and during the constructor
         // the Spring Context is not yet initialized
 
-        AAIConfig.init();
+
         AAIGraph.getInstance();
 
         System.setProperty("org.onap.aai.graphadmin.started", "true");             
@@ -116,6 +147,8 @@ public class GraphAdminApp {
         System.out.println("GraphAdmin Microservice Started");
     }
 
+
+
     public static void loadDefaultProps(){
 
         if(System.getProperty("AJSC_HOME") == null){
@@ -126,4 +159,5 @@ public class GraphAdminApp {
             System.setProperty("BUNDLECONFIG_DIR", "src/main/resources");
         }
     }
+
 }
index a281d70..f4372c1 100644 (file)
-/**\r
- * ============LICENSE_START=======================================================\r
- * org.onap.aai\r
- * ================================================================================\r
- * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.\r
- * ================================================================================\r
- * Licensed under the Apache License, Version 2.0 (the "License");\r
- * you may not use this file except in compliance with the License.\r
- * You may obtain a copy of the License at\r
- *\r
- *    http://www.apache.org/licenses/LICENSE-2.0\r
- *\r
- * Unless required by applicable law or agreed to in writing, software\r
- * distributed under the License is distributed on an "AS IS" BASIS,\r
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * See the License for the specific language governing permissions and\r
- * limitations under the License.\r
- * ============LICENSE_END=========================================================\r
- */\r
-package org.onap.aai.datacleanup;\r
-import java.io.File;\r
-import java.io.FileInputStream;\r
-import java.io.FileOutputStream;\r
-import java.io.IOException;\r
-import java.nio.file.Files;\r
-import java.nio.file.attribute.BasicFileAttributes;\r
-import java.nio.file.attribute.FileTime;\r
-import java.text.SimpleDateFormat;\r
-import java.util.Calendar;\r
-import java.util.Date;\r
-import java.util.zip.ZipEntry;\r
-import java.util.zip.ZipOutputStream;\r
-\r
-import org.onap.aai.exceptions.AAIException;\r
-import org.onap.aai.logging.ErrorLogHelper;\r
-import org.onap.aai.util.AAIConfig;\r
-import org.onap.aai.util.AAIConstants;\r
-import org.springframework.context.annotation.PropertySource;\r
-import org.springframework.scheduling.annotation.Scheduled;\r
-import org.springframework.stereotype.Component;\r
-\r
-import com.att.eelf.configuration.EELFLogger;\r
-import com.att.eelf.configuration.EELFManager;\r
-\r
-@Component\r
-@PropertySource("file:${server.local.startpath}/etc/appprops/datatoolscrons.properties")\r
-public class DataCleanupTasks {\r
-\r
-       private static final EELFLogger logger = EELFManager.getInstance().getLogger(DataCleanupTasks.class);\r
-       private final SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyyMMdd");\r
-       /**The function archives/deletes files that end in .out (Ie. dataGrooming.201511111305.out) that sit in our log/data directory structure.\r
-               logDir is the {project_home}/logs\r
-               archiveDir is the ARCHIVE directory where the files will be stored after 5 days.\r
-               ageZip is the number of days after which the file will be moved to the ARCHIVE folder.\r
-               ageDelete is the number of days after which the data files will be deleted i.e after 30 days.\r
-       */\r
-       @Scheduled(cron = "${datagroomingcleanup.cron}" )\r
-       public void dataGroomingCleanup() throws AAIException, Exception {\r
-               \r
-               logger.info("Started cron job dataGroomingCleanup @ " + simpleDateFormat.format(new Date()));\r
-               \r
-               try {\r
-                       String logDir = AAIConstants.AAI_HOME + AAIConstants.AAI_FILESEP + "logs";\r
-                       String dataGroomingDir = logDir + AAIConstants.AAI_FILESEP + "data" + AAIConstants.AAI_FILESEP + "dataGrooming";\r
-                       String archiveDir = dataGroomingDir + AAIConstants.AAI_FILESEP + "ARCHIVE";\r
-                       String dataGroomingArcDir = archiveDir + AAIConstants.AAI_FILESEP + "dataGrooming";             \r
-                       File path = new File(dataGroomingDir);\r
-                       File archivepath = new File(archiveDir);\r
-                       File dataGroomingPath = new File(dataGroomingArcDir);\r
-               \r
-                       logger.info("The logDir is " + logDir);\r
-                       logger.info("The dataGroomingDir is " + dataGroomingDir);\r
-                       logger.info("The archiveDir is " + archiveDir );\r
-                       logger.info("The dataGroomingArcDir is " + dataGroomingArcDir );\r
-               \r
-                       boolean exists = directoryExists(logDir);\r
-                       logger.info("Directory" + logDir + "exists: " + exists);\r
-                       if(!exists)\r
-                               logger.error("The directory" + logDir +"does not exists");\r
-               \r
-                       Integer ageZip = AAIConfig.getInt("aai.datagrooming.agezip");\r
-                       Integer ageDelete = AAIConfig.getInt("aai.datagrooming.agedelete");\r
-                                                       \r
-                       Date newAgeZip = getZipDate(ageZip);\r
-                                                               \r
-                       //Iterate through the dataGroomingDir\r
-                       File[] listFiles = path.listFiles();  \r
-                       if(listFiles != null) {\r
-                               for(File listFile : listFiles) {\r
-                                       if (listFile.toString().contains("ARCHIVE")){\r
-                                               continue;\r
-                                       }\r
-                                       if(listFile.isFile()){\r
-                                               logger.info("The file name in dataGrooming: " +listFile.getName()); \r
-                                               Date fileCreateDate = fileCreationMonthDate(listFile);\r
-                                               logger.info("The fileCreateDate in dataGrooming is " + fileCreateDate);\r
-                                               if( fileCreateDate.compareTo(newAgeZip) < 0) {\r
-                                               archive(listFile,archiveDir,dataGroomingArcDir);                                                \r
-                                               }\r
-                                       }\r
-                               }\r
-                       }\r
-               \r
-                       Date newAgeDelete = getZipDate(ageDelete);\r
-                       //Iterate through the archive/dataGrooming dir\r
-                       File[] listFilesArchive = dataGroomingPath.listFiles(); \r
-                       if(listFilesArchive != null) {\r
-                               for(File listFileArchive : listFilesArchive) { \r
-                                       if(listFileArchive.isFile()) {\r
-                               logger.info("The file name in ARCHIVE/dataGrooming: " +listFileArchive.getName()); \r
-                               Date fileCreateDate = fileCreationMonthDate(listFileArchive);\r
-                               logger.info("The fileCreateDate in ARCHIVE/dataGrooming is " + fileCreateDate);\r
-                               if(fileCreateDate.compareTo(newAgeDelete) < 0) {\r
-                                       delete(listFileArchive);\r
-                                       }\r
-                               }       \r
-                       }\r
-                       }\r
-               }\r
-               catch (Exception e) {\r
-                       ErrorLogHelper.logError("AAI_4000", "Exception running cron job for DataCleanup"+e.toString());\r
-                       logger.info("AAI_4000", "Exception running cron job for DataCleanup"+e.toString());\r
-                       throw e;\r
-               }\r
-       }\r
-       \r
-    /**\r
-     * This method checks if the directory exists\r
-     * @param DIR\r
-     * \r
-     */\r
-    public boolean directoryExists(String dir) {\r
-       File path = new File(dir);\r
-               boolean exists = path.exists();\r
-               return exists;  \r
-    }\r
-    \r
-    public Date getZipDate(Integer days) throws Exception {\r
-       return getZipDate(days, new Date());\r
-    }\r
-    \r
-    public Date getZipDate(Integer days, Date date) throws Exception{\r
-       \r
-       Calendar cal = Calendar.getInstance();\r
-       logger.info("The current date is " + date );\r
-       cal.setTime(date);      \r
-       cal.add(Calendar.DATE, -days);\r
-       Date newAgeZip = cal.getTime();\r
-               logger.info("The newAgeDate is " +newAgeZip);\r
-               return newAgeZip;               \r
-    }\r
-    \r
-    \r
-    public Date fileCreationMonthDate (File file) throws Exception {\r
-\r
-        BasicFileAttributes attr = Files.readAttributes(file.toPath(),\r
-                                                        BasicFileAttributes.class);\r
-        FileTime time = attr.creationTime();\r
-           String formatted = simpleDateFormat.format( new Date( time.toMillis() ) );\r
-           Date d = simpleDateFormat.parse(formatted);\r
-           return d;\r
-    }\r
-    \r
-    /**\r
-     * This method will zip the files and add it to the archive folder\r
-     * Checks if the archive folder exists, if not then creates one\r
-     * After adding the file to archive folder it deletes the file from the filepath\r
-     * @throws AAIException\r
-     * @throws Exception\r
-     */\r
-    public void archive(File file, String archiveDir, String afterArchiveDir) throws AAIException, Exception {\r
-               \r
-       logger.info("Inside the archive folder");  \r
-       String filename = file.getName();\r
-       logger.info("file name is " +filename);\r
-               File archivepath = new File(archiveDir);\r
-               \r
-               String zipFile = afterArchiveDir + AAIConstants.AAI_FILESEP + filename;\r
-               \r
-               File dataGroomingPath = new File(afterArchiveDir);\r
-       \r
-               boolean exists = directoryExists(archiveDir);\r
-               logger.info("Directory" + archiveDir + "exists: " + exists);            \r
-               if(!exists) {\r
-                       logger.error("The directory" + archiveDir +"does not exists so will create a new archive folder");\r
-                       //Create an archive folder if does not exists           \r
-                       boolean flag = dataGroomingPath.mkdirs();\r
-                       if(!flag)\r
-                               logger.error("Failed to create ARCHIVE folder");                \r
-               }\r
-               try(FileOutputStream outputstream = new FileOutputStream(zipFile + ".gz");\r
-                               ZipOutputStream zoutputstream = new ZipOutputStream(outputstream);\r
-                               FileInputStream inputstream = new FileInputStream(file)) {\r
-                       ZipEntry ze = new ZipEntry(file.getName());\r
-                       zoutputstream.putNextEntry(ze);\r
-                       byte[] buffer = new byte[1024];\r
-                       int len;\r
-                       while ((len = inputstream.read(buffer)) > 0) {\r
-                               zoutputstream.write(buffer,0,len);\r
-                       }                       \r
-                       //close all the sources\r
-                       zoutputstream.closeEntry();\r
-                       //Delete the file after been added to archive folder\r
-                       delete(file);\r
-                       logger.info("The file archived is " + file + " at " + afterArchiveDir );\r
-               }       \r
-        catch (IOException e) {\r
-                ErrorLogHelper.logError("AAI_4000", "Exception running cron job for DataCleanup " + e.getStackTrace());\r
-                logger.info("AAI_4000", "Exception running cron job for DataCleanup", e);\r
-                throw e;\r
-               }\r
-    }\r
-    \r
-    /**\r
-     * This method will delete all the files from the archive folder that are older than 60 days\r
-     * @param file\r
-     */\r
-    public static void delete(File file) {\r
-       \r
-       logger.info("Deleting the file " + file);\r
-       boolean deleteStatus = file.delete();\r
-               if(!deleteStatus){\r
-                       logger.error("Failed to delete the file" +file);                        \r
-               }\r
-    }\r
-    \r
-    /**The function archives/deletes files that end in .out (Ie. dataGrooming.201511111305.out) that sit in our log/data directory structure.\r
-       logDir is the {project_home}/logs\r
-       archiveDir is the ARCHIVE directory where the files will be stored after 5 days.\r
-       ageZip is the number of days after which the file will be moved to the ARCHIVE folder.\r
-       ageDelete is the number of days after which the data files will be deleted i.e after 30 days.\r
-*/\r
-    @Scheduled(cron = "${datasnapshotcleanup.cron}" )\r
-    public void dataSnapshotCleanup() throws AAIException, Exception {\r
-       \r
-       logger.info("Started cron job dataSnapshotCleanup @ " + simpleDateFormat.format(new Date()));\r
-       \r
-       try {\r
-               String logDir = AAIConstants.AAI_HOME + AAIConstants.AAI_FILESEP + "logs";\r
-               String dataSnapshotDir = logDir + AAIConstants.AAI_FILESEP + "data" + AAIConstants.AAI_FILESEP + "dataSnapshots";\r
-               String archiveDir = dataSnapshotDir + AAIConstants.AAI_FILESEP + "ARCHIVE";\r
-               String dataSnapshotArcDir = archiveDir + AAIConstants.AAI_FILESEP + "dataSnapshots";            \r
-               File path = new File(dataSnapshotDir);\r
-               File archivepath = new File(archiveDir);\r
-               File dataSnapshotPath = new File(dataSnapshotArcDir);\r
-       \r
-               logger.info("The logDir is " + logDir);\r
-               logger.info("The dataSnapshotDir is " + dataSnapshotDir);\r
-               logger.info("The archiveDir is " + archiveDir );\r
-               logger.info("The dataSnapshotArcDir is " + dataSnapshotArcDir );\r
-       \r
-               boolean exists = directoryExists(logDir);\r
-               logger.info("Directory" + logDir + "exists: " + exists);\r
-               if(!exists)\r
-                       logger.error("The directory" + logDir +"does not exists");\r
-       \r
-               Integer ageZipSnapshot = AAIConfig.getInt("aai.datasnapshot.agezip");\r
-               Integer ageDeleteSnapshot = AAIConfig.getInt("aai.datasnapshot.agedelete");\r
-               \r
-               Date newAgeZip = getZipDate(ageZipSnapshot);\r
-                                       \r
-               //Iterate through the dataGroomingDir\r
-               File[] listFiles = path.listFiles();  \r
-               if(listFiles != null) {\r
-                       for(File listFile : listFiles) {\r
-                               if (listFile.toString().contains("ARCHIVE")){\r
-                                       continue;\r
-                               }\r
-                               if(listFile.isFile()){\r
-                                       logger.info("The file name in dataSnapshot: " +listFile.getName()); \r
-                                       Date fileCreateDate = fileCreationMonthDate(listFile);\r
-                                       logger.info("The fileCreateDate in dataSnapshot is " + fileCreateDate);\r
-                                       if( fileCreateDate.compareTo(newAgeZip) < 0) {\r
-                                               archive(listFile,archiveDir,dataSnapshotArcDir);                                                \r
-                                       }\r
-                               }\r
-                       }\r
-               }\r
-       \r
-               Date newAgeDelete = getZipDate(ageDeleteSnapshot);\r
-               //Iterate through the archive/dataSnapshots dir\r
-               File[] listFilesArchive = dataSnapshotPath.listFiles(); \r
-               if(listFilesArchive != null) {\r
-                       for(File listFileArchive : listFilesArchive) { \r
-                               if(listFileArchive.isFile()) {\r
-                                       logger.info("The file name in ARCHIVE/dataSnapshot: " +listFileArchive.getName()); \r
-                                       Date fileCreateDate = fileCreationMonthDate(listFileArchive);\r
-                                       logger.info("The fileCreateDate in ARCHIVE/dataSnapshot is " + fileCreateDate);\r
-                                       if(fileCreateDate.compareTo(newAgeDelete) < 0) {\r
-                                               delete(listFileArchive);\r
-                                       }\r
-                               }       \r
-                       }\r
-               }\r
-       }\r
-       catch (Exception e) {\r
-               ErrorLogHelper.logError("AAI_4000", "Exception running cron job for DataCleanup"+e.toString());\r
-               logger.info("AAI_4000", "Exception running cron job for DataCleanup"+e.toString());\r
-               throw e;\r
-       }\r
-  }   \r
-}\r
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.datacleanup;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.attribute.BasicFileAttributes;
+import java.nio.file.attribute.FileTime;
+import java.text.SimpleDateFormat;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipOutputStream;
+
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.logging.ErrorLogHelper;
+import org.onap.aai.util.AAIConfig;
+import org.onap.aai.util.AAIConstants;
+import org.springframework.context.annotation.PropertySource;
+import org.springframework.scheduling.annotation.Scheduled;
+import org.springframework.stereotype.Component;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+
+@Component
+@PropertySource("file:${server.local.startpath}/etc/appprops/datatoolscrons.properties")
+public class DataCleanupTasks {
+
+       private static final EELFLogger logger = EELFManager.getInstance().getLogger(DataCleanupTasks.class);
+       private final SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyyMMdd");
+       /**The function archives/deletes files that end in .out (Ie. dataGrooming.201511111305.out) that sit in our log/data directory structure.
+               logDir is the {project_home}/logs
+               archiveDir is the ARCHIVE directory where the files will be stored after 5 days.
+               ageZip is the number of days after which the file will be moved to the ARCHIVE folder.
+               ageDelete is the number of days after which the data files will be deleted i.e after 30 days.
+       */
+       @Scheduled(cron = "${datagroomingcleanup.cron}" )
+       public void dataGroomingCleanup() throws AAIException, Exception {
+               
+               logger.info("Started cron job dataGroomingCleanup @ " + simpleDateFormat.format(new Date()));
+               
+               try {
+                       String logDir = AAIConstants.AAI_HOME + AAIConstants.AAI_FILESEP + "logs";
+                       String dataGroomingDir = logDir + AAIConstants.AAI_FILESEP + "data" + AAIConstants.AAI_FILESEP + "dataGrooming";
+                       String archiveDir = dataGroomingDir + AAIConstants.AAI_FILESEP + "ARCHIVE";
+                       String dataGroomingArcDir = archiveDir + AAIConstants.AAI_FILESEP + "dataGrooming";             
+                       File path = new File(dataGroomingDir);
+                       File archivepath = new File(archiveDir);
+                       File dataGroomingPath = new File(dataGroomingArcDir);
+               
+                       logger.info("The logDir is " + logDir);
+                       logger.info("The dataGroomingDir is " + dataGroomingDir);
+                       logger.info("The archiveDir is " + archiveDir );
+                       logger.info("The dataGroomingArcDir is " + dataGroomingArcDir );
+               
+                       boolean exists = directoryExists(logDir);
+                       logger.info("Directory" + logDir + "exists: " + exists);
+                       if(!exists)
+                               logger.error("The directory" + logDir +"does not exists");
+               
+                       Integer ageZip = AAIConfig.getInt("aai.datagrooming.agezip");
+                       Integer ageDelete = AAIConfig.getInt("aai.datagrooming.agedelete");
+                                                       
+                       Date newAgeZip = getZipDate(ageZip);
+                                                               
+                       //Iterate through the dataGroomingDir
+                       File[] listFiles = path.listFiles();  
+                       if(listFiles != null) {
+                               for(File listFile : listFiles) {
+                                       if (listFile.toString().contains("ARCHIVE")){
+                                               continue;
+                                       }
+                                       if(listFile.isFile()){
+                                               logger.info("The file name in dataGrooming: " +listFile.getName()); 
+                                               Date fileCreateDate = fileCreationMonthDate(listFile);
+                                               logger.info("The fileCreateDate in dataGrooming is " + fileCreateDate);
+                                               if( fileCreateDate.compareTo(newAgeZip) < 0) {
+                                               archive(listFile,archiveDir,dataGroomingArcDir);                                                
+                                               }
+                                       }
+                               }
+                       }
+               
+                       Date newAgeDelete = getZipDate(ageDelete);
+                       //Iterate through the archive/dataGrooming dir
+                       File[] listFilesArchive = dataGroomingPath.listFiles(); 
+                       if(listFilesArchive != null) {
+                               for(File listFileArchive : listFilesArchive) { 
+                                       if(listFileArchive.isFile()) {
+                               logger.info("The file name in ARCHIVE/dataGrooming: " +listFileArchive.getName()); 
+                               Date fileCreateDate = fileCreationMonthDate(listFileArchive);
+                               logger.info("The fileCreateDate in ARCHIVE/dataGrooming is " + fileCreateDate);
+                               if(fileCreateDate.compareTo(newAgeDelete) < 0) {
+                                       delete(listFileArchive);
+                                       }
+                               }       
+                       }
+                       }
+               }
+               catch (Exception e) {
+                       ErrorLogHelper.logError("AAI_4000", "Exception running cron job for DataCleanup"+e.toString());
+                       logger.info("AAI_4000", "Exception running cron job for DataCleanup"+e.toString());
+                       throw e;
+               }
+       }
+       
+    /**
+     * This method checks if the directory exists
+     * @param DIR
+     * 
+     */
+    public boolean directoryExists(String dir) {
+       File path = new File(dir);
+               boolean exists = path.exists();
+               return exists;  
+    }
+    
+    public Date getZipDate(Integer days) throws Exception {
+       return getZipDate(days, new Date());
+    }
+    
+    public Date getZipDate(Integer days, Date date) throws Exception{
+       
+       Calendar cal = Calendar.getInstance();
+       logger.info("The current date is " + date );
+       cal.setTime(date);      
+       cal.add(Calendar.DATE, -days);
+       Date newAgeZip = cal.getTime();
+               logger.info("The newAgeDate is " +newAgeZip);
+               return newAgeZip;               
+    }
+    
+    
+    public Date fileCreationMonthDate (File file) throws Exception {
+
+        BasicFileAttributes attr = Files.readAttributes(file.toPath(),
+                                                        BasicFileAttributes.class);
+        FileTime time = attr.creationTime();
+           String formatted = simpleDateFormat.format( new Date( time.toMillis() ) );
+           Date d = simpleDateFormat.parse(formatted);
+           return d;
+    }
+    
+    /**
+     * This method will zip the files and add it to the archive folder
+     * Checks if the archive folder exists, if not then creates one
+     * After adding the file to archive folder it deletes the file from the filepath
+     * @throws AAIException
+     * @throws Exception
+     */
+    public void archive(File file, String archiveDir, String afterArchiveDir) throws AAIException, Exception {
+               
+       logger.info("Inside the archive folder");  
+       String filename = file.getName();
+       logger.info("file name is " +filename);
+               File archivepath = new File(archiveDir);
+               
+               String zipFile = afterArchiveDir + AAIConstants.AAI_FILESEP + filename;
+               
+               File dataGroomingPath = new File(afterArchiveDir);
+       
+               boolean exists = directoryExists(archiveDir);
+               logger.info("Directory" + archiveDir + "exists: " + exists);            
+               if(!exists) {
+                       logger.error("The directory" + archiveDir +"does not exists so will create a new archive folder");
+                       //Create an archive folder if does not exists           
+                       boolean flag = dataGroomingPath.mkdirs();
+                       if(!flag)
+                               logger.error("Failed to create ARCHIVE folder");                
+               }
+               try(FileOutputStream outputstream = new FileOutputStream(zipFile + ".gz");
+                               ZipOutputStream zoutputstream = new ZipOutputStream(outputstream);
+                               FileInputStream inputstream = new FileInputStream(file)) {
+                       ZipEntry ze = new ZipEntry(file.getName());
+                       zoutputstream.putNextEntry(ze);
+                       byte[] buffer = new byte[1024];
+                       int len;
+                       while ((len = inputstream.read(buffer)) > 0) {
+                               zoutputstream.write(buffer,0,len);
+                       }                       
+                       //close all the sources
+                       zoutputstream.closeEntry();
+                       //Delete the file after been added to archive folder
+                       delete(file);
+                       logger.info("The file archived is " + file + " at " + afterArchiveDir );
+               }       
+        catch (IOException e) {
+                ErrorLogHelper.logError("AAI_4000", "Exception running cron job for DataCleanup " + e.getStackTrace());
+                logger.info("AAI_4000", "Exception running cron job for DataCleanup", e);
+                throw e;
+               }
+    }
+    
+    /**
+     * This method will delete all the files from the archive folder that are older than 60 days
+     * @param file
+     */
+    public static void delete(File file) {
+       
+       logger.info("Deleting the file " + file);
+       boolean deleteStatus = file.delete();
+               if(!deleteStatus){
+                       logger.error("Failed to delete the file" +file);                        
+               }
+    }
+    
+    /**The function archives/deletes files that end in .out (Ie. dataGrooming.201511111305.out) that sit in our log/data directory structure.
+       logDir is the {project_home}/logs
+       archiveDir is the ARCHIVE directory where the files will be stored after 5 days.
+       ageZip is the number of days after which the file will be moved to the ARCHIVE folder.
+       ageDelete is the number of days after which the data files will be deleted i.e after 30 days.
+*/
+    @Scheduled(cron = "${datasnapshotcleanup.cron}" )
+    public void dataSnapshotCleanup() throws AAIException, Exception {
+       
+       logger.info("Started cron job dataSnapshotCleanup @ " + simpleDateFormat.format(new Date()));
+       
+       try {
+               String logDir = AAIConstants.AAI_HOME + AAIConstants.AAI_FILESEP + "logs";
+               String dataSnapshotDir = logDir + AAIConstants.AAI_FILESEP + "data" + AAIConstants.AAI_FILESEP + "dataSnapshots";
+               String archiveDir = dataSnapshotDir + AAIConstants.AAI_FILESEP + "ARCHIVE";
+               String dataSnapshotArcDir = archiveDir + AAIConstants.AAI_FILESEP + "dataSnapshots";            
+               File path = new File(dataSnapshotDir);
+               File archivepath = new File(archiveDir);
+               File dataSnapshotPath = new File(dataSnapshotArcDir);
+       
+               logger.info("The logDir is " + logDir);
+               logger.info("The dataSnapshotDir is " + dataSnapshotDir);
+               logger.info("The archiveDir is " + archiveDir );
+               logger.info("The dataSnapshotArcDir is " + dataSnapshotArcDir );
+       
+               boolean exists = directoryExists(logDir);
+               logger.info("Directory" + logDir + "exists: " + exists);
+               if(!exists)
+                       logger.error("The directory" + logDir +"does not exists");
+       
+               Integer ageZipSnapshot = AAIConfig.getInt("aai.datasnapshot.agezip");
+               Integer ageDeleteSnapshot = AAIConfig.getInt("aai.datasnapshot.agedelete");
+               
+               Date newAgeZip = getZipDate(ageZipSnapshot);
+                                       
+               //Iterate through the dataGroomingDir
+               File[] listFiles = path.listFiles();  
+               if(listFiles != null) {
+                       for(File listFile : listFiles) {
+                               if (listFile.toString().contains("ARCHIVE")){
+                                       continue;
+                               }
+                               if(listFile.isFile()){
+                                       logger.info("The file name in dataSnapshot: " +listFile.getName()); 
+                                       Date fileCreateDate = fileCreationMonthDate(listFile);
+                                       logger.info("The fileCreateDate in dataSnapshot is " + fileCreateDate);
+                                       if( fileCreateDate.compareTo(newAgeZip) < 0) {
+                                               archive(listFile,archiveDir,dataSnapshotArcDir);                                                
+                                       }
+                               }
+                       }
+               }
+       
+               Date newAgeDelete = getZipDate(ageDeleteSnapshot);
+               //Iterate through the archive/dataSnapshots dir
+               File[] listFilesArchive = dataSnapshotPath.listFiles(); 
+               if(listFilesArchive != null) {
+                       for(File listFileArchive : listFilesArchive) { 
+                               if(listFileArchive.isFile()) {
+                                       logger.info("The file name in ARCHIVE/dataSnapshot: " +listFileArchive.getName()); 
+                                       Date fileCreateDate = fileCreationMonthDate(listFileArchive);
+                                       logger.info("The fileCreateDate in ARCHIVE/dataSnapshot is " + fileCreateDate);
+                                       if(fileCreateDate.compareTo(newAgeDelete) < 0) {
+                                               delete(listFileArchive);
+                                       }
+                               }       
+                       }
+               }
+       }
+       catch (Exception e) {
+               ErrorLogHelper.logError("AAI_4000", "Exception running cron job for DataCleanup"+e.toString());
+               logger.info("AAI_4000", "Exception running cron job for DataCleanup"+e.toString());
+               throw e;
+       }
+  }   
+}
diff --git a/src/main/java/org/onap/aai/dataexport/DataExportTasks.java b/src/main/java/org/onap/aai/dataexport/DataExportTasks.java
new file mode 100644 (file)
index 0000000..359e2ba
--- /dev/null
@@ -0,0 +1,382 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.dataexport;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileFilter;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Date;
+import java.util.List;
+import java.util.Map;
+import java.util.NavigableMap;
+import java.util.Properties;
+import java.util.TreeMap;
+import java.util.UUID;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.onap.aai.dbgen.DynamicPayloadGenerator;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.logging.ErrorLogHelper;
+import org.onap.aai.logging.LoggingContext;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.util.AAIConfig;
+import org.onap.aai.util.AAIConstants;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.context.annotation.PropertySource;
+import org.springframework.scheduling.annotation.Scheduled;
+import org.springframework.stereotype.Component;
+
+import com.att.eelf.configuration.Configuration;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+
+import org.apache.commons.io.comparator.LastModifiedFileComparator;
+import org.apache.commons.io.filefilter.DirectoryFileFilter;
+import org.apache.commons.io.filefilter.FileFileFilter;
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.io.filefilter.RegexFileFilter;
+
+/**
+ * DataExportTasks obtains a graph snapshot and invokes DynamicPayloadGenerator
+ *
+ */
+@Component
+@PropertySource("file:${server.local.startpath}/etc/appprops/datatoolscrons.properties")
+public class DataExportTasks {
+       
+       private static final EELFLogger LOGGER;
+       private static final SimpleDateFormat dateFormat = new SimpleDateFormat("HH:mm:ss");
+       private static final String GA_MS = "aai-graphadmin";
+       
+       static {
+               System.setProperty("aai.service.name", DataExportTasks.class.getSimpleName());
+               Properties props = System.getProperties();
+               props.setProperty(Configuration.PROPERTY_LOGGING_FILE_NAME, AAIConstants.AAI_LOGBACK_PROPS);
+               props.setProperty(Configuration.PROPERTY_LOGGING_FILE_PATH, AAIConstants.AAI_HOME_BUNDLECONFIG);
+               LOGGER = EELFManager.getInstance().getLogger(DataExportTasks.class);
+       }
+
+       private LoaderFactory loaderFactory;
+       private EdgeIngestor edgeIngestor;
+       private SchemaVersions schemaVersions;
+
+       @Autowired
+       public DataExportTasks(LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, SchemaVersions schemaVersions){
+           this.loaderFactory  = loaderFactory;
+           this.edgeIngestor   = edgeIngestor;
+           this.schemaVersions = schemaVersions;
+       }
+
+       /**
+        * Scheduled task to invoke exportTask
+        */
+       @Scheduled(cron = "${dataexporttask.cron}" )
+       public void export() {
+               try {
+                       exportTask();
+               } 
+               catch (Exception e) {
+               }
+       }
+       /**
+        * The exportTask method.
+        *
+        * @throws AAIException, Exception
+        */
+       public void exportTask() throws AAIException, Exception   {
+               
+               LoggingContext.init();
+               LoggingContext.requestId(UUID.randomUUID().toString());
+               LoggingContext.partnerName("AAI");
+               LoggingContext.targetEntity(GA_MS);
+               LoggingContext.component("exportTask");
+               LoggingContext.serviceName(GA_MS);
+               LoggingContext.targetServiceName("exportTask");
+               LoggingContext.statusCode(LoggingContext.StatusCode.COMPLETE);
+
+               if (AAIConfig.get("aai.dataexport.enable").equalsIgnoreCase("false")) {
+                       LOGGER.info("Data Export is not enabled");
+                       return;
+               }
+               // Check if the process was started via command line
+               if (isDataExportRunning()) {
+                       LOGGER.info("There is a dataExport process already running");
+                       return;
+               }
+
+               LOGGER.info("Started exportTask: " + dateFormat.format(new Date()));
+               
+               String enableSchemaValidation = AAIConfig.get("aai.dataexport.enable.schema.validation", "false");
+               String outputLocation =  AAIConstants.AAI_HOME_BUNDLECONFIG + AAIConfig.get("aai.dataexport.output.location");
+               String enableMultipleSnapshots =  AAIConfig.get("aai.dataexport.enable.multiple.snapshots", "false");
+               String nodeConfigurationLocation = AAIConstants.AAI_HOME_BUNDLECONFIG + AAIConfig.get("aai.dataexport.node.config.location");
+               String inputFilterConfigurationLocation = AAIConstants.AAI_HOME_BUNDLECONFIG + AAIConfig.get("aai.dataexport.input.filter.config.location");
+               String enablePartialGraph = AAIConfig.get("aai.dataexport.enable.partial.graph", "true");
+               
+               // Check that the output location exist
+               File targetDirFile = new File(outputLocation);
+               if ( !targetDirFile.exists() ) {
+                       targetDirFile.mkdir();
+               }
+               else {
+                       //Delete any existing payload files
+                       deletePayload(targetDirFile);
+               }
+               
+               File snapshot = null;
+               String snapshotFilePath = null;
+               if ( "false".equalsIgnoreCase(enableMultipleSnapshots)){
+                       // find the second to latest data snapshot
+                       snapshot = findSnapshot();
+                       snapshotFilePath = snapshot.getAbsolutePath();
+                       if ( "true".equalsIgnoreCase (enablePartialGraph) ) {
+                                       String[] command = new String[2];
+                                       command[0] = AAIConstants.AAI_HOME + AAIConstants.AAI_FILESEP + "bin" + AAIConstants.AAI_FILESEP + "dynamicPayloadPartial.sh";
+                                       command[1] = snapshotFilePath;
+                                       runScript(command);
+                       }
+               }
+               else {
+                       snapshotFilePath = findMultipleSnapshots();
+               }
+               
+               List<String> paramsList = new ArrayList<String>();
+               paramsList.add("-s");
+               paramsList.add(enableSchemaValidation);
+               paramsList.add("-o");
+               paramsList.add(outputLocation);
+               paramsList.add("-m");
+               paramsList.add(enableMultipleSnapshots);
+               paramsList.add("-n");
+               paramsList.add(nodeConfigurationLocation);
+               paramsList.add("-i");
+               paramsList.add(inputFilterConfigurationLocation);
+               paramsList.add("-p");
+               paramsList.add(enablePartialGraph);
+               paramsList.add("-d");
+               paramsList.add(snapshotFilePath);
+                               
+               LOGGER.debug("paramsList is : " + paramsList);
+                                                       
+               String[] paramsArray = paramsList.toArray(new String[0]); 
+               try {
+                       DynamicPayloadGenerator.run(loaderFactory, edgeIngestor, schemaVersions, paramsArray, false);
+                       LOGGER.info("DynamicPaylodGenerator completed");
+                       // tar/gzip payload files
+                       String[] command = new String[1];
+                       command[0] = AAIConstants.AAI_HOME + AAIConstants.AAI_FILESEP + "bin" + AAIConstants.AAI_FILESEP + "dynamicPayloadArchive.sh";
+                       runScript(command);
+               }
+               catch (Exception e) {
+                       ErrorLogHelper.logError("AAI_8003", e.getMessage());
+                       LOGGER.info("Exception running dataExport task " + e.getMessage());
+                       throw e;
+               } finally {
+                       LOGGER.info("Completed dataExport task" );
+                       LoggingContext.clear();
+               }
+               
+       }
+       /**
+        * The isDataExportRunning method, checks if the data export task was started separately via command line
+        * @return true if another process is running, false if not
+        */
+       private static boolean isDataExportRunning(){
+
+               Process process = null;
+
+               int count = 0;
+               try {
+                       process = new ProcessBuilder().command("bash", "-c", "ps -ef | grep '[D]ynamicPayloadGenerator'").start();
+                       InputStream is = process.getInputStream();
+                       InputStreamReader isr = new InputStreamReader(is);
+                       BufferedReader br = new BufferedReader(isr);
+
+                       while (br.readLine() != null){
+                           count++;
+                       }
+
+                       int exitVal = process.waitFor();
+                       LOGGER.info("Check if dataExport is running returned: " + exitVal);
+               } catch (Exception e) {
+                       ErrorLogHelper.logError("AAI_8002", "Exception while running the check to see if dataExport is running  "+ e.getMessage());
+                       LOGGER.info("Exception while running the check to see if dataExport is running "+ e.getMessage());
+               }
+
+               if(count > 0){
+                   return true;
+               } else {
+                       return false;
+               }
+       }
+
+       /**
+        * The findSnapshot method tries to find the second to last data snapshot. If it can't find it, it returns the last one.
+        * @return a single snapshot File
+        */
+       private static File findSnapshot() {
+               String targetDir = AAIConstants.AAI_HOME + AAIConstants.AAI_FILESEP + "logs" + AAIConstants.AAI_FILESEP + "data" + 
+                               AAIConstants.AAI_FILESEP + "dataSnapshots";
+               File snapshot = null;
+               File targetDirFile = new File(targetDir);
+               
+               File[] allFilesArr = targetDirFile.listFiles((FileFilter) FileFileFilter.FILE);
+               if ( allFilesArr == null || allFilesArr.length == 0 ) {
+                       ErrorLogHelper.logError("AAI_8001", "Unable to find data snapshots at " + targetDir);
+                       LOGGER.info ("Unable to find data snapshots at " + targetDir);
+                       return (snapshot);
+               }
+               if ( allFilesArr.length > 1 ) {
+                       Arrays.sort(allFilesArr, LastModifiedFileComparator.LASTMODIFIED_REVERSE);
+                       // need to use the second to last modified
+                       snapshot = allFilesArr[1];
+               }
+               else {
+                       snapshot = allFilesArr[0];
+               }
+               return (snapshot);
+       }
+       
+       /**
+        * The method findMultipleSnapshots looks in the data snapshots directory for a set of snapshot files that match the pattern.
+        * @return the file name prefix corresponding to the second to last set of snapshots
+        */
+       private static String findMultipleSnapshots() {
+               String targetDir = AAIConstants.AAI_HOME + AAIConstants.AAI_FILESEP + "logs" + AAIConstants.AAI_FILESEP + "data" + 
+                               AAIConstants.AAI_FILESEP + "dataSnapshots";
+               String snapshotName = null;
+               File targetDirFile = new File(targetDir);
+               TreeMap<String,List<File>> fileMap = new TreeMap<String,List<File>>(String.CASE_INSENSITIVE_ORDER);
+               
+               /*dataSnapshot.graphSON.201804022009.P0
+               dataSnapshot.graphSON.201804022009.P1
+               dataSnapshot.graphSON.201804022009.P2
+               dataSnapshot.graphSON.201804022009.P3
+               dataSnapshot.graphSON.201804022009.P4*/
+               String snapshotPattern = "^.*dataSnapshot\\.graphSON\\.(\\d+)\\.P.*$";
+               Pattern p = Pattern.compile (snapshotPattern);
+               
+               FileFilter fileFilter = new RegexFileFilter("^.*dataSnapshot\\.graphSON\\.(\\d+)\\.P.*$");
+               File[] allFilesArr = targetDirFile.listFiles(fileFilter);
+               
+               if ( allFilesArr == null || allFilesArr.length == 0 ) {
+                       ErrorLogHelper.logError("AAI_8001", "Unable to find data snapshots at " + targetDir);
+                       LOGGER.info ("Unable to find data snapshots at " + targetDir);
+                       return (null);
+               }
+               
+               if ( allFilesArr.length > 1 ) {
+                       Arrays.sort(allFilesArr, LastModifiedFileComparator.LASTMODIFIED_REVERSE);
+                       for ( File f : allFilesArr ) {
+                               // find the second to last group of multiple snapshots
+                               Matcher m = p.matcher(f.getPath());
+                               if ( m.matches() ) {
+                                       String g1 = m.group(1);
+                                       LOGGER.debug ("Found group " + g1);
+                                       if ( !fileMap.containsKey(g1) ) {
+                                               ArrayList<File> l = new ArrayList<File>();
+                                               l.add(f);
+                                               fileMap.put(g1, l);
+                                       }
+                                       else {
+                                               List<File> l = fileMap.get(g1);
+                                               l.add(f);
+                                               fileMap.put(g1, l);
+                                       }
+                               }
+
+                       }
+                       if ( fileMap.size() > 1 ) {
+                               NavigableMap<String,List<File>> dmap = fileMap.descendingMap();
+                       
+                               Map.Entry<String,List<File>> fentry = dmap.firstEntry();
+                               LOGGER.debug ("First key in descending map " + fentry.getKey());
+                               
+                               Map.Entry<String,List<File>> lentry = dmap.higherEntry(fentry.getKey());
+                               LOGGER.debug ("Next key in descending map " + lentry.getKey());
+                               
+                               List<File> l = lentry.getValue();
+                               snapshotName = l.get(0).getAbsolutePath();
+                               // Remove the .P* extension
+                               int lastDot = snapshotName.lastIndexOf('.');
+                               if ( lastDot > 0 ) {
+                                       snapshotName = snapshotName.substring(0,lastDot);
+                               }
+                               else {
+                                       LOGGER.info ("Invalid snapshot file name format " + snapshotName);
+                                       return null;
+                               }
+                       }
+               }
+               else {
+                       return null;
+               }
+               return (snapshotName);
+       }
+       /**
+        * The deletePayload method deletes all the payload files that it finds at targetDirectory
+        * @param targetDirFile the directory that contains payload files
+        * @throws AAIException
+        */
+       private static void deletePayload(File targetDirFile) throws AAIException {
+               
+               File[] allFilesArr = targetDirFile.listFiles((FileFilter)DirectoryFileFilter.DIRECTORY);
+               if ( allFilesArr == null || allFilesArr.length == 0 ) {
+                       LOGGER.info ("No payload files found at " + targetDirFile.getPath());
+                       return;
+               }
+               for ( File f : allFilesArr ) {
+                       try {
+                               FileUtils.deleteDirectory(f);
+                       }
+                       catch (IOException e) {
+                               
+                               LOGGER.info ("Unable to delete directory " + f.getAbsolutePath() + " " + e.getMessage());
+                       }
+                       
+               }
+               
+       }
+       /**
+        * The runScript method runs a shell script/command with a variable number of arguments
+        * @param script The script/command arguments
+        */
+       private static void runScript(String ...script ) {
+               Process process = null;
+               try {
+                       process = new ProcessBuilder().command(script).start();
+                       int exitVal = process.waitFor();
+                       LOGGER.info("dynamicPayloadArchive.sh returned: " + exitVal);
+               } catch (Exception e) {
+                       ErrorLogHelper.logError("AAI_8002", "Exception while running dynamicPayloadArchive.sh "+ e.getMessage());
+                       LOGGER.info("Exception while running dynamicPayloadArchive.sh" + e.getMessage());
+               }
+               
+       }
+}
index 199e704..e222228 100644 (file)
@@ -47,6 +47,8 @@ import org.apache.tinkerpop.gremlin.structure.Property;
 import org.apache.tinkerpop.gremlin.structure.Vertex;
 import org.apache.tinkerpop.gremlin.structure.VertexProperty;
 import org.onap.aai.GraphAdminApp;
+import org.onap.aai.config.PropertyPasswordConfiguration;
+import org.onap.aai.util.GraphAdminConstants;
 import org.onap.aai.dbmap.AAIGraph;
 import org.onap.aai.exceptions.AAIException;
 import org.onap.aai.introspection.Introspector;
@@ -60,6 +62,7 @@ import org.onap.aai.logging.LoggingContext;
 import org.onap.aai.edges.enums.AAIDirection;
 import org.onap.aai.edges.enums.EdgeProperty;
 import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.setup.SchemaVersion;
 import org.onap.aai.util.*;
 import org.onap.aai.logging.LoggingContext.StatusCode;
 
@@ -107,8 +110,8 @@ public class DataGrooming {
                // at all nodes of the passed-in nodeType.
                int timeWindowMinutes = 0;
 
-               int maxRecordsToFix = AAIConstants.AAI_GROOMING_DEFAULT_MAX_FIX;
-               int sleepMinutes = AAIConstants.AAI_GROOMING_DEFAULT_SLEEP_MINUTES;
+               int maxRecordsToFix = GraphAdminConstants.AAI_GROOMING_DEFAULT_MAX_FIX;
+               int sleepMinutes = GraphAdminConstants.AAI_GROOMING_DEFAULT_SLEEP_MINUTES;
                try {
                        String maxFixStr = AAIConfig.get("aai.grooming.default.max.fix");
                        if( maxFixStr != null &&  !maxFixStr.equals("") ){
@@ -121,7 +124,7 @@ public class DataGrooming {
                }
                catch ( Exception e ){
                        // Don't worry, we'll just use the defaults that we got from AAIConstants
-                       LOGGER.warn("WARNING - could not pick up aai.grooming values from aaiconfig.properties file. ");
+                       LOGGER.warn("WARNING - could not pick up aai.grooming values from aaiconfig.properties file. " + e.getMessage());
                }
 
                String prevFileName = "";
@@ -129,141 +132,45 @@ public class DataGrooming {
                dupeGrpsDeleted = 0;
                FormatDate fd = new FormatDate("yyyyMMddHHmm", "GMT");
                String dteStr = fd.getDateTime();
-
                cArgs = new CommandLineArgs();
+               try {
+                       String maxFixStr = AAIConfig.get("aai.grooming.default.max.fix");
+                       if( maxFixStr != null &&  !maxFixStr.equals("") ){
+                               cArgs.maxRecordsToFix = Integer.parseInt(maxFixStr);
+                       }
+                       String sleepStr = AAIConfig.get("aai.grooming.default.sleep.minutes");
+                       if( sleepStr != null &&  !sleepStr.equals("") ){
+                               cArgs.sleepMinutes = Integer.parseInt(sleepStr);
+                       }
+               }
+               catch ( Exception e ){
+                       // Don't worry, we'll just use the defaults that we got from AAIConstants
+                       LOGGER.warn("WARNING - could not pick up aai.grooming values from aaiconfig.properties file. " + e.getMessage());
+               }
+               
+               
                JCommander jCommander = new JCommander(cArgs, args);
                jCommander.setProgramName(DataGrooming.class.getSimpleName());
                
                //Print Defaults
-               LOGGER.info("EdgesOnlyFlag is" + cArgs.edgesOnlyFlag);
-               LOGGER.info("DoAutoFix is" + cArgs.doAutoFix);
-               LOGGER.info("skipHostCheck is" + cArgs.skipHostCheck);
-               LOGGER.info("dontFixOrphansFlag is" + cArgs.dontFixOrphansFlag);
-               LOGGER.info("singleCommits is" + cArgs.singleCommits);
-               LOGGER.info("dupeCheckOff is" + cArgs.dupeCheckOff);
-               LOGGER.info("dupeFixOn is" + cArgs.dupeFixOn);
-               LOGGER.info("ghost2CheckOff is" + cArgs.ghost2CheckOff);
-               LOGGER.info("ghost2FixOn is" + cArgs.ghost2FixOn);
-               LOGGER.info("neverUseCache is" + cArgs.neverUseCache);
-               LOGGER.info("skipEdgeChecks is" + cArgs.skipEdgeCheckFlag);
-               LOGGER.info("skipIndexUpdateFix is" + cArgs.skipIndexUpdateFix);
-               LOGGER.info("maxFix is" + cArgs.maxRecordsToFix);
+               LOGGER.info("EdgesOnlyFlag is [" + cArgs.edgesOnlyFlag + "]");
+               LOGGER.info("DoAutoFix is [" + cArgs.doAutoFix + "]");
+               LOGGER.info("skipHostCheck is [" + cArgs.skipHostCheck + "]");
+               LOGGER.info("dontFixOrphansFlag is [" + cArgs.dontFixOrphansFlag + "]");
+               LOGGER.info("dupeCheckOff is [" + cArgs.dupeCheckOff + "]");
+               LOGGER.info("dupeFixOn is [" + cArgs.dupeFixOn + "]");
+               LOGGER.info("ghost2CheckOff is [" + cArgs.ghost2CheckOff + "]");
+               LOGGER.info("ghost2FixOn is [" + cArgs.ghost2FixOn + "]");
+               LOGGER.info("neverUseCache is [" + cArgs.neverUseCache + "]");
+               LOGGER.info("singleNodeType is [" + cArgs.singleNodeType + "]");
+               LOGGER.info("skipEdgeChecks is [" + cArgs.skipEdgeCheckFlag + "]");
+               LOGGER.info("skipIndexUpdateFix is [" + cArgs.skipIndexUpdateFix + "]");
+               LOGGER.info("maxFix is [" + cArgs.maxRecordsToFix + "]");
                
-               /*if (args.length > 0) {
-                       // They passed some arguments in that will affect processing
-                       for (int i = 0; i < args.length; i++) {
-                               String thisArg = args[i];
-                               if (thisArg.equals("-edgesOnly")) {
-                                       edgesOnlyFlag = true;
-                               } else if (thisArg.equals("-autoFix")) {
-                                       doAutoFix = true;
-                               } else if (thisArg.equals("-skipHostCheck")) {
-                                       skipHostCheck = true;
-                               } else if (thisArg.equals("-dontFixOrphans")) {
-                                       dontFixOrphansFlag = true;
-                               } else if (thisArg.equals("-singleCommits")) {
-                                       singleCommits = true;
-                               } else if (thisArg.equals("-dupeCheckOff")) {
-                                       dupeCheckOff = true;
-                               } else if (thisArg.equals("-dupeFixOn")) {
-                                       dupeFixOn = true;
-                               } else if (thisArg.equals("-ghost2CheckOff")) {
-                                       ghost2CheckOff = true;
-                               } else if (thisArg.equals("-neverUseCache")) {
-                                       neverUseCache = true;
-                               } else if (thisArg.equals("-ghost2FixOn")) {
-                                       ghost2FixOn = true;
-                               } else if (thisArg.equals("-skipEdgeChecks")) {
-                                       skipEdgeCheckFlag = true;
-                               } else if (thisArg.equals("-skipIndexUpdateFix")) {
-                                       skipIndexUpdateFix = true;
-                               } else if (thisArg.equals("-maxFix")) {
-                                       i++;
-                                       if (i >= args.length) {
-                                               LoggingContext.statusCode(StatusCode.ERROR);
-                                               LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
-                                               LOGGER.error(" No value passed with -maxFix option.  ");
-                                               AAISystemExitUtil.systemExitCloseAAIGraph(0);
-                                       }
-                                       String nextArg = args[i];
-                                       try {
-                                               maxRecordsToFix = Integer.parseInt(nextArg);
-                                       } catch (Exception e) {
-                                               LoggingContext.statusCode(StatusCode.ERROR);
-                                               LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
-                                               LOGGER.error("Bad value passed with -maxFix option: ["
-                                                               + nextArg + "]");
-                                               AAISystemExitUtil.systemExitCloseAAIGraph(0);
-                                       }
-                               } else if (thisArg.equals("-sleepMinutes")) {
-                                       i++;
-                                       if (i >= args.length) {
-                                               LoggingContext.statusCode(StatusCode.ERROR);
-                                               LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
-                                               LOGGER.error("No value passed with -sleepMinutes option.");
-                                               AAISystemExitUtil.systemExitCloseAAIGraph(0);
-                                       }
-                                       String nextArg = args[i];
-                                       try {
-                                               sleepMinutes = Integer.parseInt(nextArg);
-                                       } catch (Exception e) {
-                                               LoggingContext.statusCode(StatusCode.ERROR);
-                                               LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
-                                               LOGGER.error("Bad value passed with -sleepMinutes option: ["
-                                                               + nextArg + "]");
-                                               AAISystemExitUtil.systemExitCloseAAIGraph(0);
-                                       }
-                               } else if (thisArg.equals("-timeWindowMinutes")) {
-                                       i++;
-                                       if (i >= args.length) {
-                                               LoggingContext.statusCode(StatusCode.ERROR);
-                                               LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
-                                               LOGGER.error("No value passed with -timeWindowMinutes option.");
-                                               AAISystemExitUtil.systemExitCloseAAIGraph(0);
-                                       }
-                                       String nextArg = args[i];
-                                       try {
-                                               timeWindowMinutes = Integer.parseInt(nextArg);
-                                       } catch (Exception e) {
-                                               LoggingContext.statusCode(StatusCode.ERROR);
-                                               LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
-                                               LOGGER.error("Bad value passed with -timeWindowMinutes option: ["
-                                                               + nextArg + "]");
-                                               AAISystemExitUtil.systemExitCloseAAIGraph(0);
-                                       }
-
-                               } else if (thisArg.equals("-f")) {
-                                       i++;
-                                       if (i >= args.length) {
-                                               LoggingContext.statusCode(StatusCode.ERROR);
-                                               LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
-                                               LOGGER.error(" No value passed with -f option. ");
-                                               AAISystemExitUtil.systemExitCloseAAIGraph(0);
-                                       }
-                                       prevFileName = args[i];
-                               } else if (thisArg.equals("-singleNodeType")) {
-                                       i++;
-                                       if (i >= args.length) {
-                                               LoggingContext.statusCode(StatusCode.ERROR);
-                                               LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
-                                               LOGGER.error(" No value passed with -onlyThisNodeType option. ");
-                                               AAISystemExitUtil.systemExitCloseAAIGraph(0);
-                                       }
-                                       singleNodeType = args[i];
-                               } else {
-                                       LoggingContext.statusCode(StatusCode.ERROR);
-                                       LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
-                                       LOGGER.error(" Unrecognized argument passed to DataGrooming: ["
-                                                       + thisArg + "]. ");
-                                       LOGGER.error(" Valid values are: -f -autoFix -maxFix -edgesOnly -skipEdgeChecks -dupeFixOn -donFixOrphans -timeWindowMinutes -sleepMinutes -neverUseCache");
-                                       AAISystemExitUtil.systemExitCloseAAIGraph(0);
-                               }
-                       }
-               } */
 
                String windowTag = "FULL";
                //TODO???
-               if( timeWindowMinutes > 0 ){
+               if( cArgs.timeWindowMinutes > 0 ){
                        windowTag = "PARTIAL";
                }
                String groomOutFileName = "dataGrooming." + windowTag + "." + dteStr + ".out";
@@ -279,7 +186,7 @@ public class DataGrooming {
                }
 
                try {
-                       if (!prevFileName.equals("")) {
+                       if (!cArgs.prevFileName.equals("")) {
                                // They are trying to fix some data based on a data in a
                                // previous file.
                                LOGGER.info(" Call doTheGrooming() with a previous fileName ["
@@ -287,9 +194,9 @@ public class DataGrooming {
                                Boolean finalShutdownFlag = true;
                                Boolean cacheDbOkFlag = false;
                                doTheGrooming(prevFileName, cArgs.edgesOnlyFlag, cArgs.dontFixOrphansFlag,
-                                               cArgs.maxRecordsToFix, groomOutFileName, ver, cArgs.singleCommits,
+                                               cArgs.maxRecordsToFix, groomOutFileName, ver, 
                                                cArgs.dupeCheckOff, cArgs.dupeFixOn, cArgs.ghost2CheckOff, cArgs.ghost2FixOn,
-                                               cArgs.finalShutdownFlag, cArgs.cacheDbOkFlag,
+                                               finalShutdownFlag, cacheDbOkFlag,
                                                cArgs.skipEdgeCheckFlag, cArgs.timeWindowMinutes,
                                                cArgs.singleNodeType, cArgs.skipIndexUpdateFix );
                                
@@ -306,8 +213,8 @@ public class DataGrooming {
                                Boolean cacheDbOkFlag = true;
                                int fixCandCount = doTheGrooming("", cArgs.edgesOnlyFlag,
                                                cArgs.dontFixOrphansFlag, cArgs.maxRecordsToFix, groomOutFileName,
-                                               ver, cArgs.singleCommits, cArgs.dupeCheckOff, cArgs.dupeFixOn, cArgs.ghost2CheckOff, cArgs.ghost2FixOn,
-                                               cArgs.finalShutdownFlag, cArgs.cacheDbOkFlag,
+                                               ver, cArgs.dupeCheckOff, cArgs.dupeFixOn, cArgs.ghost2CheckOff, cArgs.ghost2FixOn,
+                                               finalShutdownFlag, cacheDbOkFlag,
                                                cArgs.skipEdgeCheckFlag, cArgs.timeWindowMinutes,
                                                cArgs.singleNodeType, cArgs.skipIndexUpdateFix );
                                if (fixCandCount == 0) {
@@ -334,9 +241,9 @@ public class DataGrooming {
                                        cacheDbOkFlag = false;
                                        doTheGrooming(groomOutFileName, cArgs.edgesOnlyFlag,
                                                        cArgs.dontFixOrphansFlag, cArgs.maxRecordsToFix,
-                                                       secondGroomOutFileName, ver, cArgs.singleCommits,
+                                                       secondGroomOutFileName, ver, 
                                                        cArgs.dupeCheckOff, cArgs.dupeFixOn, cArgs.ghost2CheckOff, cArgs.ghost2FixOn,
-                                                       cArgs.finalShutdownFlag, cArgs.cacheDbOkFlag,
+                                                       finalShutdownFlag, cacheDbOkFlag,
                                                        cArgs.skipEdgeCheckFlag, cArgs.timeWindowMinutes,
                                                        cArgs.singleNodeType, cArgs.skipIndexUpdateFix );
                                }
@@ -348,12 +255,12 @@ public class DataGrooming {
                                Boolean cacheDbOkFlag = true;
                                if( cArgs.neverUseCache ){
                                        // They have forbidden us from using a cached db connection.
-                                       cArgs.cacheDbOkFlag = false;
+                                       cacheDbOkFlag = false;
                                }
                                doTheGrooming("", cArgs.edgesOnlyFlag, cArgs.dontFixOrphansFlag,
-                                               cArgs.maxRecordsToFix, groomOutFileName, ver, cArgs.singleCommits,
+                                               cArgs.maxRecordsToFix, groomOutFileName, ver, 
                                                cArgs.dupeCheckOff, cArgs.dupeFixOn, cArgs.ghost2CheckOff, cArgs.ghost2FixOn,
-                                               cArgs.finalShutdownFlag, cArgs.cacheDbOkFlag,
+                                               finalShutdownFlag, cacheDbOkFlag,
                                                cArgs.skipEdgeCheckFlag, cArgs.timeWindowMinutes,
                                                cArgs.singleNodeType, cArgs.skipIndexUpdateFix );
                        }
@@ -371,7 +278,7 @@ public class DataGrooming {
         *
         * @param args the arguments
         */
-       public static void main(String[] args) {
+       public static void main(String[] args) throws AAIException {
 
                // Set the logging file properties to be used by EELFManager
                System.setProperty("aai.service.name", DataGrooming.class.getSimpleName());
@@ -390,11 +297,24 @@ public class DataGrooming {
                props.setProperty(Configuration.PROPERTY_LOGGING_FILE_NAME, AAIConstants.AAI_LOGBACK_PROPS);
                props.setProperty(Configuration.PROPERTY_LOGGING_FILE_PATH, AAIConstants.AAI_HOME_BUNDLECONFIG);
 
-               AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext(
-                               "org.onap.aai.config",
-                               "org.onap.aai.setup"
-               );
+               AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext();
+               PropertyPasswordConfiguration initializer = new PropertyPasswordConfiguration();
+               initializer.initialize(ctx);
+
+               try {
+                       ctx.scan(
+                                       "org.onap.aai.config",
+                                       "org.onap.aai.setup"
+                       );
+                       ctx.refresh();
 
+               } catch (Exception e) {
+                       AAIException aai = ExceptionTranslator.schemaServiceExceptionTranslator(e);
+                       LoggingContext.statusCode(LoggingContext.StatusCode.ERROR);
+                       LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                       ErrorLogHelper.logError(aai.getCode(), e.getMessage() + ", resolve and retry");
+                       throw aai;
+               }
                LoaderFactory loaderFactory = ctx.getBean(LoaderFactory.class);
                SchemaVersions schemaVersions = ctx.getBean(SchemaVersions.class);
                DataGrooming dataGrooming = new DataGrooming(loaderFactory, schemaVersions);
@@ -410,7 +330,6 @@ public class DataGrooming {
         * @param maxRecordsToFix the max records to fix
         * @param groomOutFileName the groom out file name
         * @param version the version
-        * @param singleCommits the single commits
         * @param dupeCheckOff the dupe check off
         * @param dupeFixOn the dupe fix on
         * @param ghost2CheckOff the ghost 2 check off
@@ -422,7 +341,6 @@ public class DataGrooming {
        private int doTheGrooming( String fileNameForFixing,
                        Boolean edgesOnlyFlag, Boolean dontFixOrphansFlag,
                        int maxRecordsToFix, String groomOutFileName, String version,
-                       Boolean singleCommits, 
                        Boolean dupeCheckOff, Boolean dupeFixOn,
                        Boolean ghost2CheckOff, Boolean ghost2FixOn, 
                        Boolean finalShutdownFlag, Boolean cacheDbOkFlag,
@@ -532,9 +450,9 @@ public class DataGrooming {
                        ghostNodeHash = new HashMap<String, Vertex>();
                        dupeGroups = new ArrayList<>();
 
+                       LOGGER.debug(" Using default schemaVersion = [" + schemaVersions.getDefaultVersion().toString() + "]" );
                        Loader loader = loaderFactory.createLoaderForVersion(ModelType.MOXY, schemaVersions.getDefaultVersion());
 
-
                        // NOTE --- At one point, we tried explicitly searching for
                        // nodes that were missing their aai-node-type (which does
                        // happen sometimes), but the search takes too long and cannot
@@ -567,7 +485,7 @@ public class DataGrooming {
                                        int thisNtDeleteCount = 0;
                                        
                                        if( !singleNodeType.equals("") && !singleNodeType.equals(nType) ){
-                                               // We are only going to process this one node type
+                                               // We are only going to process this one node type and this isn't it
                                                continue;
                                        }
 
@@ -633,12 +551,19 @@ public class DataGrooming {
                                                                continue;
                                                        }
                                                        totalNodeCount++;
-                                                       List <Vertex> secondGetList = new ArrayList <> ();
+                                                       // Note - the "secondGetList" is used one node at a time - it is populated
+                                                       //   using either the node's defined unique key/keys (if it is not dependent on
+                                                       //   a "parent" node, or is populated using the key/keys "under" it's parent node.
+                                                       List <Vertex> secondGetList = new ArrayList <> ();  
+                                                       
                                                        // -----------------------------------------------------------------------
                                                        // For each vertex of this nodeType, we want to:
-                                                       //              a) make sure that it can be retrieved using it's AAI defined key
-                                                       //      b) make sure that it is not a duplicate
+                                                       //      a) make sure it can be retrieved using its "aai-uri"
+                                                       //              b) make sure that it can be retrieved using it's AAI defined key(s)
+                                                       //      c) make sure that it is not a duplicate
                                                        // -----------------------------------------------------------------------
+
+                                                       Boolean aaiUriOk = checkAaiUriOk(source1, thisVtx);
                                                        
                                                        // For this instance of this nodeType, get the key properties 
                                                        HashMap<String, Object> propHashWithKeys = new HashMap<>();
@@ -646,7 +571,6 @@ public class DataGrooming {
                                                        while (keyPropI.hasNext()) {
                                                                String propName = keyPropI.next();
                                                                String propVal = "";
-                                                               //delete an already deleted vertex
                                                                Object obj = thisVtx.<Object>property(propName).orElse(null);
                                                                if (obj != null) {
                                                                        propVal = obj.toString();
@@ -688,7 +612,7 @@ public class DataGrooming {
                                                                                                processedVertices.add(thisVtx.id().toString());
                                                                                                Object ob = thisVtx.<Object>property("aai-node-type").orElse(null);
                                                                                                if( ob == null && !skipIndexUpdateFix ){
-                                                                                                       updateIndexedProps(thisVtx, thisVid, nType, propTypeHash, indexedProps);
+                                                                                                       updateIndexedPropsForMissingNT(thisVtx, thisVid, nType, propTypeHash, indexedProps);
                                                                                                        updateOnlyFlag = true;
                                                                                                        dummyUpdCount++;
                                                                                                        // Since we are updating this delete candidate, not deleting it, we
@@ -767,16 +691,33 @@ public class DataGrooming {
                                                                        }
                                                                }// end of -- else this is a dependent node  -- piece
                                                                
-                                                               if( depNodeOk && (secondGetList == null || secondGetList.size() == 0) ){
-                                                                       // We could not get the node back using it's own key info. 
+                                                               Boolean aaiKeysOk = true;
+                                                               if( (secondGetList == null || secondGetList.size() == 0)
+                                                                               && depNodeOk){
+                                                                       aaiKeysOk = false;
+                                                               }
+                                                               
+                                                               if( (!aaiKeysOk || !aaiUriOk) 
+                                                                               && !deleteCandidateList.contains(thisVid) 
+                                                                               && !skipIndexUpdateFix ){
+                                                                       // Either the aaiKeys or aaiUri was bad.  This may
+                                                                       // be a problem with the indexes so we'll try to reset 
+                                                                       // them since this node is not on the delete list from
+                                                                       // a previous run.
+                                                                       tryToReSetIndexedProps(thisVtx, thisVid, indexedProps);
+                                                               }
+                                                               
+                                                               if( !aaiKeysOk || !aaiUriOk ){
+                                                                       // We could not get the node back using it's own key info or aai-uri. 
                                                                        // So, it's a PHANTOM
+                                                                       
                                                                        if (deleteCandidateList.contains(thisVid)) {
                                                                                boolean okFlag = true;
                                                                                boolean updateOnlyFlag = false;
                                                                                try {
                                                                                        Object ob = thisVtx.<Object>property("aai-node-type").orElse(null);
                                                                                        if( ob == null && !skipIndexUpdateFix ){
-                                                                                               updateIndexedProps(thisVtx, thisVid, nType, propTypeHash, indexedProps);
+                                                                                               updateIndexedPropsForMissingNT(thisVtx, thisVid, nType, propTypeHash, indexedProps);
                                                                                                dummyUpdCount++;
                                                                                                updateOnlyFlag = true;
                                                                                                // Since we are updating this delete candidate, not deleting it, we
@@ -813,7 +754,7 @@ public class DataGrooming {
                                                                        List<String> tmpDupeGroups = checkAndProcessDupes(
                                                                                                TRANSID, FROMAPPID, g, source1, version,
                                                                                                nType, secondGetList, dupeFixOn,
-                                                                                               deleteCandidateList, singleCommits,     dupeGroups, loader);
+                                                                                               deleteCandidateList, dupeGroups, loader);
                                                                        Iterator<String> dIter = tmpDupeGroups.iterator();
                                                                        while (dIter.hasNext()) {
                                                                                // Add in any newly found dupes to our running list
@@ -862,7 +803,7 @@ public class DataGrooming {
                                                        List<String> tmpDupeGroups = checkAndProcessDupes(
                                                                                TRANSID, FROMAPPID, g, source1, version,
                                                                                nType, dupeList, dupeFixOn,
-                                                                               deleteCandidateList, singleCommits,     dupeGroups, loader);
+                                                                               deleteCandidateList, dupeGroups, loader);
                                                        Iterator<String> dIter = tmpDupeGroups.iterator();
                                                        while (dIter.hasNext()) {
                                                                // Add in any newly found dupes to our running list
@@ -874,13 +815,7 @@ public class DataGrooming {
                                                
                                        }// end of extra dupe check for non-dependent nodes
                                        
-                                       if ( (thisNtDeleteCount > 0) && singleCommits ) {
-                                               // NOTE - the singleCommits option is not used in normal processing
-                                               g.tx().commit();
-                                               g = AAIGraph.getInstance().getGraph().newTransaction();
-                                               
-                                       }
-                                       thisNtDeleteCount = 0;
+                                       thisNtDeleteCount = 0;  // Reset for the next pass
                                        LOGGER.info( " Processed " + thisNtCount + " records for [" + nType + "], " + totalNodeCount + " total (in window) overall. " );
                                        
                                }// While-loop for each node type
@@ -889,17 +824,16 @@ public class DataGrooming {
 
 
                  if( !skipEdgeCheckFlag ){
-                       // --------------------------------------------------------------------------------------
-                       // Now, we're going to look for one-armed-edges. Ie. an edge that
-                       // should have
-                       // been deleted (because a vertex on one side was deleted) but
-                       // somehow was not deleted.
-                       // So the one end of it points to a vertexId -- but that vertex is
-                       // empty.
-                       // --------------------------------------------------------------------------------------
+                       // ---------------------------------------------------------------
+                       // Now, we're going to look for one-armed-edges. Ie. an 
+                       // edge that should have been deleted (because a vertex on 
+                       // one side was deleted) but somehow was not deleted.
+                       // So the one end of it points to a vertexId -- but that 
+                       // vertex is empty.
+                       // --------------------------------------------------------------
 
                        // To do some strange checking - we need a second graph object
-                       LOGGER.debug("    ---- DEBUG --- about to open a SECOND graph (takes a little while)--------\n");
+                       LOGGER.debug("    ---- NOTE --- about to open a SECOND graph (takes a little while)--------\n");
                        // Note - graph2 just reads - but we want it to use a fresh connection to 
                        //      the database, so we are NOT using the CACHED DB CONFIG here.
                        
@@ -1031,6 +965,7 @@ public class DataGrooming {
                                                                                // If we can NOT get this ghost with the SECOND graph-object, 
                                                                                // it is still a ghost since even though we can get data about it using the FIRST graph 
                                                                                // object.  
+                                                                               
                                                                                try {
                                                                                         ghost2 = g.traversal().V(vIdLong).next();
                                                                                }
@@ -1051,6 +986,7 @@ public class DataGrooming {
                                                                LOGGER.warn(">>> WARNING trying to get edge's In-vertex props ", err);
                                                        }
                                                }
+                                               
                                                if (keysMissing || vIn == null || vNtI.equals("")
                                                                || cantGetUsingVid) {
                                                        // this is a bad edge because it points to a vertex
@@ -1067,14 +1003,7 @@ public class DataGrooming {
                                                                                else {
                                                                                        vIn.remove();
                                                                                }
-                                                                               if (singleCommits) {
-                                                                                       // NOTE - the singleCommits option is not used in normal processing
-                                                                                       g.tx().commit();
-                                                                                       g = AAIGraph.getInstance().getGraph().newTransaction();
-                                                                               }
-                                                                               else {
-                                                                                       executeFinalCommit = true;
-                                                                               }
+                                                                               executeFinalCommit = true;
                                                                                deleteCount++;
                                                                        } catch (Exception e1) {
                                                                                okFlag = false;
@@ -1092,14 +1021,7 @@ public class DataGrooming {
                                                                        // vertex
                                                                        try {
                                                                                e.remove();
-                                                                               if (singleCommits) {
-                                                                                       // NOTE - the singleCommits option is not used in normal processing
-                                                                                       g.tx().commit();
-                                                                                       g = AAIGraph.getInstance().getGraph().newTransaction();
-                                                                               }
-                                                                               else {
-                                                                                       executeFinalCommit = true;
-                                                                               }
+                                                                               executeFinalCommit = true;
                                                                                deleteCount++;
                                                                        } catch (Exception ex) {
                                                                                // NOTE - often, the exception is just
@@ -1192,14 +1114,7 @@ public class DataGrooming {
                                                                                else if (vOut != null) {
                                                                                        vOut.remove();
                                                                                }
-                                                                               if (singleCommits) {
-                                                                                       // NOTE - the singleCommits option is not used in normal processing
-                                                                                       g.tx().commit();
-                                                                                       g = AAIGraph.getInstance().getGraph().newTransaction();
-                                                                               }
-                                                                               else {
-                                                                                       executeFinalCommit = true;
-                                                                               }
+                                                                               executeFinalCommit = true;
                                                                                deleteCount++;
                                                                        } catch (Exception e1) {
                                                                                okFlag = false;
@@ -1217,14 +1132,7 @@ public class DataGrooming {
                                                                        // vertex
                                                                        try {
                                                                                e.remove();
-                                                                               if (singleCommits) {
-                                                                                       // NOTE - the singleCommits option is not used in normal processing
-                                                                                       g.tx().commit();
-                                                                                       g = AAIGraph.getInstance().getGraph().newTransaction();
-                                                                               }
-                                                                               else {
-                                                                                       executeFinalCommit = true;
-                                                                               }
+                                                                               executeFinalCommit = true;
                                                                                deleteCount++;
                                                                        } catch (Exception ex) {
                                                                                // NOTE - often, the exception is just
@@ -1260,7 +1168,7 @@ public class DataGrooming {
                        
 
                        deleteCount = deleteCount + dupeGrpsDeleted;
-                       if (!singleCommits && (deleteCount > 0 || dummyUpdCount > 0) ){
+                       if (deleteCount > 0 || dummyUpdCount > 0){
                                executeFinalCommit = true;
                        }
 
@@ -1647,12 +1555,50 @@ public class DataGrooming {
        }// end of doTheGrooming()
        
        
-       public void updateIndexedProps(Vertex thisVtx, String thisVidStr, String nType,
+       public void tryToReSetIndexedProps(Vertex thisVtx, String thisVidStr, ArrayList <String> indexedProps) {
+               // Note - This is for when a node looks to be a phantom (ie. an index/pointer problem)
+           // We will only deal with properties that are indexed and have a value - and for those,
+           // we will re-set them to the same value they already have, so that hopefully if their 
+           // index was broken, it may get re-set.
+              
+               LOGGER.info(" We will try to re-set the indexed properties for this node without changing any property values.  VID = " + thisVidStr );
+               // These reserved-prop-names are all indexed for all nodes
+               
+               ArrayList <String> propList = new ArrayList <String> ();
+               propList.addAll(indexedProps);
+               // Add in the global props that we'd also like to reset
+               propList.add("aai-node-type");
+               propList.add("aai-uri");
+               propList.add("aai-uuid");
+               Iterator<String> propNameItr = propList.iterator();
+               while( propNameItr.hasNext() ){
+                       String propName = propNameItr.next();
+                       try {
+                               Object valObj = thisVtx.<Object>property(propName).orElse(null);
+                               if( valObj != null ){
+                                       LOGGER.info(" We will try resetting prop [" + propName 
+                                                       + "], to val = [" + valObj.toString() + "] for VID = " + thisVidStr);
+                                       thisVtx.property(propName, valObj);
+                               }
+                       } catch (Exception ex ){
+                               // log that we did not re-set this property
+                               LOGGER.debug("DEBUG - Exception while trying to re-set the indexed properties for this node: VID = " 
+                               + thisVidStr + ".  exception msg = [" + ex.getMessage() + "]" );
+                       }
+               }
+       }
+                 
+                 
+        public void updateIndexedPropsForMissingNT(Vertex thisVtx, String thisVidStr, String nType,
                        HashMap <String,String>propTypeHash, ArrayList <String> indexedProps) {
-               // This is a "missing-aai-node-type" scenario.
+               // This is for the very specific "missing-aai-node-type" scenario.
+               // That is: a node that does not have the "aai-node-type" property, but still has
+               //     an aai-node-type Index pointing to it and is an orphan node.   Nodes like this
+               //     are (probably) the result of a delete request that got hosed.
                // Other indexes may also be messed up, so we will update all of them on
                //    this pass.  A future pass will just treat this node like a regular orphan
                //    and delete it (if appropriate).
+                
                LOGGER.info("  We will be updating the indexed properties for this node to dummy values.  VID = " + thisVidStr );
                String dummyPropValStr = thisVidStr + "dummy";
                // These reserved-prop-names are all indexed for all nodes
@@ -1752,9 +1698,20 @@ public class DataGrooming {
                                Object ob = v.<Object>property(propName).orElse(null);
                                if (ob == null || ob.toString().equals("")) {
                                        // It is missing a key property
+                                       String thisVertId = v.id().toString();
+                                       LOGGER.debug(" -- Vid = " + thisVertId 
+                                                       + ",nType = [" + nType + "], is missing keyPropName = [" + propName + "]");
                                        return true;
                                }
                        }
+                       Object ob = v.<Object>property("aai-uri").orElse(null);
+                       if (ob == null || ob.toString().equals("")) {
+                               // It is missing a key property
+                               String thisVertId = v.id().toString();
+                               LOGGER.debug(" -- Vid = " + thisVertId 
+                                               + ",nType = [" + nType + "], is missing its [aai-uri] property");
+                               return true;
+                       }
                } catch (AAIException e) {
                        // Something was wrong -- but since we weren't able to check
                        // the keys, we will not declare that it is missing keys.
@@ -1829,11 +1786,11 @@ public class DataGrooming {
                        ArrayList<Vertex> dupeVertexList, String ver, Loader loader)
                        throws AAIException {
 
-               // This method assumes that it is being passed a List of vertex objects
-               // which
-               // violate our uniqueness constraints.
-
-               Vertex nullVtx = null;
+               // This method assumes that it is being passed a List of 
+               // vertex objects which violate our uniqueness constraints.
+               // Note - returning a null vertex means we could not 
+               //   safely pick one to keep (Ie. safely know which to delete.)
+               Vertex nullVtx = null;  
 
                if (dupeVertexList == null) {
                        return nullVtx;
@@ -1846,6 +1803,31 @@ public class DataGrooming {
                        return (dupeVertexList.get(0));
                }
 
+               // If they don't all have the same aai-uri, then we will not 
+               // choose between them - we'll need someone to manually 
+               // check to pick which one makes sense to keep.
+               Object uriOb = dupeVertexList.get(0).<Object>property("aai-uri").orElse(null);
+               if( uriOb == null || uriOb.toString().equals("") ){
+                       // this is a bad node - hopefully will be picked up by phantom checker
+                       return nullVtx;
+               }
+               String thisUri = uriOb.toString();
+               for (int i = 1; i < listSize; i++) {
+                       uriOb = dupeVertexList.get(i).<Object>property("aai-uri").orElse(null);
+                       if( uriOb == null || uriOb.toString().equals("") ){
+                               // this is a bad node - hopefully will be picked up by phantom checker
+                               return nullVtx;
+                       }
+                       String nextUri = uriOb.toString();
+                       if( !thisUri.equals(nextUri)){
+                               // there are different URI's on these - so we can't pick 
+                               // a dupe to keep.  Someone will need to look at it.
+                               return nullVtx;
+                       }
+               }
+               
+               // Compare them two at a time to see if we can tell which out of 
+               // the batch to keep.
                Vertex vtxPreferred = null;
                Vertex currentFaveVtx = dupeVertexList.get(0);
                for (int i = 1; i < listSize; i++) {
@@ -1860,7 +1842,14 @@ public class DataGrooming {
                        }
                }
 
-               return (currentFaveVtx);
+               if( currentFaveVtx != null && checkAaiUriOk(g, currentFaveVtx) ){
+                       return (currentFaveVtx);
+               }
+               else {
+                       // We had a preferred vertex, but its aai-uri was bad, so
+                       // we will not recommend one to keep.
+                       return nullVtx;
+               }
 
        } // end of getPreferredDupe()
 
@@ -2041,7 +2030,7 @@ public class DataGrooming {
                                // pointer to it, then save that one.  Otherwise, take the
                                // older one.
                                if( !onlyNodeThatIndexPointsToVidStr.equals("") ){
-                                       // only one is reachable via the index - choose that one.
+                                       // only one is reachable via the index - choose that one if its aai-uri is also good
                                        if( onlyNodeThatIndexPointsToVidStr.equals(vidA.toString()) ){
                                                preferredVtx = vtxA;
                                        }
@@ -2049,11 +2038,13 @@ public class DataGrooming {
                                                preferredVtx = vtxB;
                                        }
                                }
-                               else if (vidA < vidB) {
+                               else if ( checkAaiUriOk(g, vtxA) ) {
                                        preferredVtx = vtxA;
-                               } else {
+                               } 
+                               else if ( checkAaiUriOk(g, vtxB) ) {
                                        preferredVtx = vtxB;
                                }
+                               // else we're picking neither because neither one had a working aai-uri index property
                        }
                } else if (vtxIdsConn2A.size() > vtxIdsConn2B.size()) {
                        // 3 - VertexA is connected to more things than vtxB.
@@ -2112,14 +2103,13 @@ public class DataGrooming {
         * @param passedVertList the passed vert list
         * @param dupeFixOn the dupe fix on
         * @param deleteCandidateList the delete candidate list
-        * @param singleCommits the single commits
         * @param alreadyFoundDupeGroups the already found dupe groups
         * @return the array list
         */
        private List<String> checkAndProcessDupes(String transId,
                        String fromAppId, Graph g, GraphTraversalSource source, String version, String nType,
                        List<Vertex> passedVertList, Boolean dupeFixOn,
-                       Set<String> deleteCandidateList, Boolean singleCommits,
+                       Set<String> deleteCandidateList, 
                        ArrayList<String> alreadyFoundDupeGroups, Loader loader ) {
                
                ArrayList<String> returnList = new ArrayList<>();
@@ -2203,7 +2193,7 @@ public class DataGrooming {
                                                if (dupeFixOn) {
                                                        didRemove = deleteNonKeepersIfAppropriate(g,
                                                                        dupesStr, prefV.id().toString(),
-                                                                       deleteCandidateList, singleCommits);
+                                                                       deleteCandidateList);
                                                }
                                                if (didRemove) {
                                                        dupeGrpsDeleted++;
@@ -2255,7 +2245,7 @@ public class DataGrooming {
                                                                        didRemove = deleteNonKeepersIfAppropriate(
                                                                                        g, dupesStr, prefV.id()
                                                                                                        .toString(),
-                                                                                       deleteCandidateList, singleCommits);
+                                                                                       deleteCandidateList );
                                                                }
                                                                if (didRemove) {
                                                                        dupeGrpsDeleted++;
@@ -2359,12 +2349,11 @@ public class DataGrooming {
         * @param dupeInfoString the dupe info string
         * @param vidToKeep the vid to keep
         * @param deleteCandidateList the delete candidate list
-        * @param singleCommits the single commits
         * @return the boolean
         */
        private Boolean deleteNonKeepersIfAppropriate(Graph g,
                        String dupeInfoString, String vidToKeep,
-                       Set<String> deleteCandidateList, Boolean singleCommits) {
+                       Set<String> deleteCandidateList ) {
 
                Boolean deletedSomething = false;
                // This assumes that the dupeInfoString is in the format of
@@ -2421,11 +2410,6 @@ public class DataGrooming {
                                                                                                .traversal().V(longVertId).next();
                                                                                vtx.remove();
 
-                                                                               if (singleCommits) {
-                                                                                       // NOTE - the singleCommits option is not used in normal processing
-                                                                                       g.tx().commit();
-                                                                                       g = AAIGraph.getInstance().getGraph().newTransaction();
-                                                                               }
                                                                        } catch (Exception e) {
                                                                                okFlag = false;
                                                                                LoggingContext.statusCode(StatusCode.ERROR);
@@ -2455,6 +2439,70 @@ public class DataGrooming {
        }// end of deleteNonKeepersIfAppropriate()
 
        
+       
+       /**
+        * makes sure aai-uri exists and can be used to get this node back
+        *
+        * @param transId the trans id
+        * @param fromAppId the from app id
+        * @param graph the graph
+        * @param vtx
+        * @return true if aai-uri is populated and the aai-uri-index points to this vtx
+        * @throws AAIException the AAI exception
+        */
+       public Boolean checkAaiUriOk( GraphTraversalSource graph, Vertex origVtx )
+                       throws AAIException{
+               String aaiUriStr = "";
+               try { 
+                       Object ob = origVtx.<Object>property("aai-uri").orElse(null);
+                       String origVid = origVtx.id().toString();
+                       LOGGER.debug("DEBUG --- do checkAaiUriOk() for origVid = " + origVid);
+                       if (ob == null || ob.toString().equals("")) {
+                               // It is missing its aai-uri
+                               LOGGER.debug("DEBUG No [aai-uri] property found for vid = [" 
+                                               + origVid + "] " );
+                               return false;
+                       }
+                       else {
+                               aaiUriStr = ob.toString();
+                               Iterator <Vertex> verts = graph.V().has("aai-uri",aaiUriStr);
+                               int count = 0;
+                               while( verts.hasNext() ){
+                                       count++;
+                                       Vertex foundV = verts.next();
+                                       String foundVid = foundV.id().toString();
+                                       if( !origVid.equals(foundVid) ){
+                                               LOGGER.debug("DEBUG aai-uri key property ["  
+                                                               + aaiUriStr + "] for vid = [" 
+                                                               + origVid + "] brought back different vertex with vid = [" 
+                                                               + foundVid + "]." );
+                                               return false;
+                                       }
+                               }
+                               if( count == 0 ){
+                                       LOGGER.debug("DEBUG aai-uri key property ["  
+                                                       + aaiUriStr + "] for vid = [" 
+                                                       + origVid + "] could not be used to query for that vertex. ");
+                                       return false;   
+                               }
+                               else if( count > 1 ){
+                                       LOGGER.debug("DEBUG aai-uri key property ["  
+                                                       + aaiUriStr + "] for vid = [" 
+                                                       + origVid + "] brought back multiple (" 
+                                                       + count + ") vertices instead of just one. ");
+                                       return false;   
+                               }
+                       }
+               }
+               catch( Exception ex ){
+                       LoggingContext.statusCode(StatusCode.ERROR);
+                       LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                       LOGGER.error(" ERROR trying to get node with aai-uri: [" + aaiUriStr + "]" + LogFormatTools.getStackTop(ex));
+               }
+               return true;
+               
+       }// End of checkAaiUriOk() 
+       
        /**
         * Gets the node just using key params.
         *
@@ -2876,47 +2924,44 @@ class CommandLineArgs {
                @Parameter(names = "--help", help = true)
                public boolean help;
 
-               @Parameter(names = "-edgesOnly", description = "Check grooming on edges only", arity = 1)
+               @Parameter(names = "-edgesOnly", description = "Check grooming on edges only")
                public Boolean edgesOnlyFlag = false;
 
-               @Parameter(names = "-autoFix", description = "doautofix", arity = 1)
+               @Parameter(names = "-autoFix", description = "doautofix")
                public Boolean doAutoFix = false;
 
-               @Parameter(names = "-skipHostCheck", description = "skipHostCheck", arity = 1)
+               @Parameter(names = "-skipHostCheck", description = "skipHostCheck")
                public Boolean skipHostCheck = false;
 
-               @Parameter(names = "-dontFixOrphans", description = "dontFixOrphans", arity = 1)
+               @Parameter(names = "-dontFixOrphans", description = "dontFixOrphans")
                public Boolean dontFixOrphansFlag = false;
 
-               @Parameter(names = "-singleCommits", description = "singleCommits", arity = 1)
-               public Boolean singleCommits = false;
-
-               @Parameter(names = "-dupeCheckOff", description = "dupeCheckOff", arity = 1)
+               @Parameter(names = "-dupeCheckOff", description = "dupeCheckOff")
                public Boolean dupeCheckOff = false;
 
-               @Parameter(names = "-dupeFixOn", description = "dupeFixOn", arity = 1)
+               @Parameter(names = "-dupeFixOn", description = "dupeFixOn")
                public Boolean dupeFixOn = false;
 
-               @Parameter(names = "-ghost2CheckOff", description = "ghost2CheckOff", arity = 1)
+               @Parameter(names = "-ghost2CheckOff", description = "ghost2CheckOff")
                public Boolean ghost2CheckOff = false;
 
-               @Parameter(names = "-ghost2FixOn", description = "ghost2FixOn", arity = 1)
+               @Parameter(names = "-ghost2FixOn", description = "ghost2FixOn")
                public Boolean ghost2FixOn = false;
                
-               @Parameter(names = "-neverUseCache", description = "neverUseCache", arity = 1)
+               @Parameter(names = "-neverUseCache", description = "neverUseCache")
                public Boolean neverUseCache = false;
                
-               @Parameter(names = "-skipEdgeChecks", description = "skipEdgeChecks", arity = 1)
+               @Parameter(names = "-skipEdgeChecks", description = "skipEdgeChecks")
                public Boolean skipEdgeCheckFlag = false;
                
-               @Parameter(names = "-skipIndexUpdateFix", description = "skipIndexUpdateFix", arity = 1)
+               @Parameter(names = "-skipIndexUpdateFix", description = "skipIndexUpdateFix")
                public Boolean skipIndexUpdateFix = false;
                
                @Parameter(names = "-maxFix", description = "maxFix")
-               public int maxRecordsToFix = AAIConstants.AAI_GROOMING_DEFAULT_MAX_FIX;
+               public int maxRecordsToFix = GraphAdminConstants.AAI_GROOMING_DEFAULT_MAX_FIX;
                
                @Parameter(names = "-sleepMinutes", description = "sleepMinutes")
-               public int sleepMinutes = AAIConstants.AAI_GROOMING_DEFAULT_SLEEP_MINUTES;
+               public int sleepMinutes = GraphAdminConstants.AAI_GROOMING_DEFAULT_SLEEP_MINUTES;
                
                // A value of 0 means that we will not have a time-window -- we will look
                                // at all nodes of the passed-in nodeType.
@@ -2926,11 +2971,9 @@ class CommandLineArgs {
                @Parameter(names = "-f", description = "file")
                public String prevFileName = "";
                
-               @Parameter(names = "-singleNodeType", description = "sleepMinutes")
+               @Parameter(names = "-singleNodeType", description = "singleNodeType")
                public String singleNodeType = "";
-
-               Boolean finalShutdownFlag = true;
-               Boolean cacheDbOkFlag = true;
+               
        }
 
        public HashMap<String, Vertex> getGhostNodeHash() {
index 85a127f..ff3a6d7 100644 (file)
@@ -85,7 +85,6 @@ public class DataGroomingTasks {
                append("enableedgesonly" , AAIConfig.get("aai.datagrooming.enableedgesonly"), dataGroomingFlagMap);
                append("enableskipedgechecks" , AAIConfig.get("aai.datagrooming.enableskipedgechecks"), dataGroomingFlagMap);
                append("enablemaxfix" , AAIConfig.get("aai.datagrooming.enablemaxfix"), dataGroomingFlagMap);
-               append("enablesinglecommits" , AAIConfig.get("aai.datagrooming.enablesinglecommits"), dataGroomingFlagMap);
                append("enabledupecheckoff" , AAIConfig.get("aai.datagrooming.enabledupecheckoff"), dataGroomingFlagMap);
                append("enableghost2checkoff" , AAIConfig.get("aai.datagrooming.enableghost2checkoff"), dataGroomingFlagMap);
                append("enableghost2fixon" , AAIConfig.get("aai.datagrooming.enableghost2fixon"), dataGroomingFlagMap);
@@ -94,6 +93,7 @@ public class DataGroomingTasks {
                append("timewindowminutesvalue" , AAIConfig.get("aai.datagrooming.timewindowminutesvalue"), dataGroomingFlagMap);
                append("sleepminutesvalue" , AAIConfig.get("aai.datagrooming.sleepminutesvalue"), dataGroomingFlagMap);
                append("maxfixvalue" , AAIConfig.get("aai.datagrooming.maxfixvalue"), dataGroomingFlagMap);
+               // Note: singleNodeType parameter is not used when running from the cron
 
                if(LOGGER.isDebugEnabled()){
                        LOGGER.debug("DataGrooming Flag Values : ");
@@ -113,7 +113,7 @@ public class DataGroomingTasks {
                        }
                        if("true".equals(dataGroomingFlagMap.get("enabletimewindowminutes"))){
                                paramsArray.add("-timeWindowMinutes");                  
-                               paramsArray.add(dataGroomingFlagMap.get("enabletimewindowminutesvalue"));
+                               paramsArray.add(dataGroomingFlagMap.get("timewindowminutesvalue"));
                        }
                        if("true".equals(dataGroomingFlagMap.get("enableskiphostcheck"))){
                                paramsArray.add("-skipHostCheck");
@@ -135,9 +135,6 @@ public class DataGroomingTasks {
                                paramsArray.add("-maxFix"); 
                                paramsArray.add(dataGroomingFlagMap.get("maxfixvalue"));
                        }
-                       if("true".equals(dataGroomingFlagMap.get("enablesinglecommits"))){
-                               paramsArray.add("-singleCommits");
-                       }
                        if("true".equals(dataGroomingFlagMap.get("enabledupecheckoff"))){
                                paramsArray.add("-dupeCheckOff");
                        }
index 12815ee..e7ae5ec 100644 (file)
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
-/**
- * ============LICENSE_START=======================================================
- * org.onap.aai
- * ================================================================================
- * Copyright Â© 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- *
- * ECOMP is a trademark and service mark of AT&T Intellectual Property.
- */
 package org.onap.aai.datasnapshot;
 
 import java.io.ByteArrayOutputStream;
@@ -48,32 +26,34 @@ import java.io.IOException;
 import java.io.InputStream;
 import java.io.SequenceInputStream;
 import java.util.*;
-
 import java.util.concurrent.Callable;
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.Future;
-
 import java.util.concurrent.TimeUnit;
-import org.apache.tinkerpop.gremlin.structure.Vertex;
 
+import org.apache.tinkerpop.gremlin.structure.Vertex;
 import org.apache.commons.configuration.PropertiesConfiguration;
-
 import org.apache.tinkerpop.gremlin.structure.io.IoCore;
-import org.apache.tinkerpop.gremlin.structure.io.graphson.LegacyGraphSONReader;
 import org.onap.aai.dbmap.AAIGraph;
 import org.onap.aai.dbmap.AAIGraphConfig;
 import org.onap.aai.exceptions.AAIException;
 import org.onap.aai.logging.ErrorLogHelper;
 import org.onap.aai.util.AAIConfig;
 import org.onap.aai.util.AAIConstants;
+import org.onap.aai.util.GraphAdminConstants;
 import org.onap.aai.util.AAISystemExitUtil;
 import org.onap.aai.util.FormatDate;
+import org.onap.aai.util.GraphAdminDBUtils;
 
 import com.att.eelf.configuration.Configuration;
 import com.att.eelf.configuration.EELFLogger;
 import com.att.eelf.configuration.EELFManager;
+import com.beust.jcommander.JCommander;
+import com.beust.jcommander.Parameter;
+import com.beust.jcommander.ParameterException;
+
 import org.janusgraph.core.JanusGraph;
 import org.janusgraph.core.JanusGraphFactory;
 import org.janusgraph.core.util.JanusGraphCleanup;
@@ -87,12 +67,15 @@ public class DataSnapshot {
 
        private static final Set<String> SNAPSHOT_RELOAD_COMMANDS = new HashSet<>();
 
+       private static final String MIGRATION_PROCESS_NAME = "migration";
+
        static {
-           SNAPSHOT_RELOAD_COMMANDS.add("RELOAD_LEGACY_DATA");
                SNAPSHOT_RELOAD_COMMANDS.add("RELOAD_DATA");
                SNAPSHOT_RELOAD_COMMANDS.add("RELOAD_DATA_MULTI");
        }
        
+       private CommandLineArgs cArgs;
+       
        
        /**
         * The main method.
@@ -104,113 +87,195 @@ public class DataSnapshot {
 
            boolean success = true;
 
+               Boolean dbClearFlag = false;
+               JanusGraph graph = null;
+               String command = "JUST_TAKE_SNAPSHOT"; // This is the default
+               String oldSnapshotFileName = "";
+
+               DataSnapshot dataSnapshot = new DataSnapshot();
+               success = dataSnapshot.executeCommand(args, success, dbClearFlag, graph, command,
+                               oldSnapshotFileName);
+               
+               if(success){
+                       AAISystemExitUtil.systemExitCloseAAIGraph(0);
+               } else {
+                       AAISystemExitUtil.systemExitCloseAAIGraph(1);
+               }
+
+       }// End of main()
+
+
+       public boolean executeCommand(String[] args, boolean success,
+                       Boolean dbClearFlag, JanusGraph graph, String command,
+                       String oldSnapshotFileName) {
+               
                // Set the logging file properties to be used by EELFManager
                System.setProperty("aai.service.name", DataSnapshot.class.getSimpleName());
                Properties props = System.getProperties();
                props.setProperty(Configuration.PROPERTY_LOGGING_FILE_NAME, AAIConstants.AAI_LOGBACK_PROPS);
                props.setProperty(Configuration.PROPERTY_LOGGING_FILE_PATH, AAIConstants.AAI_HOME_BUNDLECONFIG);
                LOGGER = EELFManager.getInstance().getLogger(DataSnapshot.class);
-               Boolean dbClearFlag = false;
-               JanusGraph graph = null;
-               String command = "JUST_TAKE_SNAPSHOT"; // This is the default
-               String oldSnapshotFileName = "";
+               cArgs = new CommandLineArgs();
                
-               Long vertAddDelayMs = 1L;   // Default value
-               Long edgeAddDelayMs = 1L;   // Default value
+               String itemName = "aai.datasnapshot.threads.for.create";
                
-               Long failureDelayMs = 50L;  // Default value
-               Long retryDelayMs = 1500L;  // Default value
-               int maxErrorsPerThread = 25; // Default value
-               Long vertToEdgeProcDelay = 9000L; // Default value 
-               Long staggerThreadDelay = 5000L;  // Default value
-
-               int threadCount = 0;
-               Boolean debugFlag = false;
-               int debugAddDelayTime = 1;  // Default to 1 millisecond
-
-               boolean isExistingTitan = false;
+               try {
+                       String val = AAIConfig.get(itemName);
+                       if( val != null &&  !val.equals("") ){
+                               cArgs.threadCount = Integer.parseInt(val);
+                       }
+               }catch ( Exception e ){
+                       LOGGER.warn("WARNING - could not get [" + itemName + "] value from aaiconfig.properties file. " + e.getMessage());
+               }
+               int threadCount4Create = cArgs.threadCount;
+               
+               cArgs.snapshotType = "graphson";
+               
+               Long vertAddDelayMs = GraphAdminConstants.AAI_SNAPSHOT_DEFAULT_VERTEX_ADD_DELAY_MS;
+               itemName = "aai.datasnapshot.vertex.add.delay.ms";
+               try {
+                       String val = AAIConfig.get(itemName);
+                       if( val != null &&  !val.equals("") ){
+                               cArgs.vertAddDelayMs = Long.parseLong(val);
+                       }
+               }catch ( Exception e ){
+                       LOGGER.warn("WARNING - could not get [" + itemName + "] value from aaiconfig.properties file. " + e.getMessage());
+               }
+               
+               Long edgeAddDelayMs = GraphAdminConstants.AAI_SNAPSHOT_DEFAULT_EDGE_ADD_DELAY_MS;
+               itemName = "aai.datasnapshot.edge.add.delay.ms";
+               try {
+                       String val = AAIConfig.get(itemName);
+                       if( val != null &&  !val.equals("") ){
+                               cArgs.edgeAddDelayMs = Long.parseLong(val);
+                       }
+               }catch ( Exception e ){
+                       LOGGER.warn("WARNING - could not get [" + itemName + "] value from aaiconfig.properties file. " + e.getMessage());
+               }
+               
+               Long failureDelayMs = GraphAdminConstants.AAI_SNAPSHOT_DEFAULT_FAILURE_DELAY_MS;
+               itemName = "aai.datasnapshot.failure.delay.ms";
+               try {
+                       String val = AAIConfig.get(itemName);
+                       if( val != null &&  !val.equals("") ){
+                               cArgs.failureDelayMs = Long.parseLong(val);
+                       }
+               }catch ( Exception e ){
+                       LOGGER.warn("WARNING - could not get [" + itemName + "] value from aaiconfig.properties file. " + e.getMessage());
+               }
+               
+               Long retryDelayMs = GraphAdminConstants.AAI_SNAPSHOT_DEFAULT_RETRY_DELAY_MS;
+               itemName = "aai.datasnapshot.retry.delay.ms";
+               try {
+                       String val = AAIConfig.get(itemName);
+                       if( val != null &&  !val.equals("") ){
+                               cArgs.retryDelayMs = Long.parseLong(val);
+                       }
+               }catch ( Exception e ){
+                       LOGGER.warn("WARNING - could not get [" + itemName + "] value from aaiconfig.properties file. " + e.getMessage());
+               }
+               
+               int maxErrorsPerThread = GraphAdminConstants.AAI_SNAPSHOT_DEFAULT_MAX_ERRORS_PER_THREAD;
+               itemName = "aai.datasnapshot.max.errors.per.thread";
+               try {
+                       String val = AAIConfig.get(itemName);
+                       if( val != null &&  !val.equals("") ){
+                               cArgs.maxErrorsPerThread = Integer.parseInt(val);
+                       }
+               }catch ( Exception e ){
+                       LOGGER.warn("WARNING - could not get [" + itemName + "] value from aaiconfig.properties file. " + e.getMessage());
+               }
+               
+               Long vertToEdgeProcDelay = GraphAdminConstants.AAI_SNAPSHOT_DEFAULT_VERTEX_TO_EDGE_PROC_DELAY_MS;
+               itemName = "aai.datasnapshot.vertex.to.edge.proc.delay.ms";
+               try {
+                       String val = AAIConfig.get(itemName);
+                       if( val != null &&  !val.equals("") ){
+                               cArgs.vertToEdgeProcDelay = Long.parseLong(val);
+                       }
+               }catch ( Exception e ){
+                       LOGGER.warn("WARNING - could not get [" + itemName + "] value from aaiconfig.properties file. " + e.getMessage());
+               }
+               
+               itemName = "aai.datasnapshot.stagger.thread.delay.ms";
+               try {
+                       String val = AAIConfig.get(itemName);
+                       if( val != null &&  !val.equals("") ){
+                               cArgs.staggerThreadDelay = Long.parseLong(val);
+                       }
+               }catch ( Exception e ){
+                       LOGGER.warn("WARNING - could not get [" + itemName + "] value from aaiconfig.properties file. " + e.getMessage());
+               }               
+       
+               long debugAddDelayTime = 1;  // Default to 1 millisecond
+               Boolean debug4Create = false;  // By default we do not use debugging for snapshot creation
                
+               JCommander jCommander;
+               try {
+                       jCommander = new JCommander(cArgs, args);
+                       jCommander.setProgramName(DataSnapshot.class.getSimpleName());
+               } catch (ParameterException e1) {
+                       LOGGER.error("Error - invalid value passed to list of args - "+args);
+                       AAISystemExitUtil.systemExitCloseAAIGraph(1);
+               }
+               
+                               
                if (args.length >= 1) {
-                       command = args[0];
+                       command = cArgs.command;
                }
-                       
-               if( SNAPSHOT_RELOAD_COMMANDS.contains(command)){
-                       if (args.length == 2) {
+               
+               String source = cArgs.caller;
+
+        String snapshotType = "graphson";
+               if( SNAPSHOT_RELOAD_COMMANDS.contains(cArgs.command)){
+                       if (args.length >= 2) {
                                // If re-loading, they need to also pass the snapshot file name to use.
                                // We expected the file to be found in our snapshot directory.
-                               oldSnapshotFileName = args[1];
+                               oldSnapshotFileName = cArgs.oldFileName;
+                               snapshotType = cArgs.snapshotType;
                        }
                }
                else if( command.equals("THREADED_SNAPSHOT") ){
-                       if (args.length == 2) {
+                       if (args.length >= 2) {
                                // If doing a "threaded" snapshot, they need to specify how many threads to use
                                try {
-                                       threadCount = Integer.parseInt(args[1]);
+                                       threadCount4Create = cArgs.threadCount;
                                }
                                catch ( NumberFormatException nfe ){
-                                       ErrorLogHelper.logError("AAI_6128", "Bad (non-integer) threadCount passed to DataSnapshot [" + args[1] + "]");
-                                       LOGGER.debug("Bad (non-integer) threadCount passed to DataSnapshot [" + args[1] + "]");
+                                       ErrorLogHelper.logError("AAI_6128", "Bad (non-integer) threadCount passed to DataSnapshot [" + cArgs.threadCount + "]");
+                                       LOGGER.debug("Bad (non-integer) threadCount passed to DataSnapshot [" + cArgs.threadCount + "]");
                                        AAISystemExitUtil.systemExitCloseAAIGraph(1);
                                }
-                               if( threadCount < 1 || threadCount > 100 ){
-                                       ErrorLogHelper.logError("AAI_6128", "Out of range (1-100) threadCount passed to DataSnapshot [" + args[1] + "]");
-                                       LOGGER.debug("Out of range (1-100) threadCount passed to DataSnapshot [" + args[1] + "]");
+                               if( threadCount4Create < 1 || threadCount4Create > 100 ){
+                                       ErrorLogHelper.logError("AAI_6128", "Out of range (1-100) threadCount passed to DataSnapshot [" + cArgs.threadCount + "]");
+                                       LOGGER.debug("Out of range (1-100) threadCount passed to DataSnapshot [" + cArgs.threadCount + "]");
                                        AAISystemExitUtil.systemExitCloseAAIGraph(1);
                                }
-                               LOGGER.debug(" Will do Threaded Snapshot with threadCount = " + threadCount );
-                       }
-                       else if (args.length == 3) {
+                               LOGGER.debug(" Will do Threaded Snapshot with threadCount = " + threadCount4Create );
+                               
                                // If doing a "threaded" snapshot, they need to specify how many threads to use
                                // They can also use debug mode if they pass the word "DEBUG" to do the nodes one at a time to see where it breaks.
-                               try {
-                                       threadCount = Integer.parseInt(args[1]);
-                               }
-                               catch ( NumberFormatException nfe ){
-                                       ErrorLogHelper.logError("AAI_6128", "Bad (non-integer) threadCount passed to DataSnapshot [" + args[1] + "]");
-                                       LOGGER.debug("Bad (non-integer) threadCount passed to DataSnapshot [" + args[1] + "]");
-                                       AAISystemExitUtil.systemExitCloseAAIGraph(1);
-                               }
-                               if( threadCount < 1 || threadCount > 100 ){
-                                       ErrorLogHelper.logError("AAI_6128", "Out of range (1-100) threadCount passed to DataSnapshot [" + args[1] + "]");
-                                       LOGGER.debug("Out of range (1-100) threadCount passed to DataSnapshot [" + args[1] + "]");
-                                       AAISystemExitUtil.systemExitCloseAAIGraph(1);
-                               }
-                               if( args[2].equals("DEBUG") ){
-                                       debugFlag = true;
-                               }
-                               LOGGER.debug(" Will do Threaded Snapshot with threadCount = " + threadCount + 
-                                               ", and DEBUG mode set ON. ");
-                       }
-                       else if (args.length == 4) {
-                               // If doing a "threaded" snapshot, they need to specify how many threads to use (param 1)
-                               // They can also use debug mode if they pass the word "DEBUG" to do the nodes one (param 2)
-                               // They can also pass a delayTimer - how many milliseconds to put between each node's ADD (param 3)
-                               try {
-                                       threadCount = Integer.parseInt(args[1]);
+                               if( cArgs.debugFlag.equals("DEBUG") ){
+                                       debug4Create = true;
                                }
-                               catch ( NumberFormatException nfe ){
-                                       ErrorLogHelper.logError("AAI_6128", "Bad (non-integer) threadCount passed to DataSnapshot [" + args[1] + "]");
-                                       LOGGER.debug("Bad (non-integer) threadCount passed to DataSnapshot [" + args[1] + "]");
-                                       AAISystemExitUtil.systemExitCloseAAIGraph(1);
-                               }
-                               if( threadCount < 1 || threadCount > 100 ){
-                                       ErrorLogHelper.logError("AAI_6128", "Out of range (1-100) threadCount passed to DataSnapshot [" + args[1] + "]");
-                                       LOGGER.debug("Out of range (1-100) threadCount passed to DataSnapshot [" + args[1] + "]");
-                                       AAISystemExitUtil.systemExitCloseAAIGraph(1);
-                               }
-                               if( args[2].equals("DEBUG") ){
-                                       debugFlag = true;
-                               }
-                               try {
-                                       debugAddDelayTime = Integer.parseInt(args[3]);
-                               }
-                               catch ( NumberFormatException nfe ){
-                                       ErrorLogHelper.logError("AAI_6128", "Bad (non-integer) debugAddDelayTime passed to DataSnapshot [" + args[3] + "]");
-                                       LOGGER.debug("Bad (non-integer) debugAddDelayTime passed to DataSnapshot [" + args[3] + "]");
-                                       AAISystemExitUtil.systemExitCloseAAIGraph(1);
+                               LOGGER.debug(" Will do Threaded Snapshot with threadCount = " + threadCount4Create +
+                                               ", and DEBUG-flag set to: " + debug4Create );
+                               
+                               if (debug4Create) {
+                                       // If doing a "threaded" snapshot, they need to specify how many threads to use (param 1)
+                                       // They can also use debug mode if they pass the word "DEBUG" to do the nodes one (param 2)
+                                       // They can also pass a delayTimer - how many milliseconds to put between each node's ADD (param 3)
+                                       try {
+                                               debugAddDelayTime = cArgs.debugAddDelayTime;
+                                       } catch (NumberFormatException nfe) {
+                                               ErrorLogHelper.logError("AAI_6128",     "Bad (non-integer) debugAddDelayTime passed to DataSnapshot ["
+                                                                               + cArgs.debugAddDelayTime + "]");
+                                               LOGGER.debug("Bad (non-integer) debugAddDelayTime passed to DataSnapshot ["+ cArgs.debugAddDelayTime + "]");
+                                               AAISystemExitUtil.systemExitCloseAAIGraph(1);
+                                       }
+                                       LOGGER.debug(" Will do Threaded Snapshot with threadCount = "+ threadCount4Create + ", DEBUG-flag set to: "
+                                                       + debug4Create + ", and addDelayTimer = " + debugAddDelayTime + " mSec. ");
                                }
-                               LOGGER.debug(" Will do Threaded Snapshot with threadCount = " + threadCount + 
-                                               ", DEBUG mode ON and addDelayTimer = " + debugAddDelayTime + " mSec. ");
                        }
                        else {
                                ErrorLogHelper.logError("AAI_6128", "Wrong param count (should be 2,3 or 4) when using THREADED_SNAPSHOT.");
@@ -219,37 +284,32 @@ public class DataSnapshot {
                        }
                }
                else if( command.equals("MULTITHREAD_RELOAD") ){
-                       // Note - this will use as many threads as the snapshot file is 
+                       // Note - this will use as many threads as the snapshot file is
                        //   broken up into.  (up to a limit)
-                       if (args.length == 2) {
+                       if (args.length >= 2) {
                                // Since they are re-loading, they need to pass the snapshot file name to use.
                                // We expected the file to be found in our snapshot directory.  Note - if
                                // it is a multi-part snapshot, then this should be the root of the name.
                                // We will be using the default delay timers.
-                               oldSnapshotFileName = args[1];
-                       }
-                       else if (args.length == 7) {
-                               // Since they are re-loading, they need to pass the snapshot file name to use.
-                               // We expected the file to be found in our snapshot directory.  Note - if
-                               // it is a multi-part snapshot, then this should be the root of the name.
-                               oldSnapshotFileName = args[1];
+                               oldSnapshotFileName = cArgs.oldFileName;
+                               
                                // They should be passing the timers in in this order:
                                //    vertDelay, edgeDelay, failureDelay, retryDelay
-                               vertAddDelayMs = Long.parseLong(args[2]);
-                               edgeAddDelayMs = Long.parseLong(args[3]);
-                               failureDelayMs = Long.parseLong(args[4]);
-                               retryDelayMs = Long.parseLong(args[5]);
+                               vertAddDelayMs = cArgs.vertAddDelayMs;
+                               edgeAddDelayMs = cArgs.edgeAddDelayMs;
+                               failureDelayMs = cArgs.failureDelayMs;
+                               retryDelayMs = cArgs.retryDelayMs;
                                try {
-                                       maxErrorsPerThread = Integer.parseInt(args[6]);
+                                       maxErrorsPerThread = cArgs.maxErrorsPerThread;
                                }
                                catch ( NumberFormatException nfe ){
-                                       ErrorLogHelper.logError("AAI_6128", "Bad (non-integer) maxErrorsPerThread passed to DataSnapshot [" + args[6] + "]");
-                                       LOGGER.debug("Bad (non-integer) maxErrorsPerThread passed to DataSnapshot [" + args[6] + "]");
+                                       ErrorLogHelper.logError("AAI_6128", "Bad (non-integer) maxErrorsPerThread passed to DataSnapshot [" + cArgs.maxErrorsPerThread + "]");
+                                       LOGGER.debug("Bad (non-integer) maxErrorsPerThread passed to DataSnapshot [" + cArgs.maxErrorsPerThread + "]");
                                        AAISystemExitUtil.systemExitCloseAAIGraph(1);
                                }
                                if( maxErrorsPerThread < 1  ){
-                                       ErrorLogHelper.logError("AAI_6128", "Out of range (>0) maxErrorsPerThread passed to DataSnapshot [" + args[6] + "]");
-                                       LOGGER.debug("Out of range (>0) maxErrorsPerThread passed to DataSnapshot [" + args[6] + "]");
+                                       ErrorLogHelper.logError("AAI_6128", "Out of range (>0) maxErrorsPerThread passed to DataSnapshot [" + cArgs.maxErrorsPerThread + "]");
+                                       LOGGER.debug("Out of range (>0) maxErrorsPerThread passed to DataSnapshot [" + cArgs.maxErrorsPerThread + "]");
                                        AAISystemExitUtil.systemExitCloseAAIGraph(1);
                                }
                        }
@@ -261,16 +321,37 @@ public class DataSnapshot {
                }
                else if (command.equals("CLEAR_ENTIRE_DATABASE")) {
                        if (args.length >= 2) {
-                               oldSnapshotFileName = args[1];
-                       }
-                       if (args.length == 3) {
-                               String titanFlag = args[2];
-                               if ("titan".equalsIgnoreCase(titanFlag)) {
-                                       isExistingTitan = true;
-                               }
+                               oldSnapshotFileName = cArgs.oldFileName;
                        }
                }
 
+               
+               //Print Defaults
+               LOGGER.info("DataSnapshot command is [" + cArgs.command + "]");
+               LOGGER.info("File name to reload snapshot [" + cArgs.oldFileName + "]");
+               LOGGER.info("snapshotType is [" + cArgs.snapshotType + "]");
+               LOGGER.info("Thread count is [" + cArgs.threadCount + "]");
+               LOGGER.info("Debug Flag is [" + cArgs.debugFlag + "]");
+               LOGGER.info("DebugAddDelayTimer is [" + cArgs.debugAddDelayTime + "]");
+               LOGGER.info("VertAddDelayMs is [" + cArgs.vertAddDelayMs + "]");
+               LOGGER.info("FailureDelayMs is [" + cArgs.failureDelayMs + "]");
+               LOGGER.info("RetryDelayMs is [" + cArgs.retryDelayMs + "]");
+               LOGGER.info("MaxErrorsPerThread is [" + cArgs.maxErrorsPerThread + "]");
+               LOGGER.info("VertToEdgeProcDelay is [" + cArgs.vertToEdgeProcDelay + "]");
+               LOGGER.info("StaggerThreadDelay is [" + cArgs.staggerThreadDelay + "]");
+               LOGGER.info("Caller process is ["+ cArgs.caller + "]");
+               
+               //Print non-default values
+               if (!AAIConfig.isEmpty(cArgs.fileName)){
+                       LOGGER.info("Snapshot file name (if not default) to use  is [" + cArgs.fileName + "]");
+               }
+               if (!AAIConfig.isEmpty(cArgs.snapshotDir)){
+                       LOGGER.info("Snapshot file Directory path (if not default) to use is [" + cArgs.snapshotDir + "]");
+               }
+               if (!AAIConfig.isEmpty(cArgs.oldFileDir)){
+                       LOGGER.info("Directory path (if not default) to load the old snapshot file from is [" + cArgs.oldFileDir + "]");
+               }
+               
                ByteArrayOutputStream baos = new ByteArrayOutputStream();
                try {
                        
@@ -278,43 +359,60 @@ public class DataSnapshot {
                        ErrorLogHelper.loadProperties();
                        LOGGER.debug("Command = " + command + ", oldSnapshotFileName = [" + oldSnapshotFileName + "].");
                        String targetDir = AAIConstants.AAI_HOME + AAIConstants.AAI_FILESEP + "logs" + AAIConstants.AAI_FILESEP + "data" + AAIConstants.AAI_FILESEP + "dataSnapshots";
-
                        // Make sure the dataSnapshots directory is there
                        new File(targetDir).mkdirs();
 
                        LOGGER.debug("    ---- NOTE --- about to open graph (takes a little while) ");
                        
-                       if (command.equals("JUST_TAKE_SNAPSHOT")) {
-                               // ------------------------------------------
-                               // They just want to take a snapshot.
-                               // ------------------------------------------
+                       if ( (command.equals("THREADED_SNAPSHOT") || command.equals("JUST_TAKE_SNAPSHOT"))
+                                       && threadCount4Create == 1 ){
+                               // -------------------------------------------------------------------------------
+                               // They want to take a snapshot on a single thread and have it go in a single file
+                               //   NOTE - they can't use the DEBUG option in this case.
+                               // -------------------------------------------------------------------------------
+                               LOGGER.debug("\n>>> Command = " + command );
                                verifyGraph(AAIGraph.getInstance().getGraph());
                                FormatDate fd = new FormatDate("yyyyMMddHHmm", "GMT");
                                String dteStr = fd.getDateTime();
-                               String newSnapshotOutFname = targetDir + AAIConstants.AAI_FILESEP + "dataSnapshot.graphSON." + dteStr;
                                graph = AAIGraph.getInstance().getGraph();
-
+                               GraphAdminDBUtils.logConfigs(graph.configuration());
+                               String newSnapshotOutFname = null;
+                               long timeA = System.nanoTime();
+                               newSnapshotOutFname = targetDir + AAIConstants.AAI_FILESEP + "dataSnapshot.graphSON." + dteStr;
                                graph.io(IoCore.graphson()).writeGraph(newSnapshotOutFname);
-
                                LOGGER.debug("Snapshot written to " + newSnapshotOutFname);
+                               long timeB = System.nanoTime();
+                               long diffTime =  timeB - timeA;
+                               long minCount = TimeUnit.NANOSECONDS.toMinutes(diffTime);
+                               long secCount = TimeUnit.NANOSECONDS.toSeconds(diffTime) - (60 * minCount);
+                               LOGGER.debug("    -- Single-Thread dataSnapshot took: " +
+                                               minCount + " minutes, " + secCount + " seconds " );
        
                        }       
-                       else if (command.equals("THREADED_SNAPSHOT")) {
-                                       // ---------------------------------------------------------------------
-                                       // They want the creation of the snapshot to be spread out via threads
-                                       // ---------------------------------------------------------------------
-                                       
+                       else if ( (command.equals("THREADED_SNAPSHOT") || command.equals("JUST_TAKE_SNAPSHOT")) 
+                                       && threadCount4Create > 1 ){
+                                       // ------------------------------------------------------------
+                                       // They want the creation of the snapshot to be spread out via 
+                                       //    threads and go to multiple files
+                                       // ------------------------------------------------------------
+                                       LOGGER.debug("\n>>> Command = " + command );
+                                       String newSnapshotOutFname;
+                                       if (!AAIConfig.isEmpty(cArgs.fileName)){
+                                               newSnapshotOutFname = cArgs.fileName;
+                                       } else {
                                        FormatDate fd = new FormatDate("yyyyMMddHHmm", "GMT");
                                        String dteStr = fd.getDateTime();
-                                       String newSnapshotOutFname = targetDir + AAIConstants.AAI_FILESEP + "dataSnapshot.graphSON." + dteStr;
+                                       newSnapshotOutFname = targetDir + AAIConstants.AAI_FILESEP + "dataSnapshot.graphSON." + dteStr;
+                                       }
                                        verifyGraph(AAIGraph.getInstance().getGraph());
                                        graph = AAIGraph.getInstance().getGraph();
                                        LOGGER.debug(" Successfully got the Graph instance. ");
+                                       GraphAdminDBUtils.logConfigs(graph.configuration());
                                        long timeA = System.nanoTime();
 
-                                       LOGGER.debug(" Need to divide vertexIds across this many threads: " + threadCount );
+                                       LOGGER.debug(" Need to divide vertexIds across this many threads: " + threadCount4Create );
                                        HashMap <String,ArrayList> vertListHash = new HashMap <String,ArrayList> ();
-                                       for( int t = 0; t < threadCount; t++ ){
+                                       for( int t = 0; t < threadCount4Create; t++ ){
                                                ArrayList <Vertex> vList = new ArrayList <Vertex> ();
                                                String tk = "" + t;
                                                vertListHash.put( tk, vList);
@@ -322,8 +420,8 @@ public class DataSnapshot {
                                        LOGGER.debug("Count how many nodes are in the db. ");
                                        long totalVertCount = graph.traversal().V().count().next();
                                        LOGGER.debug(" Total Count of Nodes in DB = " + totalVertCount + ".");
-                                       long nodesPerFile = totalVertCount / threadCount;
-                                       LOGGER.debug(" Thread count = " + threadCount + ", each file will get (roughly): " + nodesPerFile + " nodes.");
+                                       long nodesPerFile = totalVertCount / threadCount4Create;
+                                       LOGGER.debug(" Thread count = " + threadCount4Create + ", each file will get (roughly): " + nodesPerFile + " nodes.");
                                        long timeA2 = System.nanoTime();
                                        long diffTime =  timeA2 - timeA;
                                        long minCount = TimeUnit.NANOSECONDS.toMinutes(diffTime);
@@ -340,7 +438,7 @@ public class DataSnapshot {
                                                // Divide up all the vertices so we can process them on different threads
                                                vtxIndex++;
                                                thisThrIndex++;
-                                               if( (thisThrIndex > nodesPerFile) && (currentTNum < threadCount -1) ){
+                                               if( (thisThrIndex > nodesPerFile) && (currentTNum < threadCount4Create -1) ){
                                                        // We will need to start adding to the Hash for the next thread
                                                        currentTNum++;
                                                        currentTKey = "" + currentTNum;
@@ -355,20 +453,20 @@ public class DataSnapshot {
                                        secCount = TimeUnit.NANOSECONDS.toSeconds(diffTime) - (60 * minCount);
                                        LOGGER.debug("    -- To Loop over all vertices, and put them into sub-Arrays it took: " +
                                                        minCount + " minutes, " + secCount + " seconds " );
-                                       
+
                                        // Need to print out each set of vertices using it's own thread
                                        ArrayList <Thread> threadArr = new ArrayList <Thread> ();
-                                       for( int thNum = 0; thNum < threadCount; thNum++ ){
+                                       for( int thNum = 0; thNum < threadCount4Create; thNum++ ){
                                                String thNumStr = "" + thNum;
                                                String subFName = newSnapshotOutFname + ".P" + thNumStr;
                                                Thread thr = new Thread(new PrintVertexDetails(graph, subFName, vertListHash.get(thNumStr),
-                                                               debugFlag, debugAddDelayTime) );
+                                                               debug4Create, debugAddDelayTime, snapshotType) );
                                                thr.start();
                                                threadArr.add(thr);
                                        }
                                        
                                        // Make sure all the threads finish before moving on.
-                                       for( int thNum = 0; thNum < threadCount; thNum++ ){
+                                       for( int thNum = 0; thNum < threadCount4Create; thNum++ ){
                                                if( null != threadArr.get(thNum) ){
                                                        (threadArr.get(thNum)).join();
                                                }
@@ -380,170 +478,173 @@ public class DataSnapshot {
                                        secCount = TimeUnit.NANOSECONDS.toSeconds(diffTime) - (60 * minCount);
                                        LOGGER.debug("   -- To write all the data out to snapshot files, it took: " +
                                                        minCount + " minutes, " + secCount + " seconds " );
-                       
+
                                        
-                       } else if( command.equals("MULTITHREAD_RELOAD") ){              
+                       } else if( command.equals("MULTITHREAD_RELOAD") ){
                                // ---------------------------------------------------------------------
                                // They want the RELOAD of the snapshot to be spread out via threads
                                // NOTE - it will only use as many threads as the number of files the
                                //    snapshot is  written to.  Ie. if you have a single-file snapshot,
                                //    then this will be single-threaded.
-                               //      
+                               //
+                               LOGGER.debug("\n>>> Command = " + command );
+                               
+                               if (cArgs.oldFileDir != null && cArgs.oldFileDir != ""){
+                                       targetDir = cArgs.oldFileDir;
+                               }
                                ArrayList <File> snapFilesArr = getFilesToProcess(targetDir, oldSnapshotFileName, false);
                                int fCount = snapFilesArr.size();
-                               Iterator <File> fItr = snapFilesArr.iterator();
-                               
                                JanusGraph graph1 = AAIGraph.getInstance().getGraph();
                                long timeStart = System.nanoTime();
-                               
+
                                HashMap <String,String> old2NewVertIdMap = new <String,String> HashMap ();
-                               
+
                                        // We're going to try loading in the vertices - without edges or properties
                                        //    using Separate threads
-                                       
+
                                        ExecutorService executor = Executors.newFixedThreadPool(fCount);
                                        List<Future<HashMap<String,String>>> list = new ArrayList<Future<HashMap<String,String>>>();
-                                       
+
                                        for( int i=0; i < fCount; i++ ){
                                                File f = snapFilesArr.get(i);
                                                String fname = f.getName();
                                                String fullSnapName = targetDir + AAIConstants.AAI_FILESEP + fname;
-                                               Thread.sleep(staggerThreadDelay);  // Stagger the threads a bit
+                                               Thread.sleep(cArgs.staggerThreadDelay);  // Stagger the threads a bit
                                                LOGGER.debug(" -- Read file: [" + fullSnapName + "]");
                                                LOGGER.debug(" -- Call the PartialVertexLoader to just load vertices  ----");
-                                               LOGGER.debug(" -- vertAddDelayMs = " + vertAddDelayMs 
-                                                               + ", failureDelayMs = " + failureDelayMs + ", retryDelayMs = " + retryDelayMs 
+                                               LOGGER.debug(" -- vertAddDelayMs = " + vertAddDelayMs
+                                                               + ", failureDelayMs = " + failureDelayMs + ", retryDelayMs = " + retryDelayMs
                                                                + ", maxErrorsPerThread = " + maxErrorsPerThread );
-                                               Callable <HashMap<String,String>> vLoader = new PartialVertexLoader(graph1, fullSnapName, 
+                                               Callable <HashMap<String,String>> vLoader = new PartialVertexLoader(graph1, fullSnapName,
                                                                vertAddDelayMs, failureDelayMs, retryDelayMs, maxErrorsPerThread, LOGGER);
                                                Future <HashMap<String,String>> future = (Future<HashMap<String, String>>) executor.submit(vLoader);
-                                               
+
                                                // add Future to the list, we can get return value using Future
                                                list.add(future);
                                                LOGGER.debug(" --  Starting PartialDbLoad VERT_ONLY thread # "+ i );
                                        }
-                                       
-                                       threadCount = 0;
+
+                                       int threadCount4Reload = 0;
                                        int threadFailCount = 0;
                                        for(Future<HashMap<String,String>> fut : list){
-                               threadCount++;
+                               threadCount4Reload++;
                                try {
                                        old2NewVertIdMap.putAll(fut.get());
-                                       LOGGER.debug(" -- back from PartialVertexLoader.  returned thread # " + threadCount +
+                                       LOGGER.debug(" -- back from PartialVertexLoader.  returned thread # " + threadCount4Reload +
                                                        ", current size of old2NewVertMap is: " + old2NewVertIdMap.size() );
-                               } 
-                               catch (InterruptedException e) {  
+                               }
+                               catch (InterruptedException e) {
                                        threadFailCount++;
                                        e.printStackTrace();
-                               } 
+                               }
                                catch (ExecutionException e) {
                                        threadFailCount++;
                                        e.printStackTrace();
                                }
-                           }                       
-                                       
+                           }
+
                                        executor.shutdown();
-                                       
+
                                        if( threadFailCount > 0 ) {
                                                String emsg = " FAILURE >> " + threadFailCount + " Vertex-loader thread(s) failed to complete successfully.  ";
                                                LOGGER.debug(emsg);
                                                throw new Exception( emsg );
                                        }
-                                       
+
                                        long timeX = System.nanoTime();
                                        long diffTime =  timeX - timeStart;
                                        long minCount = TimeUnit.NANOSECONDS.toMinutes(diffTime);
                                        long secCount = TimeUnit.NANOSECONDS.toSeconds(diffTime) - (60 * minCount);
                                        LOGGER.debug("   -- To reload just the vertex ids from the snapshot files, it took: " +
                                                        minCount + " minutes, " + secCount + " seconds " );
-                                                       
+
                                        // Give the DB a little time to chew on all those vertices
                                        Thread.sleep(vertToEdgeProcDelay);
-                                       
+
                                        // ----------------------------------------------------------------------------------------
                                        LOGGER.debug("\n\n\n  -- Now do the edges/props ----------------------");
                                        // ----------------------------------------------------------------------------------------
-                                       
-                                                               
+
+
                                        // We're going to try loading in the edges and missing properties
                                        // Note - we're passing the whole oldVid2newVid mapping to the PartialPropAndEdgeLoader
                                        //     so that the String-updates to the GraphSON will happen in the threads instead of
                                        //     here in the un-threaded calling method.
-                                       executor = Executors.newFixedThreadPool(fCount);        
+                                       executor = Executors.newFixedThreadPool(fCount);
                                        ArrayList<Future<ArrayList<String>>> listEdg = new ArrayList<Future<ArrayList<String>>>();
                                        for( int i=0; i < fCount; i++ ){
                                                File f = snapFilesArr.get(i);
                                                String fname = f.getName();
                                                String fullSnapName = targetDir + AAIConstants.AAI_FILESEP + fname;
-                                               Thread.sleep(staggerThreadDelay);  // Stagger the threads a bit
+                                               Thread.sleep(cArgs.staggerThreadDelay);  // Stagger the threads a bit
                                                LOGGER.debug(" -- Read file: [" + fullSnapName + "]");
                                                LOGGER.debug(" -- Call the PartialPropAndEdgeLoader for Properties and EDGEs  ----");
-                                               LOGGER.debug(" -- edgeAddDelayMs = " + vertAddDelayMs 
-                                                               + ", failureDelayMs = " + failureDelayMs + ", retryDelayMs = " + retryDelayMs 
+                                               LOGGER.debug(" -- edgeAddDelayMs = " + vertAddDelayMs
+                                                               + ", failureDelayMs = " + failureDelayMs + ", retryDelayMs = " + retryDelayMs
                                                                + ", maxErrorsPerThread = " + maxErrorsPerThread );
-                                               
-                                               Callable  eLoader = new PartialPropAndEdgeLoader(graph1, fullSnapName, 
-                                                               edgeAddDelayMs, failureDelayMs, retryDelayMs, 
+
+                                               Callable  eLoader = new PartialPropAndEdgeLoader(graph1, fullSnapName,
+                                                               edgeAddDelayMs, failureDelayMs, retryDelayMs,
                                                                old2NewVertIdMap, maxErrorsPerThread, LOGGER);
                                                Future <ArrayList<String>> future = (Future<ArrayList<String>>) executor.submit(eLoader);
-                                               
+
                                                //add Future to the list, we can get return value using Future
                                                listEdg.add(future);
                                                LOGGER.debug(" --  Starting PartialPropAndEdge thread # "+ i );
                                        }
-                                               
-                                       threadCount = 0;
+
+                                       threadCount4Reload = 0;
                                        for(Future<ArrayList<String>> fut : listEdg){
-                                   threadCount++;
+                                   threadCount4Reload++;
                                    try{
                                        fut.get();  // DEBUG -- should be doing something with the return value if it's not empty - ie. errors
-                                       LOGGER.debug(" -- back from PartialPropAndEdgeLoader.  thread # " + threadCount  );
-                                   } 
-                                               catch (InterruptedException e) {  
+                                       LOGGER.debug(" -- back from PartialPropAndEdgeLoader.  thread # " + threadCount4Reload  );
+                                   }
+                                               catch (InterruptedException e) {
                                                        threadFailCount++;
                                                        e.printStackTrace();
-                                               } 
+                                               }
                                                catch (ExecutionException e) {
                                                        threadFailCount++;
                                                        e.printStackTrace();
                                                }
-                                       }   
-                                       
+                                       }
+
                                        executor.shutdown();
-                                                                       
+
                                        if( threadFailCount > 0 ) {
                                                String emsg = " FAILURE >> " + threadFailCount + " Property/Edge-loader thread(s) failed to complete successfully.  ";
                                                LOGGER.debug(emsg);
                                                throw new Exception( emsg );
                                        }
-                                       
+
                                        // This is needed so we can see the data committed by the called threads
                                        graph1.tx().commit();
-                                        
+
                                        long timeEnd = System.nanoTime();
                                        diffTime =  timeEnd - timeX;
                                        minCount = TimeUnit.NANOSECONDS.toMinutes(diffTime);
                                        secCount = TimeUnit.NANOSECONDS.toSeconds(diffTime) - (60 * minCount);
                                        LOGGER.debug("   -- To reload the edges and properties from snapshot files, it took: " +
                                                        minCount + " minutes, " + secCount + " seconds " );
-                                       
+
                                        long totalDiffTime =  timeEnd - timeStart;
                                        long totalMinCount = TimeUnit.NANOSECONDS.toMinutes(totalDiffTime);
                                        long totalSecCount = TimeUnit.NANOSECONDS.toSeconds(totalDiffTime) - (60 * totalMinCount);
                                        LOGGER.debug("   -- TOTAL multi-threaded reload time: " +
                                                        totalMinCount + " minutes, " + totalSecCount + " seconds " );
-                                       
+
                        } else if (command.equals("CLEAR_ENTIRE_DATABASE")) {
                                // ------------------------------------------------------------------
                                // They are calling this to clear the db before re-loading it
                                // later
                                // ------------------------------------------------------------------
-
+                               LOGGER.debug("\n>>> Command = " + command );
                                // First - make sure the backup file(s) they will be using can be
                                // found and has(have) data.
                                // getFilesToProcess makes sure the file(s) exist and have some data.
                                getFilesToProcess(targetDir, oldSnapshotFileName, true);
-                               
+
                                LOGGER.debug("\n>>> WARNING <<<< ");
                                LOGGER.debug(">>> All data and schema in this database will be removed at this point. <<<");
                                LOGGER.debug(">>> Processing will begin in 5 seconds. <<<");
@@ -562,118 +663,64 @@ public class DataSnapshot {
                                String serviceName = System.getProperty("aai.service.name", "NA");
                                LOGGER.debug("Getting new configs for clearig");
                                PropertiesConfiguration propertiesConfiguration = new AAIGraphConfig.Builder(rtConfig).forService(serviceName).withGraphType(REALTIME_DB).buildConfiguration();
-                               if(isExistingTitan){
-                                       LOGGER.debug("Existing DB is Titan");
-                                       propertiesConfiguration.setProperty("graph.titan-version","1.0.0");
-                               }
                                LOGGER.debug("Open New Janus Graph");
                                JanusGraph janusGraph = JanusGraphFactory.open(propertiesConfiguration);
                                verifyGraph(janusGraph);
-
-                               if(isExistingTitan){
-                                       JanusGraphFactory.drop(janusGraph);
-                               } else {
-                                       janusGraph.close();
-                                       JanusGraphCleanup.clear(janusGraph);
-                               }
+                               GraphAdminDBUtils.logConfigs(janusGraph.configuration());
+                               janusGraph.close();
+                               JanusGraphCleanup.clear(janusGraph);
                                LOGGER.debug(" Done clearing data. ");
                                LOGGER.debug(">>> IMPORTANT - NOTE >>> you need to run the SchemaGenerator (use GenTester) before ");
                                LOGGER.debug("     reloading data or the data will be put in without indexes. ");
                                dbClearFlag = true;
                                LOGGER.debug("All done clearing DB");
                                
-                       } else if (command.equals("RELOAD_LEGACY_DATA")) {
-                               // -------------------------------------------------------------------
-                               // They want to restore the database from an old snapshot file
-                               // -------------------------------------------------------------------
+                       } else if (command.equals("RELOAD_DATA")) {
+                               // ---------------------------------------------------------------------------
+                               // They want to restore the database from either a single file, or a group
+                               // of snapshot files.  Either way, this command will restore via single
+                               // threaded processing.
+                               // ---------------------------------------------------------------------------
+                               LOGGER.debug("\n>>> Command = " + command );
                                verifyGraph(AAIGraph.getInstance().getGraph());
                                graph = AAIGraph.getInstance().getGraph();
+                               GraphAdminDBUtils.logConfigs(graph.configuration());
                                if (oldSnapshotFileName.equals("")) {
-                                       String emsg = "No oldSnapshotFileName passed to DataSnapshot when RELOAD_LEGACY_DATA used.";
-                                       LOGGER.debug(emsg);
-                                       AAISystemExitUtil.systemExitCloseAAIGraph(1);
-                               }
-                               String oldSnapshotFullFname = targetDir + AAIConstants.AAI_FILESEP + oldSnapshotFileName;
-                               File f = new File(oldSnapshotFullFname);
-                               if (!f.exists()) {
-                                       String emsg = "oldSnapshotFile " + oldSnapshotFullFname + " could not be found.";
-                                       LOGGER.debug(emsg);
-                                       AAISystemExitUtil.systemExitCloseAAIGraph(1);
-                               } else if (!f.canRead()) {
-                                       String emsg = "oldSnapshotFile " + oldSnapshotFullFname + " could not be read.";
-                                       LOGGER.debug(emsg);
-                                       AAISystemExitUtil.systemExitCloseAAIGraph(1);
-                               } else if (f.length() == 0) {
-                                       String emsg = "oldSnapshotFile " + oldSnapshotFullFname + " had no data.";
+                                       String emsg = "No oldSnapshotFileName passed to DataSnapshot when RELOAD_DATA used.";
                                        LOGGER.debug(emsg);
                                        AAISystemExitUtil.systemExitCloseAAIGraph(1);
                                }
-
-                               LOGGER.debug("We will load data IN from the file = " + oldSnapshotFullFname);
-                               LOGGER.debug(" Begin reloading JanusGraph 0.5 data. ");
-                               
-                               LegacyGraphSONReader lgr = LegacyGraphSONReader.build().create();
-                               InputStream is = new FileInputStream(oldSnapshotFullFname);
-                               lgr.readGraph(is, graph);
                                
-                               LOGGER.debug("Completed the inputGraph command, now try to commit()... ");
-                               graph.tx().commit();
-                               LOGGER.debug("Completed reloading JanusGraph 0.5 data.");
+                               long timeA = System.nanoTime();
 
-                               long vCount = graph.traversal().V().count().next();
-                               LOGGER.debug("A little after repopulating from an old snapshot, we see: " + vCount + " vertices in the db.");
-                       } else if (command.equals("RELOAD_DATA")) {
-                               // -------------------------------------------------------------------
-                               // They want to restore the database from an old snapshot file
-                               // -------------------------------------------------------------------
-                               verifyGraph(AAIGraph.getInstance().getGraph());
-                               graph = AAIGraph.getInstance().getGraph();
-                               if (oldSnapshotFileName.equals("")) {
-                                       String emsg = "No oldSnapshotFileName passed to DataSnapshot when RELOAD_DATA used.";
-                                       LOGGER.debug(emsg);
-                                       AAISystemExitUtil.systemExitCloseAAIGraph(1);
+                               ArrayList <File> snapFilesArr = new ArrayList <File> ();
+
+                               // First, see if this is a single file (ie. no ".P#" suffix)
+                               String onePieceSnapshotFname = targetDir + AAIConstants.AAI_FILESEP + oldSnapshotFileName;
+                               File sf = new File(onePieceSnapshotFname);
+                               if( sf.exists() ){
+                                       snapFilesArr.add(sf);
                                }
-                               String oldSnapshotFullFname = targetDir + AAIConstants.AAI_FILESEP + oldSnapshotFileName;
-                               File f = new File(oldSnapshotFullFname);
-                               if (!f.exists()) {
-                                       String emsg = "oldSnapshotFile " + oldSnapshotFullFname + " could not be found.";
-                                       LOGGER.debug(emsg);
-                                       AAISystemExitUtil.systemExitCloseAAIGraph(1);
-                               } else if (!f.canRead()) {
-                                       String emsg = "oldSnapshotFile " + oldSnapshotFullFname + " could not be read.";
-                                       LOGGER.debug(emsg);
-                                       AAISystemExitUtil.systemExitCloseAAIGraph(1);
-                               } else if (f.length() == 0) {
-                                       String emsg = "oldSnapshotFile " + oldSnapshotFullFname + " had no data.";
+                               else {
+                                       // If it's a multi-part snapshot, then collect all the files for it
+                                       String thisSnapPrefix = oldSnapshotFileName + ".P";
+                                       File fDir = new File(targetDir); // Snapshot directory
+                                       File[] allFilesArr = fDir.listFiles();
+                                       for (File snapFile : allFilesArr) {
+                                               String snapFName = snapFile.getName();
+                                               if( snapFName.startsWith(thisSnapPrefix)){
+                                                       snapFilesArr.add(snapFile);
+                                               }
+                                       }
+                               }
+                               
+                               if( snapFilesArr.isEmpty() ){
+                                       String emsg = "oldSnapshotFile " + onePieceSnapshotFname + "(with or without .P0) could not be found.";
                                        LOGGER.debug(emsg);
                                        AAISystemExitUtil.systemExitCloseAAIGraph(1);
                                }
-
-                               LOGGER.debug("We will load data IN from the file = " + oldSnapshotFullFname);
-                               LOGGER.debug(" Begin reloading data. ");
-                               graph.io(IoCore.graphson()).readGraph(oldSnapshotFullFname);
-                               LOGGER.debug("Completed the inputGraph command, now try to commit()... ");
-                               graph.tx().commit();
-                               LOGGER.debug("Completed reloading data.");
-
-                               long vCount = graph.traversal().V().count().next();
-                               
-                               LOGGER.debug("A little after repopulating from an old snapshot, we see: " + vCount + " vertices in the db.");
-                               
-                       } else if (command.equals("RELOAD_DATA_MULTI")) {
-                               // -------------------------------------------------------------------
-                               // They want to restore the database from a group of snapshot files
-                               // Note - this uses multiple snapshot files, but runs single-threaded.
-                               // -------------------------------------------------------------------
-                               verifyGraph(AAIGraph.getInstance().getGraph());
-                               graph = AAIGraph.getInstance().getGraph();
-                               
-                               ArrayList <File> snapFilesArr = getFilesToProcess(targetDir, oldSnapshotFileName, false);
-                               
-                               long timeA = System.nanoTime();
                                
                                int fCount = snapFilesArr.size();
-                               Iterator <File> fItr = snapFilesArr.iterator();
                                Vector<InputStream> inputStreamsV = new Vector<>();                  
                                for( int i = 0; i < fCount; i++ ){
                                        File f = snapFilesArr.get(i);
@@ -691,15 +738,20 @@ public class DataSnapshot {
                                        InputStream fis = new FileInputStream(fullFName);
                                        inputStreamsV.add(fis);
                                }
+
                                // Now add inputStreams.elements() to the Vector,
                            // inputStreams.elements() will return Enumerations
                            InputStream sis = new SequenceInputStream(inputStreamsV.elements());
                            LOGGER.debug("Begin loading data from " + fCount + " files  -----");
-                               graph.io(IoCore.graphson()).reader().create().readGraph(sis, graph);  
+                           if("gryo".equalsIgnoreCase(snapshotType)){
+                                       graph.io(IoCore.gryo()).reader().create().readGraph(sis, graph);
+                               } else {
+                                       graph.io(IoCore.graphson()).reader().create().readGraph(sis, graph);
+                               }
                                LOGGER.debug("Completed the inputGraph command, now try to commit()... ");
                                graph.tx().commit();
-                               LOGGER.debug(" >> Completed reloading data.");
-                               
+                               LOGGER.debug("Completed reloading data.");
+
                                long vCount = graph.traversal().V().count().next();
                                LOGGER.debug("A little after repopulating from an old snapshot, we see: " + vCount + " vertices in the db.");
                                
@@ -710,7 +762,8 @@ public class DataSnapshot {
                                LOGGER.debug("    -- To Reload this snapshot, it took: " +
                                                minCount + " minutes, " + secCount + " seconds " );
                                
-                               
+                               LOGGER.debug("A little after repopulating from an old snapshot, we see: " + vCount + " vertices in the db.");
+
                        } else {
                                String emsg = "Bad command passed to DataSnapshot: [" + command + "]";
                                LOGGER.debug(emsg);
@@ -728,7 +781,7 @@ public class DataSnapshot {
                        ex.printStackTrace();
                        success = false;
                } finally {
-                       if (!dbClearFlag && graph != null) {
+                       if (!dbClearFlag && graph != null && !MIGRATION_PROCESS_NAME.equalsIgnoreCase(source)) {
                                // Any changes that worked correctly should have already done
                                // thier commits.
                                if(!"true".equals(System.getProperty("org.onap.aai.graphadmin.started"))) {
@@ -744,18 +797,13 @@ public class DataSnapshot {
                        }
                }
 
-               if(success){
-                       AAISystemExitUtil.systemExitCloseAAIGraph(0);
-               } else {
-                       AAISystemExitUtil.systemExitCloseAAIGraph(1);
-               }
-
-       }// End of main()
-       
+               return success;
+       }
        
+
        private static ArrayList <File> getFilesToProcess(String targetDir, String oldSnapshotFileName, boolean doingClearDb)
                throws Exception {
-       
+
                if( oldSnapshotFileName == null || oldSnapshotFileName.equals("") ){
                        String emsg = "No oldSnapshotFileName passed to DataSnapshot for Reload.  ";
                        if( doingClearDb ) {
@@ -764,9 +812,9 @@ public class DataSnapshot {
                        LOGGER.debug(emsg);
                        throw new Exception( emsg );
                }
-       
+
                ArrayList <File> snapFilesArrList = new ArrayList <File> ();
-               
+
                // First, we'll assume that this is a multi-file snapshot and
                //    look for names based on that.
                String thisSnapPrefix = oldSnapshotFileName + ".P";
@@ -787,9 +835,9 @@ public class DataSnapshot {
                                snapFilesArrList.add(snapFile);
                        }
                }
-       
+
                if( snapFilesArrList.isEmpty() ){
-                       // Multi-file snapshot check did not find files, so this may 
+                       // Multi-file snapshot check did not find files, so this may
                        //   be a single-file snapshot.
                        String oldSnapshotFullFname = targetDir + AAIConstants.AAI_FILESEP + oldSnapshotFileName;
                        File f = new File(oldSnapshotFullFname);
@@ -808,7 +856,7 @@ public class DataSnapshot {
                        }
                        snapFilesArrList.add(f);
                }
-               
+
                if( snapFilesArrList.isEmpty() ){
                        // Still haven't found anything..  that was not a good file name.
                        String fullFName = targetDir + AAIConstants.AAI_FILESEP + thisSnapPrefix;
@@ -816,11 +864,11 @@ public class DataSnapshot {
                        LOGGER.debug(emsg);
                        throw new Exception(emsg);
                }
-               
+
                return snapFilesArrList;
        }
-       
-       
+
+
        public static void verifyGraph(JanusGraph graph) {
 
                if (graph == null) {
@@ -831,5 +879,64 @@ public class DataSnapshot {
 
        }
 
+       class CommandLineArgs {
+
+               
+
+               @Parameter(names = "--help", help = true)
+               public boolean help;
+
+               @Parameter(names = "-c", description = "command for taking data snapshot")
+               public String command = "JUST_TAKE_SNAPSHOT";
 
+               @Parameter(names = "-f", description = "previous snapshot file to reload")
+               public String oldFileName = "";
+
+               @Parameter(names = "-snapshotType", description = "snapshot type of gryo or graphson")
+               public String snapshotType = "graphson";
+
+               @Parameter(names = "-threadCount", description = "thread count for create")
+               public int threadCount = GraphAdminConstants.AAI_SNAPSHOT_DEFAULT_THREADS_FOR_CREATE;
+
+               @Parameter(names = "-debugFlag", description = "DEBUG flag")
+               public String debugFlag = "";
+
+               @Parameter(names = "-debugAddDelayTime", description = "delay in ms between each Add for debug mode")
+               public long debugAddDelayTime = 1L;
+               
+               @Parameter(names = "-vertAddDelayMs", description = "delay in ms while adding each vertex")
+               public long vertAddDelayMs = GraphAdminConstants.AAI_SNAPSHOT_DEFAULT_VERTEX_ADD_DELAY_MS.longValue();
+               
+               @Parameter(names = "-edgeAddDelayMs", description = "delay in ms while adding each edge")
+               public long edgeAddDelayMs = GraphAdminConstants.AAI_SNAPSHOT_DEFAULT_EDGE_ADD_DELAY_MS.longValue();
+               
+               @Parameter(names = "-failureDelayMs", description = "delay in ms when failure to load vertex or edge in snapshot")
+               public long failureDelayMs = GraphAdminConstants.AAI_SNAPSHOT_DEFAULT_FAILURE_DELAY_MS.longValue();
+
+               @Parameter(names = "-retryDelayMs", description = "time in ms after which load snapshot is retried")
+               public long retryDelayMs = GraphAdminConstants.AAI_SNAPSHOT_DEFAULT_FAILURE_DELAY_MS.longValue();
+
+               @Parameter(names = "-maxErrorsPerThread", description = "max errors allowed per thread")
+               public int maxErrorsPerThread = GraphAdminConstants.AAI_SNAPSHOT_DEFAULT_MAX_ERRORS_PER_THREAD;
+               
+               @Parameter(names = "-vertToEdgeProcDelay", description = "vertex to edge processing delay in ms")
+               public long vertToEdgeProcDelay = GraphAdminConstants.AAI_SNAPSHOT_DEFAULT_VERTEX_TO_EDGE_PROC_DELAY_MS.longValue();
+               
+               @Parameter(names = "-staggerThreadDelay", description = "thread delay stagger time in ms")
+               public long staggerThreadDelay = GraphAdminConstants.AAI_SNAPSHOT_DEFAULT_STAGGER_THREAD_DELAY_MS;
+               
+               @Parameter(names = "-fileName", description = "file name for generating snapshot ")
+               public String fileName = "";
+               
+               @Parameter(names = "-snapshotDir", description = "file path for generating snapshot ")
+               public String snapshotDir = "";
+               
+               @Parameter(names = "-oldFileDir", description = "directory containing the old snapshot file for reloading")
+               public String oldFileDir = "";
+               
+               @Parameter(names = "-caller", description = "process invoking the dataSnapshot")
+               public String caller = "";
+               
+       }
+       
 }
\ No newline at end of file
index 404e243..7092aa8 100644 (file)
@@ -67,7 +67,7 @@ public class DataSnapshotTasks {
                LOGGER.info("Started cron job dataSnapshot @ " + dateFormat.format(new Date()));
                try {
                        if (AAIConfig.get("aai.cron.enable.dataSnapshot").equals("true")) {
-                               String [] dataSnapshotParms = AAIConfig.get("aai.datasnapshot.params",  "JUST_TAKE_SNAPSHOT").split("\\s+");
+                               String [] dataSnapshotParms = {"-c",AAIConfig.get("aai.datasnapshot.params",  "JUST_TAKE_SNAPSHOT")};
                                LOGGER.info("DataSnapshot Params {}", Arrays.toString(dataSnapshotParms));
                                DataSnapshot.main(dataSnapshotParms);
                        }
index af858ae..0f03ee0 100644 (file)
@@ -223,9 +223,8 @@ public class PartialPropAndEdgeLoader implements Callable <ArrayList<String>>{
                LOGGER.debug(" -- Could not convert line to JsonObject [ " + graphSonLine + "]" );
                LOGGER.debug(" -- ErrorMsg = [" +e.getMessage() + "]");
                        
-               return(" DEBUG -a- JSON translation exception when processing this line ---");
-               //xxxxxDEBUGxxxxx I think we put some info on the return String and then return?
-       }
+               return(" JSON translation or getVid exception when processing this line [" + graphSonLine + "]");
+               }
                        
                // -----------------------------------------------------------------------------------------
                // Note - this assumes that any vertices referred to by an edge will already be in the DB.
@@ -242,11 +241,9 @@ public class PartialPropAndEdgeLoader implements Callable <ArrayList<String>>{
                LOGGER.debug(" -- "  + passInfo + " translate VertexId before adding edges failed for this: vtxId = " 
                                + originalVid + ".  ErrorMsg = [" +e.getMessage() + "]");
                        
-               return(" DEBUG -b- there VID-translation error when processing this line ---");
-               //xxxxxDEBUGxxxxx I think we put some info on the return String and then return?
+               return(" VID-translation error when processing this line ---");
        }
-               
-               
+
                try {
                        dbVtx = getVertexFromDbForVid(newVidStr);
                }
@@ -254,8 +251,7 @@ public class PartialPropAndEdgeLoader implements Callable <ArrayList<String>>{
                LOGGER.debug(" -- "  + passInfo + " READ Vertex from DB before adding edges failed for this: vtxId = " + originalVid
                                + ", newVidId = " + newVidL + ".  ErrorMsg = [" +e.getMessage() + "]");
                        
-               return("  --  there was an error processing this line --- Line = [" + graphSonLine + "]");
-               //xxxxxxDEBUGxxxx I think we put some info on the return String and then return?
+               return(" ERROR getting Vertex based on VID = " + newVidStr + "]");
        }
                        
                
@@ -268,8 +264,7 @@ public class PartialPropAndEdgeLoader implements Callable <ArrayList<String>>{
                        catch ( Exception e ){
                                LOGGER.debug(" -- " + passInfo + " COMMIT FAILED adding EDGES for this vertex: vtxId = " 
                                                + originalVid + ".  ErrorMsg = [" +e.getMessage() + "]");
-                               //xxxxxxxxxx I think we put some info on the return String and then return?
-                   return(" DEBUG -d- there was an error doing the commit while processing edges for this line ---");
+                               return(" ERROR with committing edges for vertexId = " + originalVid );
                        }
                }
                
@@ -283,15 +278,13 @@ public class PartialPropAndEdgeLoader implements Callable <ArrayList<String>>{
                        catch ( Exception e ){
                                LOGGER.debug(" -- " + passInfo + " COMMIT FAILED adding Properties for this vertex: vtxId = " 
                                                + originalVid + ".  ErrorMsg = [" +e.getMessage() + "]");
-                               //xxxxxxxxxx I think we put some info on the return String and then return?
-                   return(" DEBUG -e- there was an error doing the commit while processing Properties for this line ---");
+                               return(" ERROR with committing properties for vertexId = " + originalVid );
                        }
                }
                else {
-                       LOGGER.debug("DEBUG " + passInfo + " Error processing Properties for this vertex: vtxId = " + originalVid );
-                       
-                       //xxxxxxxxxx I think we put some info on the return String and then return?
-                   return(" DEBUG -f- there was an error while processing Properties for this line ---");
+                       LOGGER.debug("DEBUG " + passInfo + " Error processing Properties for this vertex: vtxId = "
+                                       + originalVid + ", [" + pResStr + "]");
+                       return(" ERROR processing properties for vertexId = " + originalVid + ", [" + pResStr + "]");
                }
        }
        
@@ -306,18 +299,15 @@ public class PartialPropAndEdgeLoader implements Callable <ArrayList<String>>{
                                JSONArray propsDetArr = propsOb.getJSONArray(pKey);
                                for( int i=0; i< propsDetArr.length(); i++ ){
                                        JSONObject prop = propsDetArr.getJSONObject(i);
-                                       String val = prop.getString("value");
-                                       dbVtx.property(pKey, val);  //DEBUGjojo -- val is always String here.. which is not right -------------------DEBUG
+                                       Object val = prop.get("value");
+                                       dbVtx.property(pKey, val);  // DEBUG - not sure if this is would handle String[] properties?
                                }
                        }
-       
                }
                catch ( Exception e ){
                        LOGGER.debug(" -- " + passInfo + " failure getting/setting properties for: vtxId = " 
                                        + originalVid + ".  ErrorMsg = [" + e.getMessage() + "]");
-                       //xxxDEBUGxxxxxxx I think we put some info on the return String and then return?
-                   return(" DEBUG -g- there was an error adding properties while processing this line ---");
-                       
+                       return(" error processing properties for vtxId = " + originalVid);
                }
                        
                return "";
@@ -371,7 +361,6 @@ public class PartialPropAndEdgeLoader implements Callable <ArrayList<String>>{
                        Iterator <String> edItr = edOb.keys();
                        while( edItr.hasNext() ){
                                String eLabel = edItr.next();
-                               String inVid = "";   // Note - this should really be a Long?
                                JSONArray edArr = edOb.getJSONArray(eLabel);
                                for( int i=0; i< edArr.length(); i++ ){
                                        JSONObject eObj = edArr.getJSONObject(i);
@@ -395,22 +384,19 @@ public class PartialPropAndEdgeLoader implements Callable <ArrayList<String>>{
                                                Iterator <String> ePropsItr = ePropsOb.keys();
                                                while( ePropsItr.hasNext() ){
                                                        String pKey = ePropsItr.next();
-                                                       tmpE.property(pKey, ePropsOb.getString(pKey));
+                                                       tmpE.property(pKey, ePropsOb.get(pKey));
                                                }
                                        }
                                }
                        }
-
                }
                catch ( Exception e ){
                        String msg =  " -- " + passInfo + " failure adding edge for: original vtxId = " 
                                        + originalVid + ".  ErrorMsg = [" +e.getMessage() + "]";
                        LOGGER.debug( " -- " + msg );
-                       //xxxxxxDEBUGxxxx I think we might need some better info on the return String to return?
                        LOGGER.debug(" -- now going to return/bail out of processEdgesForVtx" );
                        return(" >> " + msg );
-               
-               }
+               }
                        
                return "";
        }
index 387f45e..3afd295 100644 (file)
@@ -66,8 +66,6 @@ public class PartialVertexLoader implements Callable<HashMap<String,String>>{
                int retryFailureCount = 0;
                HashMap <String,String> failedAttemptHash = new HashMap <String,String> ();
                HashMap <String,String> old2NewVtxIdHash = new HashMap <String,String> ();
-               GraphSONReader gsr = GraphSONReader.build().create();
-               
        
                // Read this file into a JSON object
                JsonParser parser = new JsonParser();
@@ -180,13 +178,17 @@ public class PartialVertexLoader implements Callable<HashMap<String,String>>{
                        }
                        try { 
                                jg.tx().commit();
-                       // If this worked, we can take it off of the failed list
-                       failedAttemptHash.remove(failedVidStr);
+                               LOGGER.debug(" -- addVertex Successful RETRY for vtxId = " +
+                                               failedVidStr + ", label = [" + failedLabel + "]");
                }
                        catch ( Exception e ){
                                retryFailureCount++;
-                               LOGGER.debug(" -- COMMIT FAILED for RETRY for vtxId = " + failedVidStr 
-                                               + ", label = [" + failedLabel + "].  ErrorMsg = [" + e.getMessage() + "]" );
+                               // Note - this is a "POSSIBLE" error because the reason the commit fails may be that
+                               //    the node is a dupe or has some other legit reason that it should not be in the DB.
+                               LOGGER.debug(" --POSSIBLE ERROR-- COMMIT FAILED for RETRY for vtxId = " + failedVidStr 
+                                               + ", label = [" + failedLabel + "].  ErrorMsg = [" + e.getMessage() 
+                                               + "].  This vertex will not be tried again. ");
+
                                        e.printStackTrace();
                                if( retryFailureCount > maxAllowedErrors ) {
                                        LOGGER.debug(">>> Abandoning PartialVertexLoader() because " +
index 791ae15..493678b 100644 (file)
@@ -25,25 +25,28 @@ import java.util.Iterator;
 import org.apache.tinkerpop.gremlin.structure.Direction;
 import org.apache.tinkerpop.gremlin.structure.Vertex;
 import org.apache.tinkerpop.gremlin.structure.io.IoCore;
+import org.apache.tinkerpop.gremlin.structure.io.GraphWriter;
 import org.janusgraph.core.JanusGraph;
 
 
 public class PrintVertexDetails implements Runnable{
-       
-       //private static EELFLogger LOGGER;
 
        private JanusGraph jg;
        private String fname;
        private ArrayList<Vertex> vtxList;
        private Boolean debugOn;
-       private int debugDelayMs;
-               
-       public PrintVertexDetails (JanusGraph graph, String fn, ArrayList<Vertex> vL, Boolean debugFlag, int debugDelay){
+       private long debugDelayMs;
+       private String snapshotType;
+
+       static final byte[] newLineBytes = "\n".getBytes();
+
+       public PrintVertexDetails (JanusGraph graph, String fn, ArrayList<Vertex> vL, Boolean debugFlag, long debugAddDelayTime, String snapshotType){
                jg = graph;
                fname = fn;
                vtxList = vL;
                debugOn = debugFlag;
-               debugDelayMs = debugDelay;
+               debugDelayMs = debugAddDelayTime;
+               this.snapshotType = snapshotType;
        }
                
        public void run(){  
@@ -55,6 +58,12 @@ public class PrintVertexDetails implements Runnable{
                                Long debugDelayMsL = new Long(debugDelayMs);
                                FileOutputStream subFileStr = new FileOutputStream(fname);
                                Iterator <Vertex> vSubItr = vtxList.iterator();
+                               GraphWriter graphWriter = null;
+                               if("gryo".equalsIgnoreCase(snapshotType)){
+                                       graphWriter = jg.io(IoCore.gryo()).writer().create();
+                               } else {
+                                       graphWriter = jg.io(IoCore.graphson()).writer().create();
+                               }
                                while( vSubItr.hasNext() ){
                                        Long vertexIdL = 0L;
                                        String aaiNodeType = "";
@@ -68,7 +77,8 @@ public class PrintVertexDetails implements Runnable{
                                                aaiUuid = (String) tmpV.property("aai-uuid").orElse(null);
                                                
                                                Thread.sleep(debugDelayMsL); // Make sure it doesn't bump into itself
-                                               jg.io(IoCore.graphson()).writer().create().writeVertex(subFileStr, tmpV, Direction.BOTH); 
+                                               graphWriter.writeVertex(subFileStr, tmpV, Direction.BOTH);
+                                               subFileStr.write(newLineBytes);
                                                okCount++;
                                        }
                                        catch(Exception e) {
@@ -94,7 +104,11 @@ public class PrintVertexDetails implements Runnable{
                                int count = vtxList.size();
                                Iterator <Vertex> vSubItr = vtxList.iterator();
                                FileOutputStream subFileStr = new FileOutputStream(fname);
-                               jg.io(IoCore.graphson()).writer().create().writeVertices(subFileStr, vSubItr, Direction.BOTH);
+                               if ("gryo".equalsIgnoreCase(snapshotType)) {
+                                       jg.io(IoCore.gryo()).writer().create().writeVertices(subFileStr, vSubItr, Direction.BOTH);
+                               } else {
+                                       jg.io(IoCore.graphson()).writer().create().writeVertices(subFileStr, vSubItr, Direction.BOTH);
+                               }
                                subFileStr.close();
                                System.out.println(" -- Printed " + count + " vertexes out to " + fname);
                        }
index ebef01d..c45ad28 100644 (file)
-/**\r
- * ============LICENSE_START=======================================================\r
- * org.onap.aai\r
- * ================================================================================\r
- * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.\r
- * ================================================================================\r
- * Licensed under the Apache License, Version 2.0 (the "License");\r
- * you may not use this file except in compliance with the License.\r
- * You may obtain a copy of the License at\r
- *\r
- *    http://www.apache.org/licenses/LICENSE-2.0\r
- *\r
- * Unless required by applicable law or agreed to in writing, software\r
- * distributed under the License is distributed on an "AS IS" BASIS,\r
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * See the License for the specific language governing permissions and\r
- * limitations under the License.\r
- * ============LICENSE_END=========================================================\r
- */\r
-package org.onap.aai.db.schema;\r
-\r
-import java.io.IOException;\r
-import java.util.UUID;\r
-\r
-import com.fasterxml.jackson.databind.ObjectMapper;\r
-import org.apache.commons.configuration.ConfigurationException;\r
-import org.apache.commons.configuration.PropertiesConfiguration;\r
-import org.codehaus.jackson.JsonGenerationException;\r
-import org.onap.aai.dbmap.AAIGraphConfig;\r
-import org.onap.aai.edges.EdgeIngestor;\r
-import org.onap.aai.exceptions.AAIException;\r
-import org.onap.aai.setup.SchemaVersions;\r
-import org.onap.aai.logging.LoggingContext;\r
-import org.onap.aai.logging.LoggingContext.StatusCode;\r
-import org.onap.aai.util.AAIConfig;\r
-import com.beust.jcommander.JCommander;\r
-import com.beust.jcommander.Parameter;\r
-import org.janusgraph.core.JanusGraphFactory;\r
-import org.janusgraph.core.JanusGraph;\r
-import org.springframework.context.annotation.AnnotationConfigApplicationContext;\r
-\r
-public class ScriptDriver {\r
-\r
-       /**\r
-        * The main method.\r
-        *\r
-        * @param args the arguments\r
-        * @throws AAIException the AAI exception\r
-        * @throws JsonGenerationException the json generation exception\r
-        * @throws IOException Signals that an I/O exception has occurred.\r
-        */\r
-       public static void main (String[] args) throws AAIException, IOException, ConfigurationException {\r
-               CommandLineArgs cArgs = new CommandLineArgs();\r
-               \r
-               LoggingContext.init();\r
-               LoggingContext.component("DBSchemaScriptDriver");\r
-               LoggingContext.partnerName("NA");\r
-               LoggingContext.targetEntity("AAI");\r
-               LoggingContext.requestId(UUID.randomUUID().toString());\r
-               LoggingContext.serviceName("AAI");\r
-               LoggingContext.targetServiceName("main");\r
-               LoggingContext.statusCode(StatusCode.COMPLETE);\r
-               LoggingContext.responseCode(LoggingContext.SUCCESS);\r
-               \r
-               new JCommander(cArgs, args);\r
-               \r
-               if (cArgs.help) {\r
-                       System.out.println("-c [path to graph configuration] -type [what you want to audit - oxm or graph]");\r
-               }\r
-\r
-               AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext(\r
-                               "org.onap.aai.config",\r
-                               "org.onap.aai.setup"\r
-               );\r
-\r
-               AuditorFactory auditorFactory = ctx.getBean(AuditorFactory.class);\r
-               SchemaVersions schemaVersions = ctx.getBean(SchemaVersions.class);\r
-               EdgeIngestor edgeIngestor     = ctx.getBean(EdgeIngestor.class);\r
-\r
-               String config = cArgs.config;\r
-               AAIConfig.init();\r
-\r
-               PropertiesConfiguration graphConfiguration = new AAIGraphConfig\r
-                       .Builder(config)\r
-                       .forService(ScriptDriver.class.getSimpleName())\r
-                       .withGraphType("NA")\r
-                       .buildConfiguration();\r
-\r
-               try (JanusGraph graph = JanusGraphFactory.open(graphConfiguration)) {\r
-                       if (!("oxm".equals(cArgs.type) || "graph".equals(cArgs.type))) {\r
-                               System.out.println("type: " + cArgs.type + " not recognized.");\r
-                               System.exit(1);\r
-                       }\r
-\r
-                       AuditDoc doc = null;\r
-                       if ("oxm".equals(cArgs.type)) {\r
-                               doc = auditorFactory.getOXMAuditor(schemaVersions.getDefaultVersion(), edgeIngestor).getAuditDoc();\r
-                       } else if ("graph".equals(cArgs.type)) {\r
-                               doc = auditorFactory.getGraphAuditor(graph).getAuditDoc();\r
-                       }\r
-\r
-                       ObjectMapper mapper = new ObjectMapper();\r
-\r
-                       String json = mapper.writerWithDefaultPrettyPrinter().writeValueAsString(doc);\r
-                       System.out.println(json);\r
-               }\r
-       }\r
-       \r
-}\r
-\r
-class CommandLineArgs {\r
-       \r
-       @Parameter(names = "--help", description = "Help")\r
-       public boolean help = false;\r
-       \r
-       @Parameter(names = "-c", description = "Configuration", required=true)\r
-       public String config;\r
-       \r
-       @Parameter(names = "-type", description = "Type", required=true)\r
-       public String type = "graph";\r
-       \r
-\r
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.db.schema;
+
+import com.beust.jcommander.JCommander;
+import com.beust.jcommander.Parameter;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.commons.configuration.ConfigurationException;
+import org.apache.commons.configuration.PropertiesConfiguration;
+import org.codehaus.jackson.JsonGenerationException;
+import org.janusgraph.core.JanusGraph;
+import org.janusgraph.core.JanusGraphFactory;
+import org.onap.aai.config.PropertyPasswordConfiguration;
+import org.onap.aai.dbmap.AAIGraphConfig;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.logging.ErrorLogHelper;
+import org.onap.aai.logging.ErrorObjectFormatException;
+import org.onap.aai.logging.LoggingContext;
+import org.onap.aai.logging.LoggingContext.StatusCode;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.util.AAIConfig;
+import org.onap.aai.util.ExceptionTranslator;
+import org.springframework.context.annotation.AnnotationConfigApplicationContext;
+
+import java.io.IOException;
+import java.util.UUID;
+
+public class ScriptDriver {
+
+       /**
+        * The main method.
+        *
+        * @param args the arguments
+        * @throws AAIException the AAI exception
+        * @throws JsonGenerationException the json generation exception
+        * @throws IOException Signals that an I/O exception has occurred.
+        */
+       public static void main (String[] args) throws AAIException, IOException, ConfigurationException, ErrorObjectFormatException {
+               CommandLineArgs cArgs = new CommandLineArgs();
+               
+               LoggingContext.init();
+               LoggingContext.component("DBSchemaScriptDriver");
+               LoggingContext.partnerName("NA");
+               LoggingContext.targetEntity("AAI");
+               LoggingContext.requestId(UUID.randomUUID().toString());
+               LoggingContext.serviceName("AAI");
+               LoggingContext.targetServiceName("main");
+               LoggingContext.statusCode(StatusCode.COMPLETE);
+               LoggingContext.responseCode(LoggingContext.SUCCESS);
+               ErrorLogHelper.loadProperties();
+               new JCommander(cArgs, args);
+               
+               if (cArgs.help) {
+                       System.out.println("-c [path to graph configuration] -type [what you want to audit - oxm or graph]");
+               }
+
+               AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext();
+               PropertyPasswordConfiguration initializer = new PropertyPasswordConfiguration();
+               initializer.initialize(ctx);
+               try {
+                       ctx.scan(
+                                       "org.onap.aai.config",
+                                       "org.onap.aai.setup"
+                       );
+                       ctx.refresh();
+
+               } catch (Exception e) {
+                       AAIException aai = ExceptionTranslator.schemaServiceExceptionTranslator(e);
+                       LoggingContext.statusCode(LoggingContext.StatusCode.ERROR);
+                       LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                       ErrorLogHelper.logError(aai.getCode(), e.getMessage() + ", resolve and retry");
+                       throw aai;
+               }
+               AuditorFactory auditorFactory = ctx.getBean(AuditorFactory.class);
+               SchemaVersions schemaVersions = ctx.getBean(SchemaVersions.class);
+               EdgeIngestor edgeIngestor     = ctx.getBean(EdgeIngestor.class);
+
+               String config = cArgs.config;
+               AAIConfig.init();
+
+               PropertiesConfiguration graphConfiguration = new AAIGraphConfig
+                       .Builder(config)
+                       .forService(ScriptDriver.class.getSimpleName())
+                       .withGraphType("NA")
+                       .buildConfiguration();
+
+               try (JanusGraph graph = JanusGraphFactory.open(graphConfiguration)) {
+                       if (!("oxm".equals(cArgs.type) || "graph".equals(cArgs.type))) {
+                               System.out.println("type: " + cArgs.type + " not recognized.");
+                               System.exit(1);
+                       }
+
+                       AuditDoc doc = null;
+                       if ("oxm".equals(cArgs.type)) {
+                               doc = auditorFactory.getOXMAuditor(schemaVersions.getDefaultVersion(), edgeIngestor).getAuditDoc();
+                       } else if ("graph".equals(cArgs.type)) {
+                               doc = auditorFactory.getGraphAuditor(graph).getAuditDoc();
+                       }
+
+                       ObjectMapper mapper = new ObjectMapper();
+
+                       String json = mapper.writerWithDefaultPrettyPrinter().writeValueAsString(doc);
+                       System.out.println(json);
+               }
+       }
+       
+}
+
+class CommandLineArgs {
+       
+       @Parameter(names = "--help", description = "Help")
+       public boolean help = false;
+       
+       @Parameter(names = "-c", description = "Configuration", required=true)
+       public String config;
+       
+       @Parameter(names = "-type", description = "Type", required=true)
+       public String type = "graph";
+       
+
 }
\ No newline at end of file
index 7b7ef99..fd5ae00 100644 (file)
  */
 package org.onap.aai.dbgen;
 
-import java.io.FileInputStream;
-import java.io.InputStream;
-import java.util.*;
-import java.util.Map.Entry;
-
+import com.att.eelf.configuration.Configuration;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
 import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
 import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
 import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__;
-import org.apache.tinkerpop.gremlin.structure.Direction;
-import org.apache.tinkerpop.gremlin.structure.Edge;
-import org.apache.tinkerpop.gremlin.structure.Graph;
-import org.apache.tinkerpop.gremlin.structure.Vertex;
-import org.apache.tinkerpop.gremlin.structure.VertexProperty;
-import org.onap.aai.db.props.AAIProperties;
-import org.onap.aai.dbmap.AAIGraphConfig;
+import org.apache.tinkerpop.gremlin.structure.*;
+import org.janusgraph.core.JanusGraph;
+import org.janusgraph.core.JanusGraphFactory;
+import org.onap.aai.config.PropertyPasswordConfiguration;
 import org.onap.aai.dbmap.AAIGraph;
+import org.onap.aai.dbmap.AAIGraphConfig;
+import org.onap.aai.edges.enums.AAIDirection;
 import org.onap.aai.edges.enums.EdgeProperty;
 import org.onap.aai.exceptions.AAIException;
 import org.onap.aai.introspection.Introspector;
@@ -45,19 +42,19 @@ import org.onap.aai.logging.ErrorLogHelper;
 import org.onap.aai.logging.LogFormatTools;
 import org.onap.aai.logging.LoggingContext;
 import org.onap.aai.logging.LoggingContext.StatusCode;
-import org.onap.aai.edges.enums.AAIDirection;
 import org.onap.aai.setup.SchemaVersions;
 import org.onap.aai.util.AAIConfig;
 import org.onap.aai.util.AAIConstants;
+import org.onap.aai.util.ExceptionTranslator;
+import org.onap.aai.util.GraphAdminConstants;
 import org.slf4j.MDC;
-
-import com.att.eelf.configuration.Configuration;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import org.janusgraph.core.JanusGraphFactory;
-import org.janusgraph.core.JanusGraph;
 import org.springframework.context.annotation.AnnotationConfigApplicationContext;
 
+import java.io.FileInputStream;
+import java.io.InputStream;
+import java.util.*;
+import java.util.Map.Entry;
+
 public class DupeTool {
 
     private static final EELFLogger logger = EELFManager.getInstance().getLogger(DupeTool.class.getSimpleName());
@@ -76,6 +73,7 @@ public class DupeTool {
     }
 
     private LoaderFactory loaderFactory;
+    private int dupeGroupCount = 0;
 
     public DupeTool(LoaderFactory loaderFactory, SchemaVersions schemaVersions){
         this(loaderFactory, schemaVersions, true);
@@ -89,7 +87,7 @@ public class DupeTool {
 
     public void execute(String[] args){
 
-        String defVersion = "v12";
+        String defVersion = "v15";
         try {
             defVersion = AAIConfig.get(AAIConstants.AAI_DEFAULT_API_VERSION_PROP);
         } catch (AAIException ae) {
@@ -101,7 +99,7 @@ public class DupeTool {
             exit(0);
         }
 
-
+        dupeGroupCount = 0;
         Loader loader = null;
         try {
             loader = loaderFactory.createLoaderForVersion(ModelType.MOXY, schemaVersions.getDefaultVersion());
@@ -120,8 +118,8 @@ public class DupeTool {
 
         try {
             AAIConfig.init();
-            int maxRecordsToFix = AAIConstants.AAI_DUPETOOL_DEFAULT_MAX_FIX;
-            int sleepMinutes = AAIConstants.AAI_DUPETOOL_DEFAULT_SLEEP_MINUTES;
+            int maxRecordsToFix = GraphAdminConstants.AAI_DUPETOOL_DEFAULT_MAX_FIX;
+            int sleepMinutes = GraphAdminConstants.AAI_DUPETOOL_DEFAULT_SLEEP_MINUTES;
             int timeWindowMinutes = 0;   // A value of 0 means that we will not have a time-window -- we will look
             // at all nodes of the passed-in nodeType.
             long windowStartTime = 0;  // Translation of the window into a starting timestamp
@@ -137,7 +135,7 @@ public class DupeTool {
                 }
             } catch (Exception e) {
                 // Don't worry, we'll just use the defaults that we got from AAIConstants
-                logger.warn("WARNING - could not pick up aai.dupeTool values from aaiconfig.properties file.  Will use defaults. ");
+                logger.warn("WARNING - could not pick up aai.dupeTool values from aaiconfig.properties file.  Will use defaults. " + e.getMessage());
             }
 
             String nodeTypeVal = "";
@@ -364,7 +362,7 @@ public class DupeTool {
                     showNodeDetailsForADupeSet(gt1, firstPassDupeSets.get(x), logger);
                 }
             }
-
+            dupeGroupCount = firstPassDupeSets.size();
             boolean didSomeDeletesFlag = false;
             ArrayList<String> dupeSetsToFix = new ArrayList<String>();
             if (autoFix && firstPassDupeSets.size() == 0) {
@@ -405,6 +403,7 @@ public class DupeTool {
                         + " sets of duplicates that we think can be deleted. ";
                 logger.info(msg);
                 System.out.println(msg);
+               
                 if (dupeSetsToFix.size() > 0) {
                     msg = " Here is what the sets look like: ";
                     logger.info(msg);
@@ -492,7 +491,7 @@ public class DupeTool {
      *
      * @param args the arguments
      */
-    public static void main(String[] args) {
+    public static void main(String[] args) throws AAIException {
 
         System.setProperty("aai.service.name", DupeTool.class.getSimpleName());
         // Set the logging file properties to be used by EELFManager
@@ -511,11 +510,23 @@ public class DupeTool {
         LoggingContext.statusCode(StatusCode.COMPLETE);
         LoggingContext.responseCode(LoggingContext.SUCCESS);
 
-        AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext(
-                "org.onap.aai.config",
-                "org.onap.aai.setup"
-        );
-
+        AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext();
+        PropertyPasswordConfiguration initializer = new PropertyPasswordConfiguration();
+        initializer.initialize(ctx);
+        try {
+            ctx.scan(
+                    "org.onap.aai.config",
+                    "org.onap.aai.setup"
+            );
+            ctx.refresh();
+        } catch (Exception e) {
+            AAIException aai = ExceptionTranslator.schemaServiceExceptionTranslator(e);
+            logger.error("Problems running DupeTool "+aai.getMessage());
+            LoggingContext.statusCode(LoggingContext.StatusCode.ERROR);
+            LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+            ErrorLogHelper.logError(aai.getCode(), e.getMessage() + ", resolve and retry");
+            throw aai;
+        }
         LoaderFactory loaderFactory = ctx.getBean(LoaderFactory.class);
         SchemaVersions schemaVersions = ctx.getBean(SchemaVersions.class);
         DupeTool dupeTool = new DupeTool(loaderFactory, schemaVersions);
@@ -1080,10 +1091,12 @@ public class DupeTool {
                                           Boolean specialTenantRule, Loader loader, EELFLogger logger)
             throws AAIException {
 
-        // This method assumes that it is being passed a List of vertex objects
-        // which violate our uniqueness constraints.
-
+               // This method assumes that it is being passed a List of 
+               // vertex objects which violate our uniqueness constraints.
+               // Note - returning a null vertex means we could not 
+               //   safely pick one to keep (Ie. safely know which to delete.)
         Vertex nullVtx = null;
+        GraphTraversalSource gts = g.traversal();
 
         if (dupeVertexList == null) {
             return nullVtx;
@@ -1095,12 +1108,35 @@ public class DupeTool {
         if (listSize == 1) {
             return (dupeVertexList.get(0));
         }
+        
+               // If they don't all have the same aai-uri, then we will not 
+               // choose between them - we'll need someone to manually 
+               // check to pick which one makes sense to keep.
+               Object uriOb = dupeVertexList.get(0).<Object>property("aai-uri").orElse(null);
+               if( uriOb == null || uriOb.toString().equals("") ){
+                       // this is a bad node - hopefully will be picked up by phantom checker
+                       return nullVtx;
+               }
+               String thisUri = uriOb.toString();
+               for (int i = 1; i < listSize; i++) {
+                       uriOb = dupeVertexList.get(i).<Object>property("aai-uri").orElse(null);
+                       if( uriOb == null || uriOb.toString().equals("") ){
+                               // this is a bad node - hopefully will be picked up by phantom checker
+                               return nullVtx;
+                       }
+                       String nextUri = uriOb.toString();
+                       if( !thisUri.equals(nextUri)){
+                               // there are different URI's on these - so we can't pick 
+                               // a dupe to keep.  Someone will need to look at it.
+                               return nullVtx;
+                       }
+               }
 
         Vertex vtxPreferred = null;
         Vertex currentFaveVtx = dupeVertexList.get(0);
         for (int i = 1; i < listSize; i++) {
             Vertex vtxB = dupeVertexList.get(i);
-            vtxPreferred = pickOneOfTwoDupes(transId, fromAppId, g,
+            vtxPreferred = pickOneOfTwoDupes(transId, fromAppId, gts,
                     currentFaveVtx, vtxB, ver, specialTenantRule, loader, logger);
             if (vtxPreferred == null) {
                 // We couldn't choose one
@@ -1110,7 +1146,14 @@ public class DupeTool {
             }
         }
 
-        return (currentFaveVtx);
+        if( currentFaveVtx != null && checkAaiUriOk(gts, currentFaveVtx, logger) ){
+                       return (currentFaveVtx);
+               }
+               else {
+                       // We had a preferred vertex, but its aai-uri was bad, so
+                       // we will not recommend one to keep.
+                       return nullVtx;
+               }
 
     } // end of getPreferredDupe()
 
@@ -1120,7 +1163,7 @@ public class DupeTool {
      *
      * @param transId    the trans id
      * @param fromAppId  the from app id
-     * @param g          the g
+     * @param g          the graphTraversalSource
      * @param vtxA       the vtx A
      * @param vtxB       the vtx B
      * @param ver        the ver
@@ -1130,7 +1173,7 @@ public class DupeTool {
      * @throws AAIException the AAI exception
      */
     public Vertex pickOneOfTwoDupes(String transId,
-                                           String fromAppId, Graph g, Vertex vtxA,
+                                           String fromAppId, GraphTraversalSource gts, Vertex vtxA,
                                            Vertex vtxB, String ver, Boolean specialTenantRule, Loader loader, EELFLogger logger) throws AAIException {
 
         Vertex nullVtx = null;
@@ -1289,11 +1332,13 @@ public class DupeTool {
             }
 
             if (allTheSame) {
-                if (vidA < vidB) {
-                    preferredVtx = vtxA;
-                } else {
-                    preferredVtx = vtxB;
-                }
+                if ( checkAaiUriOk(gts, vtxA, logger) ) {
+                       preferredVtx = vtxA;
+               } 
+               else if ( checkAaiUriOk(gts, vtxB, logger) ) {
+                       preferredVtx = vtxB;
+               }
+               // else we're picking neither because neither one had a working aai-uri index property
             } else if (specialTenantRule) {
                 // They asked us to apply a special rule if it applies
                 if (vtxIdsConn2A.size() == 2 && vtxANodeType.equals("tenant")) {
@@ -1575,6 +1620,71 @@ public class DupeTool {
 
     }// End of getNodeKeyVals()
 
+    
+       
+       /**
+        * makes sure aai-uri exists and can be used to get this node back
+        *
+        * @param transId the trans id
+        * @param fromAppId the from app id
+        * @param graph the graph
+        * @param vtx
+        * @param EELFLogger         
+        * @return true if aai-uri is populated and the aai-uri-index points to this vtx
+        * @throws AAIException the AAI exception
+        */
+       private Boolean checkAaiUriOk( GraphTraversalSource graph, Vertex origVtx, EELFLogger eLogger )
+                       throws AAIException{
+               String aaiUriStr = "";
+               try { 
+                       Object ob = origVtx.<Object>property("aai-uri").orElse(null);
+                       String origVid = origVtx.id().toString();
+                       if (ob == null || ob.toString().equals("")) {
+                               // It is missing its aai-uri
+                               eLogger.debug("DEBUG No [aai-uri] property found for vid = [" 
+                                               + origVid + "] " );
+                               return false;
+                       }
+                       else {
+                               aaiUriStr = ob.toString();
+                               Iterator <Vertex> verts = graph.V().has("aai-uri",aaiUriStr);
+                               int count = 0;
+                               while( verts.hasNext() ){
+                                       count++;
+                                       Vertex foundV = verts.next();
+                                       String foundVid = foundV.id().toString();
+                                       if( !origVid.equals(foundVid) ){
+                                               eLogger.debug("DEBUG aai-uri key property ["  
+                                                               + aaiUriStr + "] for vid = [" 
+                                                               + origVid + "] brought back different vertex with vid = [" 
+                                                               + foundVid + "]." );
+                                               return false;
+                                       }
+                               }
+                               if( count == 0 ){
+                                       eLogger.debug("DEBUG aai-uri key property ["  
+                                                       + aaiUriStr + "] for vid = [" 
+                                                       + origVid + "] could not be used to query for that vertex. ");
+                                       return false;   
+                               }
+                               else if( count > 1 ){
+                                       eLogger.debug("DEBUG aai-uri key property ["  
+                                                       + aaiUriStr + "] for vid = [" 
+                                                       + origVid + "] brought back multiple (" 
+                                                       + count + ") vertices instead of just one. ");
+                                       return false;   
+                               }
+                       }
+               }
+               catch( Exception ex ){
+                       LoggingContext.statusCode(StatusCode.ERROR);
+                       LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                       eLogger.error(" ERROR trying to get node with aai-uri: [" + aaiUriStr + "]" + LogFormatTools.getStackTop(ex));
+               }
+               return true;
+               
+       }// End of checkAaiUriOk() 
+       
 
     /**
      * Get values of the key properties for a node as a single string
@@ -1850,5 +1960,14 @@ public class DupeTool {
             logger.warn("WARNING from final graph.shutdown()", ex);
         }
     }
+    
+       public int getDupeGroupCount() {
+               return dupeGroupCount;
+       }
+
+       public void setDupeGroupCount(int dgCount) {
+               this.dupeGroupCount = dgCount;
+       }
+       
 }
 
diff --git a/src/main/java/org/onap/aai/dbgen/DynamicPayloadGenerator.java b/src/main/java/org/onap/aai/dbgen/DynamicPayloadGenerator.java
new file mode 100644 (file)
index 0000000..ecd95a7
--- /dev/null
@@ -0,0 +1,906 @@
+/**\r
+ * ============LICENSE_START=======================================================\r
+ * org.onap.aai\r
+ * ================================================================================\r
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.\r
+ * ================================================================================\r
+ * Licensed under the Apache License, Version 2.0 (the "License");\r
+ * you may not use this file except in compliance with the License.\r
+ * You may obtain a copy of the License at\r
+ *\r
+ *    http://www.apache.org/licenses/LICENSE-2.0\r
+ *\r
+ * Unless required by applicable law or agreed to in writing, software\r
+ * distributed under the License is distributed on an "AS IS" BASIS,\r
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
+ * See the License for the specific language governing permissions and\r
+ * limitations under the License.\r
+ * ============LICENSE_END=========================================================\r
+ */\r
+package org.onap.aai.dbgen;\r
+\r
+import com.att.eelf.configuration.EELFLogger;\r
+import com.att.eelf.configuration.EELFManager;\r
+import com.beust.jcommander.JCommander;\r
+import com.beust.jcommander.Parameter;\r
+import org.apache.tinkerpop.gremlin.process.traversal.P;\r
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;\r
+import org.apache.tinkerpop.gremlin.process.traversal.step.util.Tree;\r
+import org.apache.tinkerpop.gremlin.structure.Element;\r
+import org.apache.tinkerpop.gremlin.structure.Vertex;\r
+import org.codehaus.jackson.JsonNode;\r
+import org.codehaus.jackson.map.ObjectMapper;\r
+import org.codehaus.jackson.node.ObjectNode;\r
+import org.codehaus.jackson.type.TypeReference;\r
+import org.onap.aai.config.PropertyPasswordConfiguration;\r
+import org.onap.aai.db.props.AAIProperties;\r
+import org.onap.aai.dbmap.DBConnectionType;\r
+import org.onap.aai.dbmap.InMemoryGraph;\r
+import org.onap.aai.edges.EdgeIngestor;\r
+import org.onap.aai.edges.EdgeRule;\r
+import org.onap.aai.edges.EdgeRuleQuery;\r
+import org.onap.aai.edges.enums.AAIDirection;\r
+import org.onap.aai.edges.enums.EdgeType;\r
+import org.onap.aai.edges.exceptions.AmbiguousRuleChoiceException;\r
+import org.onap.aai.edges.exceptions.EdgeRuleNotFoundException;\r
+import org.onap.aai.exceptions.AAIException;\r
+import org.onap.aai.introspection.Introspector;\r
+import org.onap.aai.introspection.Loader;\r
+import org.onap.aai.introspection.LoaderFactory;\r
+import org.onap.aai.introspection.ModelType;\r
+import org.onap.aai.introspection.exceptions.AAIUnknownObjectException;\r
+import org.onap.aai.logging.ErrorLogHelper;\r
+import org.onap.aai.logging.LogFormatTools;\r
+import org.onap.aai.logging.LoggingContext;\r
+import org.onap.aai.parsers.uri.URIToObject;\r
+import org.onap.aai.serialization.db.DBSerializer;\r
+import org.onap.aai.serialization.engines.InMemoryDBEngine;\r
+import org.onap.aai.serialization.engines.QueryStyle;\r
+import org.onap.aai.serialization.tinkerpop.TreeBackedVertex;\r
+import org.onap.aai.setup.SchemaVersion;\r
+import org.onap.aai.setup.SchemaVersions;\r
+import org.onap.aai.util.AAIConfig;\r
+import org.onap.aai.util.AAIConstants;\r
+import org.onap.aai.util.AAISystemExitUtil;\r
+import org.onap.aai.util.ExceptionTranslator;\r
+import org.slf4j.MDC;\r
+import org.springframework.context.annotation.AnnotationConfigApplicationContext;\r
+\r
+import java.io.*;\r
+import java.net.URI;\r
+import java.nio.file.Files;\r
+import java.nio.file.InvalidPathException;\r
+import java.nio.file.Path;\r
+import java.nio.file.Paths;\r
+import java.text.DateFormat;\r
+import java.text.SimpleDateFormat;\r
+import java.util.*;\r
+import java.util.Map.Entry;\r
+import java.util.regex.Matcher;\r
+import java.util.regex.Pattern;\r
+\r
+/*\r
+ * The Class ListEndpoints.\r
+ */\r
+public class DynamicPayloadGenerator {\r
+\r
+       /*\r
+        * Create a Dynamic memory graph instance which should not affect the\r
+        * AAIGraph\r
+        */\r
+       private InMemoryGraph inMemGraph = null;\r
+\r
+       private InMemoryDBEngine dbEngine;\r
+       private InputStream sequenceInputStreams;\r
+       /*\r
+        * Loader, QueryStyle, ConnectionType for the Serializer\r
+        */\r
+       private Loader loader;\r
+       private String urlBase;\r
+       private BufferedWriter bw = null;\r
+       private boolean exitFlag = true;\r
+       private CommandLineArgs cArgs;\r
+\r
+       private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(DynamicPayloadGenerator.class);\r
+\r
+       private static final QueryStyle queryStyle = QueryStyle.TRAVERSAL;\r
+       private static final DBConnectionType type = DBConnectionType.CACHED;\r
+       private static final ModelType introspectorFactoryType = ModelType.MOXY;\r
+       private final LoaderFactory loaderFactory;\r
+       private final EdgeIngestor edgeRules;\r
+       private final SchemaVersions schemaVersions;\r
+       private final SchemaVersion version;\r
+\r
+       public DynamicPayloadGenerator(LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, SchemaVersions schemaVersions){\r
+           this.loaderFactory = loaderFactory;\r
+               this.edgeRules = edgeIngestor;\r
+               this.schemaVersions = schemaVersions;\r
+               this.version = schemaVersions.getDefaultVersion();\r
+       }\r
+\r
+       /**\r
+        * The run method.\r
+        *\r
+        * @param args\r
+        *            the arguments\r
+        * @param exitFlag true if running from a shell script to call system exit, false if running from scheduled task         \r
+        * @throws AAIException\r
+        * @throws Exception\r
+        */\r
+       \r
+       public static void run (LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, SchemaVersions schemaVersions, String[] args, boolean isSystemExit) {\r
+               //\r
+               MDC.put("logFilenameAppender", DynamicPayloadGenerator.class.getSimpleName());\r
+               DynamicPayloadGenerator payloadgen = new DynamicPayloadGenerator(loaderFactory, edgeIngestor, schemaVersions);\r
+               payloadgen.exitFlag = isSystemExit;\r
+               try {\r
+                       payloadgen.init(args);\r
+\r
+                       payloadgen.generatePayloads();\r
+               } catch (AAIException e) {\r
+                       LOGGER.error("Exception " + LogFormatTools.getStackTop(e));\r
+               } catch (IOException e) {\r
+                       LOGGER.error("Exception " + LogFormatTools.getStackTop(e));\r
+               }\r
+               if ( isSystemExit ) {\r
+                       AAISystemExitUtil.systemExitCloseAAIGraph(1);\r
+               }\r
+               else {\r
+                       AAISystemExitUtil.systemExitCloseAAIGraph(0);\r
+               }\r
+       \r
+       }\r
+       public static void main(String[] args) throws AAIException {\r
+               AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext();\r
+               PropertyPasswordConfiguration initializer = new PropertyPasswordConfiguration();\r
+               initializer.initialize(ctx);\r
+               try {\r
+                       ctx.scan(\r
+                                       "org.onap.aai.config",\r
+                                       "org.onap.aai.setup"\r
+                       );\r
+                       ctx.refresh();\r
+               } catch (Exception e) {\r
+                       AAIException aai = ExceptionTranslator.schemaServiceExceptionTranslator(e);\r
+                       LOGGER.error("Problems running tool "+aai.getMessage());\r
+                       LoggingContext.statusCode(LoggingContext.StatusCode.ERROR);\r
+                       LoggingContext.responseCode(LoggingContext.DATA_ERROR);\r
+                       ErrorLogHelper.logError(aai.getCode(), e.getMessage() + ", resolve and retry");\r
+                       throw aai;\r
+\r
+               }\r
+               LoaderFactory loaderFactory = ctx.getBean(LoaderFactory.class);\r
+               EdgeIngestor  edgeIngestor  = ctx.getBean(EdgeIngestor.class);\r
+               SchemaVersions schemaVersions = ctx.getBean(SchemaVersions.class);\r
+               run (loaderFactory, edgeIngestor, schemaVersions, args, true);\r
+       }\r
+       \r
+       \r
+       public void taskExit() {\r
+               if ( this.exitFlag ) {\r
+                       AAISystemExitUtil.systemExitCloseAAIGraph(1);\r
+               }\r
+               else {\r
+                       AAISystemExitUtil.systemExitCloseAAIGraph(0);\r
+               }\r
+       }\r
+       public void init(String[] args) throws AAIException {\r
+               cArgs = new CommandLineArgs();\r
+               JCommander jCommander = new JCommander(cArgs, args);\r
+               jCommander.setProgramName(DynamicPayloadGenerator.class.getSimpleName());\r
+               LOGGER.info("Snapshot file " + cArgs.dataSnapshot);\r
+\r
+\r
+               // TODO- How to add dynamic.properties\r
+\r
+               LOGGER.info("output file " + cArgs.output);\r
+               LOGGER.info("format file " + cArgs.format);\r
+               LOGGER.info("schema enabled " + cArgs.schemaEnabled);\r
+               LOGGER.info("Multiple snapshots " + cArgs.isMultipleSnapshot);\r
+               LOGGER.info("Is Partial Graph " + cArgs.isPartialGraph);\r
+               \r
+               if (cArgs.config.isEmpty())\r
+                       cArgs.config = AAIConstants.AAI_HOME_ETC_APP_PROPERTIES + "dynamic.properties";\r
+\r
+               LOGGER.info("config file " + cArgs.config);\r
+               if (cArgs.nodePropertyFile.isEmpty())\r
+                       cArgs.nodePropertyFile = AAIConstants.AAI_HOME_ETC_SCRIPT + "/tenant_isolation/nodes.json";\r
+               LOGGER.info("nodePropertyFile file " + cArgs.nodePropertyFile);\r
+\r
+               if (cArgs.inputFilterPropertyFile.isEmpty())\r
+                       cArgs.inputFilterPropertyFile = AAIConstants.AAI_HOME_ETC_SCRIPT + "/tenant_isolation/inputFilters.json";\r
+               LOGGER.info("inputFilterPropertyFile file " + cArgs.inputFilterPropertyFile);\r
+\r
+               if (cArgs.isPartialGraph)\r
+                       cArgs.dataSnapshot = cArgs.dataSnapshot+".partial";\r
+\r
+               if (!cArgs.isMultipleSnapshot) {\r
+                       validateFile(cArgs.dataSnapshot);\r
+               } else {\r
+                       // for multiple snapshots dataSnapshot + ".P" is the prefix of the\r
+                       // files\r
+                       sequenceInputStreams = validateMultipleSnapshots(cArgs.dataSnapshot);\r
+               }\r
+\r
+               LOGGER.info("Datasnapshot file " + cArgs.dataSnapshot);\r
+               AAIConfig.init();\r
+\r
+               urlBase = AAIConfig.get("aai.server.url.base", "");\r
+\r
+       }\r
+\r
+       public void generatePayloads() throws AAIException, IOException {\r
+\r
+               List<Map<String, List<String>>> nodeFilters = readFile(cArgs.nodePropertyFile);\r
+               /*\r
+                * Read the inputFilters which will include for each node-type the regex that needs to be \r
+                * applied and the filtered-node-type\r
+                * For eg: complex --> apply regex on cloud-region and then traverse to complex\r
+                * complex --> filtered-node-type: cloud-region, filters: include regex on cloud-region\r
+                */\r
+               /*\r
+                * Example: \r
+                * { "cloud-region" : \r
+                *               {"filtered-node-type":"cloud-region",\r
+                *                "filters": [ { "property": "cloud-owner", "regex": "att-aic" }, \r
+                *                                       { "property": "cloud-region-id", "regex": "M*" },\r
+                *                   { "property":"cloud-region-version", "regex": "aic2.5|aic3.0" }\r
+                *                 ] }, \r
+                *  "complex" : {\r
+                *              "filtered-node-type":"cloud-region", \r
+                *       "filters": [ { "property": "cloud-owner", "regex": "att-aic" }, \r
+                *                                       { "property": "cloud-region-id", "regex": "M*" },\r
+                *                   { "property":"cloud-region-version", "regex": "aic2.5|aic3.0" }\r
+                *                 ] }, \r
+                * \r
+                * } }\r
+                */\r
+               Map<String, Map<String, String>> inputFilters = readInputFilterPropertyFile(cArgs.inputFilterPropertyFile);\r
+               Map<String, String> filteredNodeTypes = findFilteredNodeTypes(cArgs.inputFilterPropertyFile);\r
+               // Read the input filter criteria\r
+               LOGGER.info("Load the Graph");\r
+\r
+               this.loadGraph();\r
+               LOGGER.info("Generate payload");\r
+               this.generatePayload(nodeFilters, inputFilters, filteredNodeTypes);\r
+               LOGGER.info("Close graph");\r
+               this.closeGraph();\r
+\r
+       }\r
+\r
+       private List<Map<String, List<String>>> readFile(String inputFile) throws IOException {\r
+\r
+               // validate that we can read the inputFile\r
+               validateFile(inputFile);\r
+\r
+               InputStream is = new FileInputStream(inputFile);\r
+               Scanner scanner = new Scanner(is);\r
+               String jsonFile = scanner.useDelimiter("\\Z").next();\r
+               scanner.close();\r
+\r
+               List<Map<String, List<String>>> allNodes = new ArrayList<>();\r
+               Map<String, List<String>> filterCousins = new HashMap<>();\r
+               Map<String, List<String>> filterParents = new HashMap<>();\r
+\r
+               ObjectMapper mapper = new ObjectMapper();\r
+\r
+               JsonNode rootNode = mapper.readTree(jsonFile);\r
+\r
+               Iterator<Entry<String, JsonNode>> nodeFields = rootNode.getFields();\r
+\r
+               while (nodeFields.hasNext()) {\r
+                       Entry<String, JsonNode> entry = nodeFields.next();\r
+                       String nodeType = entry.getKey();\r
+                       JsonNode nodeProperty = entry.getValue();\r
+\r
+                       JsonNode cousinFilter = nodeProperty.path("cousins");\r
+                       JsonNode parentFilter = nodeProperty.path("parents");\r
+                       List<String> cousins = new ObjectMapper().readValue(cousinFilter.traverse(),\r
+                                       new TypeReference<ArrayList<String>>() {\r
+                                       });\r
+\r
+                       List<String> parents = new ObjectMapper().readValue(parentFilter.traverse(),\r
+                                       new TypeReference<ArrayList<String>>() {\r
+                                       });\r
+                       for (String cousin : cousins) {\r
+                               LOGGER.info("Cousins-Filtered " + cousin);\r
+                       }\r
+                       for (String parent : parents) {\r
+                               LOGGER.info("Parents-Filtered " + parent);\r
+                       }\r
+                       filterCousins.put(nodeType, cousins);\r
+                       filterParents.put(nodeType, parents);\r
+\r
+               }\r
+\r
+               allNodes.add(filterCousins);\r
+               allNodes.add(filterParents);\r
+               return allNodes;\r
+\r
+       }\r
+\r
+ /* Example:\r
+{\r
+  "cloud-region" : {\r
+      "filtered-node-type" :"cloud-region",\r
+      "filters": [\r
+             {\r
+                 "property": "cloud-owner",\r
+                "regex": "att-aic"\r
+             },\r
+             {\r
+                 "property": "cloud-region-id",\r
+                "regex": "M*"\r
+             },\r
+             {\r
+                 "property": "cloud-region-version",\r
+                "regex": "aic2.5|aic3.0"\r
+             }\r
+    ]\r
+  },\r
+  "complex" : {\r
+           "filters":[\r
+           ]\r
+           \r
+  }\r
+}\r
+*/\r
+       private Map<String, Map<String, String>> readInputFilterPropertyFile(String inputFile) throws IOException {\r
+\r
+               validateFile(inputFile);\r
+\r
+               InputStream is = new FileInputStream(inputFile);\r
+               Scanner scanner = new Scanner(is);\r
+               String jsonFile = scanner.useDelimiter("\\Z").next();\r
+               scanner.close();\r
+\r
+               Map<String, Map<String, String>> propToRegex = new HashMap<String, Map<String, String>>();\r
+\r
+               ObjectMapper mapper = new ObjectMapper();\r
+\r
+               JsonNode rootNode = mapper.readTree(jsonFile);\r
+\r
+               Iterator<Entry<String, JsonNode>> nodeFields = rootNode.getFields();\r
+\r
+               while (nodeFields.hasNext()) {\r
+                       Entry<String, JsonNode> entry = nodeFields.next();\r
+                       String nodeType = entry.getKey();\r
+                       JsonNode nodeProperty = entry.getValue();\r
+\r
+                       JsonNode filter = nodeProperty.path("filters");\r
+                       List<JsonNode> filterMap = new ObjectMapper().readValue(filter.traverse(),\r
+                                       new TypeReference<ArrayList<JsonNode>>() {\r
+                                       });\r
+                       HashMap<String, String> filterMaps = new HashMap<String, String>();\r
+                       for (JsonNode n : filterMap) {\r
+                               filterMaps.put(n.get("property").asText(), n.get("regex").asText());\r
+                       }\r
+\r
+                       propToRegex.put(nodeType, filterMaps);\r
+               }\r
+               return (propToRegex);\r
+       }\r
+\r
+       private Map<String, String> findFilteredNodeTypes(String inputFile) throws IOException {\r
+\r
+               validateFile(inputFile);\r
+\r
+               InputStream is = new FileInputStream(inputFile);\r
+               Scanner scanner = new Scanner(is);\r
+               String jsonFile = scanner.useDelimiter("\\Z").next();\r
+               scanner.close();\r
+\r
+               Map<String, String> filteredNodeTypes = new HashMap<String, String>();\r
+\r
+               ObjectMapper mapper = new ObjectMapper();\r
+\r
+               JsonNode rootNode = mapper.readTree(jsonFile);\r
+\r
+               Iterator<Entry<String, JsonNode>> nodeFields = rootNode.getFields();\r
+\r
+               while (nodeFields.hasNext()) {\r
+                       Entry<String, JsonNode> entry = nodeFields.next();\r
+                       String nodeType = entry.getKey();\r
+                       JsonNode nodeProperty = entry.getValue();\r
+\r
+                       JsonNode filter = nodeProperty.path("filtered-node-type");\r
+\r
+                       filteredNodeTypes.put(nodeType, filter.asText());\r
+               }\r
+               return (filteredNodeTypes);\r
+       }\r
+\r
+       public void loadGraph() throws IOException {\r
+\r
+               loadGraphIntoMemory();\r
+               buildDbEngine();\r
+\r
+       }\r
+\r
+       private void loadGraphIntoMemory() throws IOException {\r
+               if (!(cArgs.isMultipleSnapshot)) {\r
+                       inMemGraph = new InMemoryGraph.Builder().build(cArgs.dataSnapshot, cArgs.config, cArgs.schemaEnabled,\r
+                                       cArgs.isPartialGraph);\r
+               } else {\r
+                       inMemGraph = new InMemoryGraph.Builder().build(sequenceInputStreams, cArgs.config, cArgs.schemaEnabled,\r
+                                       cArgs.isPartialGraph);\r
+               }\r
+       }\r
+\r
+       private void buildDbEngine() {\r
+               // TODO : parametrise version\r
+               loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, version);\r
+\r
+               dbEngine = new InMemoryDBEngine(queryStyle, type, loader, inMemGraph.getGraph());\r
+               dbEngine.startTransaction();\r
+       }\r
+\r
+       private void generatePayload(List<Map<String, List<String>>> nodeFilters,\r
+                       Map<String, Map<String, String>> inputFilters, Map<String, String> filteredNodeTypes)\r
+                       throws AAIException, IOException {\r
+\r
+               Map<String, List<String>> filterCousinsMap = nodeFilters.get(0);\r
+               Map<String, List<String>> filterParentsMap = nodeFilters.get(1);\r
+       Set<String> nodeTypes = filterCousinsMap.keySet();\r
+\r
+               for (String nodeType : nodeTypes) {\r
+                       if ("DMAAP-MR".equals(cArgs.format)) {\r
+                               bw = createFile(nodeType + ".json");\r
+                       }\r
+                       List<String> filterCousins = filterCousinsMap.get(nodeType);\r
+                       List<String> filterParents = filterParentsMap.get(nodeType);\r
+                       Map<String, String> nodeInputFilterMap = inputFilters.get(nodeType);\r
+                       String filteredNodeType = nodeType;\r
+                       if(filteredNodeTypes.get(nodeType) != null && !filteredNodeTypes.get(nodeType).isEmpty())\r
+                               filteredNodeType = filteredNodeTypes.get(nodeType);\r
+                       readVertices(nodeType, filterCousins, filterParents, nodeInputFilterMap, filteredNodeType);\r
+                       if(bw != null)\r
+                               bw.close();\r
+                       LOGGER.info("All Done-" + nodeType);\r
+               }\r
+\r
+       }\r
+\r
+       private BufferedWriter createFile(String outfileName) throws IOException {\r
+               // FileLocation\r
+               String fileName = outfileName;\r
+               File outFile = new File(fileName);\r
+               FileWriter fw = null;\r
+               LOGGER.info(" Will write to " + fileName);\r
+               try {\r
+                       fw = new FileWriter(outFile.getAbsoluteFile());\r
+               } catch (IOException i) {\r
+                       String emsg = "Unable to write to " + fileName + " Exception = " + i.getMessage();\r
+                       LOGGER.error(emsg);\r
+                       System.out.println(emsg);\r
+                       throw i;\r
+               }\r
+               return new BufferedWriter(fw);\r
+       }\r
+\r
+       private void createDirectory(String dirName) throws IOException {\r
+               // FileLocation\r
+               Path pathDir = null;\r
+               try {\r
+                       pathDir = Paths.get(dirName);\r
+               } catch (InvalidPathException i) {\r
+                       String emsg = "Directory " + dirName + " could not be found.";\r
+                       LOGGER.error(emsg);\r
+                       System.out.println(emsg);\r
+                       taskExit();\r
+               }\r
+               try {\r
+                       Files.createDirectories(pathDir);\r
+               } catch (Exception e) {\r
+                       String emsg = "Directory " + dirName + " could not be created. Exception = " + e.getMessage();\r
+                       LOGGER.error(emsg);\r
+                       System.out.println(emsg);\r
+                       taskExit();\r
+               }\r
+       }\r
+\r
+       public void readVertices(String nodeType, List<String> filterCousins, List<String> filterParents,\r
+                       Map<String, String> nodeInputFilters, String filteredNodeType) throws AAIException, IOException {\r
+\r
+               DBSerializer serializer = new DBSerializer(version, dbEngine, introspectorFactoryType, "sourceOfTruth");\r
+\r
+               /*\r
+                * Start with nodeType you need to filter and then traverse to the actual nodeType\r
+                */\r
+               GraphTraversal<Vertex, Vertex> gtraversal = inMemGraph.getGraph().traversal().V().has("aai-node-type",\r
+                               filteredNodeType);\r
+\r
+               \r
+               // input regex\r
+               if (nodeInputFilters != null && (!nodeInputFilters.isEmpty())) {\r
+                       for (Map.Entry<String, String> entry : nodeInputFilters.entrySet()) {\r
+                               String property = entry.getKey();\r
+                               String regex = entry.getValue();\r
+                               Pattern pa = Pattern.compile(regex);\r
+\r
+                               gtraversal = gtraversal.has(property, P.test((t, p) -> {\r
+                                       Matcher m = ((Pattern) p).matcher((CharSequence) t);\r
+                                       boolean b = m.matches();\r
+                                       return b;\r
+                               }, pa));\r
+                       }\r
+               }\r
+\r
+               /*\r
+                * Tenant, AZ, Complex, Zone, pserver come here\r
+                */\r
+               if (!filteredNodeType.equals(nodeType)) {\r
+\r
+                       EdgeRuleQuery treeEdgeRuleQuery = new EdgeRuleQuery\r
+                                       .Builder(filteredNodeType, nodeType)\r
+                                       .edgeType(EdgeType.TREE)\r
+                                       .build();\r
+\r
+                       EdgeRuleQuery cousinEdgeQuery = new EdgeRuleQuery\r
+                                       .Builder(filteredNodeType, nodeType)\r
+                                       .edgeType(EdgeType.COUSIN)\r
+                                       .build();\r
+\r
+                       EdgeRule rule = null;\r
+                       boolean hasTreeEdgeRule = true;\r
+\r
+                       try {\r
+                               rule = edgeRules.getRule(treeEdgeRuleQuery);\r
+                       } catch (EdgeRuleNotFoundException | AmbiguousRuleChoiceException e) {\r
+                               hasTreeEdgeRule = false;\r
+                       }\r
+\r
+                       if(!hasTreeEdgeRule) {\r
+                               try {\r
+                                       rule = edgeRules.getRule(cousinEdgeQuery);\r
+                               } catch (EdgeRuleNotFoundException | AmbiguousRuleChoiceException e) {\r
+                                   LOGGER.error("Unable to get a tree or cousin edge between {} and {}", filteredNodeType, nodeType);\r
+                                   return;\r
+                               }\r
+                       }\r
+\r
+                       if (rule.getDirection().toString().equals(AAIDirection.OUT.toString())) {\r
+                               gtraversal.out(rule.getLabel()).has("aai-node-type", nodeType);\r
+                       } else {\r
+                               gtraversal.in(rule.getLabel()).has("aai-node-type", nodeType);\r
+                       }\r
+\r
+               }\r
+\r
+               String dirName = cArgs.output + AAIConstants.AAI_FILESEP + nodeType + AAIConstants.AAI_FILESEP;\r
+               createDirectory(dirName);\r
+               // TODO: Formatter\r
+\r
+               if ("DMAAP-MR".equals(cArgs.format)) {\r
+                       while (gtraversal.hasNext()) {\r
+                               if (bw != null)\r
+                                       bw = createFile(nodeType + ".json");\r
+                               Vertex node = gtraversal.next();\r
+                               Introspector nodeObj = serializer.getLatestVersionView(node);\r
+                               createPayloadForDmaap(node, nodeObj);\r
+                       }\r
+               } else {\r
+                       if ("PAYLOAD".equals(cArgs.format)) {\r
+                               int counter = 0;\r
+                               while (gtraversal.hasNext()) {\r
+                                       Vertex node = gtraversal.next();\r
+                                       try {\r
+                                               counter++;\r
+                                               String filename = dirName + counter + "-" + nodeType + ".json";\r
+                                               bw = createFile(filename);\r
+                                               Introspector obj = loader.introspectorFromName(nodeType);\r
+                                               Set<Vertex> seen = new HashSet<>();\r
+                                               int depth = AAIProperties.MAXIMUM_DEPTH;\r
+                                               boolean nodeOnly = false;\r
+\r
+                                               Tree<Element> tree = dbEngine.getQueryEngine().findSubGraph(node, depth, nodeOnly);\r
+                                               TreeBackedVertex treeVertex = new TreeBackedVertex(node, tree);\r
+                                               serializer.dbToObjectWithFilters(obj, treeVertex, seen, depth, nodeOnly, filterCousins,\r
+                                                               filterParents);\r
+                                               createPayloadForPut(obj);\r
+                                               if(bw != null)\r
+                                                       bw.close();\r
+\r
+                                               URI uri = serializer.getURIForVertex(node);\r
+                                               String filenameWithUri = dirName + counter + "-" + nodeType + ".txt";\r
+                                               bw = createFile(filenameWithUri);\r
+                                               bw.write(uri.toString());\r
+                                               bw.newLine();\r
+                                               bw.close();\r
+                                       } catch (Exception e) {\r
+                                               String emsg = "Caught exception while processing [" + counter + "-" + nodeType + "] continuing";\r
+                                               System.out.println(emsg);\r
+                                               LOGGER.error(emsg);\r
+                                               \r
+                                       }\r
+                               }\r
+                       }\r
+               }\r
+\r
+       }\r
+\r
+       public void createPayloadForPut(Introspector nodeObj) throws IOException {\r
+\r
+               String entityJson = nodeObj.marshal(false);\r
+               ObjectMapper mapper = new ObjectMapper();\r
+\r
+               ObjectNode rootNode = (ObjectNode) mapper.readTree(entityJson);\r
+               rootNode.remove("resource-version");\r
+\r
+               bw.newLine();\r
+               bw.write(rootNode.toString());\r
+               bw.newLine();\r
+       }\r
+\r
+       public void createPayloadForDmaap(Vertex node, Introspector nodeObj)\r
+                       throws AAIException, UnsupportedEncodingException {\r
+\r
+               DBSerializer serializer = new DBSerializer(version, dbEngine, introspectorFactoryType, "sourceOfTruth");\r
+\r
+               URI uri = serializer.getURIForVertex(node);\r
+\r
+               String sourceOfTruth = "";\r
+               HashMap<String, Introspector> relatedVertices = new HashMap<>();\r
+               List<Vertex> vertexChain = dbEngine.getQueryEngine().findParents(node);\r
+\r
+               for (Vertex vertex : vertexChain) {\r
+                       try {\r
+\r
+                               Introspector vertexObj = serializer.getVertexProperties(vertex);\r
+\r
+                               relatedVertices.put(vertexObj.getObjectId(), vertexObj);\r
+                       } catch (AAIUnknownObjectException e) {\r
+                               LOGGER.warn("Unable to get vertex properties, partial list of related vertices returned");\r
+                       }\r
+\r
+               }\r
+\r
+               String transactionId = "TXID";\r
+               createNotificationEvent(transactionId, sourceOfTruth, uri, nodeObj, relatedVertices);\r
+\r
+       }\r
+\r
+       public void createNotificationEvent(String transactionId, String sourceOfTruth, URI uri, Introspector obj,\r
+                       Map<String, Introspector> relatedObjects) throws AAIException, UnsupportedEncodingException {\r
+\r
+               String action = "CREATE";\r
+               final Introspector notificationEvent = loader.introspectorFromName("notification-event");\r
+\r
+               try {\r
+                       Introspector eventHeader = loader.introspectorFromName("notification-event-header");\r
+                       URIToObject parser = new URIToObject(loader, uri, (HashMap) relatedObjects);\r
+\r
+                       String entityLink = urlBase + version + uri;\r
+\r
+                       notificationEvent.setValue("cambria-partition", "AAI");\r
+\r
+                       eventHeader.setValue("entity-link", entityLink);\r
+                       eventHeader.setValue("action", action);\r
+                       eventHeader.setValue("entity-type", obj.getDbName());\r
+                       eventHeader.setValue("top-entity-type", parser.getTopEntityName());\r
+                       eventHeader.setValue("source-name", sourceOfTruth);\r
+                       eventHeader.setValue("version", version.toString());\r
+                       eventHeader.setValue("id", transactionId);\r
+                       eventHeader.setValue("event-type", "AAI-BASELINE");\r
+                       if (eventHeader.getValue("domain") == null) {\r
+                               eventHeader.setValue("domain", AAIConfig.get("aai.notificationEvent.default.domain", "UNK"));\r
+                       }\r
+\r
+                       if (eventHeader.getValue("sequence-number") == null) {\r
+                               eventHeader.setValue("sequence-number",\r
+                                               AAIConfig.get("aai.notificationEvent.default.sequenceNumber", "UNK"));\r
+                       }\r
+\r
+                       if (eventHeader.getValue("severity") == null) {\r
+                               eventHeader.setValue("severity", AAIConfig.get("aai.notificationEvent.default.severity", "UNK"));\r
+                       }\r
+\r
+                       if (eventHeader.getValue("id") == null) {\r
+                               eventHeader.setValue("id", genDate2() + "-" + UUID.randomUUID().toString());\r
+\r
+                       }\r
+\r
+                       if (eventHeader.getValue("timestamp") == null) {\r
+                               eventHeader.setValue("timestamp", genDate());\r
+                       }\r
+\r
+                       List<Object> parentList = parser.getParentList();\r
+                       parentList.clear();\r
+\r
+                       if (!parser.getTopEntity().equals(parser.getEntity())) {\r
+                               Introspector child;\r
+                               String json = obj.marshal(false);\r
+                               child = parser.getLoader().unmarshal(parser.getEntity().getName(), json);\r
+                               parentList.add(child.getUnderlyingObject());\r
+                       }\r
+\r
+                       final Introspector eventObject;\r
+\r
+                       String json = "";\r
+                       if (parser.getTopEntity().equals(parser.getEntity())) {\r
+                               json = obj.marshal(false);\r
+                               eventObject = loader.unmarshal(obj.getName(), json);\r
+                       } else {\r
+                               json = parser.getTopEntity().marshal(false);\r
+\r
+                               eventObject = loader.unmarshal(parser.getTopEntity().getName(), json);\r
+                       }\r
+                       notificationEvent.setValue("event-header", eventHeader.getUnderlyingObject());\r
+                       notificationEvent.setValue("entity", eventObject.getUnderlyingObject());\r
+\r
+                       String entityJson = notificationEvent.marshal(false);\r
+\r
+                       bw.newLine();\r
+                       bw.write(entityJson);\r
+\r
+               } catch (AAIUnknownObjectException e) {\r
+                       LOGGER.error("Fatal error - notification-event-header object not found!");\r
+               } catch (Exception e) {\r
+                       LOGGER.error("Unmarshalling error occurred while generating Notification " + LogFormatTools.getStackTop(e));\r
+               }\r
+       }\r
+\r
+       private void closeGraph() {\r
+               inMemGraph.getGraph().tx().rollback();\r
+               inMemGraph.getGraph().close();\r
+       }\r
+\r
+       public static String genDate() {\r
+               Date date = new Date();\r
+               DateFormat formatter = new SimpleDateFormat("yyyyMMdd-HH:mm:ss:SSS");\r
+               return formatter.format(date);\r
+       }\r
+\r
+       public static String genDate2() {\r
+               Date date = new Date();\r
+               DateFormat formatter = new SimpleDateFormat("yyyyMMddHHmmss");\r
+               return formatter.format(date);\r
+       }\r
+\r
+       private void validateFile(String filename) {\r
+               File f = new File(filename);\r
+               if (!f.exists()) {\r
+                       String emsg = "File " + filename + " could not be found.";\r
+                       LOGGER.error(emsg);\r
+                       System.out.println(emsg);\r
+                       taskExit();\r
+               } else if (!f.canRead()) {\r
+                       String emsg = "File " + filename + " could not be read.";\r
+                       LOGGER.error(emsg);\r
+                       System.out.println(emsg);\r
+                       taskExit();\r
+               } else if (f.length() == 0) {\r
+                       String emsg = "File " + filename + " had no data.";\r
+                       LOGGER.error(emsg);\r
+                       System.out.println(emsg);\r
+                       taskExit();\r
+               }\r
+       }\r
+\r
+       private InputStream validateMultipleSnapshots(String filenamePrefix) {\r
+               if (filenamePrefix == null || filenamePrefix.length() == 0) {\r
+                       String emsg = "No snapshot path was provided.";\r
+                       LOGGER.error(emsg);\r
+                       System.out.println(emsg);\r
+                       taskExit();\r
+               }\r
+               String targetDir = ".";\r
+               int lastSeparator = filenamePrefix.lastIndexOf(File.separator);\r
+\r
+               LOGGER.info("File separator=[" + File.separator + "] lastSeparator=" + lastSeparator + " filenamePrefix="\r
+                               + filenamePrefix);\r
+               if (lastSeparator >= 0) {\r
+                       targetDir = filenamePrefix.substring(0, lastSeparator);\r
+                       LOGGER.info("targetDir=" + targetDir);\r
+               }\r
+               if (targetDir.length() == 0) {\r
+                       String emsg = "No snapshot directory was found in path:" + filenamePrefix;\r
+                       LOGGER.error(emsg);\r
+                       System.out.println(emsg);\r
+                       taskExit();\r
+               }\r
+               String prefix = filenamePrefix.substring(lastSeparator + 1);\r
+               if (prefix == null || prefix.length() == 0) {\r
+                       String emsg = "No snapshot file prefix was provided.";\r
+                       LOGGER.error(emsg);\r
+                       System.out.println(emsg);\r
+                       taskExit();\r
+               }\r
+               long timeA = System.nanoTime();\r
+\r
+               ArrayList<File> snapFilesArr = new ArrayList<File>();\r
+               String thisSnapPrefix = prefix + ".P";\r
+               File fDir = new File(targetDir); // Snapshot directory\r
+               File[] allFilesArr = fDir.listFiles();\r
+               for (File snapFile : allFilesArr) {\r
+                       String snapFName = snapFile.getName();\r
+                       if (snapFName.startsWith(thisSnapPrefix)) {\r
+                               snapFilesArr.add(snapFile);\r
+                       }\r
+               }\r
+\r
+               if (snapFilesArr.isEmpty()) {\r
+                       String fullFName = targetDir + AAIConstants.AAI_FILESEP + thisSnapPrefix;\r
+                       String emsg = "Snapshot files " + fullFName + "* could not be found.";\r
+                       LOGGER.error(emsg);\r
+                       System.out.println(emsg);\r
+                       taskExit();\r
+               }\r
+\r
+               int fCount = snapFilesArr.size();\r
+               Iterator<File> fItr = snapFilesArr.iterator();\r
+               Vector<InputStream> inputStreamsV = new Vector<>();\r
+               for (int i = 0; i < fCount; i++) {\r
+                       File f = snapFilesArr.get(i);\r
+                       String fname = f.getName();\r
+                       if (!f.canRead()) {\r
+                               String emsg = "Snapshot file " + fname + " could not be read.";\r
+                               LOGGER.error(emsg);\r
+                               System.out.println(emsg);\r
+                               taskExit();\r
+                       } else if (f.length() == 0) {\r
+                               String emsg = "Snapshot file " + fname + " had no data.";\r
+                               LOGGER.error(emsg);\r
+                               System.out.println(emsg);\r
+                               taskExit();\r
+                       }\r
+                       String fullFName = targetDir + AAIConstants.AAI_FILESEP + fname;\r
+                       InputStream fis = null;\r
+                       try {\r
+                               fis = new FileInputStream(fullFName);\r
+                       } catch (FileNotFoundException e) {\r
+                               // should not happen at this point\r
+                               String emsg = "Snapshot file " + fullFName + " could not be found";\r
+                               LOGGER.error(emsg);\r
+                               System.out.println(emsg);\r
+                               taskExit();\r
+                       }\r
+                       inputStreamsV.add(fis);\r
+               }\r
+               // Now add inputStreams.elements() to the Vector,\r
+               InputStream sis = new SequenceInputStream(inputStreamsV.elements());\r
+               return (sis);\r
+       }\r
+\r
+       public InMemoryGraph getInMemGraph() {\r
+               return inMemGraph;\r
+       }\r
+\r
+       public void setInMemGraph(InMemoryGraph inMemGraph) {\r
+               this.inMemGraph = inMemGraph;\r
+       }\r
+}\r
+\r
+class CommandLineArgs {\r
+\r
+       @Parameter(names = "--help", help = true)\r
+       public boolean help;\r
+\r
+       @Parameter(names = "-d", description = "snapshot file to be loaded", required = true)\r
+       public String dataSnapshot;\r
+\r
+       @Parameter(names = "-s", description = "is schema to be enabled ", arity = 1)\r
+       public boolean schemaEnabled = true;\r
+\r
+       @Parameter(names = "-c", description = "location of configuration file")\r
+       public String config = "";\r
+\r
+       @Parameter(names = "-o", description = "output location")\r
+       public String output = "";\r
+\r
+       @Parameter(names = "-f", description = "format of output")\r
+       public String format = "PAYLOAD";\r
+\r
+       @Parameter(names = "-n", description = "Node input file")\r
+       public String nodePropertyFile = "";\r
+\r
+       @Parameter(names = "-m", description = "multipe snapshots or not", arity = 1)\r
+       public boolean isMultipleSnapshot = false;\r
+\r
+       @Parameter(names = "-i", description = "input filter configuration file")\r
+       public String inputFilterPropertyFile = "";\r
+\r
+       @Parameter(names = "-p", description = "Use the partial graph", arity = 1)\r
+       public boolean isPartialGraph = true;\r
+\r
+}\r
diff --git a/src/main/java/org/onap/aai/dbgen/GraphSONPartialIO.java b/src/main/java/org/onap/aai/dbgen/GraphSONPartialIO.java
new file mode 100644 (file)
index 0000000..915db69
--- /dev/null
@@ -0,0 +1,158 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.dbgen;
+
+import org.apache.tinkerpop.gremlin.structure.Graph;
+import org.apache.tinkerpop.gremlin.structure.io.Io;
+import org.apache.tinkerpop.gremlin.structure.io.IoRegistry;
+import org.apache.tinkerpop.gremlin.structure.io.Mapper;
+import org.apache.tinkerpop.gremlin.structure.io.graphson.GraphSONMapper;
+import org.apache.tinkerpop.gremlin.structure.io.graphson.GraphSONReader;
+import org.apache.tinkerpop.gremlin.structure.io.graphson.GraphSONVersion;
+import org.apache.tinkerpop.gremlin.structure.io.graphson.GraphSONWriter;
+
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.util.Optional;
+import java.util.function.Consumer;
+
+/**
+ * Constructs GraphSON IO implementations given a {@link Graph} and {@link IoRegistry}. Implementers of the {@link Graph}
+ * interfaces should see the {@link GraphSONMapper} for information on the expectations for the {@link IoRegistry}.
+ *
+ * @author Stephen Mallette (http://stephen.genoprime.com)
+ */
+public final class GraphSONPartialIO implements Io<GraphSONPartialReader.Builder, GraphSONWriter.Builder, GraphSONMapper.Builder> {
+    private final IoRegistry registry;
+    private final Graph graph;
+    private final Optional<Consumer<Mapper.Builder>> onMapper;
+    private final GraphSONVersion version;
+
+    private GraphSONPartialIO(final Builder builder) {
+        this.registry = builder.registry;
+        this.graph = builder.graph;
+        this.onMapper = Optional.ofNullable(builder.onMapper);
+        this.version = builder.version;
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override
+    public GraphSONPartialReader.Builder reader() {
+        return GraphSONPartialReader.build().mapper(mapper().create());
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override
+    public GraphSONWriter.Builder writer() {
+        return GraphSONWriter.build().mapper(mapper().create());
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override
+    public GraphSONMapper.Builder mapper() {
+        final GraphSONMapper.Builder builder = (null == this.registry) ?
+                GraphSONMapper.build().version(version) : GraphSONMapper.build().version(version).addRegistry(this.registry);
+        onMapper.ifPresent(c -> c.accept(builder));
+        return builder;
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override
+    public void writeGraph(final String file) throws IOException {
+        try (final OutputStream out = new FileOutputStream(file)) {
+            writer().create().writeGraph(out, graph);
+        }
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override
+    public void readGraph(final String file) throws IOException {
+        try (final InputStream in = new FileInputStream(file)) {
+            reader().create().readGraph(in, graph);
+        }
+    }
+
+    /**
+     * Create a new builder using the default version of GraphSON.
+     */
+    public static Io.Builder<GraphSONPartialIO> build() {
+        return build(GraphSONVersion.V1_0);
+    }
+
+    /**
+     * Create a new builder using the specified version of GraphSON.
+     */
+    public static Io.Builder<GraphSONPartialIO> build(final GraphSONVersion version) {
+        return new Builder(version);
+    }
+
+    public final static class Builder implements Io.Builder<GraphSONPartialIO> {
+
+        private IoRegistry registry = null;
+        private Graph graph;
+        private Consumer<Mapper.Builder> onMapper = null;
+        private final GraphSONVersion version;
+
+        Builder(final GraphSONVersion version) {
+            this.version = version;
+        }
+
+        /**
+         * @deprecated As of release 3.2.2, replaced by {@link #onMapper(Consumer)}.
+         */
+        @Deprecated
+        @Override
+        public Io.Builder<GraphSONPartialIO> registry(final IoRegistry registry) {
+            this.registry = registry;
+            return this;
+        }
+
+        @Override
+        public Io.Builder<? extends Io> onMapper(final Consumer<Mapper.Builder> onMapper) {
+            this.onMapper = onMapper;
+            return this;
+        }
+
+        @Override
+        public Io.Builder<GraphSONPartialIO> graph(final Graph g) {
+            this.graph = g;
+            return this;
+        }
+
+        @Override
+        public GraphSONPartialIO create() {
+            if (null == graph) throw new IllegalArgumentException("The graph argument was not specified");
+            return new GraphSONPartialIO(this);
+        }
+    }
+}
diff --git a/src/main/java/org/onap/aai/dbgen/GraphSONPartialReader.java b/src/main/java/org/onap/aai/dbgen/GraphSONPartialReader.java
new file mode 100644 (file)
index 0000000..ebe2180
--- /dev/null
@@ -0,0 +1,354 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.dbgen;
+
+import org.apache.tinkerpop.gremlin.structure.Direction;
+import org.apache.tinkerpop.gremlin.structure.Edge;
+import org.apache.tinkerpop.gremlin.structure.Graph;
+import org.apache.tinkerpop.gremlin.structure.Property;
+import org.apache.tinkerpop.gremlin.structure.T;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.apache.tinkerpop.gremlin.structure.VertexProperty;
+import org.apache.tinkerpop.gremlin.structure.io.GraphReader;
+import org.apache.tinkerpop.gremlin.structure.io.GraphWriter;
+import org.apache.tinkerpop.gremlin.structure.io.Mapper;
+import org.apache.tinkerpop.gremlin.structure.io.graphson.GraphSONMapper;
+import org.apache.tinkerpop.gremlin.structure.io.graphson.GraphSONReader;
+import org.apache.tinkerpop.gremlin.structure.io.graphson.GraphSONTokens;
+import org.apache.tinkerpop.gremlin.structure.io.graphson.GraphSONVersion;
+import org.apache.tinkerpop.gremlin.structure.io.gryo.GryoWriter;
+import org.apache.tinkerpop.gremlin.structure.util.Attachable;
+import org.apache.tinkerpop.gremlin.structure.util.Host;
+import org.apache.tinkerpop.gremlin.structure.util.star.StarGraph;
+import org.apache.tinkerpop.gremlin.util.function.FunctionUtils;
+import org.apache.tinkerpop.gremlin.util.iterator.IteratorUtils;
+import org.apache.tinkerpop.shaded.jackson.core.type.TypeReference;
+import org.apache.tinkerpop.shaded.jackson.databind.JsonNode;
+import org.apache.tinkerpop.shaded.jackson.databind.ObjectMapper;
+import org.apache.tinkerpop.shaded.jackson.databind.node.JsonNodeType;
+import org.onap.aai.dbmap.InMemoryGraph;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+
+import java.io.BufferedReader;
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.OutputStream;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicLong;
+import java.util.function.Function;
+import java.util.stream.Stream;
+
+/**
+ * This is a Wrapper around the GraphsonReader class
+ * The idea is to rewrite methods that are customized for A&AI
+ * GraphsonReader is a final class . hence the use of the Wrapper
+ * instead of inheriting-overwriting
+ *
+ * 
+ */
+public final class GraphSONPartialReader implements GraphReader {
+    private final ObjectMapper mapper ;
+    private final long batchSize ;
+    private final GraphSONVersion version ;
+    private boolean unwrapAdjacencyList = false;
+    private final GraphSONReader reader;
+    
+    private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(InMemoryGraph.class);
+
+    final TypeReference<Map<String, Object>> mapTypeReference = new TypeReference<Map<String, Object>>() {
+    };
+
+    private GraphSONPartialReader(final Builder builder) {
+        mapper = builder.mapper.createMapper();
+        batchSize = builder.batchSize;
+        unwrapAdjacencyList = builder.unwrapAdjacencyList;
+        version = ((GraphSONMapper)builder.mapper).getVersion();
+        reader = GraphSONReader.build().create();
+    }
+
+    /**
+     * Read data into a {@link Graph} from output generated by any of the {@link GraphSONWriter} {@code writeVertex} or
+     * {@code writeVertices} methods or by {@link GryoWriter#writeGraph(OutputStream, Graph)}.
+     *
+     * @param inputStream a stream containing an entire graph of vertices and edges as defined by the accompanying
+     *                    {@link GraphSONWriter#writeGraph(OutputStream, Graph)}.
+     * @param graphToWriteTo the graph to write to when reading from the stream.
+     */
+    @Override
+    public void readGraph(final InputStream inputStream, final Graph graphToWriteTo) throws IOException {
+        // dual pass - create all vertices and store to cache the ids.  then create edges.  as long as we don't
+        // have vertex labels in the output we can't do this single pass
+       LOGGER.info("Read the Partial Graph");
+       final Map<StarGraph.StarVertex,Vertex> cache = new HashMap<>();
+        final AtomicLong counter = new AtomicLong(0);
+        
+        final boolean supportsTx = graphToWriteTo.features().graph().supportsTransactions();
+        final Graph.Features.EdgeFeatures edgeFeatures = graphToWriteTo.features().edge();
+        
+        readVertexStrings(inputStream).<Vertex>map(FunctionUtils.wrapFunction(line -> readVertex(new ByteArrayInputStream(line.getBytes()), null, null, Direction.IN))).forEach(vertex -> {
+               try{
+                       final Attachable<Vertex> attachable = (Attachable<Vertex>) vertex;
+                   cache.put((StarGraph.StarVertex) attachable.get(), attachable.attach(Attachable.Method.create(graphToWriteTo)));
+                   if (supportsTx && counter.incrementAndGet() % batchSize == 0)
+                       graphToWriteTo.tx().commit();
+               }
+               catch(Exception ex){
+                       LOGGER.info("Error in reading vertex from graphson"+vertex.toString());
+               }
+        });
+        
+        cache.entrySet().forEach(kv -> kv.getKey().edges(Direction.IN).forEachRemaining(e -> {
+               try{
+                       // can't use a standard Attachable attach method here because we have to use the cache for those
+                   // graphs that don't support userSuppliedIds on edges.  note that outVertex/inVertex methods return
+                   // StarAdjacentVertex whose equality should match StarVertex.
+                   final Vertex cachedOutV = cache.get(e.outVertex());
+                   final Vertex cachedInV = cache.get(e.inVertex());
+                   
+                   if(cachedOutV != null  && cachedInV != null){
+                        
+                           final Edge newEdge = edgeFeatures.willAllowId(e.id()) ? cachedOutV.addEdge(e.label(), cachedInV, T.id, e.id()) : cachedOutV.addEdge(e.label(), cachedInV);
+                           e.properties().forEachRemaining(p -> newEdge.property(p.key(), p.value()));
+                       }
+                   else{
+                       LOGGER.debug("Ghost edges from "+ cachedOutV + " to "+ cachedInV);
+                       
+                   }
+                   if (supportsTx && counter.incrementAndGet() % batchSize == 0)
+                       graphToWriteTo.tx().commit();
+               }
+               catch(Exception ex){
+                       LOGGER.info("Error in writing vertex into graph"+e.toString());
+               }
+        }));
+
+        if (supportsTx) graphToWriteTo.tx().commit();
+    }
+
+    /**
+     * Read {@link Vertex} objects from output generated by any of the {@link GraphSONWriter} {@code writeVertex} or
+     * {@code writeVertices} methods or by {@link GraphSONWriter#writeGraph(OutputStream, Graph)}.
+     *
+     * @param inputStream a stream containing at least one {@link Vertex} as defined by the accompanying
+     *                    {@link GraphWriter#writeVertices(OutputStream, Iterator, Direction)} or
+     *                    {@link GraphWriter#writeVertices(OutputStream, Iterator)} methods.
+     * @param vertexAttachMethod a function that creates re-attaches a {@link Vertex} to a {@link Host} object.
+     * @param edgeAttachMethod a function that creates re-attaches a {@link Edge} to a {@link Host} object.
+     * @param attachEdgesOfThisDirection only edges of this direction are passed to the {@code edgeMaker}.
+     */
+    @Override
+    public Iterator<Vertex> readVertices(final InputStream inputStream,
+                                         final Function<Attachable<Vertex>, Vertex> vertexAttachMethod,
+                                         final Function<Attachable<Edge>, Edge> edgeAttachMethod,
+                                         final Direction attachEdgesOfThisDirection) throws IOException {
+       // return readVertexStrings(inputStream).<Vertex>map(FunctionUtils.wrapFunction(line -> readVertex(new ByteArrayInputStream(line.getBytes()), vertexAttachMethod, edgeAttachMethod, attachEdgesOfThisDirection))).iterator();
+        return reader.readVertices(inputStream, vertexAttachMethod, edgeAttachMethod, attachEdgesOfThisDirection);
+                       
+    }
+
+    /**
+     * Read a {@link Vertex}  from output generated by any of the {@link GraphSONWriter} {@code writeVertex} or
+     * {@code writeVertices} methods or by {@link GraphSONWriter#writeGraph(OutputStream, Graph)}.
+     *
+     * @param inputStream a stream containing at least a single vertex as defined by the accompanying
+     *                    {@link GraphWriter#writeVertex(OutputStream, Vertex)}.
+     * @param vertexAttachMethod a function that creates re-attaches a {@link Vertex} to a {@link Host} object.
+     */
+    @Override
+    public Vertex readVertex(final InputStream inputStream, final Function<Attachable<Vertex>, Vertex> vertexAttachMethod) throws IOException {
+        return reader.readVertex(inputStream, vertexAttachMethod);
+    }
+
+    /**
+     * Read a {@link Vertex} from output generated by any of the {@link GraphSONWriter} {@code writeVertex} or
+     * {@code writeVertices} methods or by {@link GraphSONWriter#writeGraph(OutputStream, Graph)}.
+     *
+     * @param inputStream a stream containing at least one {@link Vertex} as defined by the accompanying
+     *                    {@link GraphWriter#writeVertices(OutputStream, Iterator, Direction)} method.
+     * @param vertexAttachMethod a function that creates re-attaches a {@link Vertex} to a {@link Host} object.
+     * @param edgeAttachMethod a function that creates re-attaches a {@link Edge} to a {@link Host} object.
+     * @param attachEdgesOfThisDirection only edges of this direction are passed to the {@code edgeMaker}.
+     */
+    @Override
+    public Vertex readVertex(final InputStream inputStream,
+                             final Function<Attachable<Vertex>, Vertex> vertexAttachMethod,
+                             final Function<Attachable<Edge>, Edge> edgeAttachMethod,
+                             final Direction attachEdgesOfThisDirection) throws IOException {
+       
+       return reader.readVertex(inputStream, vertexAttachMethod, edgeAttachMethod, attachEdgesOfThisDirection);
+    }
+
+    /**
+     * Read an {@link Edge} from output generated by {@link GraphSONWriter#writeEdge(OutputStream, Edge)} or via
+     * an {@link Edge} passed to {@link GraphSONWriter#writeObject(OutputStream, Object)}.
+     *
+     * @param inputStream a stream containing at least one {@link Edge} as defined by the accompanying
+     *                    {@link GraphWriter#writeEdge(OutputStream, Edge)} method.
+     * @param edgeAttachMethod a function that creates re-attaches a {@link Edge} to a {@link Host} object.
+     */
+    @Override
+    public Edge readEdge(final InputStream inputStream, final Function<Attachable<Edge>, Edge> edgeAttachMethod) throws IOException {
+        /*if (version == GraphSONVersion.V1_0) {
+            final Map<String, Object> edgeData = mapper.readValue(inputStream, mapTypeReference);
+
+            final Map<String, Object> edgeProperties = edgeData.containsKey(GraphSONTokens.PROPERTIES) ?
+                    (Map<String, Object>) edgeData.get(GraphSONTokens.PROPERTIES) : Collections.EMPTY_MAP;
+            final DetachedEdge edge = new DetachedEdge(edgeData.get(GraphSONTokens.ID),
+                    edgeData.get(GraphSONTokens.LABEL).toString(),
+                    edgeProperties,
+                    Pair.with(edgeData.get(GraphSONTokens.OUT), edgeData.get(GraphSONTokens.OUT_LABEL).toString()),
+                    Pair.with(edgeData.get(GraphSONTokens.IN), edgeData.get(GraphSONTokens.IN_LABEL).toString()));
+
+            return edgeAttachMethod.apply(edge);
+        } else {
+            return edgeAttachMethod.apply((DetachedEdge) mapper.readValue(inputStream, Edge.class));
+        }*/
+       return reader.readEdge(inputStream, edgeAttachMethod);
+    }
+
+    /**
+     * Read a {@link VertexProperty} from output generated by
+     * {@link GraphSONWriter#writeVertexProperty(OutputStream, VertexProperty)} or via an {@link VertexProperty} passed
+     * to {@link GraphSONWriter#writeObject(OutputStream, Object)}.
+     *
+     * @param inputStream a stream containing at least one {@link VertexProperty} as written by the accompanying
+     *                    {@link GraphWriter#writeVertexProperty(OutputStream, VertexProperty)} method.
+     * @param vertexPropertyAttachMethod a function that creates re-attaches a {@link VertexProperty} to a
+     *                                   {@link Host} object.
+     */
+    @Override
+    public VertexProperty readVertexProperty(final InputStream inputStream,
+                                             final Function<Attachable<VertexProperty>, VertexProperty> vertexPropertyAttachMethod) throws IOException {
+        /*if (version == GraphSONVersion.V1_0) {
+            final Map<String, Object> vpData = mapper.readValue(inputStream, mapTypeReference);
+            final Map<String, Object> metaProperties = (Map<String, Object>) vpData.get(GraphSONTokens.PROPERTIES);
+            final DetachedVertexProperty vp = new DetachedVertexProperty(vpData.get(GraphSONTokens.ID),
+                    vpData.get(GraphSONTokens.LABEL).toString(),
+                    vpData.get(GraphSONTokens.VALUE), metaProperties);
+            return vertexPropertyAttachMethod.apply(vp);
+        } else {
+            return vertexPropertyAttachMethod.apply((DetachedVertexProperty) mapper.readValue(inputStream, VertexProperty.class));
+        }*/
+       return reader.readVertexProperty(inputStream, vertexPropertyAttachMethod);
+    }
+
+    /**
+     * Read a {@link Property} from output generated by  {@link GraphSONWriter#writeProperty(OutputStream, Property)} or
+     * via an {@link Property} passed to {@link GraphSONWriter#writeObject(OutputStream, Object)}.
+     *
+     * @param inputStream a stream containing at least one {@link Property} as written by the accompanying
+     *                    {@link GraphWriter#writeProperty(OutputStream, Property)} method.
+     * @param propertyAttachMethod a function that creates re-attaches a {@link Property} to a {@link Host} object.
+     */
+    @Override
+    public Property readProperty(final InputStream inputStream,
+                                 final Function<Attachable<Property>, Property> propertyAttachMethod) throws IOException {
+        /*if (version == GraphSONVersion.V1_0) {
+            final Map<String, Object> propertyData = mapper.readValue(inputStream, mapTypeReference);
+            final DetachedProperty p = new DetachedProperty(propertyData.get(GraphSONTokens.KEY).toString(), propertyData.get(GraphSONTokens.VALUE));
+            return propertyAttachMethod.apply(p);
+        } else {
+            return propertyAttachMethod.apply((DetachedProperty) mapper.readValue(inputStream, Property.class));
+        }*/
+       return reader.readProperty(inputStream, propertyAttachMethod);
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override
+    public <C> C readObject(final InputStream inputStream, final Class<? extends C> clazz) throws IOException {
+        return mapper.readValue(inputStream, clazz);
+    }
+
+    private Stream<String> readVertexStrings(final InputStream inputStream) throws IOException {
+        if (unwrapAdjacencyList) {
+               final JsonNode root = mapper.readTree(inputStream);
+            final JsonNode vertices = root.get(GraphSONTokens.VERTICES);
+            if (!vertices.getNodeType().equals(JsonNodeType.ARRAY)) throw new IOException(String.format("The '%s' key must be an array", GraphSONTokens.VERTICES));
+            return IteratorUtils.stream(vertices.elements()).map(Object::toString);
+        } else {
+               final BufferedReader br = new BufferedReader(new InputStreamReader(inputStream));
+            return br.lines();
+        }
+       
+    }
+
+    
+    public static Builder build() {
+        return new Builder();
+    }
+
+    public final static class Builder implements ReaderBuilder<GraphSONPartialReader> {
+        private long batchSize = 10000;
+
+        private Mapper<ObjectMapper> mapper = GraphSONMapper.build().create();
+        private boolean unwrapAdjacencyList = false;
+        
+
+        private Builder() {}
+
+        /**
+         * Number of mutations to perform before a commit is executed when using
+         * {@link GraphSONPartialReader#readGraph(InputStream, Graph)}.
+         */
+        public Builder batchSize(final long batchSize) {
+            this.batchSize = batchSize;
+            return this;
+        }
+
+        /**
+         * Override all of the {@link GraphSONMapper} builder
+         * options with this mapper.  If this value is set to something other than null then that value will be
+         * used to construct the writer.
+         */
+        public Builder mapper(final Mapper<ObjectMapper> mapper) {
+            this.mapper = mapper;
+            return this;
+        }
+
+        /**
+         * If the adjacency list is wrapped in a JSON object, as is done when writing a graph with
+         * {@link GraphSONWriter.Builder#wrapAdjacencyList} set to {@code true}, this setting needs to be set to
+         * {@code true} to properly read it.  By default, this value is {@code false} and the adjacency list is
+         * simply read as line delimited vertices.
+         * <p/>
+         * By setting this value to {@code true}, the generated JSON is no longer "splittable" by line and thus not
+         * suitable for OLAP processing.  Furthermore, reading this format of the JSON with
+         * {@link GraphSONPartialReader#readGraph(InputStream, Graph)} or
+         * {@link GraphSONPartialReader#readVertices(InputStream, Function, Function, Direction)} requires that the
+         * entire JSON object be read into memory, so it is best saved for "small" graphs.
+         */
+        public Builder unwrapAdjacencyList(final boolean unwrapAdjacencyList) {
+            this.unwrapAdjacencyList = unwrapAdjacencyList;
+            return this;
+        }
+
+        public GraphSONPartialReader create() {
+            return new GraphSONPartialReader(this);
+        }
+    }
+}
index c0f8ee9..9fc18eb 100644 (file)
  */
 package org.onap.aai.dbgen.schemamod;
 
-import java.util.Properties;
-
+import com.att.eelf.configuration.Configuration;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.onap.aai.config.PropertyPasswordConfiguration;
 import org.onap.aai.dbmap.DBConnectionType;
+import org.onap.aai.exceptions.AAIException;
 import org.onap.aai.introspection.Loader;
 import org.onap.aai.introspection.LoaderFactory;
 import org.onap.aai.introspection.ModelType;
-import org.onap.aai.setup.SchemaVersions;
-import org.onap.aai.setup.SchemaVersion;
 import org.onap.aai.logging.ErrorLogHelper;
-import org.onap.aai.serialization.engines.QueryStyle;
+import org.onap.aai.logging.LoggingContext;
 import org.onap.aai.serialization.engines.JanusGraphDBEngine;
+import org.onap.aai.serialization.engines.QueryStyle;
 import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.setup.SchemaVersion;
+import org.onap.aai.setup.SchemaVersions;
 import org.onap.aai.util.AAIConfig;
 import org.onap.aai.util.AAIConstants;
+import org.onap.aai.util.ExceptionTranslator;
 import org.onap.aai.util.UniquePropertyCheck;
 import org.slf4j.MDC;
-
-import com.att.eelf.configuration.Configuration;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
 import org.springframework.context.annotation.AnnotationConfigApplicationContext;
 
+import java.util.Properties;
+
 public class SchemaMod {
 
        private final LoaderFactory loaderFactory;
@@ -159,13 +162,25 @@ public class SchemaMod {
                logger.info(msg);
        }
 
-       public static void main(String[] args) {
-
-               AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext(
-                               "org.onap.aai.config",
-                               "org.onap.aai.setup"
-               );
+       public static void main(String[] args) throws AAIException {
 
+               AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext();
+               PropertyPasswordConfiguration initializer = new PropertyPasswordConfiguration();
+               initializer.initialize(ctx);
+               try {
+                       ctx.scan(
+                                       "org.onap.aai.config",
+                                       "org.onap.aai.setup"
+                       );
+                       ctx.refresh();
+               } catch (Exception e) {
+                       AAIException aai = ExceptionTranslator.schemaServiceExceptionTranslator(e);
+                       System.out.println("Problems running SchemaMod "+aai.getMessage());
+                       LoggingContext.statusCode(LoggingContext.StatusCode.ERROR);
+                       LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                       ErrorLogHelper.logError(aai.getCode(), e.getMessage() + ", resolve and retry");
+                       throw aai;
+               }
                LoaderFactory loaderFactory = ctx.getBean(LoaderFactory.class);
                SchemaVersions schemaVersions = ctx.getBean(SchemaVersions.class);
                SchemaMod schemaMod = new SchemaMod(loaderFactory, schemaVersions);
index 616ff02..b3faec8 100644 (file)
-/**\r
- * ============LICENSE_START=======================================================\r
- * org.onap.aai\r
- * ================================================================================\r
- * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.\r
- * ================================================================================\r
- * Licensed under the Apache License, Version 2.0 (the "License");\r
- * you may not use this file except in compliance with the License.\r
- * You may obtain a copy of the License at\r
- *\r
- *    http://www.apache.org/licenses/LICENSE-2.0\r
- *\r
- * Unless required by applicable law or agreed to in writing, software\r
- * distributed under the License is distributed on an "AS IS" BASIS,\r
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * See the License for the specific language governing permissions and\r
- * limitations under the License.\r
- * ============LICENSE_END=========================================================\r
- */\r
-package org.onap.aai.migration;\r
-\r
-\r
-import java.util.HashMap;\r
-import java.util.Iterator;\r
-import java.util.List;\r
-import java.util.Map;\r
-import org.apache.tinkerpop.gremlin.structure.Edge;\r
-import org.apache.tinkerpop.gremlin.structure.Property;\r
-import org.apache.tinkerpop.gremlin.structure.Direction;\r
-import org.apache.tinkerpop.gremlin.structure.Vertex;\r
-import org.javatuples.Pair;\r
-import org.onap.aai.db.props.AAIProperties;\r
-import org.onap.aai.edges.EdgeIngestor;\r
-import org.onap.aai.introspection.LoaderFactory;\r
-import org.onap.aai.serialization.db.EdgeSerializer;\r
-import org.onap.aai.serialization.engines.TransactionalGraphEngine;\r
-import org.onap.aai.setup.SchemaVersions;\r
-\r
-/**\r
- * A migration template for "swinging" edges that terminate on an old-node to a new target node. \r
- *     That is, given an oldNode and a newNode we will swing edges that terminate on the\r
- *     oldNode and terminate them on the newNode (actually we drop the old edges and add new ones).\r
- *     \r
- *     \r
- *     We allow the passing of some parameters to restrict what edges get swung over: \r
- *      > otherEndNodeTypeRestriction: only swing edges that terminate on the oldNode if the\r
- *                     node at the other end of the edge is of this nodeType.\r
- *      > edgeLabelRestriction: Only swing edges that have this edgeLabel\r
- *      > edgeDirectionRestriction: Only swing edges that go this direction (from the oldNode)\r
- *             this is a required parameter.  valid values are: BOTH, IN, OUT\r
- *     \r
- */\r
-@MigrationPriority(0)\r
-@MigrationDangerRating(1)\r
-public abstract class EdgeSwingMigrator extends Migrator {\r
-\r
-       private boolean success = true;\r
-       private String nodeTypeRestriction = null;\r
-       private String edgeLabelRestriction = null;  \r
-       private String edgeDirRestriction = null;  \r
-       private List<Pair<Vertex, Vertex>> nodePairList;\r
-       \r
-       \r
-       public EdgeSwingMigrator(TransactionalGraphEngine engine , LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) {\r
-               super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);\r
-       }\r
-       \r
-\r
-       /**\r
-        * Do not override this method as an inheritor of this class\r
-        */\r
-       @Override\r
-       public void run() {\r
-               executeModifyOperation();\r
-               cleanupAsAppropriate(this.nodePairList);\r
-       }\r
-\r
-       /**\r
-        * This is where inheritors should add their logic\r
-        */\r
-       protected void executeModifyOperation() {\r
-       \r
-               try {\r
-                       this.nodeTypeRestriction = this.getNodeTypeRestriction();\r
-                       this.edgeLabelRestriction = this.getEdgeLabelRestriction();\r
-                       this.edgeDirRestriction = this.getEdgeDirRestriction();\r
-                       nodePairList = this.getAffectedNodePairs();\r
-                       for (Pair<Vertex, Vertex> nodePair : nodePairList) {\r
-                               Vertex fromNode = nodePair.getValue0();\r
-                               Vertex toNode = nodePair.getValue1();\r
-                               this.swingEdges(fromNode, toNode,\r
-                                               this.nodeTypeRestriction,this.edgeLabelRestriction,this.edgeDirRestriction);\r
-                       }\r
-               } catch (Exception e) {\r
-                       logger.error("error encountered", e);\r
-                       success = false;\r
-               }\r
-       }\r
-\r
-\r
-       protected void swingEdges(Vertex oldNode, Vertex newNode, String nodeTypeRestr, String edgeLabelRestr, String edgeDirRestr) {\r
-               try {\r
-                       // If the old and new Vertices aren't populated, throw an exception\r
-                       if( oldNode == null  ){\r
-                               logger.info ( "null oldNode passed to swingEdges() ");\r
-                               success = false;\r
-                               return;\r
-                       }\r
-                       else if( newNode == null ){\r
-                               logger.info ( "null newNode passed to swingEdges() ");\r
-                               success = false;\r
-                               return;\r
-                       }\r
-                       else if( edgeDirRestr == null ||\r
-                                               (!edgeDirRestr.equals("BOTH") \r
-                                                       && !edgeDirRestr.equals("IN")  \r
-                                                       && !edgeDirRestr.equals("OUT") )\r
-                                               ){\r
-                               logger.info ( "invalid direction passed to swingEdges(). valid values are BOTH/IN/OUT ");\r
-                               success = false;\r
-                               return;\r
-                       }\r
-                       else if( edgeLabelRestr != null \r
-                                       && (edgeLabelRestr.trim().equals("none") || edgeLabelRestr.trim().equals("")) ){\r
-                               edgeLabelRestr = null;\r
-                       }\r
-                       else if( nodeTypeRestr == null || nodeTypeRestr.trim().equals("") ){\r
-                               nodeTypeRestr = "none";\r
-                       }\r
-                               \r
-                       String oldNodeType = oldNode.value(AAIProperties.NODE_TYPE);\r
-                       String oldUri = oldNode.<String> property("aai-uri").isPresent()  ? oldNode.<String> property("aai-uri").value() : "URI Not present"; \r
-                       \r
-                       String newNodeType = newNode.value(AAIProperties.NODE_TYPE);\r
-                       String newUri = newNode.<String> property("aai-uri").isPresent()  ? newNode.<String> property("aai-uri").value() : "URI Not present"; \r
-\r
-                       // If the nodeTypes don't match, throw an error \r
-                       if( !oldNodeType.equals(newNodeType) ){\r
-                               logger.info ( "Can not swing edge from a [" + oldNodeType + "] node to a [" +\r
-                                               newNodeType + "] node. ");\r
-                               success = false;\r
-                               return;\r
-                       }\r
-                       \r
-                       // Find and migrate any applicable OUT edges.\r
-                       if( edgeDirRestr.equals("BOTH") || edgeDirRestr.equals("OUT") ){\r
-                               Iterator <Edge> edgeOutIter = null;\r
-                               if( edgeLabelRestr == null ) {\r
-                                       edgeOutIter = oldNode.edges(Direction.OUT);\r
-                               }\r
-                               else {\r
-                                       edgeOutIter = oldNode.edges(Direction.OUT, edgeLabelRestr);\r
-                               }\r
-                               \r
-                               while( edgeOutIter.hasNext() ){\r
-                                       Edge oldOutE = edgeOutIter.next();\r
-                                       String eLabel = oldOutE.label();\r
-                                       Vertex otherSideNode4ThisEdge = oldOutE.inVertex();\r
-                                       String otherSideNodeType = otherSideNode4ThisEdge.value(AAIProperties.NODE_TYPE);\r
-                                       if( nodeTypeRestr.equals("none") || nodeTypeRestr.toLowerCase().equals(otherSideNodeType) ){\r
-                                               Iterator <Property<Object>> propsIter = oldOutE.properties();\r
-                                               HashMap<String, String> propMap = new HashMap<String,String>();\r
-                                               while( propsIter.hasNext() ){\r
-                                                       Property <Object> ep = propsIter.next();\r
-                                                       propMap.put(ep.key(), ep.value().toString());\r
-                                               }\r
-                                               \r
-                                               String otherSideUri = otherSideNode4ThisEdge.<String> property("aai-uri").isPresent()  ? otherSideNode4ThisEdge.<String> property("aai-uri").value() : "URI Not present"; \r
-                                               logger.info ( "\nSwinging [" + eLabel + "] OUT edge.  \n    >> Unchanged side is [" \r
-                                                               + otherSideNodeType + "][" + otherSideUri + "] \n    >> Edge used to go to [" + oldNodeType \r
-                                                               + "][" + oldUri + "],\n    >> now swung to [" + newNodeType + "][" + newUri + "]. ");\r
-                                               // remove the old edge\r
-                                               oldOutE.remove();\r
-                                               \r
-                                               // add the new edge with properties that match the edge that was deleted.  We don't want to\r
-                                               // change any edge properties - just swinging one end of the edge to a new node.\r
-                                               // NOTE - addEdge adds an OUT edge to the vertex passed as a parameter, so we are \r
-                                               //       adding from the newNode side.\r
-                                               Edge newOutE = newNode.addEdge(eLabel, otherSideNode4ThisEdge);\r
-                                               \r
-                                               Iterator it = propMap.entrySet().iterator();\r
-                                           while (it.hasNext()) {\r
-                                               Map.Entry pair = (Map.Entry)it.next();\r
-                                               newOutE.property(pair.getKey().toString(), pair.getValue().toString() );\r
-                                           }\r
-                                           \r
-                                       }\r
-                               }\r
-                       }       \r
-                       \r
-                       // Find and migrate any applicable IN edges.\r
-                       if( edgeDirRestr.equals("BOTH") || edgeDirRestr.equals("IN") ){\r
-                               Iterator <Edge> edgeInIter = null;\r
-                               if( edgeLabelRestr == null ) {\r
-                                       edgeInIter = oldNode.edges(Direction.IN);\r
-                               }\r
-                               else {\r
-                                       edgeInIter = oldNode.edges(Direction.IN, edgeLabelRestr);\r
-                               }                       \r
-                               \r
-                               while( edgeInIter.hasNext() ){\r
-                                       Edge oldInE = edgeInIter.next();\r
-                                       String eLabel = oldInE.label();\r
-                                       Vertex otherSideNode4ThisEdge = oldInE.outVertex();\r
-                                       String otherSideNodeType = otherSideNode4ThisEdge.value(AAIProperties.NODE_TYPE);\r
-                                       if( nodeTypeRestr.equals("none") || nodeTypeRestr.toLowerCase().equals(otherSideNodeType) ){\r
-                                               Iterator <Property<Object>> propsIter = oldInE.properties();\r
-                                               HashMap<String, String> propMap = new HashMap<String,String>();\r
-                                               while( propsIter.hasNext() ){\r
-                                                       Property <Object> ep = propsIter.next();\r
-                                                       propMap.put(ep.key(), ep.value().toString());\r
-                                               }\r
-\r
-                                               String otherSideUri = otherSideNode4ThisEdge.<String> property("aai-uri").isPresent()  ? otherSideNode4ThisEdge.<String> property("aai-uri").value() : "URI Not present"; \r
-                                               logger.info ( "\nSwinging [" + eLabel + "] IN edge.  \n    >> Unchanged side is  [" \r
-                                                               + otherSideNodeType + "][" + otherSideUri + "] \n    >>  Edge used to go to [" + oldNodeType \r
-                                                               + "][" + oldUri + "],\n    >>   now swung to [" + newNodeType + "][" + newUri + "]. ");\r
-                                               \r
-                                               // remove the old edge\r
-                                               oldInE.remove();\r
-                                               \r
-                                               // add the new edge with properties that match the edge that was deleted.  We don't want to\r
-                                               // change any edge properties - just swinging one end of the edge to a new node.\r
-                                               // NOTE - addEdge adds an OUT edge to the vertex passed as a parameter, so we are \r
-                                               //       adding from the node on the other-end of the original edge so we'll get \r
-                                               //       an IN-edge to the newNode.\r
-                                               Edge newInE = otherSideNode4ThisEdge.addEdge(eLabel, newNode);\r
-                                               \r
-                                               Iterator it = propMap.entrySet().iterator();\r
-                                           while (it.hasNext()) {\r
-                                               Map.Entry pair = (Map.Entry)it.next();\r
-                                               newInE.property(pair.getKey().toString(), pair.getValue().toString() );\r
-                                           } \r
-                                       }\r
-                               }\r
-                       }       \r
-                       \r
-               } catch (Exception e) {\r
-                       logger.error("error encountered", e);\r
-                       success = false;\r
-               }\r
-       }\r
-  \r
-       @Override\r
-       public Status getStatus() {\r
-               if (success) {\r
-                       return Status.SUCCESS;\r
-               } else {\r
-                       return Status.FAILURE;\r
-               }\r
-       }\r
-       \r
-       \r
-       /**\r
-        * Get the List of node pairs("from" and "to"), you would like EdgeSwingMigrator to migrate from json files\r
-        * @return\r
-        */\r
-       public abstract List<Pair<Vertex, Vertex>> getAffectedNodePairs() ;\r
-       \r
-       \r
-       /**\r
-        * Get the nodeTypeRestriction that you want EdgeSwingMigrator to use\r
-        * @return\r
-        */\r
-       public abstract String getNodeTypeRestriction() ;\r
-       \r
-       \r
-       /**\r
-        * Get the nodeTypeRestriction that you want EdgeSwingMigrator to use\r
-        * @return\r
-        */\r
-       public abstract String getEdgeLabelRestriction() ;\r
-       \r
-       /**\r
-        * Get the nodeTypeRestriction that you want EdgeSwingMigrator to use\r
-        * @return\r
-        */\r
-       public abstract String getEdgeDirRestriction() ;\r
-       \r
-\r
-       \r
-       /**\r
-        * Cleanup (remove) the nodes that edges were moved off of if appropriate\r
-        * @return\r
-        */\r
-       public abstract void cleanupAsAppropriate(List<Pair<Vertex, Vertex>> nodePairL);\r
-\r
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration;
+
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.tinkerpop.gremlin.structure.Edge;
+import org.apache.tinkerpop.gremlin.structure.Property;
+import org.apache.tinkerpop.gremlin.structure.Direction;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.javatuples.Pair;
+import org.onap.aai.db.props.AAIProperties;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.edges.enums.EdgeProperty;
+import org.onap.aai.edges.enums.EdgeType;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.serialization.db.EdgeSerializer;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.setup.SchemaVersions;
+
+/**
+ * A migration template for "swinging" edges that terminate on an old-node to a new target node. 
+ *     That is, given an oldNode and a newNode we will swing edges that terminate on the
+ *     oldNode and terminate them on the newNode (actually we drop the old edges and add new ones).
+ *     
+ *     
+ *     We allow the passing of some parameters to restrict what edges get swung over: 
+ *      > otherEndNodeTypeRestriction: only swing edges that terminate on the oldNode if the
+ *                     node at the other end of the edge is of this nodeType.
+ *      > edgeLabelRestriction: Only swing edges that have this edgeLabel
+ *      > edgeDirectionRestriction: Only swing edges that go this direction (from the oldNode)
+ *             this is a required parameter.  valid values are: BOTH, IN, OUT
+ *     
+ */
+@MigrationPriority(0)
+@MigrationDangerRating(1)
+public abstract class EdgeSwingMigrator extends Migrator {
+
+       private boolean success = true;
+       private String nodeTypeRestriction = null;
+       private String edgeLabelRestriction = null;  
+       private String edgeDirRestriction = null;  
+       private List<Pair<Vertex, Vertex>> nodePairList;
+       
+       
+       public EdgeSwingMigrator(TransactionalGraphEngine engine , LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) {
+               super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+       }
+       
+
+       /**
+        * Do not override this method as an inheritor of this class
+        */
+       @Override
+       public void run() {
+               executeModifyOperation();
+               cleanupAsAppropriate(this.nodePairList);
+       }
+
+       /**
+        * This is where inheritors should add their logic
+        */
+       protected void executeModifyOperation() {
+       
+               try {
+                       this.nodeTypeRestriction = this.getNodeTypeRestriction();
+                       this.edgeLabelRestriction = this.getEdgeLabelRestriction();
+                       this.edgeDirRestriction = this.getEdgeDirRestriction();
+                       nodePairList = this.getAffectedNodePairs();
+                       for (Pair<Vertex, Vertex> nodePair : nodePairList) {
+                               Vertex fromNode = nodePair.getValue0();
+                               Vertex toNode = nodePair.getValue1();
+                               this.swingEdges(fromNode, toNode,
+                                               this.nodeTypeRestriction,this.edgeLabelRestriction,this.edgeDirRestriction);
+                       }
+               } catch (Exception e) {
+                       logger.error("error encountered", e);
+                       success = false;
+               }
+       }
+
+
+       protected void swingEdges(Vertex oldNode, Vertex newNode, String nodeTypeRestr, String edgeLabelRestr, String edgeDirRestr) {
+               try {
+                       // If the old and new Vertices aren't populated, throw an exception
+                       if( oldNode == null  ){
+                               logger.info ( "null oldNode passed to swingEdges() ");
+                               success = false;
+                               return;
+                       }
+                       else if( newNode == null ){
+                               logger.info ( "null newNode passed to swingEdges() ");
+                               success = false;
+                               return;
+                       }
+                       else if( edgeDirRestr == null ||
+                                               (!edgeDirRestr.equals("BOTH") 
+                                                       && !edgeDirRestr.equals("IN")  
+                                                       && !edgeDirRestr.equals("OUT") )
+                                               ){
+                               logger.info ( "invalid direction passed to swingEdges(). valid values are BOTH/IN/OUT ");
+                               success = false;
+                               return;
+                       }
+                       else if( edgeLabelRestr != null 
+                                       && (edgeLabelRestr.trim().equals("none") || edgeLabelRestr.trim().equals("")) ){
+                               edgeLabelRestr = null;
+                       }
+                       else if( nodeTypeRestr == null || nodeTypeRestr.trim().equals("") ){
+                               nodeTypeRestr = "none";
+                       }
+                               
+                       String oldNodeType = oldNode.value(AAIProperties.NODE_TYPE);
+                       String oldUri = oldNode.<String> property("aai-uri").isPresent()  ? oldNode.<String> property("aai-uri").value() : "URI Not present"; 
+                       
+                       String newNodeType = newNode.value(AAIProperties.NODE_TYPE);
+                       String newUri = newNode.<String> property("aai-uri").isPresent()  ? newNode.<String> property("aai-uri").value() : "URI Not present"; 
+
+                       // If the nodeTypes don't match, throw an error 
+                       if( !oldNodeType.equals(newNodeType) ){
+                               logger.info ( "Can not swing edge from a [" + oldNodeType + "] node to a [" +
+                                               newNodeType + "] node. ");
+                               success = false;
+                               return;
+                       }
+                       
+                       // Find and migrate any applicable OUT edges.
+                       if( edgeDirRestr.equals("BOTH") || edgeDirRestr.equals("OUT") ){
+                               Iterator <Edge> edgeOutIter = null;
+                Iterator <Edge> newNodeEdgeOutIter = null;
+
+                               if( edgeLabelRestr == null ) {
+                                       edgeOutIter = oldNode.edges(Direction.OUT);
+                    newNodeEdgeOutIter = newNode.edges(Direction.OUT);
+
+                               }
+                               else {
+                                       edgeOutIter = oldNode.edges(Direction.OUT, edgeLabelRestr);
+                    newNodeEdgeOutIter = newNode.edges(Direction.OUT, edgeLabelRestr);
+                }
+                
+                List<Vertex> newNodeOtherEndVertexList = new ArrayList<Vertex>();
+                while (newNodeEdgeOutIter.hasNext()){
+                    Edge newNodeOutE = newNodeEdgeOutIter.next();
+                    Vertex otherSideNode4ThisEdgeOfNewNode = newNodeOutE.inVertex();
+                    newNodeOtherEndVertexList.add(otherSideNode4ThisEdgeOfNewNode);
+                }   
+                               
+                               while( edgeOutIter.hasNext() ){
+                                       Edge oldOutE = edgeOutIter.next();
+                                       String eLabel = oldOutE.label();
+                                       Vertex otherSideNode4ThisEdge = oldOutE.inVertex();
+                                       String otherSideNodeType = otherSideNode4ThisEdge.value(AAIProperties.NODE_TYPE);
+                                       if( nodeTypeRestr.equals("none") || nodeTypeRestr.toLowerCase().equals(otherSideNodeType) ){
+                                               Iterator <Property<Object>> propsIter = oldOutE.properties();
+                                               HashMap<String, String> propMap = new HashMap<String,String>();
+                                               while( propsIter.hasNext() ){
+                                                       Property <Object> ep = propsIter.next();
+                                                       propMap.put(ep.key(), ep.value().toString());
+                                               }
+                                               
+                                               String otherSideUri = otherSideNode4ThisEdge.<String> property("aai-uri").isPresent()  ? otherSideNode4ThisEdge.<String> property("aai-uri").value() : "URI Not present"; 
+                                               logger.info ( "\nSwinging [" + eLabel + "] OUT edge.  \n    >> Unchanged side is [" 
+                                                               + otherSideNodeType + "][" + otherSideUri + "] \n    >> Edge used to go to [" + oldNodeType 
+                                                               + "][" + oldUri + "],\n    >> now swung to [" + newNodeType + "][" + newUri + "]. ");
+                                               // remove the old edge
+                                               oldOutE.remove();
+                                               
+                                               // add the new edge with properties that match the edge that was deleted.  We don't want to
+                                               // change any edge properties - just swinging one end of the edge to a new node.
+                                               // NOTE - addEdge adds an OUT edge to the vertex passed as a parameter, so we are 
+                                               //       adding from the newNode side.
+                                               
+                                               EdgeType edgeType = getEdgeType(propMap);
+                                               if (edgeType != null && !newNodeOtherEndVertexList.contains(otherSideNode4ThisEdge)){
+//                                                     Edge newOutE = newNode.addEdge(eLabel, otherSideNode4ThisEdge);
+                                                       Edge newOutE = createEdgeIfPossible(edgeType, newNode, otherSideNode4ThisEdge);
+                                                       if (newOutE != null){
+                                                               Iterator it = propMap.entrySet().iterator();
+                                                           while (it.hasNext()) {
+                                                               Map.Entry pair = (Map.Entry)it.next();
+                                                               newOutE.property(pair.getKey().toString(), pair.getValue().toString() );
+                                                           }
+                                                       }else {
+                                                               logger.info("\n Edge was not swung due to Multiplicity Rule Violation...");
+                                                       }
+                                               }
+                                       }
+                               }
+                       }       
+                       
+                       // Find and migrate any applicable IN edges.
+                       if( edgeDirRestr.equals("BOTH") || edgeDirRestr.equals("IN") ){
+                               Iterator <Edge> edgeInIter = null;
+                                Iterator <Edge> newNodeEdgeOutIter = null;
+                               if( edgeLabelRestr == null ) {
+                                       edgeInIter = oldNode.edges(Direction.IN);
+                                       newNodeEdgeOutIter = newNode.edges(Direction.IN);
+                               }
+                               else {
+                                       edgeInIter = oldNode.edges(Direction.IN, edgeLabelRestr);
+                                       newNodeEdgeOutIter = newNode.edges(Direction.IN, edgeLabelRestr);
+                               }
+                               
+                               List<Vertex> newNodeOtherEndVertexList = new ArrayList<Vertex>();
+                while (newNodeEdgeOutIter.hasNext()){
+                    Edge newNodeOutE = newNodeEdgeOutIter.next();
+                    Vertex otherSideNode4ThisEdgeOfNewNode = newNodeOutE.outVertex();
+                    newNodeOtherEndVertexList.add(otherSideNode4ThisEdgeOfNewNode);
+                }
+
+                               while( edgeInIter.hasNext() ){
+                                       Edge oldInE = edgeInIter.next();
+                                       String eLabel = oldInE.label();
+                                       Vertex otherSideNode4ThisEdge = oldInE.outVertex();
+                                       String otherSideNodeType = otherSideNode4ThisEdge.value(AAIProperties.NODE_TYPE);
+                                       if( nodeTypeRestr.equals("none") || nodeTypeRestr.toLowerCase().equals(otherSideNodeType) ){
+                                               Iterator <Property<Object>> propsIter = oldInE.properties();
+                                               HashMap<String, String> propMap = new HashMap<String,String>();
+                                               while( propsIter.hasNext() ){
+                                                       Property <Object> ep = propsIter.next();
+                                                       propMap.put(ep.key(), ep.value().toString());
+                                               }
+
+                                               String otherSideUri = otherSideNode4ThisEdge.<String> property("aai-uri").isPresent()  ? otherSideNode4ThisEdge.<String> property("aai-uri").value() : "URI Not present"; 
+                                               logger.info ( "\nSwinging [" + eLabel + "] IN edge.  \n    >> Unchanged side is  [" 
+                                                               + otherSideNodeType + "][" + otherSideUri + "] \n    >>  Edge used to go to [" + oldNodeType 
+                                                               + "][" + oldUri + "],\n    >>   now swung to [" + newNodeType + "][" + newUri + "]. ");
+                                               
+                                               // remove the old edge
+                                               oldInE.remove();
+                                               
+                                               // add the new edge with properties that match the edge that was deleted.  We don't want to
+                                               // change any edge properties - just swinging one end of the edge to a new node.
+                                               // NOTE - addEdge adds an OUT edge to the vertex passed as a parameter, so we are 
+                                               //       adding from the node on the other-end of the original edge so we'll get 
+                                               //       an IN-edge to the newNode.
+                                               EdgeType edgeType = getEdgeType(propMap);
+                                               if (edgeType != null && !newNodeOtherEndVertexList.contains(otherSideNode4ThisEdge)){
+//                                                     Edge newInE = otherSideNode4ThisEdge.addEdge(eLabel, newNode);
+                                                       Edge newInE = createEdgeIfPossible(edgeType, otherSideNode4ThisEdge, newNode);
+                                                       if (newInE != null){
+                                                               Iterator it = propMap.entrySet().iterator();
+                                                           while (it.hasNext()) {
+                                                               Map.Entry pair = (Map.Entry)it.next();
+                                                               newInE.property(pair.getKey().toString(), pair.getValue().toString() );
+                                                           }
+                                                       } else {
+                                                               logger.info("\t Edge was not swung due to Multiplicity Rule Violation...");
+                                                       }
+                                               }
+                                       }
+                               }
+                       }       
+                       
+               } catch (Exception e) {
+                       logger.error("error encountered", e);
+                       success = false;
+               }
+       }
+       
+       private EdgeType getEdgeType(HashMap edgePropMap) {
+               EdgeType type = null;
+               String containsValue = edgePropMap.get(EdgeProperty.CONTAINS.toString()).toString();
+               if ("NONE".equalsIgnoreCase(containsValue)){
+                       type = EdgeType.COUSIN;
+               } else {
+                       type = EdgeType.TREE;
+               }
+               return type;
+       }
+  
+       @Override
+       public Status getStatus() {
+               if (success) {
+                       return Status.SUCCESS;
+               } else {
+                       return Status.FAILURE;
+               }
+       }
+       
+       
+       /**
+        * Get the List of node pairs("from" and "to"), you would like EdgeSwingMigrator to migrate from json files
+        * @return
+        */
+       public abstract List<Pair<Vertex, Vertex>> getAffectedNodePairs() ;
+       
+       
+       /**
+        * Get the nodeTypeRestriction that you want EdgeSwingMigrator to use
+        * @return
+        */
+       public abstract String getNodeTypeRestriction() ;
+       
+       
+       /**
+        * Get the nodeTypeRestriction that you want EdgeSwingMigrator to use
+        * @return
+        */
+       public abstract String getEdgeLabelRestriction() ;
+       
+       /**
+        * Get the nodeTypeRestriction that you want EdgeSwingMigrator to use
+        * @return
+        */
+       public abstract String getEdgeDirRestriction() ;
+       
+
+       
+       /**
+        * Cleanup (remove) the nodes that edges were moved off of if appropriate
+        * @return
+        */
+       public abstract void cleanupAsAppropriate(List<Pair<Vertex, Vertex>> nodePairL);
+
 }
\ No newline at end of file
index 0e65745..ecc0434 100644 (file)
  */
 package org.onap.aai.migration;
 
-import java.util.UUID;
-
+import org.onap.aai.config.PropertyPasswordConfiguration;
 import org.onap.aai.dbmap.AAIGraph;
 import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.exceptions.AAIException;
 import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.logging.ErrorLogHelper;
 import org.onap.aai.logging.LoggingContext;
 import org.onap.aai.logging.LoggingContext.StatusCode;
 import org.onap.aai.serialization.db.EdgeSerializer;
 import org.onap.aai.setup.SchemaVersions;
 import org.onap.aai.util.AAIConstants;
+import org.onap.aai.util.ExceptionTranslator;
 import org.springframework.context.annotation.AnnotationConfigApplicationContext;
 
+import java.util.UUID;
+
 /**
  * Wrapper class to allow {@link org.onap.aai.migration.MigrationControllerInternal MigrationControllerInternal}
  * to be run from a shell script
@@ -43,7 +47,7 @@ public class MigrationController {
         * @param args
         *            the arguments
         */
-       public static void main(String[] args) {
+       public static void main(String[] args) throws AAIException {
 
                LoggingContext.init();
                LoggingContext.partnerName("Migration");
@@ -55,11 +59,23 @@ public class MigrationController {
                LoggingContext.statusCode(StatusCode.COMPLETE);
                LoggingContext.responseCode(LoggingContext.SUCCESS);
 
-               AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext(
-                               "org.onap.aai.config",
-                               "org.onap.aai.setup"
-               );
-
+               AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext();
+               PropertyPasswordConfiguration initializer = new PropertyPasswordConfiguration();
+               initializer.initialize(ctx);
+               try {
+                       ctx.scan(
+                                       "org.onap.aai.config",
+                                       "org.onap.aai.setup"
+                       );
+                       ctx.refresh();
+               } catch (Exception e) {
+                       AAIException aai = ExceptionTranslator.schemaServiceExceptionTranslator(e);
+                       System.out.println("Problems running tool "+aai.getMessage());
+                       LoggingContext.statusCode(LoggingContext.StatusCode.ERROR);
+                       LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                       ErrorLogHelper.logError(aai.getCode(), e.getMessage() + ", resolve and retry");
+                       throw aai;
+               }
                LoaderFactory loaderFactory   = ctx.getBean(LoaderFactory.class);
                EdgeIngestor   edgeIngestor   = ctx.getBean(EdgeIngestor.class);
                EdgeSerializer edgeSerializer = ctx.getBean(EdgeSerializer.class);
index b113f03..b94460a 100644 (file)
 
 package org.onap.aai.migration;
 
-import com.att.eelf.configuration.Configuration;
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
-import com.beust.jcommander.JCommander;
-import com.beust.jcommander.Parameter;
+import java.io.File;
+import java.io.IOException;
+import java.lang.reflect.InvocationTargetException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Properties;
+import java.util.Set;
+import java.util.stream.Collectors;
+
 import org.apache.commons.configuration.ConfigurationException;
 import org.apache.commons.configuration.PropertiesConfiguration;
 import org.apache.commons.lang.exception.ExceptionUtils;
 import org.apache.tinkerpop.gremlin.structure.Graph;
 import org.apache.tinkerpop.gremlin.structure.io.IoCore;
+import org.onap.aai.datasnapshot.DataSnapshot;
 import org.onap.aai.db.props.AAIProperties;
 import org.onap.aai.dbmap.AAIGraph;
 import org.onap.aai.dbmap.DBConnectionType;
@@ -38,32 +47,24 @@ import org.onap.aai.exceptions.AAIException;
 import org.onap.aai.introspection.Loader;
 import org.onap.aai.introspection.LoaderFactory;
 import org.onap.aai.introspection.ModelType;
+import org.onap.aai.serialization.db.EdgeSerializer;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.setup.SchemaVersion;
 import org.onap.aai.logging.LoggingContext;
 import org.onap.aai.logging.LoggingContext.StatusCode;
-import org.onap.aai.serialization.db.EdgeSerializer;
-import org.onap.aai.serialization.engines.JanusGraphDBEngine;
 import org.onap.aai.serialization.engines.QueryStyle;
+import org.onap.aai.serialization.engines.JanusGraphDBEngine;
 import org.onap.aai.serialization.engines.TransactionalGraphEngine;
-import org.onap.aai.setup.SchemaVersion;
-import org.onap.aai.setup.SchemaVersions;
 import org.onap.aai.util.AAIConstants;
 import org.onap.aai.util.FormatDate;
 import org.reflections.Reflections;
 import org.slf4j.MDC;
 
-import java.io.File;
-import java.io.IOException;
-import java.lang.reflect.InvocationTargetException;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-import java.util.Properties;
-import java.util.Set;
-import java.util.stream.Collectors;
-
+import com.att.eelf.configuration.Configuration;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.beust.jcommander.JCommander;
+import com.beust.jcommander.Parameter;
 
 /**
  * Runs a series of migrations from a defined directory based on the presence of
@@ -73,262 +74,280 @@ import java.util.stream.Collectors;
  */
 public class MigrationControllerInternal {
 
-    private EELFLogger logger;
-    private final int DANGER_ZONE = 10;
-    public static final String VERTEX_TYPE = "migration-list-1707";
-    private final List<String> resultsSummary = new ArrayList<>();
-    private final List<NotificationHelper> notifications = new ArrayList<>();
-    private static final String SNAPSHOT_LOCATION = AAIConstants.AAI_HOME + AAIConstants.AAI_FILESEP + "logs" + AAIConstants.AAI_FILESEP + "data" + AAIConstants.AAI_FILESEP + "migrationSnapshots";
-
-    private LoaderFactory loaderFactory;
-    private EdgeIngestor edgeIngestor;
-    private EdgeSerializer edgeSerializer;
-    private final SchemaVersions schemaVersions;
-
-    public MigrationControllerInternal(LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions){
-        this.loaderFactory = loaderFactory;
-        this.edgeIngestor = edgeIngestor;
-        this.edgeSerializer = edgeSerializer;
-        this.schemaVersions = schemaVersions;
-    }
-
-    /**
-     * The main method.
-     *
-     * @param args
-     *            the arguments
-     */
-    public void run(String[] args) {
-        // Set the logging file properties to be used by EELFManager
-        System.setProperty("aai.service.name", MigrationController.class.getSimpleName());
-        Properties props = System.getProperties();
-        props.setProperty(Configuration.PROPERTY_LOGGING_FILE_NAME, "migration-logback.xml");
-        props.setProperty(Configuration.PROPERTY_LOGGING_FILE_PATH, AAIConstants.AAI_HOME_ETC_APP_PROPERTIES);
-
-        logger = EELFManager.getInstance().getLogger(MigrationControllerInternal.class.getSimpleName());
-        MDC.put("logFilenameAppender", MigrationController.class.getSimpleName());
-
-        boolean loadSnapshot = false;
-
-        CommandLineArgs cArgs = new CommandLineArgs();
-
-        JCommander jCommander = new JCommander(cArgs, args);
-        jCommander.setProgramName(MigrationController.class.getSimpleName());
-
-        // Set flag to load from snapshot based on the presence of snapshot and
-        // graph storage backend of inmemory
-        if (cArgs.dataSnapshot != null && !cArgs.dataSnapshot.isEmpty()) {
-            try {
-                PropertiesConfiguration config = new PropertiesConfiguration(cArgs.config);
-                if (config.getString("storage.backend").equals("inmemory")) {
-                    loadSnapshot = true;
-                    System.setProperty("load.snapshot.file", "true");
-                    System.setProperty("snapshot.location", cArgs.dataSnapshot);
-                }
-            } catch (ConfigurationException e) {
-                LoggingContext.statusCode(StatusCode.ERROR);
-                LoggingContext.responseCode(LoggingContext.DATA_ERROR);
-                logAndPrint("ERROR: Could not load janusgraph configuration.\n" + ExceptionUtils.getFullStackTrace(e));
-                return;
-            }
-        }
-        System.setProperty("realtime.db.config", cArgs.config);
-        logAndPrint("\n\n---------- Connecting to Graph ----------");
-        AAIGraph.getInstance();
-
-        logAndPrint("---------- Connection Established ----------");
-        SchemaVersion version = schemaVersions.getDefaultVersion();
-        QueryStyle queryStyle = QueryStyle.TRAVERSAL;
-        ModelType introspectorFactoryType = ModelType.MOXY;
-        Loader loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, version);
-        TransactionalGraphEngine engine = new JanusGraphDBEngine(queryStyle, DBConnectionType.REALTIME, loader);
-
-        if (cArgs.help) {
-            jCommander.usage();
-            engine.rollback();
-            return;
-        }
-
-        Reflections reflections = new Reflections("org.onap.aai.migration");
-        List<Class<? extends Migrator>> migratorClasses = new ArrayList<>(findClasses(reflections));
-        //Displays list of migration classes which needs to be executed.Pass flag "-l" following by the class names
-        if (cArgs.list) {
-            listMigrationWithStatus(cArgs, migratorClasses, engine);
-            return;
-        }
-
-        logAndPrint("---------- Looking for migration scripts to be executed. ----------");
-        //Excluding any migration class when run migration from script.Pass flag "-e" following by the class names
-        if (!cArgs.excludeClasses.isEmpty()) {
-            migratorClasses = filterMigrationClasses(cArgs.excludeClasses, migratorClasses);
-            listMigrationWithStatus(cArgs, migratorClasses, engine);
-        }
-        List<Class<? extends Migrator>> migratorClassesToRun = createMigratorList(cArgs, migratorClasses);
-
-        sortList(migratorClassesToRun);
-
-        if (!cArgs.scripts.isEmpty() && migratorClassesToRun.isEmpty()) {
-            LoggingContext.statusCode(StatusCode.ERROR);
-            LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
-            logAndPrint("\tERROR: Failed to find migrations " + cArgs.scripts + ".");
-            logAndPrint("---------- Done ----------");
-            LoggingContext.successStatusFields();
-        }
-
-        logAndPrint("\tFound " + migratorClassesToRun.size() + " migration scripts.");
-        logAndPrint("---------- Executing Migration Scripts ----------");
-
-
-        if (!cArgs.skipPreMigrationSnapShot) {
-            takePreSnapshotIfRequired(engine, cArgs, migratorClassesToRun);
-        }
-
-        for (Class<? extends Migrator> migratorClass : migratorClassesToRun) {
-            String name = migratorClass.getSimpleName();
-            Migrator migrator;
-            if (cArgs.runDisabled.contains(name) || migratorClass.isAnnotationPresent(Enabled.class)) {//Check either of enabled annotation or runDisabled flag
-
-                try {
-                    engine.startTransaction();
-                    if (!cArgs.forced && hasAlreadyRun(name, engine)) {
-                        logAndPrint("Migration " + name + " has already been run on this database and will not be executed again. Use -f to force execution");
-                        continue;
-                    }
-                    migrator = migratorClass
-                        .getConstructor(
-                            TransactionalGraphEngine.class,
-                            LoaderFactory.class,
-                            EdgeIngestor.class,
-                            EdgeSerializer.class,
-                            SchemaVersions.class
-                        ).newInstance(engine, loaderFactory, edgeIngestor, edgeSerializer,schemaVersions);
-                } catch (InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException | NoSuchMethodException | SecurityException e) {
-                    LoggingContext.statusCode(StatusCode.ERROR);
-                    LoggingContext.responseCode(LoggingContext.DATA_ERROR);
-                    logAndPrint("EXCEPTION caught initalizing migration class " + migratorClass.getSimpleName() + ".\n" + ExceptionUtils.getFullStackTrace(e));
-                    LoggingContext.successStatusFields();
-                    engine.rollback();
-                    continue;
-                }
-                logAndPrint("\tRunning " + migratorClass.getSimpleName() + " migration script.");
-                logAndPrint("\t\t See " + System.getProperty("AJSC_HOME") + "/logs/migration/" + migratorClass.getSimpleName() + "/* for logs.");
-                MDC.put("logFilenameAppender", migratorClass.getSimpleName() + "/" + migratorClass.getSimpleName());
-
-                migrator.run();
-
-                commitChanges(engine, migrator, cArgs);
-            } else {
-                logAndPrint("\tSkipping " + migratorClass.getSimpleName() + " migration script because it has been disabled.");
-            }
-        }
-        MDC.put("logFilenameAppender", MigrationController.class.getSimpleName());
-        for (NotificationHelper notificationHelper : notifications) {
-            try {
-                notificationHelper.triggerEvents();
-            } catch (AAIException e) {
-                LoggingContext.statusCode(StatusCode.ERROR);
-                LoggingContext.responseCode(LoggingContext.AVAILABILITY_TIMEOUT_ERROR);
-                logAndPrint("\tcould not event");
-                logger.error("could not event", e);
-                LoggingContext.successStatusFields();
-            }
-        }
-        logAndPrint("---------- Done ----------");
-
-        // Save post migration snapshot if snapshot was loaded
-        if (!cArgs.skipPostMigrationSnapShot) {
-            generateSnapshot(engine, "post");
-        }
-
-        outputResultsSummary();
-    }
-
-    /**
-     * This method is used to remove excluded classes from migration from the
-     * script command.
-     *
-     * @param excludeClasses
-     *            : Classes to be removed from Migration
-     * @param migratorClasses
-     *            : Classes to execute migration.
-     * @return
-     */
-    private List<Class<? extends Migrator>> filterMigrationClasses(
-            List<String> excludeClasses,
-            List<Class<? extends Migrator>> migratorClasses) {
-
-        List<Class<? extends Migrator>> filteredMigratorClasses = migratorClasses
-                .stream()
-                .filter(migratorClass -> !excludeClasses.contains(migratorClass
-                        .getSimpleName())).collect(Collectors.toList());
-
-        return filteredMigratorClasses;
-    }
-
-    private void listMigrationWithStatus(CommandLineArgs cArgs,
-            List<Class<? extends Migrator>> migratorClasses, TransactionalGraphEngine engine) {
-            sortList(migratorClasses);
-            engine.startTransaction();
-            System.out.println("---------- List of all migrations ----------");
-            migratorClasses.forEach(migratorClass -> {
-                boolean enabledAnnotation = migratorClass.isAnnotationPresent(Enabled.class);
-                String enabled = enabledAnnotation ? "Enabled" : "Disabled";
-                StringBuilder sb = new StringBuilder();
-                sb.append(migratorClass.getSimpleName());
-                sb.append(" in package ");
-                sb.append(migratorClass.getPackage().getName().substring(migratorClass.getPackage().getName().lastIndexOf('.')+1));
-                sb.append(" is ");
-                sb.append(enabled);
-                sb.append(" ");
-                sb.append("[" + getDbStatus(migratorClass.getSimpleName(), engine) + "]");
-                System.out.println(sb.toString());
-            });
-            engine.rollback();
-            System.out.println("---------- Done ----------");
-        }
-
-    private String getDbStatus(String name, TransactionalGraphEngine engine) {
-        if (hasAlreadyRun(name, engine)) {
-            return "Already executed in this env";
-        }
-        return "Will be run on next execution if Enabled";
-    }
-
-    private boolean hasAlreadyRun(String name, TransactionalGraphEngine engine) {
-        return engine.asAdmin().getReadOnlyTraversalSource().V().has(AAIProperties.NODE_TYPE, VERTEX_TYPE).has(name, true).hasNext();
-    }
-    private Set<Class<? extends Migrator>> findClasses(Reflections reflections) {
-        Set<Class<? extends Migrator>> migratorClasses = reflections.getSubTypesOf(Migrator.class);
-        /*
-         * TODO- Change this to make sure only classes in the specific $release are added in the runList
-         * Or add a annotation like exclude which folks again need to remember to add ??
-         */
-
-        migratorClasses.remove(PropertyMigrator.class);
-        migratorClasses.remove(EdgeMigrator.class);
-        return migratorClasses;
-    }
-
-
-    private void takePreSnapshotIfRequired(TransactionalGraphEngine engine, CommandLineArgs cArgs, List<Class<? extends Migrator>> migratorClassesToRun) {
-
-        /*int sum = 0;
-        for (Class<? extends Migrator> migratorClass : migratorClassesToRun) {
-            if (migratorClass.isAnnotationPresent(Enabled.class)) {
-                sum += migratorClass.getAnnotation(MigrationPriority.class).value();
-            }
-        }
-
-        if (sum >= DANGER_ZONE) {
-
-            logAndPrint("Entered Danger Zone. Taking snapshot.");
-        }*/
-
-        //always take snapshot for now
-
-        generateSnapshot(engine, "pre");
-
-    }
+       private EELFLogger logger;
+       private final int DANGER_ZONE = 10;
+       public static final String VERTEX_TYPE = "migration-list-1707";
+       private final List<String> resultsSummary = new ArrayList<>();
+       private final List<NotificationHelper> notifications = new ArrayList<>();
+       private static final String SNAPSHOT_LOCATION = AAIConstants.AAI_HOME + AAIConstants.AAI_FILESEP + "logs" + AAIConstants.AAI_FILESEP + "data" + AAIConstants.AAI_FILESEP + "migrationSnapshots";
+
+       private LoaderFactory loaderFactory;
+       private EdgeIngestor edgeIngestor;
+       private EdgeSerializer edgeSerializer;
+       private final SchemaVersions schemaVersions;
+
+       public MigrationControllerInternal(LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions){
+           this.loaderFactory = loaderFactory;
+               this.edgeIngestor = edgeIngestor;
+               this.edgeSerializer = edgeSerializer;
+               this.schemaVersions = schemaVersions;
+               
+       }
+
+       /**
+        * The main method.
+        *
+        * @param args
+        *            the arguments
+        */
+       public void run(String[] args) {
+               // Set the logging file properties to be used by EELFManager
+               System.setProperty("aai.service.name", MigrationController.class.getSimpleName());
+               Properties props = System.getProperties();
+               props.setProperty(Configuration.PROPERTY_LOGGING_FILE_NAME, "migration-logback.xml");
+               props.setProperty(Configuration.PROPERTY_LOGGING_FILE_PATH, AAIConstants.AAI_HOME_ETC_APP_PROPERTIES);
+
+               logger = EELFManager.getInstance().getLogger(MigrationControllerInternal.class.getSimpleName());
+               MDC.put("logFilenameAppender", MigrationController.class.getSimpleName());
+
+               boolean loadSnapshot = false;
+
+               CommandLineArgs cArgs = new CommandLineArgs();
+
+               JCommander jCommander = new JCommander(cArgs, args);
+               jCommander.setProgramName(MigrationController.class.getSimpleName());
+
+               // Set flag to load from snapshot based on the presence of snapshot and
+               // graph storage backend of inmemory
+               if (cArgs.dataSnapshot != null && !cArgs.dataSnapshot.isEmpty()) {
+                       try {
+                               PropertiesConfiguration config = new PropertiesConfiguration(cArgs.config);
+                               if (config.getString("storage.backend").equals("inmemory")) {
+                                       loadSnapshot = true;
+//                                     System.setProperty("load.snapshot.file", "true");
+                                       System.setProperty("snapshot.location", cArgs.dataSnapshot);
+                                       String snapshotLocation =cArgs.dataSnapshot;
+                                       String snapshotDir;
+                                       String snapshotFile;
+                                       int index = snapshotLocation.lastIndexOf("\\");
+                                       if (index == -1){
+                                               //Use default directory path
+                                               snapshotDir =  AAIConstants.AAI_HOME + AAIConstants.AAI_FILESEP + "snapshots";
+                                               snapshotFile = snapshotLocation;
+                                       } else {
+                                               snapshotDir = snapshotLocation.substring(0, index+1);
+                                               snapshotFile = snapshotLocation.substring(index+1, snapshotLocation.length()) ;
+                                       }
+                                       String [] dataSnapShotArgs = {"-c","MULTITHREAD_RELOAD","-f", snapshotFile, "-oldFileDir",snapshotDir, "-caller","migration"};
+                                       DataSnapshot dataSnapshot = new DataSnapshot();
+                                       dataSnapshot.executeCommand(dataSnapShotArgs, true, false, null, "MULTITHREAD_RELOAD", snapshotFile);
+                               }
+                       } catch (ConfigurationException e) {
+                               LoggingContext.statusCode(StatusCode.ERROR);
+                               LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                               logAndPrint("ERROR: Could not load janusgraph configuration.\n" + ExceptionUtils.getFullStackTrace(e));
+                               return;
+                       }
+               }
+               else {
+                       System.setProperty("realtime.db.config", cArgs.config);
+                       logAndPrint("\n\n---------- Connecting to Graph ----------");
+                       AAIGraph.getInstance();
+               }
+
+               logAndPrint("---------- Connection Established ----------");
+               SchemaVersion version = schemaVersions.getDefaultVersion();
+               QueryStyle queryStyle = QueryStyle.TRAVERSAL;
+               ModelType introspectorFactoryType = ModelType.MOXY;
+               Loader loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, version);
+               TransactionalGraphEngine engine = new JanusGraphDBEngine(queryStyle, DBConnectionType.REALTIME, loader);
+
+               if (cArgs.help) {
+                       jCommander.usage();
+                       engine.rollback();
+                       return;
+               }
+
+               Reflections reflections = new Reflections("org.onap.aai.migration");
+               List<Class<? extends Migrator>> migratorClasses = new ArrayList<>(findClasses(reflections));
+               //Displays list of migration classes which needs to be executed.Pass flag "-l" following by the class names
+               if (cArgs.list) {
+                       listMigrationWithStatus(cArgs, migratorClasses, engine);
+                       return;
+               }
+
+               logAndPrint("---------- Looking for migration scripts to be executed. ----------");
+               //Excluding any migration class when run migration from script.Pass flag "-e" following by the class names
+               if (!cArgs.excludeClasses.isEmpty()) {
+                       migratorClasses = filterMigrationClasses(cArgs.excludeClasses, migratorClasses);
+                       listMigrationWithStatus(cArgs, migratorClasses, engine);
+               }
+               List<Class<? extends Migrator>> migratorClassesToRun = createMigratorList(cArgs, migratorClasses);
+
+               sortList(migratorClassesToRun);
+
+               if (!cArgs.scripts.isEmpty() && migratorClassesToRun.isEmpty()) {
+                       LoggingContext.statusCode(StatusCode.ERROR);
+                       LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
+                       logAndPrint("\tERROR: Failed to find migrations " + cArgs.scripts + ".");
+                       logAndPrint("---------- Done ----------");
+                       LoggingContext.successStatusFields();
+               }
+
+               logAndPrint("\tFound " + migratorClassesToRun.size() + " migration scripts.");
+               logAndPrint("---------- Executing Migration Scripts ----------");
+
+
+               if (!cArgs.skipPreMigrationSnapShot) {
+                       takePreSnapshotIfRequired(engine, cArgs, migratorClassesToRun);
+               }
+
+               for (Class<? extends Migrator> migratorClass : migratorClassesToRun) {
+                       String name = migratorClass.getSimpleName();
+                       Migrator migrator;
+                       if (cArgs.runDisabled.contains(name) || migratorClass.isAnnotationPresent(Enabled.class)) {
+
+                               try {
+                                       engine.startTransaction();
+                                       if (!cArgs.forced && hasAlreadyRun(name, engine)) {
+                                               logAndPrint("Migration " + name + " has already been run on this database and will not be executed again. Use -f to force execution");
+                                               continue;
+                                       }
+                                       migrator = migratorClass
+                                               .getConstructor(
+                                                       TransactionalGraphEngine.class,
+                                                       LoaderFactory.class,
+                                                       EdgeIngestor.class,
+                                                       EdgeSerializer.class,
+                                                       SchemaVersions.class
+                                               ).newInstance(engine, loaderFactory, edgeIngestor, edgeSerializer,schemaVersions);
+                               } catch (InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException | NoSuchMethodException | SecurityException e) {
+                                       LoggingContext.statusCode(StatusCode.ERROR);
+                                       LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                                       logAndPrint("EXCEPTION caught initalizing migration class " + migratorClass.getSimpleName() + ".\n" + ExceptionUtils.getFullStackTrace(e));
+                                       LoggingContext.successStatusFields();
+                                       engine.rollback();
+                                       continue;
+                               }
+                               logAndPrint("\tRunning " + migratorClass.getSimpleName() + " migration script.");
+                               logAndPrint("\t\t See " + System.getProperty("AJSC_HOME") + "/logs/migration/" + migratorClass.getSimpleName() + "/* for logs.");
+                               MDC.put("logFilenameAppender", migratorClass.getSimpleName() + "/" + migratorClass.getSimpleName());
+
+                               migrator.run();
+
+                               commitChanges(engine, migrator, cArgs);
+                       } else {
+                               logAndPrint("\tSkipping " + migratorClass.getSimpleName() + " migration script because it has been disabled.");
+                       }
+               }
+               MDC.put("logFilenameAppender", MigrationController.class.getSimpleName());
+               for (NotificationHelper notificationHelper : notifications) {
+                       try {
+                               notificationHelper.triggerEvents();
+                       } catch (AAIException e) {
+                               LoggingContext.statusCode(StatusCode.ERROR);
+                               LoggingContext.responseCode(LoggingContext.AVAILABILITY_TIMEOUT_ERROR);
+                               logAndPrint("\tcould not event");
+                               logger.error("could not event", e);
+                               LoggingContext.successStatusFields();
+                       }
+               }
+               logAndPrint("---------- Done ----------");
+
+               // Save post migration snapshot if snapshot was loaded
+               if (!cArgs.skipPostMigrationSnapShot) {
+                       generateSnapshot(engine, "post");
+               }
+
+               outputResultsSummary();
+       }
+
+       /**
+        * This method is used to remove excluded classes from migration from the
+        * script command.
+        *
+        * @param excludeClasses
+        *            : Classes to be removed from Migration
+        * @param migratorClasses
+        *            : Classes to execute migration.
+        * @return
+        */
+       private List<Class<? extends Migrator>> filterMigrationClasses(
+                       List<String> excludeClasses,
+                       List<Class<? extends Migrator>> migratorClasses) {
+
+               List<Class<? extends Migrator>> filteredMigratorClasses = migratorClasses
+                               .stream()
+                               .filter(migratorClass -> !excludeClasses.contains(migratorClass
+                                               .getSimpleName())).collect(Collectors.toList());
+
+               return filteredMigratorClasses;
+       }
+
+       private void listMigrationWithStatus(CommandLineArgs cArgs,
+                       List<Class<? extends Migrator>> migratorClasses, TransactionalGraphEngine engine) {
+                       sortList(migratorClasses);
+                       engine.startTransaction();
+                       System.out.println("---------- List of all migrations ----------");
+                       migratorClasses.forEach(migratorClass -> {
+                               boolean enabledAnnotation = migratorClass.isAnnotationPresent(Enabled.class);
+                               String enabled = enabledAnnotation ? "Enabled" : "Disabled";
+                               StringBuilder sb = new StringBuilder();
+                               sb.append(migratorClass.getSimpleName());
+                               sb.append(" in package ");
+                               sb.append(migratorClass.getPackage().getName().substring(migratorClass.getPackage().getName().lastIndexOf('.')+1));
+                               sb.append(" is ");
+                               sb.append(enabled);
+                               sb.append(" ");
+                               sb.append("[" + getDbStatus(migratorClass.getSimpleName(), engine) + "]");
+                               System.out.println(sb.toString());
+                       });
+                       engine.rollback();
+                       System.out.println("---------- Done ----------");
+               }
+
+       private String getDbStatus(String name, TransactionalGraphEngine engine) {
+               if (hasAlreadyRun(name, engine)) {
+                       return "Already executed in this env";
+               }
+               return "Will be run on next execution if Enabled";
+       }
+
+       private boolean hasAlreadyRun(String name, TransactionalGraphEngine engine) {
+               return engine.asAdmin().getReadOnlyTraversalSource().V().has(AAIProperties.NODE_TYPE, VERTEX_TYPE).has(name, true).hasNext();
+       }
+       private Set<Class<? extends Migrator>> findClasses(Reflections reflections) {
+               Set<Class<? extends Migrator>> migratorClasses = reflections.getSubTypesOf(Migrator.class);
+               /*
+                * TODO- Change this to make sure only classes in the specific $release are added in the runList
+                * Or add a annotation like exclude which folks again need to remember to add ??
+                */
+
+               migratorClasses.remove(PropertyMigrator.class);
+               migratorClasses.remove(EdgeMigrator.class);
+               return migratorClasses;
+       }
+
+
+       private void takePreSnapshotIfRequired(TransactionalGraphEngine engine, CommandLineArgs cArgs, List<Class<? extends Migrator>> migratorClassesToRun) {
+
+               /*int sum = 0;
+               for (Class<? extends Migrator> migratorClass : migratorClassesToRun) {
+                       if (migratorClass.isAnnotationPresent(Enabled.class)) {
+                               sum += migratorClass.getAnnotation(MigrationPriority.class).value();
+                       }
+               }
+
+               if (sum >= DANGER_ZONE) {
+
+                       logAndPrint("Entered Danger Zone. Taking snapshot.");
+               }*/
+
+               //always take snapshot for now
+
+               generateSnapshot(engine, "pre");
+
+       }
 
 
     private List<Class<? extends Migrator>> createMigratorList(CommandLineArgs cArgs,
@@ -345,6 +364,7 @@ public class MigrationControllerInternal {
         }
         return migratorClassesToRun;
     }
+
     private boolean migratorExplicitlySpecified(CommandLineArgs cArgs, String migratorName){
         return !cArgs.scripts.isEmpty() && cArgs.scripts.contains(migratorName);
     }
@@ -352,122 +372,125 @@ public class MigrationControllerInternal {
         return !cArgs.runDisabled.isEmpty() && cArgs.runDisabled.contains(migratorName);
     }
 
-    private void sortList(List<Class<? extends Migrator>> migratorClasses) {
-        Collections.sort(migratorClasses, (m1, m2) -> {
-            try {
-                if (m1.getAnnotation(MigrationPriority.class).value() > m2.getAnnotation(MigrationPriority.class).value()) {
-                    return 1;
-                } else if (m1.getAnnotation(MigrationPriority.class).value() < m2.getAnnotation(MigrationPriority.class).value()) {
-                    return -1;
-                } else {
-                    return m1.getSimpleName().compareTo(m2.getSimpleName());
-                }
-            } catch (Exception e) {
-                return 0;
-            }
-        });
-    }
-
-
-    private void generateSnapshot(TransactionalGraphEngine engine, String phase) {
-
-        FormatDate fd = new FormatDate("yyyyMMddHHmm", "GMT");
-        String dateStr= fd.getDateTime();
-        String fileName = SNAPSHOT_LOCATION + File.separator + phase + "Migration." + dateStr + ".graphson";
-        logAndPrint("Saving snapshot of graph " + phase + " migration to " + fileName);
-        Graph transaction = null;
-        try {
-
-            Path pathToFile = Paths.get(fileName);
-            if (!pathToFile.toFile().exists()) {
-                Files.createDirectories(pathToFile.getParent());
-            }
-            transaction = engine.startTransaction();
-            transaction.io(IoCore.graphson()).writeGraph(fileName);
-            engine.rollback();
-        } catch (IOException e) {
-            LoggingContext.statusCode(StatusCode.ERROR);
-            LoggingContext.responseCode(LoggingContext.AVAILABILITY_TIMEOUT_ERROR);
-            logAndPrint("ERROR: Could not write in memory graph to " + phase + "Migration file. \n" + ExceptionUtils.getFullStackTrace(e));
-            LoggingContext.successStatusFields();
-            engine.rollback();
-        }
-
-        logAndPrint( phase + " migration snapshot saved to " + fileName);
-    }
-    /**
-     * Log and print.
-     *
-     * @param msg
-     *            the msg
-     */
-    protected void logAndPrint(String msg) {
-        System.out.println(msg);
-        logger.info(msg);
-    }
-
-    /**
-     * Commit changes.
-     *
-     * @param engine
-     *            the graph transaction
-     * @param migrator
-     *            the migrator
-     * @param cArgs
-     */
-    protected void commitChanges(TransactionalGraphEngine engine, Migrator migrator, CommandLineArgs cArgs) {
-
-        String simpleName = migrator.getClass().getSimpleName();
-        String message;
-        if (migrator.getStatus().equals(Status.FAILURE)) {
-            message = "Migration " + simpleName + " Failed. Rolling back.";
-            LoggingContext.statusCode(StatusCode.ERROR);
-            LoggingContext.responseCode(LoggingContext.DATA_ERROR);
-            logAndPrint("\t" + message);
-            LoggingContext.successStatusFields();
-            migrator.rollback();
-        } else if (migrator.getStatus().equals(Status.CHECK_LOGS)) {
-            message = "Migration " + simpleName + " encountered an anomaly, check logs. Rolling back.";
-            LoggingContext.statusCode(StatusCode.ERROR);
-            LoggingContext.responseCode(LoggingContext.DATA_ERROR);
-            logAndPrint("\t" + message);
-            LoggingContext.successStatusFields();
-            migrator.rollback();
-        } else {
-            MDC.put("logFilenameAppender", simpleName + "/" + simpleName);
-
-            if (cArgs.commit) {
-                if (!engine.asAdmin().getTraversalSource().V().has(AAIProperties.NODE_TYPE, VERTEX_TYPE).hasNext()) {
-                    engine.asAdmin().getTraversalSource().addV(AAIProperties.NODE_TYPE, VERTEX_TYPE).iterate();
-                }
-                engine.asAdmin().getTraversalSource().V().has(AAIProperties.NODE_TYPE, VERTEX_TYPE)
-                .property(simpleName, true).iterate();
-                MDC.put("logFilenameAppender", MigrationController.class.getSimpleName());
-                notifications.add(migrator.getNotificationHelper());
-                migrator.commit();
-                message = "Migration " + simpleName + " Succeeded. Changes Committed.";
-                logAndPrint("\t"+ message +"\t");
-            } else {
-                message = "--commit not specified. Not committing changes for " + simpleName + " to database.";
-                logAndPrint("\t" + message);
-                migrator.rollback();
-            }
-
-        }
-
-        resultsSummary.add(message);
-
-    }
-
-    private void outputResultsSummary() {
-        logAndPrint("---------------------------------");
-        logAndPrint("-------------Summary-------------");
-        for (String result : resultsSummary) {
-            logAndPrint(result);
-        }
-        logAndPrint("---------------------------------");
-        logAndPrint("---------------------------------");
-    }
+       private void sortList(List<Class<? extends Migrator>> migratorClasses) {
+               Collections.sort(migratorClasses, (m1, m2) -> {
+                       try {
+                               if (m1.getAnnotation(MigrationPriority.class).value() > m2.getAnnotation(MigrationPriority.class).value()) {
+                                       return 1;
+                               } else if (m1.getAnnotation(MigrationPriority.class).value() < m2.getAnnotation(MigrationPriority.class).value()) {
+                                       return -1;
+                               } else {
+                                       return m1.getSimpleName().compareTo(m2.getSimpleName());
+                               }
+                       } catch (Exception e) {
+                               return 0;
+                       }
+               });
+       }
+
+
+       private void generateSnapshot(TransactionalGraphEngine engine, String phase) {
+
+               FormatDate fd = new FormatDate("yyyyMMddHHmm", "GMT");
+               String dateStr= fd.getDateTime();
+               String fileName = SNAPSHOT_LOCATION + File.separator + phase + "Migration." + dateStr + ".graphson";
+               logAndPrint("Saving snapshot of graph " + phase + " migration to " + fileName);
+               Graph transaction = null;
+               try {
+
+                       Path pathToFile = Paths.get(fileName);
+                       if (!pathToFile.toFile().exists()) {
+                               Files.createDirectories(pathToFile.getParent());
+                       }
+                       String [] dataSnapshotArgs = {"-c","THREADED_SNAPSHOT", "-fileName",fileName, "-caller","migration"};
+                       DataSnapshot dataSnapshot = new DataSnapshot();
+                       dataSnapshot.executeCommand(dataSnapshotArgs, true, false, null, "THREADED_SNAPSHOT", null);
+//                     transaction = engine.startTransaction();
+//                     transaction.io(IoCore.graphson()).writeGraph(fileName);
+//                     engine.rollback();
+               } catch (IOException e) {
+                       LoggingContext.statusCode(StatusCode.ERROR);
+                       LoggingContext.responseCode(LoggingContext.AVAILABILITY_TIMEOUT_ERROR);
+                       logAndPrint("ERROR: Could not write in memory graph to " + phase + "Migration file. \n" + ExceptionUtils.getFullStackTrace(e));
+                       LoggingContext.successStatusFields();
+                       engine.rollback();
+               }
+
+               logAndPrint( phase + " migration snapshot saved to " + fileName);
+       }
+       /**
+        * Log and print.
+        *
+        * @param msg
+        *            the msg
+        */
+       protected void logAndPrint(String msg) {
+               System.out.println(msg);
+               logger.info(msg);
+       }
+
+       /**
+        * Commit changes.
+        *
+        * @param engine
+        *            the graph transaction
+        * @param migrator
+        *            the migrator
+        * @param cArgs
+        */
+       protected void commitChanges(TransactionalGraphEngine engine, Migrator migrator, CommandLineArgs cArgs) {
+
+               String simpleName = migrator.getClass().getSimpleName();
+               String message;
+               if (migrator.getStatus().equals(Status.FAILURE)) {
+                       message = "Migration " + simpleName + " Failed. Rolling back.";
+                       LoggingContext.statusCode(StatusCode.ERROR);
+                       LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                       logAndPrint("\t" + message);
+                       LoggingContext.successStatusFields();
+                       migrator.rollback();
+               } else if (migrator.getStatus().equals(Status.CHECK_LOGS)) {
+                       message = "Migration " + simpleName + " encountered an anomaly, check logs. Rolling back.";
+                       LoggingContext.statusCode(StatusCode.ERROR);
+                       LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                       logAndPrint("\t" + message);
+                       LoggingContext.successStatusFields();
+                       migrator.rollback();
+               } else {
+                       MDC.put("logFilenameAppender", simpleName + "/" + simpleName);
+
+                       if (cArgs.commit) {
+                               if (!engine.asAdmin().getTraversalSource().V().has(AAIProperties.NODE_TYPE, VERTEX_TYPE).hasNext()) {
+                                       engine.asAdmin().getTraversalSource().addV(AAIProperties.NODE_TYPE, VERTEX_TYPE).iterate();
+                               }
+                               engine.asAdmin().getTraversalSource().V().has(AAIProperties.NODE_TYPE, VERTEX_TYPE)
+                               .property(simpleName, true).iterate();
+                               MDC.put("logFilenameAppender", MigrationController.class.getSimpleName());
+                               notifications.add(migrator.getNotificationHelper());
+                               migrator.commit();
+                               message = "Migration " + simpleName + " Succeeded. Changes Committed.";
+                               logAndPrint("\t"+ message +"\t");
+                       } else {
+                               message = "--commit not specified. Not committing changes for " + simpleName + " to database.";
+                               logAndPrint("\t" + message);
+                               migrator.rollback();
+                       }
+
+               }
+
+               resultsSummary.add(message);
+
+       }
+
+       private void outputResultsSummary() {
+               logAndPrint("---------------------------------");
+               logAndPrint("-------------Summary-------------");
+               for (String result : resultsSummary) {
+                       logAndPrint(result);
+               }
+               logAndPrint("---------------------------------");
+               logAndPrint("---------------------------------");
+       }
 
 }
 
index 106d5e4..791fec0 100644 (file)
@@ -135,11 +135,11 @@ public abstract class Migrator implements Runnable {
                if (dmaapMsgList.size() > 0) {
                        try {
                                Files.write(Paths.get(logDirectory+"/"+fileName), (Iterable<String>)dmaapMsgList.stream()::iterator);
-                       } catch (IOException e) {
-                               logger.error("Unable to generate file with dmaap msgs for MigrateHUBEvcInventory", e);
+                       } catch (IOException e) {                               
+                               logger.error("Unable to generate file with dmaap msgs for " + getMigrationName(), e);
                        }
                } else {
-                       logger.info("No dmaap msgs detected for MigrateForwardEvcCircuitId");
+                       logger.info("No dmaap msgs detected for " + getMigrationName());
                }
        }
 
@@ -304,6 +304,28 @@ public abstract class Migrator implements Runnable {
                }
                return newEdge;
        }
+       
+       /**
+        * Creates the edge
+        *
+        * @param type the edge type - COUSIN or TREE
+        * @param out the out
+        * @param in the in
+        * @return the edge
+        */
+       protected Edge createEdgeIfPossible(EdgeType type, Vertex out, Vertex in) throws AAIException {
+               Edge newEdge = null;
+               try {
+                       if (type.equals(EdgeType.COUSIN)){
+                               newEdge = edgeSerializer.addEdgeIfPossible(this.engine.asAdmin().getTraversalSource(), out, in);
+                       } else {
+                               newEdge = edgeSerializer.addTreeEdgeIfPossible(this.engine.asAdmin().getTraversalSource(), out, in);
+                       }
+               } catch (NoEdgeRuleFoundException e) {
+                       throw new AAIException("AAI_6129", e);
+               }
+               return newEdge;
+       }
 
        /**
         * Creates the edge
index 6d02563..458796a 100644 (file)
  */
 package org.onap.aai.migration;
 
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
 import java.util.Map;
+import java.util.Map.Entry;
+
 import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
 import org.apache.tinkerpop.gremlin.structure.Vertex;
 import org.onap.aai.db.props.AAIProperties;
@@ -38,9 +43,16 @@ import org.onap.aai.setup.SchemaVersions;
 public abstract class ValueMigrator extends Migrator {
 
     protected final Map<String, Map<String, ?>> propertyValuePairByNodeType;
+    protected Map<String, List<?>> conditionsMap;
     protected final Boolean updateExistingValues;
-       protected final JanusGraphManagement graphMgmt;
-
+       protected final JanusGraphManagement graphMgmt; 
+       
+       private int migrationSuccess = 0;
+       private Map<String, String> nodeTotalSuccess = new HashMap<>();
+       private int subTotal = 0;
+       
+       private static List<String> dmaapMsgList = new ArrayList<String>();
+       
     /**
      *
      * @param engine
@@ -53,6 +65,23 @@ public abstract class ValueMigrator extends Migrator {
            this.updateExistingValues = updateExistingValues;
                this.graphMgmt = engine.asAdmin().getManagementSystem();
        }
+       
+       //Migrate with property conditions
+       public ValueMigrator(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions, Map propertyValuePairByNodeType, Map conditionsMap, Boolean updateExistingValues) {
+               super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+           this.propertyValuePairByNodeType = propertyValuePairByNodeType;
+           this.updateExistingValues = updateExistingValues;
+           this.conditionsMap = conditionsMap;
+               this.graphMgmt = engine.asAdmin().getManagementSystem();
+       }
+       
+       @Override
+       public void commit() {
+        engine.commit();
+        if(isUpdateDmaap()){
+               createDmaapFiles(this.dmaapMsgList);
+        }
+       }
 
        /**
         * Do not override this method as an inheritor of this class
@@ -64,41 +93,95 @@ public abstract class ValueMigrator extends Migrator {
 
     protected void updateValues() {
         for (Map.Entry<String, Map<String, ?>> entry: propertyValuePairByNodeType.entrySet()) {
-            String nodeType = entry.getKey();
+            String nodeType = entry.getKey();  
+            this.subTotal = 0;
+            
             Map<String, ?> propertyValuePair = entry.getValue();
             for (Map.Entry<String, ?> pair : propertyValuePair.entrySet()) {
-                String property = pair.getKey();
+                String property = pair.getKey();  
                 Object newValue = pair.getValue();
                 try {
                     GraphTraversal<Vertex, Vertex> g = this.engine.asAdmin().getTraversalSource().V()
-                            .has(AAIProperties.NODE_TYPE, nodeType);
+                            .has(AAIProperties.NODE_TYPE, nodeType);  
                     while (g.hasNext()) {
                         Vertex v = g.next();
-                        if (v.property(property).isPresent() && !updateExistingValues) {
-                            String propertyValue = v.property(property).value().toString();
-                            if (propertyValue.isEmpty()) {
-                                v.property(property, newValue);
-                                logger.info(String.format("Node Type %s: Property %s is empty, adding value %s",
-                                        nodeType, property, newValue.toString()));
-                                this.touchVertexProperties(v, false);
-                            } else {
-                                logger.info(String.format("Node Type %s: Property %s value already exists - skipping",
-                                        nodeType, property));
-                            }
-                        } else {
-                            logger.info(String.format("Node Type %s: Property %s does not exist or " +
-                                    "updateExistingValues flag is set to True - adding the property with value %s",
-                                    nodeType, property, newValue.toString()));
-                            v.property(property, newValue);
-                            this.touchVertexProperties(v, false);
-                        }
+                     
+                        if (this.conditionsMap !=null){
+                               checkConditions( v, property, newValue, nodeType);
+                        }else{                         
+                               migrateValues( v, property, newValue, nodeType);
+                        }                                         
                     }
                 } catch (Exception e) {
                     logger.error(String.format("caught exception updating aai-node-type %s's property %s's value to " +
                             "%s: %s", nodeType, property, newValue.toString(), e.getMessage()));
                     logger.error(e.getMessage());
                 }
+            }            
+              this.nodeTotalSuccess.put(nodeType, Integer.toString(this.subTotal));
+        }
+        
+        logger.info ("\n \n ******* Final Summary for " + " " + getMigrationName() +" ********* \n");                
+        for (Map.Entry<String, String> migratedNode: nodeTotalSuccess.entrySet()) {
+               logger.info("Total Migrated Records for " + migratedNode.getKey() +": " + migratedNode.getValue());
+               
+        }
+        logger.info(this.MIGRATION_SUMMARY_COUNT + "Total Migrated Records: "+ migrationSuccess);           
+        
+    }
+    
+    private void migrateValues (Vertex v, String property, Object newValue, String nodeType) throws Exception{
+       
+       if (v.property(property).isPresent() && !updateExistingValues) {
+            String propertyValue = v.property(property).value().toString();
+            if (propertyValue.isEmpty()) {
+                v.property(property, newValue);
+                logger.info(String.format("Node Type %s: Property %s is empty, adding value %s",
+                        nodeType, property, newValue.toString()));
+                this.touchVertexProperties(v, false);
+                updateDmaapList(v);
+                this.migrationSuccess++;
+                this.subTotal++;
+            } else {
+                logger.info(String.format("Node Type %s: Property %s value already exists - skipping",
+                        nodeType, property));
             }
+        } else {
+            logger.info(String.format("Node Type %s: Property %s does not exist or " +
+                    "updateExistingValues flag is set to True - adding the property with value %s",
+                    nodeType, property, newValue.toString()));
+            v.property(property, newValue);
+            this.touchVertexProperties(v, false);
+            updateDmaapList(v);
+            this.migrationSuccess++;
+            this.subTotal++;
         }
     }
-}
+    
+    private void checkConditions(Vertex v, String property, Object newValue, String nodeType) throws Exception{
+       
+       for (Map.Entry<String, List<?>> entry: conditionsMap.entrySet()){
+               String conditionType = entry.getKey();
+               List <?> conditionsValueList = conditionsMap.get(conditionType);
+               
+               if(v.property(conditionType).isPresent()){
+                       for (int i = 0; i < conditionsValueList.size(); i++){                   
+                               if (v.property(conditionType).value().equals(conditionsValueList.get(i))){                                      
+                                       migrateValues( v, property, newValue, nodeType);
+                                       break;
+                               }
+                       }                       
+               }               
+       }       
+    }
+    
+    private void updateDmaapList(Vertex v){
+       String dmaapMsg = System.nanoTime() + "_" + v.id().toString() + "_"     + v.value("resource-version").toString();
+        dmaapMsgList.add(dmaapMsg);
+        logger.info("\tAdding Updated Vertex " + v.id().toString() + " to dmaapMsgList....");
+    }
+    
+    public boolean isUpdateDmaap(){
+       return false;
+    }
+}
\ No newline at end of file
diff --git a/src/main/java/org/onap/aai/migration/v12/ALTSLicenseEntitlementMigration.java b/src/main/java/org/onap/aai/migration/v12/ALTSLicenseEntitlementMigration.java
new file mode 100644 (file)
index 0000000..ef45209
--- /dev/null
@@ -0,0 +1,200 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v12;
+import java.io.BufferedReader;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.IOException;
+import java.util.*;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.onap.aai.migration.MigrationDangerRating;
+import org.onap.aai.migration.MigrationPriority;
+import org.onap.aai.db.props.AAIProperties;
+import org.onap.aai.migration.Migrator;
+import org.onap.aai.migration.Status;
+import org.onap.aai.edges.enums.EdgeType;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.serialization.db.EdgeSerializer;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.setup.SchemaVersions;
+
+
+@MigrationPriority(11)
+@MigrationDangerRating(0)
+public class ALTSLicenseEntitlementMigration extends Migrator{
+
+    private final String LICENSE_NODE_TYPE = "license";
+    private final String ENTITLEMENT_NODE_TYPE = "entitlement";
+    private boolean success = true;
+    private final GraphTraversalSource g;
+    private int headerLength;
+
+
+    public ALTSLicenseEntitlementMigration(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) {
+        super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+        this.g = this.engine.asAdmin().getTraversalSource();
+    }
+
+    @Override
+    public void run() {
+        logger.info("---------- Update ALTS Entitlements and Licenses resource-uuid in generic-vnf  ----------");
+        String homeDir = System.getProperty("AJSC_HOME");
+        String configDir = System.getProperty("BUNDLECONFIG_DIR");
+        if (homeDir == null) {
+            logger.info("ERROR: Could not find sys prop AJSC_HOME");
+            success = false;
+            return;
+        }
+        if (configDir == null) {
+            success = false;
+            return;
+        }
+        int fileLineCounter = 0;
+        String fileName = homeDir + "/" + configDir + "/" + "migration-input-files/ALTS-migration-data/ALTS-migration-input.csv";
+        Map<String, Set<String>> history = new HashMap<>();
+        logger.info(fileName);
+        logger.info("---------- Processing VNFs from file  ----------");
+        try (BufferedReader br = new BufferedReader(new FileReader(fileName))) {
+            String vnfLine;
+            while ((vnfLine = br.readLine()) != null) {
+                vnfLine = vnfLine.replace("\n", "").replace("\r", "");
+                logger.info("\n");
+                if (!vnfLine.isEmpty()) {
+                    if (fileLineCounter != 0) {
+                        String[] fields = vnfLine.split("\\s*,\\s*", -1);
+                        if (fields.length != this.headerLength) {
+                            logger.info("ERROR: Vnf line should contain " + this.headerLength + " columns, contains " + fields.length + " instead.");
+                            success = false;
+                            continue;
+                        }
+                        String newResourceUuid = fields[0];
+                        String groupUuid = fields[1];
+                        String vnfId = fields[19];
+                        logger.info("---------- Processing Line " + vnfLine + "----------");
+                        logger.info("newResourceUuid = " + newResourceUuid + " vnfId = " + vnfId + " group uuid = " + groupUuid);
+                        if (history.containsKey(vnfId)){
+                            if (history.get(vnfId).contains(groupUuid)){
+                                logger.info("ERROR: duplicate groupUuid in vnf - skipping");
+                                fileLineCounter++;
+                                continue;
+                            }
+                            else{
+                                history.get(vnfId).add(groupUuid);
+                            }
+                        }
+                        else {
+                            Set newSet = new HashSet();
+                            newSet.add(groupUuid);
+                            history.put(vnfId, newSet);
+                        }
+                        List<Vertex> entitlements = g.V().has(AAIProperties.NODE_TYPE, "entitlement").has("group-uuid", groupUuid)
+                                .where(this.engine.getQueryBuilder().createEdgeTraversal(EdgeType.TREE, "entitlement", "generic-vnf").getVerticesByProperty("vnf-id", vnfId)
+                                        .<GraphTraversal<?, ?>>getQuery()).toList();
+
+                        List<Vertex> licenses = g.V().has(AAIProperties.NODE_TYPE, "license").has("group-uuid", groupUuid)
+                                .where(this.engine.getQueryBuilder().createEdgeTraversal(EdgeType.TREE, "license", "generic-vnf").getVerticesByProperty("vnf-id", vnfId)
+                                        .<GraphTraversal<?, ?>>getQuery()).toList();
+
+                        this.ChangeResourceUuid(entitlements, newResourceUuid, "entitlements", vnfId, groupUuid);
+                        this.ChangeResourceUuid(licenses, newResourceUuid, "license", vnfId, groupUuid);
+
+                    } else {
+                        this.headerLength = vnfLine.split("\\s*,\\s*", -1).length;
+                        logger.info("headerLength: " + headerLength);
+                        if (this.headerLength < 22){
+                            logger.info("ERROR: Input file should have 22 columns");
+                            this.success = false;
+                            return;
+                        }
+                    }
+                }
+                fileLineCounter++;
+            }
+        } catch (FileNotFoundException e) {
+            logger.info("ERROR: Could not find file " + fileName, e);
+            success = false;
+        } catch (IOException e) {
+            logger.info("ERROR: Issue reading file " + fileName, e);
+            success = false;
+        } catch (Exception e) {
+            logger.info("encountered exception", e);
+            e.printStackTrace();
+            success = false;
+        }
+    }
+
+    private void ChangeResourceUuid(List<Vertex> vertices, String newResourceUuid, String nodeType, String vnfId, String groupUuid){
+        if (vertices.size() > 1) {
+           logger.info("\t More than 1 " + nodeType + "found, skipping");
+           return;
+        }
+        else if (vertices.size() == 1) {
+            try {
+                logger.info(String.format("Updating %s with groupUuid %s from generic-vnf with vnfId %s with newResourceUuid %s", nodeType, groupUuid, vnfId, newResourceUuid));
+                Vertex v = vertices.get(0);
+                String resourceUuid = v.<String>property("resource-uuid").value();
+                logger.info("\tOriginal resource-uuid: " + resourceUuid);
+                v.property("resource-uuid", newResourceUuid);
+
+                String aaiUri = v.<String>property(AAIProperties.AAI_URI).value();
+                if (aaiUri != null) {
+                    logger.info("\tOriginal aaiUri: " + aaiUri);
+                    aaiUri = aaiUri.replaceFirst("[^/]*"+resourceUuid + "$", newResourceUuid);
+                    v.property(AAIProperties.AAI_URI, aaiUri);
+                    logger.info("\tNew aaiUri: " + v.value(AAIProperties.AAI_URI).toString());
+                }
+                
+                this.touchVertexProperties(v, false);
+                logger.info("\tNew resource-uuid: " + newResourceUuid);
+            }
+            catch (Exception e){
+                logger.info("\t ERROR: caught exception: " + e.getMessage());
+            }
+        }
+        else {
+            logger.info("\t No " + nodeType + " found with group-uuid "+ groupUuid + " for generic-vnf " +vnfId);
+            return;
+        }
+    }
+
+    @Override
+    public Status getStatus() {
+        if (success) {
+            return Status.SUCCESS;
+        } else {
+            return Status.FAILURE;
+        }
+    }
+
+    @Override
+    public Optional<String[]> getAffectedNodeTypes() {
+        return Optional.of(new String[]{LICENSE_NODE_TYPE, ENTITLEMENT_NODE_TYPE});
+    }
+
+    @Override
+    public String getMigrationName() {
+        return "ALTSLicenseEntitlementMigration";
+    }
+
+}
diff --git a/src/main/java/org/onap/aai/migration/v12/MigrateDataFromASDCToConfiguration.java b/src/main/java/org/onap/aai/migration/v12/MigrateDataFromASDCToConfiguration.java
new file mode 100644 (file)
index 0000000..819c7d4
--- /dev/null
@@ -0,0 +1,138 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v12;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.onap.aai.db.props.AAIProperties;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.migration.*;
+import org.onap.aai.serialization.db.EdgeSerializer;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.util.AAIConstants;
+
+import java.io.*;
+import java.util.Optional;
+
+@MigrationPriority(20)
+@MigrationDangerRating(2)
+//@Enabled
+public class MigrateDataFromASDCToConfiguration extends Migrator {
+    private final String PARENT_NODE_TYPE = "generic-vnf";
+    private boolean success = true;
+    private String entitlementPoolUuid = "";
+    private String VNT = "";
+
+
+    public MigrateDataFromASDCToConfiguration(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) {
+        super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+    }
+
+
+    @Override
+    public void run() {
+
+        String homeDir = System.getProperty("AJSC_HOME");
+        String configDir = System.getProperty("BUNDLECONFIG_DIR");
+
+        String csvFile = homeDir + AAIConstants.AAI_FILESEP  + configDir
+                + AAIConstants.AAI_FILESEP + "migration-input-files"
+                + AAIConstants.AAI_FILESEP + "VNT-migration-data" +
+                  AAIConstants.AAI_FILESEP + "VNT-migration-input.csv";
+
+        logger.info("Reading Csv file: " + csvFile);
+        BufferedReader br = null;
+        String line = "";
+        String cvsSplitBy = "\t";
+        try {
+
+            br = new BufferedReader(new FileReader(new File(csvFile)));
+            while ((line = br.readLine()) != null) {
+                line = line.replaceAll("\"", "");
+                String[] temp = line.split(cvsSplitBy);
+                if ("entitlement-pool-uuid".equals(temp[0]) || "vendor-allowed-max-bandwidth (VNT)".equals(temp[1])) {
+                    continue;
+                }
+                entitlementPoolUuid = temp[0];
+                VNT = temp[1];
+                GraphTraversal<Vertex, Vertex> f = this.engine.asAdmin().getTraversalSource().V().has(AAIProperties.NODE_TYPE, "entitlement").has("group-uuid", entitlementPoolUuid)
+                        .out("org.onap.relationships.inventory.BelongsTo").has(AAIProperties.NODE_TYPE, "generic-vnf")
+                        .has("vnf-type", "HN").in("org.onap.relationships.inventory.ComposedOf").has(AAIProperties.NODE_TYPE, "service-instance").out("org.onap.relationships.inventory.Uses").has(AAIProperties.NODE_TYPE, "configuration");
+                
+                modify(f);
+            }
+
+        } catch (FileNotFoundException e) {
+            success = false;
+            logger.error("Found Exception" , e);
+        } catch (IOException e) {
+            success = false;
+            logger.error("Found Exception" , e);
+        } catch (Exception a) {
+            success= false;
+            logger.error("Found Exception" , a);
+        } finally {
+            try {
+                br.close();
+            } catch (IOException e) {
+                success = false;
+                logger.error("Found Exception" , e);
+            }
+        }
+
+    }
+
+    public void modify(GraphTraversal<Vertex, Vertex> g) {
+        int count = 0;
+        while (g.hasNext()) {
+            Vertex v = g.next();
+            logger.info("Found node type " + v.property("aai-node-type").value().toString() + " with configuration id:  " + v.property("configuration-id").value().toString());
+            v.property("vendor-allowed-max-bandwidth", VNT);
+            logger.info("VNT val after migration: " + v.property("vendor-allowed-max-bandwidth").value().toString());
+            count++;
+        }
+
+        logger.info("modified " + count + " configuration nodes related to Entitlement UUID: " +entitlementPoolUuid);
+
+    }
+
+    @Override
+    public Status getStatus() {
+        if (success) {
+            return Status.SUCCESS;
+        } else {
+            return Status.FAILURE;
+        }
+    }
+
+    @Override
+    public Optional<String[]> getAffectedNodeTypes() {
+        return Optional.of(new String[]{PARENT_NODE_TYPE});
+    }
+
+    @Override
+    public String getMigrationName() {
+        return "MigrateDataFromASDCToConfiguration";
+    }
+
+
+}
diff --git a/src/main/java/org/onap/aai/migration/v12/MigrateHUBEvcInventory.java b/src/main/java/org/onap/aai/migration/v12/MigrateHUBEvcInventory.java
new file mode 100644 (file)
index 0000000..0b3103b
--- /dev/null
@@ -0,0 +1,293 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v12;
+/*-
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.onap.aai.db.props.AAIProperties;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.migration.MigrationDangerRating;
+import org.onap.aai.migration.MigrationPriority;
+import org.onap.aai.migration.Migrator;
+import org.onap.aai.migration.Status;
+import org.onap.aai.serialization.db.EdgeSerializer;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.util.AAIConfig;
+
+
+@MigrationPriority(31)
+@MigrationDangerRating(100)
+//@Enabled
+public class MigrateHUBEvcInventory extends Migrator {
+
+       private static final String FORWARDER_EVC_NODE_TYPE = "forwarder-evc";
+       
+       private static boolean success = true;
+    private static boolean checkLog = false;
+    private static GraphTraversalSource g = null;
+    private int headerLength;
+  
+    private static int processedEvcsCount = 0;
+    private static int falloutRowsCount = 0;
+    private static List<String> processedEvcsList = new ArrayList<String>();
+    private static Map<String, String> falloutLinesMap = new HashMap<String, String>();
+    
+    private static final String homeDir = System.getProperty("AJSC_HOME");
+       private static List<String> dmaapMsgList = new ArrayList<String>();
+    
+       
+    public MigrateHUBEvcInventory(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) {
+        super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+        this.g = this.engine.asAdmin().getTraversalSource();
+    }
+
+    @Override
+    public void run() {
+        logger.info("---------- Start migration of HUB EVC Inventory  ----------");
+        String homeDir = System.getProperty("AJSC_HOME");
+        String configDir = System.getProperty("BUNDLECONFIG_DIR");
+        if (homeDir == null) {
+            logger.info("ERROR: Could not find sys prop AJSC_HOME");
+            success = false;
+            return;
+        }
+        if (configDir == null) {
+            success = false;
+            return;
+        }
+        
+        String feedDir = homeDir + "/" + configDir + "/" + "migration-input-files/sarea-inventory/";
+        int fileLineCounter = 0;
+        String fileName = feedDir+ "hub.csv";
+        logger.info(fileName);
+        logger.info("---------- Processing HUB Entries from file  ----------");
+        try {
+            String line;
+            List<String> lines = Files.readAllLines(Paths.get(fileName));
+            Iterator<String> lineItr = lines.iterator();
+            while (lineItr.hasNext()){
+               line = lineItr.next();
+                logger.info("\n");
+                if (!line.isEmpty()) {
+                    if (fileLineCounter != 0) {
+                        String[] colList = line.split("\\s*,\\s*", -1);
+//                        if (colList.length != headerLength) {
+//                            logger.info("ERROR: HUB line entry should contain " + headerLength + " columns, contains " + colList.length + " instead.");
+//                            success = false;
+//                            continue;
+//                        }
+                        Map<String, String> hubColValues = new HashMap<String, String>();
+                        hubColValues.put("ivlan", colList[1]);
+                        hubColValues.put("nniSvlan", colList[3]);
+                        hubColValues.put("evcName", colList[4]);
+                       
+                       String evcName = hubColValues.get("evcName");
+                       String ivlan = hubColValues.get("ivlan");
+                       String nniSvlan = hubColValues.get("nniSvlan");
+                        if (!AAIConfig.isEmpty(evcName)) {
+                               logger.info("---------- Processing Line " + line + "----------");
+                            logger.info("\t Evc Name = " + evcName );
+                            
+                            List<Vertex> forwarderEvcList = g.V().has ("forwarding-path-id", evcName).has(AAIProperties.NODE_TYPE, "forwarding-path")
+                                    .in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "forwarder")
+                                       .out("org.onap.relationships.inventory.Uses").has("aai-node-type", "configuration")
+                                       .in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "forwarder-evc").toList();
+                                               
+                            
+                            if (forwarderEvcList == null || forwarderEvcList.isEmpty()){
+                                logger.info("\t ERROR: Forwarder-evc does not exist for evc-id = " + evcName + " - skipping");
+                                falloutLinesMap.put(String.valueOf(fileLineCounter+1), "["+evcName+"] - Forwarder-evc does not exist" );
+                                falloutRowsCount++;
+                            }
+                            else if (forwarderEvcList!= null && !forwarderEvcList.isEmpty()) {
+                               Iterator<Vertex> listItr = forwarderEvcList.iterator();
+                               while (listItr.hasNext()){
+                                       Vertex forwarderEvcVtx = listItr.next();
+                                                                       if (forwarderEvcVtx != null && forwarderEvcVtx.property("forwarder-evc-id").isPresent() && !AAIConfig.isEmpty(ivlan )) {
+                                                                               boolean isUpdated = updateIvlanOnForwarder(forwarderEvcVtx, ivlan, nniSvlan );
+                                                                               if (!isUpdated){
+                                                                                       falloutLinesMap.put(String.valueOf(fileLineCounter+1), "["+evcName+"] - Forwarder-evc does not have svlan populated" );
+                                                       falloutRowsCount++;
+                                                                               }
+                                       }
+                                   }
+                               if (!processedEvcsList.contains(evcName)) {
+                                       processedEvcsList.add(evcName);
+                                       processedEvcsCount++;
+                               }
+                               
+                            }
+                        }
+                    } else {
+                        this.headerLength = line.split("\\s*,\\s*", -1).length;
+                        logger.info("headerLength: " + headerLength);
+                        if (this.headerLength < 5){
+                            logger.info("ERROR: Input file should have 5 columns");
+                            MigrateHUBEvcInventory.success = false;
+                            return;
+                        }
+                    }
+                }
+               
+                fileLineCounter++;
+            }
+            
+            logger.info ("\n \n ******* Final Summary for HUB FILE Migration ********* \n");
+            logger.info("Evcs processed: "+processedEvcsCount);
+            logger.info("Total Rows Count: "+(fileLineCounter + 1));
+            logger.info("Fallout Rows Count : "+falloutRowsCount +"\n");
+            if (!falloutLinesMap.isEmpty()) {
+               logger.info("------ Fallout Details: ------");
+               falloutLinesMap.forEach((lineNumber, errorMsg) -> {
+                       logger.info(errorMsg + ": on row "+lineNumber.toString());
+               });
+            }
+            
+      } catch (FileNotFoundException e) {
+            logger.info("ERROR: Could not file file " + fileName, e.getMessage());
+            success = false;
+            checkLog = true;
+        } catch (IOException e) {
+            logger.info("ERROR: Issue reading file " + fileName, e);
+            success = false;
+        } catch (Exception e) {
+            logger.info("encountered exception", e.getMessage());
+            success = false;
+        }
+    }
+    
+    private boolean updateIvlanOnForwarder(Vertex forwarderEvcVtx, String ivlan, String nniSvlan) throws Exception {
+       
+       boolean isUpdated = true;
+       String forwarderEvcId = forwarderEvcVtx.value("forwarder-evc-id");
+       
+       String forwarderSvlan = null;
+       if( forwarderEvcVtx.property("svlan").isPresent()) {
+               forwarderSvlan = forwarderEvcVtx.value("svlan");
+       }
+       if (forwarderSvlan != null && !forwarderSvlan.isEmpty()) {
+                       int forwarderSvlanValue = Integer.parseInt(forwarderSvlan);
+                       int nniSvlanValue = Integer.parseInt(nniSvlan);
+                       if (forwarderSvlan != null && nniSvlan != null && (forwarderSvlanValue == nniSvlanValue)) {
+                               if (ivlan != null && !ivlan.isEmpty()) {
+                                       if (forwarderEvcVtx.property("ivlan").isPresent()) {
+                                               String forwarderIvlan = forwarderEvcVtx.value("ivlan");
+                                               if (forwarderIvlan != null && !forwarderIvlan.isEmpty()) {
+                                                       if (Integer.parseInt(forwarderIvlan) == Integer.parseInt(ivlan)) {
+                                                               logger.info("\t Skipped update ivlan for  forwarder-evc[" + forwarderEvcId
+                                                                               + "], ivlan already set to expected value");
+                                                       } else {
+                                                               logger.info("\t Start ivlan update for forwarder-evc[" + forwarderEvcId + "]");
+                                                               updateIvlan(forwarderEvcVtx, ivlan, forwarderEvcId);
+                                                       }
+                                               }
+                                       } else {
+                                               updateIvlan(forwarderEvcVtx, ivlan, forwarderEvcId);
+                                       }
+                               }
+                       }
+               } else {
+                       logger.info("Skipping ivlan update, svlan is not present on the forwarder-evc ["+forwarderEvcId +"]" );
+                       isUpdated = false;
+               }
+               return isUpdated;
+       }
+
+       private void updateIvlan(Vertex forwarderEvcVtx, String ivlan, String forwarderEvcId) {
+               forwarderEvcVtx.property("ivlan", ivlan);
+               this.touchVertexProperties(forwarderEvcVtx, false);
+               logger.info("\t Updated ivlan to "+ ivlan       + " on forwarder-evc["
+                               + forwarderEvcId + "]");
+               String dmaapMsg = System.nanoTime() + "_" + forwarderEvcVtx.id().toString() + "_"       + forwarderEvcVtx.value("resource-version").toString();
+               dmaapMsgList.add(dmaapMsg);
+//             try {
+//                     final Introspector evcIntrospector = serializer.getLatestVersionView(forwarderEvcVtx);
+//                     this.notificationHelper.addEvent(forwarderEvcVtx, evcIntrospector, EventAction.UPDATE,
+//                                     this.serializer.getURIForVertex(forwarderEvcVtx, false));
+//             } catch (UnsupportedEncodingException e) {
+//                     logger.info("\t ERROR: Could not update ivlan on forwader-evc " + forwarderEvcVtx, e.getMessage());
+//             } catch (AAIException e) {
+//                     logger.info("\t ERROR: Could not update ivlan on forwarder-evc "+ forwarderEvcVtx, e.getMessage());
+//             }
+       }
+
+
+    @Override
+    public Status getStatus() {
+        if (checkLog) {
+            return Status.CHECK_LOGS;
+        }
+        else if (success) {
+            return Status.SUCCESS;
+        }
+        else {
+            return Status.FAILURE;
+        }
+    }
+
+    @Override
+       public void commit() {
+               engine.commit();
+        createDmaapFiles(dmaapMsgList);
+       }
+
+    @Override
+    public Optional<String[]> getAffectedNodeTypes() {
+        return Optional.of(new String[]{MigrateHUBEvcInventory.FORWARDER_EVC_NODE_TYPE});
+    }
+
+    @Override
+    public String getMigrationName() {
+        return "MigrateHUBEvcInventory";
+    }
+}
diff --git a/src/main/java/org/onap/aai/migration/v12/MigrateINVEvcInventory.java b/src/main/java/org/onap/aai/migration/v12/MigrateINVEvcInventory.java
new file mode 100644 (file)
index 0000000..a9fce6a
--- /dev/null
@@ -0,0 +1,242 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v12;
+/*-
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+
+import java.io.BufferedReader;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Optional;
+import java.util.List;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.migration.*;
+import org.onap.aai.db.props.AAIProperties;
+import org.onap.aai.serialization.db.EdgeSerializer;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.util.AAIConfig;
+
+
+@MigrationPriority(28)
+@MigrationDangerRating(100)
+public class MigrateINVEvcInventory extends Migrator {
+
+       private static final String PROPERTY_EVC_ID = "evc-id";
+       private static final String EVC_NODE_TYPE = "evc";
+       
+       private static boolean success = true;
+    private static boolean checkLog = false;
+    private static GraphTraversalSource g = null;
+    private int headerLength;
+  
+    private static int processedEvcsCount = 0;
+    private static int falloutEvcsCount = 0;
+    private static Map<String, String> falloutEvcsMap = new HashMap<String, String>();
+    
+    private static final String homeDir = System.getProperty("AJSC_HOME");
+       private static List<String> dmaapMsgList = new ArrayList<String>();
+    
+       
+    public MigrateINVEvcInventory(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) {
+        super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+        this.g = this.engine.asAdmin().getTraversalSource();
+    }
+
+    @Override
+    public void run() {
+        logger.info("---------- Start migration of INV EVC Inventory  ----------");
+        String configDir = System.getProperty("BUNDLECONFIG_DIR");
+        if (homeDir == null) {
+            logger.info("ERROR: Could not find sys prop AJSC_HOME");
+            success = false;
+            return;
+        }
+        if (configDir == null) {
+            success = false;
+            return;
+        }
+        
+        String feedDir = homeDir + "/" + configDir + "/" + "migration-input-files/sarea-inventory/";
+        int fileLineCounter = 0;
+        String fileName = feedDir+ "inv.csv";
+        logger.info(fileName);
+        logger.info("---------- Processing INV Entries from file  ----------");
+        try (BufferedReader br = new BufferedReader(new FileReader(fileName))) {
+            String line;
+            while ((line = br.readLine()) != null) {
+                line = line.replace("\n", "").replace("\r", "");
+                logger.info("\n");
+                if (!line.isEmpty()) {
+                    if (fileLineCounter != 0) {
+                        String[] colList = line.split("\\s*,\\s*", -1);
+                        if (colList.length != headerLength) {
+                            logger.info("ERROR: INV line should contain " + headerLength + " columns, contains " + colList.length + " instead.");
+                            continue;
+                        }
+                        Map<String, String> invColValues = new HashMap<String, String>();
+                        invColValues.put("evcName", colList[22]);
+                        invColValues.put("collectorInterconnectType", colList[17]);
+                       
+                       String evcName = invColValues.get("evcName");
+                       String interconnectType = invColValues.get("collectorInterconnectType");
+                        if (!AAIConfig.isEmpty(evcName) && !AAIConfig.isEmpty(interconnectType) ) {
+                               logger.info("---------- Processing Line " + line + "----------");
+                            logger.info("\t Evc Name = " + evcName );
+                            
+                            // For each provided evc-name, check if the evc already exists
+                            List<Vertex> existingEvcList = g.V().has(PROPERTY_EVC_ID, evcName).has(AAIProperties.NODE_TYPE, EVC_NODE_TYPE).toList();
+                            if (existingEvcList == null || existingEvcList.size() == 0){
+                                logger.info("\t ERROR: Evc does not exist with evc-id = " + evcName + " - skipping");
+                                falloutEvcsCount++;
+                                falloutEvcsMap.put((fileLineCounter+1)+"", "["+evcName+"] - Evc does not exist" );
+                            }
+                            else if (existingEvcList!= null && existingEvcList.size() == 1) {
+                               Vertex evcVtx = existingEvcList.get(0);
+                                                               if (evcVtx != null && !AAIConfig.isEmpty(interconnectType )) {
+                                                                       updateEvcInterconnectType(evcVtx, interconnectType );
+                               }
+                               processedEvcsCount++;
+                            }
+                            else if (existingEvcList!= null && existingEvcList.size() > 1) {
+                                logger.info("\t ERROR: More than one EVC exist with evc-id = " + evcName + " - skipping");
+                                 falloutEvcsCount++;
+                                 falloutEvcsMap.put((fileLineCounter+1)+"", "["+evcName+"] - More than one EVC exist with evc-id" );
+                            }
+                        } else {
+                               logger.info("---------- Processing Line " + line + "----------");
+                               logger.info("Invalid line entry : evcName: "+evcName + " interConnectType: "+ interconnectType);
+                               continue;
+                        }
+                    } else {
+                        this.headerLength = line.split("\\s*,\\s*", -1).length;
+                        logger.info("headerLength: " + headerLength);
+                        if (this.headerLength < 23){
+                            logger.info("ERROR: Input file should have 23 columns");
+                            this.success = false;
+                            return;
+                        }
+                    }
+                }
+               
+                fileLineCounter++;
+            }
+            
+            logger.info ("\n \n ******* Final Summary for INV FILE Migration ********* \n");
+            logger.info("Evcs processed: "+processedEvcsCount);
+            logger.info("Fallout Evcs count: "+falloutEvcsCount);
+            if (!falloutEvcsMap.isEmpty()) {
+               logger.info("------ Fallout Details: ------");
+               falloutEvcsMap.forEach((lineNumber, errorMsg) -> {
+                       logger.info(errorMsg + ": on row "+lineNumber.toString());
+               });
+            }
+      } catch (FileNotFoundException e) {
+            logger.info("ERROR: Could not file file " + fileName, e.getMessage());
+            success = false;
+            checkLog = true;
+        } catch (IOException e) {
+            logger.info("ERROR: Issue reading file " + fileName, e);
+            success = false;
+        } catch (Exception e) {
+            logger.info("encountered exception", e);
+            e.printStackTrace();
+            success = false;
+        }
+    }
+    
+    
+    private void updateEvcInterconnectType(Vertex evcVtx, String interconnectType) {
+       
+       String evcId = evcVtx.value("evc-id");
+       if (interconnectType != null && !interconnectType.isEmpty()){
+                       evcVtx.property("inter-connect-type-ingress", interconnectType);
+               this.touchVertexProperties(evcVtx, false);
+               logger.info("\t Updated inter-connect-type-ingress property for evc [" + evcId +"]");
+               String dmaapMsg = System.nanoTime() + "_" + evcVtx.id().toString() + "_"        + evcVtx.value("resource-version").toString();
+                       dmaapMsgList.add(dmaapMsg);
+//                     try {
+//                             final Introspector evcIntrospector = serializer.getLatestVersionView(evcVtx);
+//                             this.notificationHelper.addEvent(evcVtx, evcIntrospector, EventAction.UPDATE, this.serializer
+//                                             .getURIForVertex(evcVtx, false));
+//                     } catch (UnsupportedEncodingException e) {
+//                             logger.info("\t ERROR: Could not send update notification for evc " + evcId, e.getMessage());
+//                     } catch (AAIException e) {
+//                             logger.info("\t ERROR: Could not send update notification for evc " + evcId, e.getMessage());
+//                     }
+       }
+       }
+
+
+    @Override
+    public Status getStatus() {
+        if (checkLog) {
+            return Status.CHECK_LOGS;
+        }
+        else if (success) {
+            return Status.SUCCESS;
+        }
+        else {
+            return Status.FAILURE;
+        }
+    }
+
+    @Override
+       public void commit() {
+               engine.commit();
+        createDmaapFiles(dmaapMsgList);
+       }
+    
+    @Override
+    public Optional<String[]> getAffectedNodeTypes() {
+        return Optional.of(new String[]{MigrateINVEvcInventory.EVC_NODE_TYPE});
+    }
+
+    @Override
+    public String getMigrationName() {
+        return "MigrateINVEvcInventory";
+    }
+}
diff --git a/src/main/java/org/onap/aai/migration/v12/MigrateINVPhysicalInventory.java b/src/main/java/org/onap/aai/migration/v12/MigrateINVPhysicalInventory.java
new file mode 100644 (file)
index 0000000..0c85481
--- /dev/null
@@ -0,0 +1,361 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v12;
+/*-
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.javatuples.Pair;
+import org.onap.aai.db.props.AAIProperties;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.introspection.Introspector;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.migration.*;
+import org.onap.aai.serialization.db.EdgeSerializer;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.setup.SchemaVersions;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+import java.util.*;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.atomic.AtomicInteger;
+
+
+@MigrationPriority(25)
+@MigrationDangerRating(100)
+public class MigrateINVPhysicalInventory extends Migrator {
+
+       private static final String NODE_TYPE_PNF = "pnf";
+       private static final String NODE_TYPE_PINTERFACE = "p-interface";
+       private static final String NODE_TYPE_PINTERFACES = "p-interfaces";
+       private static final String PROPERTY_PNF_NAME = "pnf-name";
+       private static final String PROPERTY_INTERFACE_NAME = "interface-name";
+       protected final AtomicInteger skippedRowsCount = new AtomicInteger(0);
+       protected final AtomicInteger processedRowsCount = new AtomicInteger(0);
+
+       private boolean success = true;
+    private boolean checkLog = false;
+    private GraphTraversalSource g = null;
+    protected int headerLength;
+
+       protected final AtomicInteger falloutRowsCount = new AtomicInteger(0);
+       private static final String homeDir = System.getProperty("AJSC_HOME");
+       private static List<String> dmaapMsgList = new ArrayList<String>();
+
+       public MigrateINVPhysicalInventory(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) {
+               super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+        this.g = this.engine.asAdmin().getTraversalSource();
+    }
+
+       @Override
+    public void run() {
+        logger.info("---------- Start migration of INV File Physical Inventory  ----------");
+        String configDir = System.getProperty("BUNDLECONFIG_DIR");
+        if (homeDir == null) {
+            logger.info("ERROR: Could not find sys prop AJSC_HOME");
+            success = false;
+            return;
+        }
+        if (configDir == null) {
+            success = false;
+            return;
+        }
+        
+        String feedDir = homeDir + "/" + configDir + "/" + "migration-input-files/sarea-inventory/";
+        String fileName = feedDir+ "inv.csv";
+        logger.info(fileName);
+        logger.info("---------- Processing INV Entries from file  ----------");
+
+
+               try {
+                       Map<String, Set<String>> data = loadFile(fileName);
+                       this.processData(data);
+                       
+                       logger.info("\n ******* Summary Report for Inv File Physical Migration *******");
+                       logger.info("Number of distinct pnfs processed: "+data.keySet().size());
+                       logger.info("Rows processed: " + processedRowsCount);
+                       logger.info("Rows skipped: "+ skippedRowsCount);
+                       logger.info("Fallout Rows count: " + falloutRowsCount);
+                       
+               } catch (FileNotFoundException e) {
+            logger.info("ERROR: Could not file file " + fileName, e.getMessage());
+            success = false;
+            checkLog = true;
+        } catch (IOException e) {
+            logger.info("ERROR: Issue reading file " + fileName, e);
+            success = false;
+        } catch (Exception e) {
+            logger.info("encountered exception", e);
+            e.printStackTrace();
+            success = false;
+        }
+    }
+
+       protected void processData(Map<String, Set<String>> data) throws Exception{
+
+               for (Map.Entry<String, Set<String>> entry : data.entrySet()) {
+                       String pnfName = entry.getKey();
+                       final Set<String> newPInterfaces = entry.getValue();
+                       Introspector pnf;
+                       Vertex pnfVertex;
+                       EventAction eventAction = EventAction.UPDATE;
+                       boolean pnfChangesMade = false;
+
+                       if (pnfExists(pnfName)) {
+                               pnf = serializer.getLatestVersionView(getPnf(pnfName));
+                               pnfVertex = getPnf(pnfName);
+                       } else {
+                               pnf = loader.introspectorFromName(NODE_TYPE_PNF);
+                               pnf.setValue(PROPERTY_PNF_NAME, pnfName);
+                               pnfVertex = serializer.createNewVertex(pnf);
+                               eventAction = EventAction.CREATE;
+                               pnfChangesMade = true;
+                       }
+
+                       if (pnfChangesMade) {
+                               serializer.serializeSingleVertex(pnfVertex, pnf, getMigrationName());
+                               logger.info ("\t Pnf [" + pnfName +"] created with vertex id "+pnfVertex);
+//                             pnf = serializer.getLatestVersionView(pnfVertex);
+//                             this.notificationHelper.addEvent(pnfVertex, serializer.getLatestVersionView(pnfVertex), eventAction, this.serializer.getURIForVertex(pnfVertex, false));
+//                             logger.info("\t Dmaap notification sent for creation of pnf ");
+                               String dmaapMsg = System.nanoTime() + "_" + pnfVertex.id().toString() + "_"     + pnfVertex.value("resource-version").toString();
+                               dmaapMsgList.add(dmaapMsg);
+                       } else {
+                               logger.info("\t Pnf ["+ pnfName +"] already exists ");
+                       }
+
+                       if (!newPInterfaces.isEmpty()) {
+                               Introspector pInterfacesIntrospector = pnf.getWrappedValue(NODE_TYPE_PINTERFACES);
+                               if ( pInterfacesIntrospector == null) {
+                                       pInterfacesIntrospector = pnf.newIntrospectorInstanceOfProperty(NODE_TYPE_PINTERFACES);
+                                       pnf.setValue(NODE_TYPE_PINTERFACES, pInterfacesIntrospector.getUnderlyingObject());
+                               }
+
+                               for (Introspector introspector : pInterfacesIntrospector.getWrappedListValue(NODE_TYPE_PINTERFACE)) {
+                                       String interfaceName = introspector.getValue(PROPERTY_INTERFACE_NAME).toString();
+                                       if (newPInterfaces.contains(interfaceName)) {
+                                               newPInterfaces.remove(interfaceName);
+                                       }
+                               }
+
+                               for (String pInterfaceName : newPInterfaces) {
+                                       Introspector pInterface = loader.introspectorFromName(NODE_TYPE_PINTERFACE);
+                                       pInterface.setValue(PROPERTY_INTERFACE_NAME, pInterfaceName);
+                                       Vertex pInterfaceVertex = serializer.createNewVertex(pInterface);
+                                       pInterfaceVertex.property(AAIProperties.AAI_URI, pnfVertex.property(AAIProperties.AAI_URI).value() + "/p-interfaces/p-interface/" + pInterfaceName);
+                                       edgeSerializer.addTreeEdge(g, pnfVertex, pInterfaceVertex);
+                                       eventAction = EventAction.CREATE;
+                                       serializer.serializeSingleVertex(pInterfaceVertex, pInterface, getMigrationName());
+                                       logger.info ("\t p-interface [" + pInterfaceName +"] created with vertex id "+ pInterfaceVertex + " on pnf ["+pnfName+"]");
+//                                     pInterface = serializer.getLatestVersionView(pInterfaceVertex);
+//                                     this.notificationHelper.addEvent(pInterfaceVertex, pInterface, eventAction, this.serializer.getURIForVertex(pInterfaceVertex, false));
+//                                     logger.info("\t Dmaap notification sent for creation of p-interface ");
+                                       String dmaapMsg = System.nanoTime() + "_" + pInterfaceVertex.id().toString() + "_"      + pInterfaceVertex.value("resource-version").toString();
+                                       dmaapMsgList.add(dmaapMsg);
+                               }
+                       }
+               }
+       }
+
+       protected boolean pnfExists(String pnfName) {
+               return g.V().has(PROPERTY_PNF_NAME, pnfName).has(AAIProperties.NODE_TYPE, NODE_TYPE_PNF).hasNext();
+       }
+
+       protected Vertex getPnf(String pnfName) {
+               return g.V().has(PROPERTY_PNF_NAME, pnfName).has(AAIProperties.NODE_TYPE, NODE_TYPE_PNF).next();
+       }
+
+       /**
+        * Load file to the map for processing
+        * @param fileName
+        * @return
+        * @throws Exception
+        */
+       protected Map<String,Set<String>> loadFile(String fileName) throws Exception {
+               List<String> lines = Files.readAllLines(Paths.get(fileName));
+               return this.getFileContents(lines);
+       }
+
+       /**
+        * Get lines from file.
+        * @param lines
+        * @return
+        * @throws Exception
+        */
+       protected Map<String,Set<String>> getFileContents(List<String> lines) throws Exception {
+
+               final Map<String,Set<String>> fileContents = new ConcurrentHashMap<>();
+
+               processAndRemoveHeader(lines);
+               
+               logger.info("Total rows count excluding header: "+ lines.size());
+               
+               lines.stream()
+                       .filter(line -> !line.isEmpty())
+                       .map(line -> Arrays.asList(line.split("\\s*,\\s*", -1)))
+//                     .filter(this::verifyLine)
+                       .map(this::processLine)
+                       .filter(Optional::isPresent)
+                       .map(Optional::get)
+                       .forEach(p -> {
+                               processedRowsCount.getAndIncrement();
+                               String pnfName = p.getValue0();
+                               if (!fileContents.containsKey(pnfName)) {
+                                       Set<String> s = new HashSet<>();
+                                       fileContents.put(p.getValue0(), s);
+                               }
+                               if (p.getValue1() != null) {
+                                       fileContents.get(p.getValue0()).add(p.getValue1());
+                               }
+                       })
+               ;
+               
+               return fileContents;
+
+
+       }
+
+       /**
+        * Verify line has the necessary details.
+        * @param line
+        * @return
+        */
+       protected boolean verifyLine(List<String> line) {
+               if (line.size() != headerLength) {
+                       logger.info("ERROR: INV line should contain " + headerLength + " columns, contains " + line.size() + " instead.");
+                       this.skippedRowsCount.getAndIncrement();
+                       return false;
+               }
+               return true;
+       }
+
+       /**
+*       * Get the pnf name and interface name from the line.
+        * @param line
+        * @return
+        */
+       protected Optional<Pair<String,String>> processLine(List<String> line) {
+               logger.info("Processing line... " + line.toString());
+               int lineSize = line.size();
+               if (lineSize < 11){
+                       logger.info("Skipping line, does not contain pnf and/or port columns");
+                       skippedRowsCount.getAndIncrement();
+                       return Optional.empty();
+               }
+               
+               String pnfName = line.get(0);
+               String portAid = line.get(11).replaceAll("^\"|\"$", "").replaceAll("\\s+","");
+               
+               if (pnfName.isEmpty() && portAid.isEmpty()) {
+                       logger.info("Line missing pnf name and port " + line);
+                       falloutRowsCount.getAndIncrement();
+                       return Optional.empty();
+               } else if (pnfName.isEmpty()) {
+                       logger.info("Line missing pnf name" + line);
+                       falloutRowsCount.getAndIncrement();
+                       return Optional.empty();
+               } else if (portAid.isEmpty()) {
+                       logger.info("Line missing port " + line);
+                       return Optional.of(Pair.with(pnfName, null));
+               }
+               return Optional.of(Pair.with(pnfName, portAid));
+       }
+
+       /**
+        * Verify header of the csv and remove it from the list.
+        * @param lines
+        * @throws Exception
+        */
+       protected String processAndRemoveHeader(List<String> lines) throws Exception {
+               String firstLine;
+               if (lines.isEmpty()) {
+                       String msg = "ERROR: Missing Header in file";
+                       success = false;
+                       logger.error(msg);
+                       throw new Exception(msg);
+               } else {
+                       firstLine = lines.get(0);
+               }
+
+               this.headerLength = firstLine.split("\\s*,\\s*", -1).length;
+               logger.info("headerLength: " + headerLength);
+               if (this.headerLength < 21){
+                       String msg = "ERROR: Input file should have 21 columns";
+                       success = false;
+                       logger.error(msg);
+                       throw new Exception(msg);
+               }
+
+               return lines.remove(0);
+       }
+
+
+    @Override
+    public Status getStatus() {
+        if (checkLog) {
+            return Status.CHECK_LOGS;
+        }
+        else if (success) {
+            return Status.SUCCESS;
+        }
+        else {
+            return Status.FAILURE;
+        }
+    }
+    
+    @Override
+       public void commit() {
+               engine.commit();
+               createDmaapFiles(dmaapMsgList);
+       }
+
+    @Override
+    public Optional<String[]> getAffectedNodeTypes() {
+        return Optional.of(new String[]{NODE_TYPE_PNF});
+    }
+
+    @Override
+    public String getMigrationName() {
+        return "MigrateINVPhysicalInventory";
+    }
+
+}
diff --git a/src/main/java/org/onap/aai/migration/v12/MigratePATHEvcInventory.java b/src/main/java/org/onap/aai/migration/v12/MigratePATHEvcInventory.java
new file mode 100644 (file)
index 0000000..b0bacde
--- /dev/null
@@ -0,0 +1,713 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v12;
+/*-
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.onap.aai.db.props.AAIProperties;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.introspection.Introspector;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.migration.MigrationDangerRating;
+import org.onap.aai.migration.MigrationPriority;
+import org.onap.aai.migration.Migrator;
+import org.onap.aai.migration.Status;
+import org.onap.aai.serialization.db.EdgeSerializer;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.util.AAIConfig;
+
+
+@MigrationPriority(29)
+@MigrationDangerRating(100)
+public class MigratePATHEvcInventory extends Migrator {
+
+       private static Map<String, Vertex> portList = new HashMap<String, Vertex>();
+       private static Map<String, Vertex> pnfList = new HashMap<String, Vertex>();
+       private final String FORWARDER_EVC_NODE_TYPE = "forwarder-evc";
+       private final String LAGINTERFACE_NODE_TYPE = "lag-interface";
+       private final String CONFIGURATION_NODE_TYPE = "configuration";
+       private final String FORWARDING_PATH_NODE_TYPE = "forwarding-path";
+       private final String FORWARDING_PATH_ID = "forwarding-path-id";
+       private final String PROPERTY_CONFIGURATION_ID = "configuration-id";
+       private final String PNF_NODE_TYPE = "pnf";
+       private final String PROPERTY_PNF_NAME = "pnf-name";
+       private final String PROPERTY_INTERFACE_NAME = "interface-name";
+       private final String PINTERFACE_NODE_TYPE = "p-interface";
+       private static boolean success = true;
+    private static boolean checkLog = false;
+    private static GraphTraversalSource g = null;
+    private int headerLength;
+    
+    //Create a map to store the evcs processed where lag-interfaces were found to track the sequence of ports
+    //key contains the evcName
+    //value is a map that contains the mapping for sequence of forwarders and corresponding portAids in the order they are found 
+    
+    private static Map<String, Map<Vertex, String>> pathFileMap = new HashMap<String, Map<Vertex, String>>();
+  
+    private static int processedEvcsCount = 0;
+    private static int falloutEvcsCount = 0;
+    
+    //Map with lineNumber and the reason for failure for each EVC
+    private static Map<Integer, String> falloutEvcsList = new HashMap<Integer, String>();
+    private static final String homeDir = System.getProperty("AJSC_HOME");
+       private static List<String> dmaapMsgList = new ArrayList<String>();
+       
+       
+    public MigratePATHEvcInventory(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) {
+        super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+        this.g = this.engine.asAdmin().getTraversalSource();
+    }
+
+    @Override
+    public void run() {
+        logger.info("---------- Start migration of PATH EVC Inventory  ----------");
+        String configDir = System.getProperty("BUNDLECONFIG_DIR");
+        if (homeDir == null) {
+            logger.info("ERROR: Could not find sys prop AJSC_HOME");
+            success = false;
+            return;
+        }
+        if (configDir == null) {
+            success = false;
+            return;
+        }
+        
+        String feedDir = homeDir + "/" + configDir + "/" + "migration-input-files/sarea-inventory/";
+        int fileLineCounter = 0;
+        String fileName = feedDir+ "path.csv";
+        logger.info(fileName);
+        logger.info("---------- Processing PATH Entries from file  ----------");
+        try {
+               List<String> lines = Files.readAllLines(Paths.get(fileName));
+            Iterator<String> lineItr = lines.iterator();
+            while (lineItr.hasNext()){
+                String line = lineItr.next().replace("\n", "").replace("\r", "");
+                logger.info("\n");
+                if (!line.isEmpty()) {
+                    if (fileLineCounter != 0) {
+                        String[] colList = line.split("\\s*,\\s*", -1);
+                        if (colList.length != headerLength) {
+                            logger.info("ERROR: PATH line should contain " + headerLength + " columns, contains " + colList.length + " instead.");
+//                            success = false;
+                            continue;
+                        }
+                        Map<String, String> pathColValues = new HashMap<String, String>();
+                        pathColValues.put("evcName", colList[1]);
+                        pathColValues.put("bearerFacingCircuit", colList[4]);
+                        pathColValues.put("bearerCvlan", colList[6]);
+                       pathColValues.put("bearerSvlan", colList[7]);
+                       pathColValues.put("bearerPtniiName", colList[8]);
+                       String bearerPortAid = colList[12].replaceAll("^\"|\"$", "").replaceAll("\\s+","");
+                       pathColValues.put("bearerPortAid", bearerPortAid);
+                       pathColValues.put("collectorFacingCircuit", colList[14]);
+                       pathColValues.put("collectorCvlan", colList[16]);
+                       pathColValues.put("collectorSvlan", colList[17]);
+                       pathColValues.put("collectorPtniiName", colList[18]);
+                       String collectorPortAid = colList[22].replaceAll("^\"|\"$", "").replaceAll("\\s+","");
+                       pathColValues.put("collectorPortAid", collectorPortAid);
+                       
+                       
+                       String evcName = pathColValues.get("evcName");
+                        if (!AAIConfig.isEmpty(evcName)) {
+                               logger.info("---------- Processing Line " + line + "----------");
+                            logger.info("\t Evc Name = " + evcName );
+                            
+                            boolean isEntryValid = validatePnfsAndPorts(pathColValues, evcName);
+                            
+                            if (!isEntryValid){
+                               logger.info("\t ERROR: Skipping processing for line containing evc-name [" +evcName+ "]");
+                                                                       falloutEvcsCount++;
+                                                                       falloutEvcsList.put(Integer.valueOf(fileLineCounter -1 ), "["+ evcName +"] Ptnii or port does not exist");
+                                                               continue;
+                            }
+                            // Get the forwarding path containing forwarders
+                            GraphTraversal<Vertex, Vertex> forwardingPathList = g.V().has(this.FORWARDING_PATH_ID, evcName).has(AAIProperties.NODE_TYPE, this.FORWARDING_PATH_NODE_TYPE)
+                                       .where(__.in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type","forwarder"));
+                            
+                            if (!forwardingPathList.hasNext()){
+                               createNewForwardersFromPATHData(pathColValues, evcName, fileLineCounter);
+                               processedEvcsCount++;
+                            } else {
+                               Vertex forwardingPathVtx = forwardingPathList.next();
+                               List<Vertex> forwardersList = g.V(forwardingPathVtx.id()).in("org.onap.relationships.inventory.BelongsTo").toList();
+                               Iterator<Vertex> forwardersItr = forwardersList.iterator();
+                               List<String> forwarderRoleList =  new ArrayList<String>();
+                               while (forwardersItr.hasNext()){
+                                       Vertex forwarderVtx = forwardersItr.next();
+                                       String role = forwarderVtx.value("forwarder-role");
+                                       if (role!= null ){
+                                               forwarderRoleList.add(role);
+                                       }
+                               }
+                               if (forwarderRoleList!= null && !forwarderRoleList.isEmpty()) {
+                                       if (forwarderRoleList.contains("ingress") && forwarderRoleList.contains("egress")){
+                                               logger.info("\t Skipping processing for EVC[" + evcName + "] because forwarders related to this EVC already exist.");
+                                               falloutEvcsCount++;
+                                               falloutEvcsList.put(Integer.valueOf(fileLineCounter -1 ), "["+ evcName +"] Forwarders already exists for EVC");
+                                       } else {
+                                               createNewForwardersFromPATHData(pathColValues, evcName, fileLineCounter);
+                                       } 
+                               }
+                            }
+                        }
+                    } else {
+                        this.headerLength = line.split("\\s*,\\s*", -1).length;
+                        logger.info("headerLength: " + headerLength);
+                        if (this.headerLength < 24){
+                            logger.info("ERROR: Input file should have 24 columns");
+                            this.success = false;
+                            return;
+                        }
+                    }
+                }
+                fileLineCounter++;
+            }
+            logger.info ("\n \n ******* Final Summary for PATH FILE Migration ********* \n");
+            logger.info("Evcs processed: "+processedEvcsCount);
+            logger.info("Total Rows Count: "+(fileLineCounter + 1));
+            logger.info("Fallout Rows Count : "+falloutEvcsCount +"\n");
+            if (!falloutEvcsList.isEmpty()) {
+               logger.info("------ Fallout Details: ------");
+               falloutEvcsList.forEach((lineNumber, errorMsg) -> {
+                       logger.info(errorMsg + ": on row "+lineNumber.toString());
+               });
+            }
+        } catch (FileNotFoundException e) {
+            logger.info("ERROR: Could not file file " + fileName, e.getMessage());
+            success = false;
+            checkLog = true;
+        } catch (IOException e) {
+            logger.info("ERROR: Issue reading file " + fileName, e);
+            success = false;
+        } catch (Exception e) {
+            logger.info("encountered exception", e);
+            e.printStackTrace();
+            success = false;
+        }
+    }
+    
+    
+       private boolean validatePnfsAndPorts(Map<String, String> pathColValues, String evcName) {
+       
+       String collectorPtniiName = pathColValues.get("collectorPtniiName");
+       String bearerPtniiName = pathColValues.get("bearerPtniiName");
+       String collectorPortAid = pathColValues.get("collectorPortAid");
+       String bearerPortAid = pathColValues.get("bearerPortAid");
+               boolean isValid = validateCollectorPnf(collectorPtniiName, evcName) && validateBearerPnf(bearerPtniiName, evcName) 
+                               && validateCollectorPort(pathColValues, collectorPortAid, collectorPtniiName, evcName) 
+                               && validateBearerPort(pathColValues, bearerPortAid, bearerPtniiName, evcName) ;
+               return isValid;
+       }
+
+       private boolean validateCollectorPnf(String collectorPtniiName, String evcName) {
+               
+               boolean isValid = false;
+               if (!AAIConfig.isEmpty(collectorPtniiName)) {
+                       if (!pnfList.isEmpty() && pnfList.containsKey(collectorPtniiName)){
+                               isValid = true;
+                               logger.info("\t Pnf [" + collectorPtniiName + "] found in AAI");
+                               return isValid;
+                       }
+                       List<Vertex> collectorPnfList = g.V().has(this.PROPERTY_PNF_NAME, collectorPtniiName).has(AAIProperties.NODE_TYPE, PNF_NODE_TYPE).toList();
+                       if (collectorPnfList != null && collectorPnfList.size() == 1) {
+                               isValid = true;
+                               pnfList.put(collectorPtniiName, collectorPnfList.get(0));
+                               logger.info("\t Pnf [" + collectorPtniiName + "] found in AAI");
+                       } else if (collectorPnfList == null || collectorPnfList.size() == 0) {
+                               logger.info("\t ERROR: Failure to find Pnf [" + collectorPtniiName      + "] for EVC [" + evcName + "]");
+                       }
+               }
+               return isValid;
+       }
+       
+       private boolean validateBearerPnf(String bearerPtniiName, String evcName) {
+               boolean isValid = false;
+               if (!AAIConfig.isEmpty(bearerPtniiName)) {
+                       if (!pnfList.isEmpty() && pnfList.containsKey(bearerPtniiName)){
+                               isValid = true;
+                               logger.info("\t Pnf [" + bearerPtniiName + "] found in AAI");
+                               return isValid;
+                       }
+                       List<Vertex> bearerPnfList = g.V().has(this.PROPERTY_PNF_NAME, bearerPtniiName).has(AAIProperties.NODE_TYPE, PNF_NODE_TYPE).toList();
+               if (bearerPnfList!= null && bearerPnfList.size() == 1){
+                   isValid = true;
+                   pnfList.put(bearerPtniiName, bearerPnfList.get(0));
+                   logger.info("\t Pnf ["+ bearerPtniiName + "] found in AAI");
+               }
+               else if (bearerPnfList == null || bearerPnfList.size() == 0) {
+                       logger.info("\t ERROR:  Failure to find Pnf ["+ bearerPtniiName + "]  for EVC [" + evcName + "]");
+               }       
+           }
+       return isValid;
+       }
+       
+       private boolean validateCollectorPort(Map<String, String> pathColValues, String collectorPortAid, String collectorPtniiName, String evcName) {
+               boolean isValid = false;
+               
+               if (!AAIConfig.isEmpty(collectorPortAid)) {
+                       
+                       boolean isPortAidALagIntf = false;
+                       GraphTraversal<Vertex, Vertex> collectorPortList;
+                       String lagInterface = null;
+                       
+                       int lagIdentifierIndex = collectorPortAid.indexOf("_");
+                       
+                       if (lagIdentifierIndex > 0) {
+                               String[] subStringList = collectorPortAid.split("_");
+                               lagInterface = subStringList[0]; //forwarder will be related to this lagInterface
+                               isPortAidALagIntf = true;
+                       }
+                       
+                       if (isPortAidALagIntf)
+                       {
+                               if (!portList.isEmpty() && portList.containsKey(collectorPtniiName+"_"+lagInterface)){
+                                       isValid = true;
+                                       logger.info("\t lag-interface [" + lagInterface + "] found in AAI");
+                                       populatePathFileMapWithForwarderInfo(collectorPtniiName, evcName, lagInterface, portList.get(collectorPtniiName+"_"+lagInterface));
+                                       return isValid;
+                               }
+                               Vertex collectorPnfVtx  = pnfList.get(collectorPtniiName);
+                               if (collectorPnfVtx == null ) {
+                                       logger.info("\t ERROR: Failure to find lag-interface ["+ lagInterface + "] for EVC [" + evcName + "]");
+                                       return isValid;
+                               } else {
+                                       collectorPortList = g.V(collectorPnfVtx).in("tosca.relationships.network.BindsTo").has("interface-name", lagInterface).has("aai-node-type", "lag-interface");
+                                        
+                               if (collectorPortList!= null && collectorPortList.hasNext()) {
+                                       Vertex lagInterfaceVtx = collectorPortList.next();
+                                   if (lagInterfaceVtx != null && lagInterfaceVtx.property("interface-name").isPresent()) {
+                                                       isValid = true;
+                                                       portList.put(collectorPtniiName+"_"+lagInterface, lagInterfaceVtx);
+                                                       populatePathFileMapWithForwarderInfo(collectorPtniiName, evcName, lagInterface, lagInterfaceVtx);
+                                                       logger.info("\t lag-interface [" + lagInterface
+                                                                       + "] found in AAI");
+                                               }
+                               }
+                               else if (collectorPortList == null || !collectorPortList.hasNext()) {
+                                       logger.info("\t ERROR: Failure to find lag-interface ["+ lagInterface + "] for EVC [" + evcName + "]");
+                               }
+                               }
+                       } 
+                       else if (!isPortAidALagIntf)
+                       {
+                               if (!portList.isEmpty() && portList.containsKey(collectorPtniiName+"_"+collectorPortAid)){
+                                       isValid = true;
+                                       logger.info("\t p-interface [" + collectorPortAid + "] found in AAI");
+                                       populatePathFileMapWithForwarderInfo(collectorPtniiName, evcName, collectorPortAid, portList.get(collectorPtniiName+"_"+collectorPortAid));
+                                       return isValid;
+                               }
+                               
+                               Vertex collectorPnfVtx  = pnfList.get(collectorPtniiName);
+                               if (collectorPnfVtx == null ) {
+                                       logger.info("\t ERROR: Failure to find p-interface ["+ collectorPortAid + "] for EVC [" + evcName + "]");
+                                       return isValid;
+                               } else {
+                                       collectorPortList =g.V(collectorPnfVtx).in("tosca.relationships.network.BindsTo").has("interface-name", collectorPortAid).has("aai-node-type", "p-interface");
+                                                       
+                               if (collectorPortList!= null && collectorPortList.hasNext()) {
+                                       Vertex pInterfaceVtx = collectorPortList.next();
+                                       if (pInterfaceVtx != null && pInterfaceVtx.property("interface-name").isPresent()) {
+                                                       isValid = true;
+                                                       portList.put(collectorPtniiName+"_"+collectorPortAid, pInterfaceVtx );
+                                                       populatePathFileMapWithForwarderInfo(collectorPtniiName, evcName, collectorPortAid, pInterfaceVtx);
+                                                       logger.info("\t p-interface [" + collectorPortAid
+                                                                       + "] found in AAI");
+                                               }
+                               }
+                               else if (collectorPortList == null || !collectorPortList.hasNext()) {
+                                       logger.info("\t ERROR: Failure to find p-interface ["+ collectorPortAid + "] for EVC [" + evcName + "]");
+                               }
+                               }
+                       }
+               }
+       return isValid;
+       }
+
+       private boolean validateBearerPort(Map<String, String> pathColValues, String bearerPortAid, String bearerPtniiName, String evcName) {
+               boolean isValid = false;
+               
+               if (!AAIConfig.isEmpty(bearerPortAid)) {
+                       GraphTraversal<Vertex, Vertex> bearerPortList;
+                       
+                       boolean isPortAidALagIntf = false;
+                       GraphTraversal<Vertex, Vertex> collectorPortList;
+                       String lagInterface = null;
+                       
+                       int lagIdentifierIndex = bearerPortAid.indexOf("_");
+                       
+                       if (lagIdentifierIndex > 0) {
+                               String[] subStringList = bearerPortAid.split("_");
+                               lagInterface = subStringList[0]; //forwarder will be related to this lagInterface
+                               isPortAidALagIntf = true;
+                       }
+                       
+                       if (isPortAidALagIntf)
+                       {
+                               if (!portList.isEmpty() && portList.containsKey(bearerPtniiName+"_"+lagInterface)){
+                                       isValid = true;
+                                       logger.info("\t lag-interface [" + lagInterface + "] found in AAI");
+                                       populatePathFileMapWithForwarderInfo(bearerPtniiName, evcName, lagInterface, portList.get(bearerPtniiName+"_"+lagInterface));
+                                       return isValid;
+                               }
+                               Vertex bearerPnfVtx  = pnfList.get(bearerPtniiName);
+                               if (bearerPnfVtx == null ) {
+                                       logger.info("\t ERROR: Failure to find lag-interface ["+ lagInterface + "] for EVC [" + evcName + "]");
+                                       return isValid;
+                               } else {
+                                       GraphTraversal<Vertex, Vertex> lagPortList = g.V(bearerPnfVtx).in("tosca.relationships.network.BindsTo").has("interface-name", lagInterface).has("aai-node-type", "lag-interface");
+                               if (lagPortList!= null && lagPortList.hasNext()) {
+                                       Vertex lagInterfaceVtx = lagPortList.next();
+                                       if (lagInterfaceVtx != null && lagInterfaceVtx.property("interface-name").isPresent()) {
+                                                       isValid = true;
+                                                       portList.put(bearerPtniiName+"_"+lagInterface, lagInterfaceVtx);
+                                                       populatePathFileMapWithForwarderInfo(bearerPtniiName, evcName, lagInterface, lagInterfaceVtx);
+                                                       logger.info("\t lag-interface [" + lagInterface
+                                                                       + "] found in AAI");
+                                               }
+                               }
+                               else if (lagPortList == null || !lagPortList.hasNext()) {
+                                       logger.info("\t ERROR: Failure to find lag-interface ["+ lagInterface + "] for EVC [" + evcName + "]");
+                               }
+                               }
+                       } 
+                       else if (!isPortAidALagIntf) {
+                               if (!portList.isEmpty() && portList.containsKey(bearerPtniiName+"_"+bearerPortAid)){
+                                       isValid = true;
+                                       logger.info("\t p-interface [" + bearerPortAid + "] found in AAI");
+                                       populatePathFileMapWithForwarderInfo(bearerPtniiName, evcName, bearerPortAid, portList.get(bearerPtniiName+"_"+bearerPortAid));
+                                       return isValid;
+                               }
+                               Vertex bearerPnfVtx  = pnfList.get(bearerPtniiName);
+                               if (bearerPnfVtx == null ) {
+                                       logger.info("\t ERROR: Failure to find p-interface ["+ bearerPortAid + "] for EVC [" + evcName + "]");
+                                       return isValid;
+                               } else {
+                                       bearerPortList = g.V(bearerPnfVtx).in("tosca.relationships.network.BindsTo").has("interface-name", bearerPortAid).has("aai-node-type","p-interface");
+                                       if (bearerPortList!= null && bearerPortList.hasNext()){
+                                               Vertex pInterfaceVtx = bearerPortList.next();
+                                       if (pInterfaceVtx != null && pInterfaceVtx.property("interface-name").isPresent()) {
+                                                       isValid = true;
+                                                       portList.put(bearerPtniiName+"_"+bearerPortAid, pInterfaceVtx);
+                                                       populatePathFileMapWithForwarderInfo(bearerPtniiName, evcName, bearerPortAid, pInterfaceVtx);
+                                                       logger.info("\t p-interface [" + bearerPortAid
+                                                                       + "] found in AAI");
+                                               }
+                                       }
+                                       else if (bearerPortList == null || !bearerPortList.hasNext()) {
+                                               logger.info("\t ERROR: Failure to find p-interface ["+ bearerPortAid + "] for evc [" + evcName + "]");
+                                       }
+                               }
+                       }
+               }
+       return isValid;
+       }
+
+       private void populatePathFileMapWithForwarderInfo(String ptniiName, String evcName, String lagInterface, Vertex interfaceVtx) {
+               int size = 0;
+               Map<Vertex, String> interfaceMap = new HashMap<Vertex, String>();
+               interfaceMap = pathFileMap.get(evcName);
+               if (interfaceMap != null && !interfaceMap.isEmpty()) {
+                       size = interfaceMap.size();
+               }
+               String sequence = Integer.toString(size + 1);
+               if (interfaceMap != null && size > 0){
+                       interfaceMap.put(interfaceVtx, sequence +"_"+ ptniiName+"_"+lagInterface);
+               } else{
+                       interfaceMap = new HashMap<Vertex, String>();
+                       interfaceMap.put(interfaceVtx, sequence +"_"+ptniiName+"_"+lagInterface );
+               }
+               pathFileMap.put(evcName, interfaceMap);
+       }
+
+    private void createNewForwardersFromPATHData(Map<String, String> pathColValues, String evcName, int fileLineCounter) {
+       Map<Vertex, String> forwarderMap = pathFileMap.get(evcName);
+       List<Vertex> forwardingPathVtxList  = g.V().has(this.FORWARDING_PATH_ID, evcName).has(AAIProperties.NODE_TYPE, FORWARDING_PATH_NODE_TYPE).toList();
+       if (forwardingPathVtxList != null && !forwardingPathVtxList.isEmpty())  {
+               Vertex forwardingPathVtx = forwardingPathVtxList.get(0);
+                       if (forwarderMap != null && !forwarderMap.isEmpty()) {
+                               //for each forwarder, create the new forwarder object
+                               forwarderMap.forEach((portVtx, port) -> {
+
+                                       Vertex forwarderVtx = createForwarderObject(evcName, portVtx, port, forwardingPathVtx);
+                                       if (forwarderVtx != null) {
+                                               String forwarderRole = forwarderVtx.value("forwarder-role").toString();
+                                               Vertex configurationVtx = createConfigurationObject(evcName, portVtx, port, forwarderVtx);
+                                               createForwarderEvcObject(pathColValues, forwarderRole, portVtx, port,
+                                                               configurationVtx);
+                                       }
+                               });
+                       }
+               } else {
+                       falloutEvcsList.put((fileLineCounter + 1), "["+ evcName +"] Forwarding-path does not exist for EVC");
+                       falloutEvcsCount++;
+                       //Reduce the count of processed evcs since this EVC row cannot be processed
+                       processedEvcsCount--;
+                       logger.info("\t ERROR: Forwarding-path does not exist for EVC [" + evcName + "] skipping processing for this EVC.");
+               }
+       }
+
+       private Vertex createForwarderObject(String evcName, Vertex intfVertex, String port, Vertex forwardingPathVtx) {
+       Vertex forwarderVtx = null;
+
+               try {
+                       //check if the forwarder was already created
+                       List<Vertex> forwardersList = g.V(forwardingPathVtx.id()).in("org.onap.relationships.inventory.BelongsTo").toList();
+               Iterator<Vertex> forwardersItr = forwardersList.iterator();
+               while (forwardersItr.hasNext()){
+                       Vertex existingForwarderVtx = forwardersItr.next();
+                       Vertex existingIntfVtx = g.V(existingForwarderVtx).out("org.onap.relationships.inventory.ForwardsTo").toList().get(0);
+                       if( existingIntfVtx.id().equals(intfVertex.id())) {
+                               //this forwarder has already been created from the forwarderMap
+                               return null;
+                       }
+               }
+                       Integer sequence = getSequenceFromPathMapPort(port);
+                       String role = getForwarderRole(port);
+                       
+                       Introspector forwarder = loader.introspectorFromName("forwarder");
+                       forwarderVtx = serializer.createNewVertex(forwarder);
+                       
+                       if (sequence != null && role != null) {
+                               forwarder.setValue("sequence", sequence);
+                               forwarder.setValue("forwarder-role", role );
+                               
+                               //Create tree edge from forwarding-path
+                               this.createTreeEdge(forwardingPathVtx, forwarderVtx);
+                               //Create cousin edge to p-interface or lag-interface
+                               this.createCousinEdge(intfVertex, forwarderVtx);
+                               
+                               serializer.serializeSingleVertex(forwarderVtx, forwarder, "migrations");
+                               
+//                             String forwarderVtxProps = this.asString(forwarderVtx);
+//                             logger.info(" forwarderVtxProps:" + forwarderVtxProps);
+                               
+                               String forwarderVtxSequence = forwarderVtx.value("sequence").toString() ;
+                               String forwarderVtxRole = forwarderVtx.value("forwarder-role").toString();
+                               String forwardingPathId = forwardingPathVtx.value("forwarding-path-id").toString();
+                               
+                               logger.info("\t Created new forwarder " + forwarderVtx + " with sequence = " + forwarderVtxSequence + " with role [" + forwarderVtxRole 
+                                               +"] as a child of forwarding-path [" + forwardingPathId + "]" );
+                               
+                               String dmaapMsg = System.nanoTime() + "_" + forwarderVtx.id().toString() + "_"  + forwarderVtx.value("resource-version").toString();
+                               dmaapMsgList.add(dmaapMsg);
+                               
+//                             Introspector forwarderIntrospector = serializer.getLatestVersionView(forwarderVtx);
+//                             this.notificationHelper.addEvent(forwarderVtx, forwarderIntrospector, EventAction.CREATE, this.serializer
+//                                             .getURIForVertex(forwarderVtx, false));
+//                             logger.info("\t Dmaap event sent for " + forwarderVtx + " for port ["+intfVertex.toString() + "] with sequence = [" + sequence + "] and role [" + role +"]" );
+                       }
+               } catch (Exception e) {
+                       logger.info("\t ERROR: Failure to PUT forwarder for EVC [" + evcName + "]" );
+               }
+               return forwarderVtx;
+       }
+       
+       private Integer getSequenceFromPathMapPort(String port) {
+               String[] subStringList = port.split("_");
+               String sequenceStr = subStringList[0]; //forwarder will be have this sequence
+               if (sequenceStr != null && !sequenceStr.isEmpty()) {
+                       return Integer.parseInt(sequenceStr);
+               } else {
+                       return null;
+               }
+               
+       }
+
+       private Vertex createConfigurationObject(String evcName, Vertex portVtx, String port, Vertex forwarderVtx) {
+               Vertex configurationVtx = null;
+               String configurationId = null;
+               try {
+                       Introspector configuration = loader.introspectorFromName(CONFIGURATION_NODE_TYPE);
+                       
+                       configurationVtx = serializer.createNewVertex(configuration);
+                       String sequence = forwarderVtx.value("sequence").toString();
+                       configurationId = evcName + "-" + sequence;
+                       configuration.setValue("configuration-id", configurationId);
+                       configuration.setValue("configuration-type", "forwarder");
+                       configuration.setValue("configuration-sub-type", "forwarder");
+                       this.createCousinEdge(forwarderVtx, configurationVtx);
+                       serializer.serializeSingleVertex(configurationVtx, configuration, "migrations");
+                       
+                       logger.info("\t Created new configuration for forwarder " + configurationVtx + " with configuration-id= " + configurationVtx.value("configuration-id").toString() );
+                       
+                       String dmaapMsg = System.nanoTime() + "_" + configurationVtx.id().toString() + "_"      + configurationVtx.value("resource-version").toString();
+                       dmaapMsgList.add(dmaapMsg);
+//                     Introspector introspector = serializer.getLatestVersionView(configurationVtx);
+//                     this.notificationHelper.addEvent(configurationVtx, introspector, EventAction.CREATE, this.serializer.getURIForVertex(configurationVtx, false));
+//                     logger.info("\t Dmaap event sent for " + configurationVtx + " with configuration-id = " + configurationVtx.value("configuration-id").toString() );
+                       
+                       return configurationVtx;
+               } catch (Exception e) {
+                       logger.info("\t ERROR: Failure to PUT Configuration for forwarder [" + configurationId + "]" );
+               }
+               return configurationVtx;
+       }
+
+       private Vertex createForwarderEvcObject(Map<String, String> pathColValues, String forwarderRole, Vertex portVtx, String port, Vertex configurationVtx) {
+               Vertex forwarderEvcVtx = null;
+               String configurationId = null;
+               try {
+                       Introspector forwarderEvc = loader.introspectorFromName(FORWARDER_EVC_NODE_TYPE);
+                       forwarderEvcVtx = serializer.createNewVertex(forwarderEvc);
+                       configurationId = configurationVtx.value(this.PROPERTY_CONFIGURATION_ID).toString();
+                       
+                       String collectorFacingCircuit = pathColValues.get("collectorFacingCircuit");
+                       String bearerFacingCircuit = pathColValues.get("bearerFacingCircuit");
+                       String collectorCvlan = pathColValues.get("collectorCvlan");
+                       String bearerCvlan = pathColValues.get("bearerCvlan");
+                       String collectorSvlan = pathColValues.get("collectorSvlan");
+                       String bearerSvlan = pathColValues.get("bearerSvlan");
+                       
+                       forwarderEvc.setValue("forwarder-evc-id", configurationId);
+                       
+                       //Don't set circuitid for forwarder-evc connected to configuration that's connected to intermediate forwarder.
+                       if ("ingress".equalsIgnoreCase(forwarderRole)){
+                               forwarderEvc.setValue("circuit-id", checkForNull(collectorFacingCircuit));
+                               if (collectorCvlan != null && !collectorCvlan.isEmpty()) {
+                                       forwarderEvc.setValue("cvlan", collectorCvlan);
+                               }
+                               if (collectorSvlan != null && !collectorSvlan.isEmpty()) {
+                                       forwarderEvc.setValue("svlan", collectorSvlan);
+                               }
+                       } else if ("egress".equalsIgnoreCase(forwarderRole)){
+                               forwarderEvc.setValue("circuit-id", bearerFacingCircuit);
+                               if (bearerCvlan != null && !bearerCvlan.isEmpty()) {
+                                       forwarderEvc.setValue("cvlan", bearerCvlan);
+                               }
+                               if (bearerSvlan != null && !bearerSvlan.isEmpty()) {
+                                       forwarderEvc.setValue("svlan", bearerSvlan);
+                               }
+                       } else {
+                               int lastIndex = configurationId.lastIndexOf("-");
+                               String sequenceStr = configurationId.substring(lastIndex);
+                               int i = Integer.parseInt(sequenceStr);
+                               if (i%2 == 0){
+                                       forwarderEvc.setValue("cvlan", checkForNull(bearerCvlan));
+                                       forwarderEvc.setValue("svlan", checkForNull(bearerSvlan));
+                               } else {
+                                       forwarderEvc.setValue("cvlan", checkForNull(collectorCvlan));
+                                       forwarderEvc.setValue("svlan", checkForNull(collectorSvlan));
+                               }
+                       }
+                       this.createTreeEdge(configurationVtx, forwarderEvcVtx);
+                       serializer.serializeSingleVertex(forwarderEvcVtx, forwarderEvc, "migrations");
+                       
+                       logger.info("\t Created new forwarder-evc as a child of configuration " + forwarderEvcVtx + " with forwarder-evc-id= " + forwarderEvcVtx.value("forwarder-evc-id").toString() );
+                       String dmaapMsg = System.nanoTime() + "_" + forwarderEvcVtx.id().toString() + "_"       + forwarderEvcVtx.value("resource-version").toString();
+                       dmaapMsgList.add(dmaapMsg);
+                       
+//                     logger.info("\t Forwarder-evc: "+ this.asString(forwarderEvcVtx));
+                       
+//                     Introspector introspector = serializer.getLatestVersionView(forwarderEvcVtx);
+//                     this.notificationHelper.addEvent(forwarderEvcVtx, introspector, EventAction.CREATE, this.serializer.getURIForVertex(forwarderEvcVtx, false));
+//                     logger.info("\t Dmaap event sent for " + forwarderEvcVtx + " with forwarder-evc-id = " + forwarderEvcVtx.value("forwarder-evc-id").toString() );
+               }  catch (Exception e) {
+                       logger.info("\t ERROR: Failure to PUT fowarder-evc for configuration [" + configurationId + "]" );
+               }
+               return forwarderEvcVtx;
+
+       }
+
+       private String getForwarderRole( String port) {
+               String role = null;
+               Integer seq = getSequenceFromPathMapPort(port);
+               if (seq != null ) {
+                       int sequence = seq.intValue();
+                       if (sequence == 1){
+                               role = "ingress";
+                       } else if (sequence > 1 && port.indexOf(".") > 0) {
+                               role = "egress";
+                       } else {
+                               role = "intermediate";
+                       }
+               }
+               return role;
+       }
+       
+       private String checkForNull(String s){
+       if (s!= null && !s.isEmpty()){
+               return s;
+       }
+       return null;
+    }
+
+    @Override
+    public Status getStatus() {
+        if (checkLog) {
+            return Status.CHECK_LOGS;
+        }
+        else if (success) {
+            return Status.SUCCESS;
+        }
+        else {
+            return Status.FAILURE;
+        }
+    }
+
+    @Override
+       public void commit() {
+               engine.commit();
+               createDmaapFiles(dmaapMsgList);
+       }
+    
+    @Override
+    public Optional<String[]> getAffectedNodeTypes() {
+        return Optional.of(new String[]{this.FORWARDING_PATH_NODE_TYPE});
+    }
+
+    @Override
+    public String getMigrationName() {
+        return "MigratePATHEvcInventory";
+    }
+}
diff --git a/src/main/java/org/onap/aai/migration/v12/MigratePATHPhysicalInventory.java b/src/main/java/org/onap/aai/migration/v12/MigratePATHPhysicalInventory.java
new file mode 100644 (file)
index 0000000..af3d90a
--- /dev/null
@@ -0,0 +1,348 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v12;
+/*-
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.onap.aai.db.props.AAIProperties;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.introspection.Introspector;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.migration.MigrationDangerRating;
+import org.onap.aai.migration.MigrationPriority;
+import org.onap.aai.migration.Migrator;
+import org.onap.aai.migration.Status;
+import org.onap.aai.serialization.db.EdgeSerializer;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.util.AAIConfig;
+
+
+@MigrationPriority(26)
+@MigrationDangerRating(100)
+public class MigratePATHPhysicalInventory extends Migrator {
+
+       private static List<String> lagPortList = new ArrayList<String>();
+       private static Map<String, Vertex> pnfList = new HashMap<String, Vertex>();
+       private final String LAGINTERFACE_NODE_TYPE = "lag-interface";
+       private final String PNF_NODE_TYPE = "pnf";
+       private final String PROPERTY_PNF_NAME = "pnf-name";
+       private final String PROPERTY_INTERFACE_NAME = "interface-name";
+       private final String LAG_INTERFACE_NODE_TYPE = "lag-interface";
+       private static boolean success = true;
+    private static boolean checkLog = false;
+    private static GraphTraversalSource g = null;
+    private int headerLength;
+    
+    private static List<String> dmaapMsgList = new ArrayList<String>();
+    private static final String homeDir = System.getProperty("AJSC_HOME");
+    
+    //Create a map to store the evcs processed where lag-interfaces were found to track the sequence of ports
+    //key contains the evcName
+    //value is a map that contains the mapping for sequence of forwarders and corresponding portAids in the order they are found 
+    
+    private static Map<String, Map<Vertex, String>> pathFileMap = new HashMap<String, Map<Vertex, String>>();
+  
+    private static int processedLagInterfacesCount = 0;
+    private static int skippedRowCount = 0;
+    //Map with lineNumber and the reason for failure for each interface
+    private static Map<String, String> lagInterfacesNotProcessedMap = new HashMap<String, String>();
+    
+       
+    public MigratePATHPhysicalInventory(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) {
+        super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+        this.g = this.engine.asAdmin().getTraversalSource();
+    }
+
+    @Override
+    public void run() {
+        logger.info("---------- Start migration of PATH file Physical Inventory  ----------");
+        String configDir = System.getProperty("BUNDLECONFIG_DIR");
+        if (homeDir == null) {
+            logger.info("ERROR: Could not find sys prop AJSC_HOME");
+            success = false;
+            return;
+        }
+        if (configDir == null) {
+            success = false;
+            return;
+        }
+        
+        String feedDir = homeDir + "/" + configDir + "/" + "migration-input-files/sarea-inventory/";
+        int fileLineCounter = 0;
+        String fileName = feedDir+ "path.csv";
+        logger.info(fileName);
+        logger.info("---------- Processing PATH Entries from file  ----------");
+        try  {
+               List<String> lines = Files.readAllLines(Paths.get(fileName));
+            Iterator<String> lineItr = lines.iterator();
+            while (lineItr.hasNext()){
+                String line = lineItr.next().replace("\n", "").replace("\r", "");
+                logger.info("\n");
+                if (!line.isEmpty()) {
+                    if (fileLineCounter != 0) {
+                        String[] colList = line.split("\\s*,\\s*", -1);
+                        Map<String, String> pathColValues = new HashMap<String, String>();
+                        pathColValues.put("evcName", colList[1]);
+                        pathColValues.put("bearerFacingCircuit", colList[4]);
+                        pathColValues.put("bearerCvlan", colList[6]);
+                       pathColValues.put("bearerSvlan", colList[7]);
+                       pathColValues.put("bearerPtniiName", colList[8]);
+                       pathColValues.put("bearerPortAid", colList[12]);
+                       pathColValues.put("collectorFacingCircuit", colList[14]);
+                       pathColValues.put("collectorCvlan", colList[16]);
+                       pathColValues.put("collectorSvlan", colList[17]);
+                       pathColValues.put("collectorPtniiName", colList[18]);
+                       pathColValues.put("collectorPortAid", colList[22]);
+                       
+                       // For each row, check if the collector and bearerPnfs exist and create lag interfaces
+                       
+                       validateCollectorPnfAndCreateLagInterface(pathColValues, (fileLineCounter+1));
+                       validateBearerPnfAndCreateLagInterface(pathColValues, (fileLineCounter+1));
+                       
+                    } else {
+                        this.headerLength = line.split("\\s*,\\s*", -1).length;
+                        logger.info("headerLength: " + headerLength);
+                        if (this.headerLength < 21){
+                            logger.info("ERROR: Input file should have 21 columns");
+                            this.success = false;
+                            return;
+                        }
+                    }
+                }
+                fileLineCounter++;
+            }
+            logger.info ("\n \n ******* Final Summary for PATH FILE Physical Inventory Migration ********* \n");
+            logger.info("Lag Interfaces processed: "+processedLagInterfacesCount);
+            logger.info("Total Rows Count: "+(fileLineCounter + 1));
+            logger.info("Fallout Lag Interfaces Count : "+lagInterfacesNotProcessedMap.size() +"\n");
+            
+            if (!lagInterfacesNotProcessedMap.isEmpty()) {
+               logger.info("------ Fallout Details: ------");
+               lagInterfacesNotProcessedMap.forEach((lineEntry, errorMsg) -> {
+                       int lineNumberIndex = lineEntry.indexOf("-");
+                       String lineNumber = lineEntry.substring(0, lineNumberIndex);
+                       String portDetail = lineEntry.substring(lineNumberIndex+1);
+                       logger.info(errorMsg + ": on row "+ lineNumber +" for PortAid ["+ portDetail+"]");
+               });
+            }
+        } catch (FileNotFoundException e) {
+            logger.info("ERROR: Could not file file " + fileName, e.getMessage());
+            success = false;
+            checkLog = true;
+        } catch (IOException e) {
+            logger.info("ERROR: Issue reading file " + fileName, e);
+            success = false;
+        } catch (Exception e) {
+            logger.info("encountered exception", e);
+            e.printStackTrace();
+            success = false;
+        }
+    }
+    
+    
+       private void validateBearerPnfAndCreateLagInterface(Map<String, String> pathColValues, int lineNumber) {
+               String bearerPtniiName = pathColValues.get("bearerPtniiName");
+       String bearerPortAid = pathColValues.get("bearerPortAid");
+       Vertex pnfVtx = getPnf(bearerPtniiName);
+       if (pnfVtx != null){
+               //create lag-interface
+               createLagInterfaceObject(pnfVtx, bearerPortAid, lineNumber);
+       } else {
+               int lagIdentifierIndex = bearerPortAid.indexOf("_");
+               if (lagIdentifierIndex > 0) {
+                       lagInterfacesNotProcessedMap.put(""+ lineNumber+ "-"+bearerPtniiName+"-"+bearerPortAid+"", "Pnf ["+bearerPtniiName+"] not found" );
+               }
+       }
+               
+       }
+
+       private void validateCollectorPnfAndCreateLagInterface(Map<String, String> pathColValues, int lineNumber) {
+               String collectorPtniiName = pathColValues.get("collectorPtniiName");
+       String collectorPortAid = pathColValues.get("collectorPortAid");
+       Vertex pnfVtx = getPnf(collectorPtniiName);
+       if (pnfVtx != null){
+               //create lag-interface
+               createLagInterfaceObject(pnfVtx, collectorPortAid, lineNumber);
+       }else {
+               int lagIdentifierIndex = collectorPortAid.indexOf("_");
+               if (lagIdentifierIndex > 0) {
+                       lagInterfacesNotProcessedMap.put(""+ lineNumber+ "-"+collectorPtniiName+"-"+collectorPortAid+"", "Pnf ["+collectorPtniiName+"] not found" );
+               }
+       }
+       }
+       
+       private void createLagInterfaceObject(Vertex pnfVtx, String portAid, int lineNumber) {
+               String pnfName = pnfVtx.value(PROPERTY_PNF_NAME);
+               
+               if (pnfName != null && !pnfName.isEmpty()) {
+                       
+                       if(portAid == null ||  portAid.isEmpty()){
+                               logger.info("\t Invalid Port entry [" +portAid + "] - Invalid record - skipping..." );
+                       } else{
+                               if (!AAIConfig.isEmpty(portAid)) {
+                                       GraphTraversal<Vertex, Vertex> portList;
+                                       
+                                       boolean isPortAidALagIntf = false;
+                                       String interfaceName = null;
+                                       
+                                       int lagIdentifierIndex = portAid.indexOf("_");
+                                       
+                                       if (lagIdentifierIndex > 0) {
+                                               String[] subStringList = portAid.split("_");
+                                               interfaceName = subStringList[0];
+                                               isPortAidALagIntf = true;
+                                       }
+                                       
+                                       if (isPortAidALagIntf)
+                                       {
+                                               try {
+                                                       
+                                                       if (lagPortList != null && lagPortList.contains(pnfName+"_"+interfaceName)){
+                                                               logger.info("\t lag-interface [" + interfaceName        + "] already exists in AAI - skipping");
+                                                               return;
+                                                       }
+                                                               
+                                                               
+                                                       portList = g.V(pnfVtx).in("tosca.relationships.network.BindsTo").has("interface-name", interfaceName).has("aai-node-type", "lag-interface");
+                                               if (portList!= null && portList.hasNext()) {
+                                                       Vertex lagInterfaceVtx = portList.next();
+                                                   if (lagInterfaceVtx != null && lagInterfaceVtx.property("interface-name").isPresent()) {
+                                                                       logger.info("\t lag-interface [" + interfaceName        + "] already exists in AAI - skipping");
+//                                                                     lagInterfacesNotProcessedMap.put(""+lineNumber+"-"+pnfName+"-"+portAid+"", "lag-interface already exists for ["+interfaceName+"]" );
+                                                               }
+                                               }
+                                               else if (portList == null || !portList.hasNext()) {
+                                                       //Create lag-interface in pnf
+                                                               Introspector lagInterface = loader.introspectorFromName(LAG_INTERFACE_NODE_TYPE);
+                                                               
+                                                               Vertex lagIntVtx = serializer.createNewVertex(lagInterface);
+                                                               lagInterface.setValue("interface-name", interfaceName);
+                                                               this.createTreeEdge(pnfVtx, lagIntVtx);
+                                                               serializer.serializeSingleVertex(lagIntVtx, lagInterface, "migrations");
+                                                               
+                                                               logger.info("\t Created new lag-interface " + lagIntVtx + " with interface-name= " + lagIntVtx.value("interface-name"));
+                                                               
+                                                               processedLagInterfacesCount++;
+                                                               lagPortList.add(pnfName+"_"+interfaceName);
+                                                               
+                                                               String dmaapMsg = System.nanoTime() + "_" + lagIntVtx.id().toString() + "_"     + lagIntVtx.value("resource-version").toString();
+                                                               dmaapMsgList.add(dmaapMsg);
+//                                                             Introspector introspector = serializer.getLatestVersionView(lagIntVtx);
+//                                                             this.notificationHelper.addEvent(lagIntVtx, introspector, EventAction.CREATE, this.serializer.getURIForVertex(lagIntVtx, false));
+//                                                             logger.info("\t Dmaap event sent for " + lagIntVtx + " with interface-name= " + lagIntVtx.value("interface-name").toString() );
+                                               }
+                                               } catch (Exception e) {
+                                                       logger.info("\t ERROR: Failure to create lag-interface ["+ interfaceName + "]");
+                                                       lagInterfacesNotProcessedMap.put(""+lineNumber+"-"+pnfName+"-"+portAid+"", "Failed to create lag-interface ["+interfaceName+"]" );
+                                               }
+                                       } 
+                                       else 
+                                       {
+                                               logger.info("\t Port-Aid[" +portAid +"] on PNF["+pnfName+"] not a lag-interface, skipping....");
+                                       }
+                               }
+                               
+                       }
+               }
+       }
+
+       
+       private Vertex getPnf(String ptniiName) {
+               Vertex pnfVtx = null;
+               if (!AAIConfig.isEmpty(ptniiName)) {
+                       if (!pnfList.isEmpty() && pnfList.containsKey(ptniiName)){
+                               return pnfList.get(ptniiName);
+                       }
+                       List<Vertex> collectorPnfList = g.V().has(this.PROPERTY_PNF_NAME, ptniiName).has(AAIProperties.NODE_TYPE, PNF_NODE_TYPE).toList();
+                       if (collectorPnfList != null && collectorPnfList.size() == 1) {
+                               pnfVtx = collectorPnfList.get(0);
+                               pnfList.put(ptniiName, pnfVtx);
+                               logger.info("\t Pnf [" + ptniiName + "] found in AAI");
+                       } else if (collectorPnfList == null || collectorPnfList.size() == 0) {
+                               logger.info("\t ERROR: Failure to find Pnf [" + ptniiName       + "]" );
+                       }
+               } else {
+                       logger.info("\t ERROR: Failure to find Pnf [" + ptniiName       + "]" );
+               }
+               return pnfVtx;
+       }
+       
+    @Override
+    public Status getStatus() {
+        if (checkLog) {
+            return Status.CHECK_LOGS;
+        }
+        else if (success) {
+            return Status.SUCCESS;
+        }
+        else {
+            return Status.FAILURE;
+        }
+    }
+    
+    @Override
+       public void commit() {
+               engine.commit();
+               createDmaapFiles(dmaapMsgList);
+       }
+
+    @Override
+    public Optional<String[]> getAffectedNodeTypes() {
+        return Optional.of(new String[]{this.LAG_INTERFACE_NODE_TYPE});
+    }
+
+    @Override
+    public String getMigrationName() {
+        return "MigratePATHPhysicalInventory";
+    }
+}
diff --git a/src/main/java/org/onap/aai/migration/v12/MigrateSAREvcInventory.java b/src/main/java/org/onap/aai/migration/v12/MigrateSAREvcInventory.java
new file mode 100644 (file)
index 0000000..e05999d
--- /dev/null
@@ -0,0 +1,551 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v12;
+/*-
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Optional;
+import java.util.List;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.introspection.Introspector;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.migration.*;
+import org.onap.aai.db.props.AAIProperties;
+import org.onap.aai.serialization.db.EdgeSerializer;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.util.AAIConfig;
+
+
+@MigrationPriority(27)
+@MigrationDangerRating(100)
+public class MigrateSAREvcInventory extends Migrator {
+
+       private static Map<String, Vertex> pnfList = new HashMap<String, Vertex>();
+       private static List<String> portList = new ArrayList<String>();;
+       private final String SAREA_GLOBAL_CUSTOMER_ID = "8a00890a-e6ae-446b-9dbe-b828dbeb38bd";
+       private final String CONFIGURATION_NODE_TYPE = "configuration";
+       private final String SERVICE_INSTANCE_NODE_TYPE = "service-instance";
+       private final String SERVICE_SUBSCRIPTON_NODE_TYPE = "service-subscription";
+       private final String PROPERTY_SERVICE_TYPE = "service-type";
+       private final String SERVICE_INSTANCE_ID = "service-instance-id";
+       private final String FORWARDING_PATH_NODE_TYPE = "forwarding-path";
+       private final String FOWARDING_PATH_ID = "forwarding-path-id";
+       private final String EVC_NODE_TYPE = "evc";
+       private final String PROPERTY_CONFIGURATION_ID = "configuration-id";
+       private final String PNF_NODE_TYPE = "pnf";
+       private final String PROPERTY_PNF_NAME = "pnf-name";
+       private final String PROPERTY_INTERFACE_NAME = "interface-name";
+       private final String PINTERFACE_NODE_TYPE = "p-interface";
+       private static boolean success = true;
+    private static boolean checkLog = false;
+    private static GraphTraversalSource g = null;
+    private int headerLength;
+  
+    private static int processedEvcsCount = 0;
+    private static int falloutEvcsCount = 0;
+    private static Map<String, String> falloutEvcsMap = new HashMap<String, String>();
+    
+    private static final String homeDir = System.getProperty("AJSC_HOME");
+       private static List<String> dmaapMsgList = new ArrayList<String>();
+       
+    public MigrateSAREvcInventory(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) {
+        super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+        this.g = this.engine.asAdmin().getTraversalSource();
+    }
+
+    @Override
+    public void run() {
+        logger.info("---------- Start migration of SAR EVC Inventory  ----------");
+        String configDir = System.getProperty("BUNDLECONFIG_DIR");
+        if (homeDir == null) {
+            logger.info("ERROR: Could not find sys prop AJSC_HOME");
+            success = false;
+            return;
+        }
+        if (configDir == null) {
+            success = false;
+            return;
+        }
+        
+        String feedDir = homeDir + "/" + configDir + "/" + "migration-input-files/sarea-inventory/";
+        int fileLineCounter = 0;
+        String fileName = feedDir+ "sar.csv";
+        logger.info(fileName);
+        logger.info("---------- Processing SAR Entries from file  ----------");
+        
+        try  {
+            String line;
+            
+            List<String> lines = Files.readAllLines(Paths.get(fileName));
+            Iterator<String> lineItr = lines.iterator();
+            while (lineItr.hasNext()){
+               line = lineItr.next().replace("\n", "").replace("\r", "");
+                logger.info("\n");
+                if (!line.isEmpty()) {
+                    if (fileLineCounter != 0) {
+                        String[] colList = line.split("\\s*,\\s*", -1);
+//                        if (colList.length != headerLength) {
+//                            logger.info("ERROR: SAR line should contain " + headerLength + " columns, contains " + colList.length + " instead.");
+//                            success = false;
+//                            continue;
+//                        }
+                        Map<String, String> sarColValues = new HashMap<String, String>();
+                        sarColValues.put("evcName", colList[0]);
+                        sarColValues.put("subscriberName", colList[1]);
+                        sarColValues.put("espName", colList[2]);
+                        sarColValues.put("bearerCircuitId", colList[3]);
+                       sarColValues.put("bearerTagMode", colList[4]);
+                       sarColValues.put("bearerCvlan", colList[5]);
+                       sarColValues.put("bearerSvlan", colList[6]);
+                       sarColValues.put("bearerPtniiName", colList[7]);
+                       sarColValues.put("bearerSlotName", colList[8]);
+                       String bearerPortAid = colList[9].replaceAll("^\"|\"$", "").replaceAll("\\s+","");
+                       sarColValues.put("bearerPortAid", bearerPortAid);
+                       sarColValues.put("bearerPortType", colList[10]);
+                       sarColValues.put("collectorCircuitId", colList[11]);
+                       sarColValues.put("collectorTagMode", colList[12]);
+                       sarColValues.put("collectorCvlan", colList[13]);
+                       sarColValues.put("collectorSvlan", colList[14]);
+                       sarColValues.put("collectorPtniiName", colList[15]);
+                       sarColValues.put("collectorSlotName", colList[16]);
+                       String collectorPortAid = colList[17].replaceAll("^\"|\"$", "").replaceAll("\\s+","");
+                       sarColValues.put("collectorPortAid", collectorPortAid);
+                       sarColValues.put("collectorPortType", colList[18]);
+                       sarColValues.put("espEvcCircuitId", colList[19]);
+                       sarColValues.put("evcAccessCIR", colList[20]);
+                       
+                       String evcName = sarColValues.get("evcName");
+                        if (!AAIConfig.isEmpty(evcName)) {
+                               logger.info("---------- Processing Line " + line + "----------");
+                            logger.info("\t Evc Name = " + evcName );
+                            
+                            boolean isEntryValid = validatePnfsAndPorts(sarColValues, evcName);
+                            
+                            if (!isEntryValid){
+                               logger.info("\t ERROR: Skipping processing for line containing evc-name [" +evcName+ "]");
+                               falloutEvcsCount++;
+                               falloutEvcsMap.put((fileLineCounter+1)+"", "["+evcName+"] - PortAid/Pnf does not exist" );
+                               fileLineCounter++;
+                               continue;
+                            }
+                            
+                            createNewObjectsFromSARFile(sarColValues, evcName, fileLineCounter);
+                               
+                        }
+                    } else {
+                        this.headerLength = line.split("\\s*,\\s*", -1).length;
+                        logger.info("headerLength: " + headerLength);
+                        if (this.headerLength < 21){
+                            logger.info("ERROR: Input file should have 21 columns");
+                            this.success = false;
+                            return;
+                        }
+                    }
+                }
+                fileLineCounter++;
+            }
+            
+            logger.info ("\n \n ******* Final Summary for SAR FILE Migration ********* \n");
+            logger.info("Evcs processed: "+processedEvcsCount);
+            logger.info("Fallout Evcs count: "+falloutEvcsCount);
+            if (!falloutEvcsMap.isEmpty()) {
+               logger.info("------ Fallout Details: ------");
+               falloutEvcsMap.forEach((lineNumber, errorMsg) -> {
+                       logger.info(errorMsg + ": on row "+lineNumber.toString());
+               });
+            }
+        } catch (FileNotFoundException e) {
+            logger.info("ERROR: Could not file file " + fileName, e.getMessage());
+            success = false;
+            checkLog = true;
+        } catch (IOException e) {
+            logger.info("ERROR: Issue reading file " + fileName, e);
+            success = false;
+        } catch (Exception e) {
+            logger.info("encountered exception", e);
+            e.printStackTrace();
+            success = false;
+        }
+    }
+    
+    
+    private boolean validatePnfsAndPorts(Map<String, String> sarColValues, String evcName) {
+       
+       String collectorPtniiName = sarColValues.get("collectorPtniiName");
+       String bearerPtniiName = sarColValues.get("bearerPtniiName");
+       String collectorPortAid = sarColValues.get("collectorPortAid");
+       String bearerPortAid = sarColValues.get("bearerPortAid");
+               boolean isValid = validateCollectorPnf(collectorPtniiName, evcName) && validateBearerPnf(bearerPtniiName, evcName) 
+                               && validateCollectorPort(collectorPortAid, collectorPtniiName, evcName) 
+                               && validateBearerPort(bearerPortAid, bearerPtniiName, evcName) ;
+               return isValid;
+       }
+
+       private boolean validateCollectorPnf(String collectorPtniiName, String evcName) {
+               
+               boolean isValid = false;
+               if (!AAIConfig.isEmpty(collectorPtniiName)) {
+                       if (!pnfList.isEmpty() && pnfList.containsKey(collectorPtniiName)){
+                               isValid = true;
+                               logger.info("\t Pnf [" + collectorPtniiName + "] found in AAI");
+                               return isValid;
+                       }
+                       List<Vertex> collectorPnfList = g.V().has(this.PROPERTY_PNF_NAME, collectorPtniiName).has(AAIProperties.NODE_TYPE, PNF_NODE_TYPE).toList();
+                       if (collectorPnfList != null && collectorPnfList.size() == 1) {
+                               isValid = true;
+                               pnfList.put(collectorPtniiName, collectorPnfList.get(0));
+                               logger.info("\t Pnf [" + collectorPtniiName + "] found in AAI");
+                       } else if (collectorPnfList == null || collectorPnfList.size() == 0) {
+                               logger.info("\t ERROR: Failure to find Pnf [" + collectorPtniiName      + "] for EVC [" + evcName + "]");
+                       }
+               }
+               return isValid;
+       }
+       
+       private boolean validateBearerPnf(String bearerPtniiName, String evcName) {
+               boolean isValid = false;
+               if (!AAIConfig.isEmpty(bearerPtniiName)) {
+                       if (!pnfList.isEmpty() && pnfList.containsKey(bearerPtniiName)){
+                               isValid = true;
+                               logger.info("\t Pnf [" + bearerPtniiName + "] found in AAI");
+                               return isValid;
+                       }
+                       List<Vertex> bearerPnfList = g.V().has(this.PROPERTY_PNF_NAME, bearerPtniiName).has(AAIProperties.NODE_TYPE, PNF_NODE_TYPE).toList();
+               if (bearerPnfList!= null && bearerPnfList.size() == 1){
+                   isValid = true;
+                   pnfList.put(bearerPtniiName, bearerPnfList.get(0));
+                   logger.info("\t Pnf ["+ bearerPtniiName + "] found in AAI");
+               }
+               else if (bearerPnfList == null || bearerPnfList.size() == 0) {
+                       logger.info("\t ERROR:  Failure to find Pnf ["+ bearerPtniiName + "]  for EVC [" + evcName + "]");
+               }       
+           }
+       return isValid;
+       }
+       
+       private boolean validateCollectorPort(String collectorPortAid, String collectorPtniiName, String evcName) {
+               boolean isValid = false;
+               if (!AAIConfig.isEmpty(collectorPortAid)) {
+                       if (!portList.isEmpty() && portList.contains(collectorPtniiName+"_"+collectorPortAid)){
+                               isValid = true;
+                               logger.info("\t Port ["+ collectorPortAid + "] found in AAI");
+                               return isValid;
+                       }
+                       GraphTraversal<Vertex, Vertex> collectorPortList;
+                       Vertex collectorPnfVtx  = pnfList.get(collectorPtniiName);
+                       if (collectorPnfVtx == null ) {
+                               logger.info("\t ERROR: Failure to find p-interface ["+ collectorPortAid + "] for EVC [" + evcName + "]");
+                               return isValid;
+                       } else {
+                               collectorPortList =g.V(collectorPnfVtx).in("tosca.relationships.network.BindsTo").has("interface-name", collectorPortAid).has("aai-node-type", "p-interface");
+                       if (collectorPortList!= null && collectorPortList.hasNext()) {
+                           isValid = true;
+                           portList.add(collectorPtniiName+"_"+collectorPortAid);
+                           logger.info("\t Port ["+ collectorPortAid + "] found in AAI");
+                       }
+                       else if (collectorPortList == null || !collectorPortList.hasNext()) {
+                               logger.info("\t ERROR: Failure to find p-interface ["+ collectorPortAid + "] for EVC [" + evcName + "]");
+                       }
+                       }
+               }
+       return isValid;
+       }
+       
+       private boolean validateBearerPort(String bearerPortAid, String bearerPtniiName, String evcName) {
+               boolean isValid = false;
+               
+               if (!AAIConfig.isEmpty(bearerPortAid)) {
+                       if (!portList.isEmpty() && portList.contains(bearerPtniiName+"_"+bearerPortAid)){
+                               isValid = true;
+                               logger.info("\t Port ["+ bearerPortAid + "] found in AAI");
+                               return isValid;
+                       }
+                       GraphTraversal<Vertex, Vertex> bearerPortList;
+                       Vertex bearerPnfVtx  = pnfList.get(bearerPtniiName);
+                       if (bearerPnfVtx == null ) {
+                               logger.info("\t ERROR: Failure to find p-interface ["+ bearerPortAid + "] for EVC [" + evcName + "]");
+                               return isValid;
+                       } else {
+                               bearerPortList =g.V(bearerPnfVtx).in("tosca.relationships.network.BindsTo").has("interface-name", bearerPortAid).has("aai-node-type", "p-interface");
+                               if (bearerPortList!= null && bearerPortList.hasNext()){
+                                   isValid = true;
+                                   portList.add(bearerPtniiName+"_"+bearerPortAid);
+                                   logger.info("\t Port ["+ bearerPortAid + "] found in AAI");
+                               }
+                               else if (bearerPortList == null || !bearerPortList.hasNext()) {
+                                       logger.info("\t ERROR: Failure to find p-interface ["+ bearerPortAid + "] for evc [" + evcName + "]");
+                               }
+                       }
+               }
+       return isValid;
+       }
+
+       private void createNewObjectsFromSARFile(Map<String, String> sarColValues, String evcName, int lineNumber) {
+       Vertex serviceInstanceVtx = createNewServiceInstanceFromSARData(sarColValues, evcName, lineNumber);
+       if (serviceInstanceVtx != null && serviceInstanceVtx.property("service-instance-id").isPresent()) {
+               Vertex forwardingPathVtx = createNewForwardingPathFromSARData(sarColValues, serviceInstanceVtx, lineNumber);
+               Vertex configurationVtx = createNewConfigurationFromSARData(sarColValues, forwardingPathVtx, lineNumber);
+               Vertex evcVtx = createNewEvcFromSARData(sarColValues, configurationVtx, lineNumber);
+               }
+       }
+
+    private Vertex createNewServiceInstanceFromSARData(Map<String, String> sarColValues, String evcName, int lineNumber) {
+       
+       String serviceType = "SAREA";
+       Vertex serviceInstanceVtx = null;
+
+               try {
+                        
+                       GraphTraversal<Vertex, Vertex> servSubVtxList = g.V().has("global-customer-id", SAREA_GLOBAL_CUSTOMER_ID)
+                                       .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "SAREA");
+                       
+                        if (servSubVtxList!= null && servSubVtxList.hasNext()){        
+                                Vertex serviceSubscriptionVtx = servSubVtxList.next();
+                               if (serviceSubscriptionVtx != null ) {
+                                       
+                                       List<Vertex> existingServInstVtxList = g.V(serviceSubscriptionVtx).in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "service-instance")
+                                               .has("service-instance-id",evcName).toList();
+                       
+                    if (existingServInstVtxList!= null && existingServInstVtxList.size() >0){
+                        logger.info("\t service-instance already exists for evc " + evcName + " - skipping");
+                        
+                        return existingServInstVtxList.iterator().next();
+                    }
+                    else if (existingServInstVtxList!= null && existingServInstVtxList.size() == 0) {
+                       Introspector servInstance = loader.introspectorFromName("service-instance");
+                                       serviceInstanceVtx = serializer.createNewVertex(servInstance);
+                                       String serviceInstanceId = (String) sarColValues.get("evcName");
+                                       servInstance.setValue("service-instance-id", serviceInstanceId);
+                                       servInstance.setValue("service-type", serviceType);
+                                       this.createTreeEdge(serviceSubscriptionVtx, serviceInstanceVtx);
+                                       serializer.serializeSingleVertex(serviceInstanceVtx, servInstance, "migrations");
+                                       
+                                       logger.info("\t Created new service-instance " + serviceInstanceVtx + " with service-instance-id = " + serviceInstanceId );
+                                       
+                                       String dmaapMsg = System.nanoTime() + "_" + serviceInstanceVtx.id().toString() + "_"    + serviceInstanceVtx.value("resource-version").toString();
+                                       dmaapMsgList.add(dmaapMsg);
+                                       processedEvcsCount++;
+                    }
+                    else {
+                        logger.info("\t ERROR: More than one service-instance found for evc-name: " + evcName);
+                    }
+                               }
+                       } else {
+                               logger.info("\t ERROR: SAREA Subscription not found for Customer ["+SAREA_GLOBAL_CUSTOMER_ID+"]");
+                               falloutEvcsCount++;
+                               falloutEvcsMap.put((lineNumber+1)+"", "["+evcName+"] - SAREA Subscription not found for Customer ["+SAREA_GLOBAL_CUSTOMER_ID+"]" );
+                       }
+               } catch (Exception e) {
+                       logger.info("\t ERROR: Failure to PUT service-instance for EVC [" + evcName + "]" );
+                       falloutEvcsCount++;
+                       falloutEvcsMap.put((lineNumber+1)+"", "["+evcName+"] - Failure to PUT service-instance for EVC" );
+               }
+               return serviceInstanceVtx;
+
+       }
+    
+    private Vertex createNewForwardingPathFromSARData(Map<String, String> sarColValues, Vertex serviceInstanceVtx, int lineNumber) {
+               Vertex fpVertex = null;
+               String serviceInstanceId = serviceInstanceVtx.value(this.SERVICE_INSTANCE_ID);
+               
+               try {
+                       
+                       List<Vertex> fpList = g.V(serviceInstanceVtx).in("org.onap.relationships.inventory.AppliesTo").has("aai-node-type","forwarding-path")
+                                       .has("forwarding-path-id", serviceInstanceId).toList();
+                       if (fpList != null && !fpList.isEmpty()){
+                               logger.info("\t forwarding-path already exists for evc " + serviceInstanceId + " - skipping");
+                               return fpList.iterator().next();
+                       } 
+                       
+                       //If forwarding-path does not exist, create it
+                       Introspector fpIntrospector = loader.introspectorFromName(FORWARDING_PATH_NODE_TYPE);
+                       fpVertex = serializer.createNewVertex(fpIntrospector);
+                       
+                       fpIntrospector.setValue("forwarding-path-id", serviceInstanceId);
+                       fpIntrospector.setValue("forwarding-path-name", serviceInstanceId);
+                       this.createCousinEdge(fpVertex, serviceInstanceVtx);
+                       serializer.serializeSingleVertex(fpVertex, fpIntrospector, "migrations");
+
+                       logger.info("\t Created new forwarding-path " + fpVertex + " with forwarding-path-id = " + fpVertex.value("forwarding-path-id").toString() );
+                       String dmaapMsg = System.nanoTime() + "_" + fpVertex.id().toString() + "_"      + fpVertex.value("resource-version").toString();
+                       dmaapMsgList.add(dmaapMsg);
+
+               } catch (Exception e) {
+                       logger.info("\t ERROR: Failure to PUT forwarding-path for EVC [" + serviceInstanceId + "]" );
+                       processedEvcsCount--;
+                       falloutEvcsCount++;
+                       falloutEvcsMap.put((lineNumber+1)+"", "["+serviceInstanceId+"] - Failure to PUT forwarding-path for EVC" );
+               }
+               return fpVertex;
+       }
+    
+    private Vertex createNewConfigurationFromSARData(Map<String, String> sarColValues, Vertex forwardingPathVtx, int lineNumber) {
+       
+       Vertex configurationVtx = null;
+       String forwardingPathId = forwardingPathVtx.value(this.FOWARDING_PATH_ID);
+       try {
+               
+               List<Vertex> configList = g.V(forwardingPathVtx).out("org.onap.relationships.inventory.Uses").has("aai-node-type","configuration")
+                               .has("configuration-id", forwardingPathId).toList();
+               if (configList != null && !configList.isEmpty()){
+                       logger.info("\t configuration already exists for evc " + forwardingPathId + " - skipping");
+                       return configList.iterator().next();
+               } 
+               
+               //If configuration does not exist, create it
+               Introspector configuration = loader.introspectorFromName(CONFIGURATION_NODE_TYPE);
+                       configurationVtx = serializer.createNewVertex(configuration);
+                       
+                       configuration.setValue("configuration-id", forwardingPathId);
+                       configuration.setValue("configuration-type", "forwarding-path");
+                       configuration.setValue("configuration-sub-type", "evc");
+                       this.createCousinEdge(forwardingPathVtx, configurationVtx);
+                       serializer.serializeSingleVertex(configurationVtx, configuration, "migrations");
+                       
+                       logger.info("\t Created new configuration for forwarding-path " + configurationVtx + " with configuration-id= " + configurationVtx.value("configuration-id").toString() );
+                       
+                       String dmaapMsg = System.nanoTime() + "_" + configurationVtx.id().toString() + "_"      + configurationVtx.value("resource-version").toString();
+                       dmaapMsgList.add(dmaapMsg);
+                       
+       }catch (Exception e) {
+                       logger.info("\t ERROR: Failure to PUT configuration for EVC [" + forwardingPathId + "]" );
+                       processedEvcsCount--;
+                       falloutEvcsCount++;
+                       falloutEvcsMap.put((lineNumber+1)+"", "["+forwardingPathId+"] - Failure to PUT configuration for EVC" );
+               }
+               return configurationVtx;
+       }
+    
+    private Vertex createNewEvcFromSARData(Map<String, String> sarColValues, Vertex configurationVtx, int lineNumber) {
+       String evcId = null;
+       Vertex evcVtx = null;
+       try {
+               Introspector evc = loader.introspectorFromName(EVC_NODE_TYPE);
+                       evcVtx = serializer.createNewVertex(evc);
+                       evcId = configurationVtx.value(this.PROPERTY_CONFIGURATION_ID);
+                       
+                       String cir = sarColValues.get("evcAccessCIR");
+                       int length = cir.length();
+                       String cirValue =  cir.substring(0,(length-4));
+                       String cirUnits =  cir.substring((length-4), (length));
+                       
+                       String espEvcCircuitId = sarColValues.get("espEvcCircuitId");
+                       String espName = sarColValues.get("espName");
+                       String collectorTagMode = sarColValues.get("collectorTagMode");
+                       String bearerTagMode = sarColValues.get("bearerTagMode");
+                       
+                       evc.setValue("evc-id", evcId);
+                       evc.setValue("forwarding-path-topology", "PointToPoint");
+                       evc.setValue("cir-value", checkForNull(cirValue));
+                       evc.setValue("cir-units", checkForNull(cirUnits));
+                       evc.setValue("esp-evc-circuit-id", checkForNull(espEvcCircuitId));
+                       evc.setValue("esp-evc-cir-value", checkForNull(cirValue));
+                       evc.setValue("esp-evc-cir-units", checkForNull(cirUnits));
+                       evc.setValue("esp-itu-code", checkForNull(espName));
+                       evc.setValue("tagmode-access-ingress", checkForNull(collectorTagMode));
+                       evc.setValue("tagmode-access-egress", checkForNull(bearerTagMode));
+                       this.createTreeEdge(configurationVtx, evcVtx);
+                       serializer.serializeSingleVertex(evcVtx, evc, "migrations");
+                       
+                       logger.info("\t Created new evc as a child of configuration " + evcVtx + " with evc-id= " + evcVtx.value("evc-id").toString() );
+                       String dmaapMsg = System.nanoTime() + "_" + evcVtx.id().toString() + "_"        + evcVtx.value("resource-version").toString();
+                       dmaapMsgList.add(dmaapMsg);
+                       
+//                     Introspector introspector = serializer.getLatestVersionView(evcVtx);
+//                     this.notificationHelper.addEvent(evcVtx, introspector, EventAction.CREATE, this.serializer.getURIForVertex(evcVtx, false));
+//                     logger.info("\t Dmaap event sent for " + evcVtx + " with evc-id = " + evcId);
+       }catch (Exception e) {
+                       logger.info("\t ERROR: Failure to PUT EVC for evc-name [" + evcId + "]" );
+                       processedEvcsCount--;
+                       falloutEvcsCount++;
+                       falloutEvcsMap.put((lineNumber+1)+"", "["+evcId+"] - Failure to PUT EVC" );
+               }
+               return evcVtx;
+
+       }
+    
+    private String checkForNull(String s){
+       if (s!= null && !s.isEmpty()){
+               return s;
+       }
+       return null;
+    }
+
+
+    @Override
+    public Status getStatus() {
+        if (checkLog) {
+            return Status.CHECK_LOGS;
+        }
+        else if (success) {
+            return Status.SUCCESS;
+        }
+        else {
+            return Status.FAILURE;
+        }
+    }
+    
+    @Override
+       public void commit() {
+               engine.commit();
+               createDmaapFiles(dmaapMsgList);
+       }
+
+    @Override
+    public Optional<String[]> getAffectedNodeTypes() {
+        return Optional.of(new String[]{this.SERVICE_INSTANCE_NODE_TYPE});
+    }
+
+    @Override
+    public String getMigrationName() {
+        return "MigrateSAREvcInventory";
+    }
+}
diff --git a/src/main/java/org/onap/aai/migration/v13/MigrateBadWidgetModelsPartOne.java b/src/main/java/org/onap/aai/migration/v13/MigrateBadWidgetModelsPartOne.java
new file mode 100644 (file)
index 0000000..5f74835
--- /dev/null
@@ -0,0 +1,343 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v13;
+
+import java.io.BufferedReader;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Optional;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.javatuples.Pair;
+import org.onap.aai.db.props.AAIProperties;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.migration.*;
+import org.onap.aai.serialization.db.EdgeSerializer;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.setup.SchemaVersions;
+
+
+@Enabled
+@MigrationPriority(20)
+@MigrationDangerRating(100)
+public class MigrateBadWidgetModelsPartOne extends EdgeSwingMigrator {
+       private boolean success = true;
+       private final GraphTraversalSource g;
+       private int candidateCount = 0;
+       private int nqEdgeCount = 0;
+       
+       // migration restrictions that we will use for this migration
+       private final String NODE_TYPE_RESTRICTION = "named-query-element";
+       private final String EDGE_LABEL_RESTRICTION = "org.onap.relationships.inventory.IsA";
+       private final String EDGE_DIR_RESTRICTION = "IN";
+       
+       GraphTraversal<Vertex, Vertex> widgetModelTraversal;
+       GraphTraversal<Vertex, Vertex> widgetModelVersionTraversal;
+       GraphTraversal<Vertex, Vertex> validModVerTraversal;
+       GraphTraversal<Vertex, Vertex> widgetModelNqEdgeTraversal;
+
+
+
+       public MigrateBadWidgetModelsPartOne(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) {
+               super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+               this.g = this.engine.asAdmin().getTraversalSource();
+       }
+
+
+       @Override
+       public Status getStatus() {
+               if (success) {
+                       return Status.SUCCESS;
+               } else {
+                       return Status.FAILURE;
+               }
+       }
+
+       @Override
+       public Optional<String[]> getAffectedNodeTypes() {
+               return Optional.of(new String[]{"model", "named-query-element"});
+       }
+
+       @Override
+       public String getMigrationName() {
+               return "MigrateBadWidgetModelsPartOne";
+       }
+       
+       
+       /**
+        * Get the List of node pairs("from" and "to"), you would like EdgeSwingMigrator to migrate.
+        * @return
+        */
+       @Override
+       public List<Pair<Vertex, Vertex>> getAffectedNodePairs() {
+               
+               List<Pair<Vertex, Vertex>> fromToVertPairList = new ArrayList<Pair<Vertex, Vertex>>();
+               ArrayList <Vertex> badModVtxList = new <Vertex> ArrayList ();
+               
+               logAndPrintInfo("--------- GET AFFECTED NODE PAIRS -------------");
+               // Read the json file to populate the validWidgetModelVesionIdHash and also
+               // validWidgetModelInvIdHash which will be used to figure out which data is in the db with
+               // an invalid id.
+               ArrayList <String> fileLines = readInValidWidgetInfoFile();
+               
+               // validWidgetModelVersionIdHash:  key = nodeType, value = validModelVersionId for that nodeType
+               //       Note - we currently only have one valid version per model for widget models.
+               HashMap <String,String> validModelInvariantIdHash = getModelInvariantIdHash( fileLines );
+               
+               // See what (widget) models are being used in the DB
+               widgetModelTraversal = this.engine.asAdmin().getTraversalSource().V()
+                       .has("aai-node-type", "model")
+                       .has("model-type", "widget");
+
+               if(!(widgetModelTraversal.hasNext())){
+                       logAndPrintInfo("unable to find widget models in database. ");
+               }
+               
+               while (widgetModelTraversal.hasNext()) {
+                       Vertex widgetModVertexInDb = widgetModelTraversal.next();
+                       String invId = widgetModVertexInDb.property("model-invariant-id").value().toString();
+                       if( validModelInvariantIdHash.containsValue(invId) ){
+                               // This is a valid model, we don't need to do anything with it.
+                               continue;
+                       }
+                       // For this bad widget model, need to look at the model-version node to
+                       //   find out what type of widget it is supposed to be so we can look up the correct invId.  
+                       // Note - We expect just one per model, but there could be more.
+                       logAndPrintInfo(" Found invalid widget model-invariant-id = [" + invId + "].");
+                       
+                       // We're using badModIdList to help us figure out how many bad edges go with the 
+                       //  bad model nodes - which is really just for logging purposes.
+                       badModVtxList.add(widgetModVertexInDb);
+                       
+                       widgetModelVersionTraversal = this.engine.asAdmin().getTraversalSource()
+                    .V(widgetModVertexInDb)
+                    .in("org.onap.relationships.inventory.BelongsTo")
+                    .has("aai-node-type", "model-ver");
+                       
+                       if(!(widgetModelVersionTraversal.hasNext())){
+                               logAndPrintInfo("unable to find widget model version in database for model-invariant-id = [" + invId + "].");
+                       }
+
+                       while (widgetModelVersionTraversal.hasNext()) {
+                               Vertex widgetModVersionVertex = widgetModelVersionTraversal.next();
+                               String nodeType = widgetModVersionVertex.property("model-name").value().toString();
+                               logAndPrintInfo(" nodeType that goes with invalid widget model-invariant-id = [" + invId + "] is: [" + nodeType + "].");
+                               
+                               // Now we can use the nodeType to find the correct/valid model-invariant-id to use
+                               if( validModelInvariantIdHash.containsKey(nodeType) ){
+                                       // We know what the model-invariant-id SHOULD be, so swing edges from the invalid node to this valid one.
+                                       String validModInvId = validModelInvariantIdHash.get(nodeType);
+                                       Iterator<Vertex> toVtxItr= 
+                                                       this.g.V().has("model-invariant-id",validModInvId).has(AAIProperties.NODE_TYPE, "model");
+                                       int ct = 0;
+                                       while(toVtxItr.hasNext()) {
+                                               Vertex toValidVert = toVtxItr.next();
+                                               ct++;
+                                               if( ct == 1 ){
+                                                       fromToVertPairList.add(new Pair<>(widgetModVertexInDb, toValidVert));
+                                               }
+                                               else {
+                                                       logAndPrintInfo("ERROR - More than one model node found for model-invariant-id = [" + validModInvId + "].");
+                                               }
+                                       }
+                                       if( ct == 0 ){
+                                               logAndPrintInfo("unable to find model node in database for valid model-invariant-id = [" + validModInvId + "].");
+                                       }
+                               }
+                               else {
+                                       logAndPrintInfo("unable to find a valid widget model in database for model-name = [" + nodeType + "].");
+                               }
+                       }
+               }
+               candidateCount = fromToVertPairList.size();
+               
+               // For each of the bad model nodes, see how many actually have an IN edge from a named-query-element
+               for( int i = 0; i < badModVtxList.size(); i++ ){
+                       widgetModelNqEdgeTraversal = this.engine.asAdmin().getTraversalSource()
+                               .V(badModVtxList.get(i))
+                .in("org.onap.relationships.inventory.IsA")
+                .has("aai-node-type", "named-query-element");
+                       
+                       if(widgetModelNqEdgeTraversal.hasNext()) {
+                               nqEdgeCount++;
+                       }
+               }
+               
+               return fromToVertPairList;
+       }
+       
+       
+       public String getNodeTypeRestriction(){
+               return NODE_TYPE_RESTRICTION;
+       }
+
+       public String getEdgeLabelRestriction(){
+               return EDGE_LABEL_RESTRICTION;
+       }
+       
+       public String getEdgeDirRestriction(){
+               return EDGE_DIR_RESTRICTION;
+       }
+       
+       /**
+        * Get the List of node pairs("from" and "to"), you would like EdgeSwingMigrator to migrate.
+        * @return
+        */
+       public void cleanupAsAppropriate(List<Pair<Vertex, Vertex>> nodePairL) {
+               
+               // Cleanup of model nodes will be done by the other migration script after the 
+               // model-ver records have edges swung off of them.
+               
+               // We're just going to give count of how many of these edges were found.
+               logAndPrintInfo(" >>>> SUMMARY for Migration of named-query-element to model edges: ");
+               logAndPrintInfo(" >>>>    Count of bad widget model nodes found: " + candidateCount );
+               logAndPrintInfo(" >>>>    Count of bad widget model nodes that have named-query-element edges: " + nqEdgeCount );
+               
+       }
+       
+       private ArrayList <String> readInValidWidgetInfoFile(){
+               
+               ArrayList <String> fileLines = new ArrayList <String> ();
+               String homeDir = System.getProperty("AJSC_HOME");
+               String configDir = System.getProperty("BUNDLECONFIG_DIR");
+               if (homeDir == null) {
+                       logAndPrintInfo("ERROR: Could not find sys prop AJSC_HOME");
+                       success = false;
+                       return fileLines;
+               }
+               if (configDir == null) {
+                       logAndPrintInfo("ERROR: Could not find sys prop BUNDLECONFIG_DIR");
+                       success = false;
+                       return fileLines;
+               }
+               String fileName = homeDir + "/" + configDir + "/" + "migration-input-files/widget-model-migration-data/widget-model-migration-input.csv";
+               try (BufferedReader br = new BufferedReader(new FileReader(fileName))) {
+                       String modelInfoLine;
+                       while ((modelInfoLine = br.readLine()) != null) {
+                               modelInfoLine = modelInfoLine.replace("\n", "").replace("\r", "");
+                               if (!modelInfoLine.isEmpty()) {
+                                       fileLines.add(modelInfoLine);
+                               }
+                       }
+
+               } 
+               catch (FileNotFoundException e) {
+                       logger.error("ERROR: Could not find file " + fileName, e);
+                       success = false;
+               } catch (IOException e) {
+                       logger.error("ERROR: Issue reading file " + fileName, e);
+                       success = false;
+               } catch (Exception e) {
+                       logger.error("encountered exception", e);
+                       e.printStackTrace();
+                       success = false;
+               }
+               return fileLines;
+       }
+       
+       
+       HashMap <String,String> getModelVersionIdHash( ArrayList <String> fileLines ){
+               
+               HashMap <String, String> versionIdHash = new HashMap <String,String> ();
+               
+               if( fileLines == null ){
+                       logAndPrintInfo("ERROR: null fileLines array passed to getModelVersionIdHash");
+                       success = false;
+                       return versionIdHash;
+               }
+               
+               for(int i = 0; i < fileLines.size(); i++ ){
+                       String mLine = fileLines.get(i);
+                       String[] fields = mLine.split("\\,");
+                       if (fields.length != 3) {
+                               logAndPrintInfo("ERROR: row in data file did not contain 3 elements. should have: model-name,model-version-id,model-invariant-id on each line.");
+                               success = false;
+                       }
+                       else {
+                               versionIdHash.put(fields[0],fields[1]);
+                       }
+               }
+               
+               // Because of some bad data in the db, we will manually map the nodeType of "vdc" to what is 
+               //   the correct model info for "virtual-data-center".  Problem is that there is no vdc nodeType, but
+               //   there are named-queries pointing at a bad widget-model for "vdc".
+               String virtDataCenterVerId = versionIdHash.get("virtual-data-center");
+               if( virtDataCenterVerId != null ){
+                       versionIdHash.put("vdc",virtDataCenterVerId );
+               }
+               
+               return versionIdHash;
+       }
+                       
+       
+       HashMap <String,String> getModelInvariantIdHash( ArrayList <String> fileLines ){
+               HashMap <String, String> invIdHash = new HashMap <String,String> ();
+               
+               if( fileLines == null ){
+                       logAndPrintInfo("ERROR: null fileLines array passed to getModelVersionIdHash");
+                       success = false;
+                       return invIdHash;
+               }
+               
+               for(int i = 0; i < fileLines.size(); i++ ){
+                       String mLine = fileLines.get(i);
+                       String[] fields = mLine.split("\\,");
+                       if (fields.length != 3) {
+                               logAndPrintInfo("ERROR: row in data file did not contain 3 elements. should have: model-name,model-version-id,model-invariant-id on each line.");
+                               success = false;
+                       }
+                       else {
+                               invIdHash.put(fields[0],fields[2]);
+                       }
+               }
+               
+               // Because of some bad data in the db, we will manually map the nodeType of "vdc" to what is 
+               //   the correct model info for "virtual-data-center".  Problem is that there is no vdc nodeType, but
+               //   there are named-queries pointing at a bad widget-model for "vdc".
+               String virtDataCenterInvId = invIdHash.get("virtual-data-center");
+               if( invIdHash != null ){
+                       invIdHash.put("vdc",virtDataCenterInvId );
+               }
+               
+               return invIdHash;
+       }
+       
+       /**
+        * Log and print.
+        *
+        * @param msg
+        *            the msg
+        */
+       protected void logAndPrintInfo(String msg) {
+               System.out.println(msg);
+               logger.info(msg);
+       }
+
+               
+                       
+}
\ No newline at end of file
diff --git a/src/main/java/org/onap/aai/migration/v13/MigrateBadWidgetModelsPartTwo.java b/src/main/java/org/onap/aai/migration/v13/MigrateBadWidgetModelsPartTwo.java
new file mode 100644 (file)
index 0000000..bb525c3
--- /dev/null
@@ -0,0 +1,508 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v13;
+
+import java.io.BufferedReader;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.Set;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.structure.Direction;
+import org.apache.tinkerpop.gremlin.structure.Edge;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.javatuples.Pair;
+import org.onap.aai.db.props.AAIProperties;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.migration.*;
+import org.onap.aai.serialization.db.EdgeSerializer;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.setup.SchemaVersions;
+
+
+@Enabled
+@MigrationPriority(21)
+@MigrationDangerRating(100)
+public class MigrateBadWidgetModelsPartTwo extends EdgeSwingMigrator {
+       private boolean success = true;
+       private final GraphTraversalSource g;
+       
+       // NOTE -- this migration is for "model-ver" nodes only.  It needs to be run AFTER 
+       //   the MigrateWidgetModelsPartOne.
+       //  
+       
+       // migration restrictions that we will use for this migration
+       private final String NODE_TYPE_RESTRICTION = "model-element";
+       private final String EDGE_LABEL_RESTRICTION = "org.onap.relationships.inventory.IsA";
+       private final String EDGE_DIR_RESTRICTION = "IN";
+       
+       GraphTraversal<Vertex, Vertex> widgetModelTraversal;
+       GraphTraversal<Vertex, Vertex> widgetModelVersionTraversal;
+       GraphTraversal<Vertex, Vertex> validModVerTraversal;
+
+
+
+       public MigrateBadWidgetModelsPartTwo(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) {
+               super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+               this.g = this.engine.asAdmin().getTraversalSource();
+       }
+
+
+       @Override
+       public Status getStatus() {
+               if (success) {
+                       return Status.SUCCESS;
+               } else {
+                       return Status.FAILURE;
+               }
+       }
+
+       @Override
+       public Optional<String[]> getAffectedNodeTypes() {
+               return Optional.of(new String[]{"model", "model-element", "model-ver"});
+       }
+
+       @Override
+       public String getMigrationName() {
+               return "MigrateBadWidgetModelsPartTwo";
+       }
+       
+       
+       /**
+        * Get the List of node pairs("from" and "to"), you would like EdgeSwingMigrator to migrate.
+        * @return
+        */
+       @Override
+       public List<Pair<Vertex, Vertex>> getAffectedNodePairs() {
+               logAndPrintInfo("--------- GET AFFECTED NODE PAIRS -------------");
+               // Read the json file to populate the validWidgetModelVesionIdHash and also
+               // validWidgetModelInvIdHash which will be used to figure out which data is in the db with
+               // an invalid id.
+               ArrayList <String> fileLines = readInValidWidgetInfoFile();
+               
+               // validWidgetModelVersionIdHash:  key = nodeType, value = validModelVersionId for that nodeType
+               //       Note - we currently only have one valid version per model for widget models.
+               HashMap <String,String> validModelVersionIdHash = getModelVersionIdHash( fileLines ); 
+               
+               // validWidgetModelVersionIdHash:  key = nodeType, value = validModelVersionId for that nodeType
+               //       Note - we currently only have one valid version per model for widget models.
+               HashMap <String,String> validModelInvariantIdHash = getModelInvariantIdHash( fileLines );
+               
+               // Now we will see what is actually in the DB
+               List<Pair<Vertex, Vertex>> fromToVertPairList = new ArrayList<Pair<Vertex, Vertex>>();
+               widgetModelTraversal = this.engine.asAdmin().getTraversalSource().V()
+                       .has("aai-node-type", "model")
+                       .has("model-type", "widget");
+
+               if(!(widgetModelTraversal.hasNext())){
+                       logAndPrintInfo("unable to find widget models in database. ");
+               }
+               
+               int validModelVerCount = 0;
+               while (widgetModelTraversal.hasNext()) {
+                       Vertex widgetModVertex = widgetModelTraversal.next();
+                       String invId = widgetModVertex.property("model-invariant-id").value().toString();
+                       
+                       // Find the model-version nodes that belong to this model.
+                       // We expect just one per model, but there could be more.
+                       widgetModelVersionTraversal = this.engine.asAdmin().getTraversalSource()
+                    .V(widgetModVertex)
+                    .in("org.onap.relationships.inventory.BelongsTo")
+                    .has("aai-node-type", "model-ver");
+                       
+                       if(!(widgetModelVersionTraversal.hasNext())){
+                               logAndPrintInfo("unable to find widget model version in database for model-invariant-id = [" + invId + "].");
+                       }
+
+                       while (widgetModelVersionTraversal.hasNext()) {
+                               Vertex widgetModVersionVertex = widgetModelVersionTraversal.next();
+                               String modVersionIdInDb = widgetModVersionVertex.property("model-version-id").value().toString();
+                               String nodeType = widgetModVersionVertex.property("model-name").value().toString();
+                               
+                               if( validModelVersionIdHash.containsKey(nodeType) ){
+                                       // We know what the model-version-id SHOULD be, so make sure we're using it.
+                                       String validModVerId = validModelVersionIdHash.get(nodeType);
+                                       if( !modVersionIdInDb.equals(validModVerId) ){
+                                               logAndPrintInfo(" Bad model-version-id found in DB for model-name = " + nodeType + ", verId = " + modVersionIdInDb );
+                                               validModVerTraversal = this.engine.asAdmin().getTraversalSource()
+                                                               .V()
+                                                               .has("model-version-id",validModVerId)
+                                                               .has("aai-node-type","model-ver");
+                                   if(!(validModVerTraversal.hasNext())){
+                                                       logAndPrintInfo("unable to find widget model version in database for valid model-version-id = [" + validModVerId + "].");
+                                               }
+                                   int ct = 0;
+                                   while (validModVerTraversal.hasNext()) {
+                                       ct++;
+                                                       if( ct > 1 ){
+                                                               logAndPrintInfo("ERROR - More than one model-ver found for model-version-id = [" + validModVerId + "].");
+                                                               break;
+                                                       }
+                                                       Vertex toVert = validModVerTraversal.next();
+                                                       fromToVertPairList.add(new Pair<>(widgetModVersionVertex, toVert));
+                                   }
+                                       }
+                                       else {
+                                               validModelVerCount++;
+                                               logAndPrintInfo("Valid model-version-id used in DB for model-name = [" + nodeType + "].");
+                                       }
+                               }
+                               else {
+                                       logAndPrintInfo("unable to find a valid widget model-ver in database for model-name = [" + nodeType + "].");
+                               }
+                       }
+               }
+                               
+               return fromToVertPairList;
+       }
+       
+       
+       public String getNodeTypeRestriction(){
+               return NODE_TYPE_RESTRICTION;
+       }
+
+       public String getEdgeLabelRestriction(){
+               return EDGE_LABEL_RESTRICTION;
+       }
+       
+       public String getEdgeDirRestriction(){
+               return EDGE_DIR_RESTRICTION;
+       }
+       
+       /**
+        * Get the List of node pairs("from" and "to"), you would like EdgeSwingMigrator to migrate.
+        * @return
+        */
+       public void cleanupAsAppropriate(List<Pair<Vertex, Vertex>> nodePairL) {
+               
+               // The first node in each pair is the model-ver that we were migrating edges AWAY FROM because
+               //    it is an invalid model-ver node.
+               // Delete those as well as their parent model node (if the parent model node has no other users
+               //    and is not on the validModelInvIdList).
+               
+               int badModelVerCount = 0;
+               int modelVerDelCount = 0;
+               int modelDelCount = 0;
+               int parentPreventValidDelCount = 0;
+               
+               HashMap <String,String> parentPreventInEdgeIdHash = new HashMap <String,String> (); // using a hash so we can count the # of models, not edges to it.
+               HashMap <String,String> parentPreventOutEdgeIdHash = new HashMap <String,String> (); // using a hash so we can count the # of models, not edges to it.
+               HashMap <String,String> parentPreventIsaEdgeDelHash = new HashMap <String,String> (); // using a hash so we can count the # of models, not edges to it.
+               
+               ArrayList <String> fileLines = readInValidWidgetInfoFile();
+               // validWidgetModelVersionIdHash:  key = nodeType, value = validModelVersionId for that nodeType
+               //       Note - we currently only have one valid version per model for widget models.
+               HashMap <String,String> validModelInvariantIdHash = getModelInvariantIdHash( fileLines );
+               
+               try {
+                       for (Pair<Vertex, Vertex> nodePair : nodePairL) {
+                               // The "fromNode" is the "bad/old" model-ver node that we moved off of
+                               badModelVerCount++;
+                               Vertex oldNode = nodePair.getValue0();  
+                               String oldModVerId = oldNode.property("model-version-id").value().toString();
+                               Vertex parentModelNode = null;
+                               
+                               //DOUBLE CHECK THAT THIS IS NOT a valid model-version-id
+                               
+                               
+                               boolean okToDelete = true;
+                               //---- delete the oldNode if the only edge it has is its "belongsTo/OUT" edge to its parent model.
+                               //     AND if its parent node does not have any named-query edges ("IsA" edges) pointing to it.
+                               Iterator <Edge> edgeInIter = oldNode.edges(Direction.IN);
+                               while( edgeInIter.hasNext() ){
+                                       Edge inE = edgeInIter.next();
+                                       Vertex otherSideNode4ThisEdge = inE.inVertex();
+                                       String otherSideNodeType = otherSideNode4ThisEdge.value(AAIProperties.NODE_TYPE);
+                                       // If there are any IN edges, we won't delete this thing.
+                                       okToDelete = false;
+                                       logAndPrintInfo("We will not delete old model-ver node because it still has IN edges. This model-version-id = [" 
+                                                       + oldModVerId + "], has IN edge from a [" + otherSideNodeType + "] node. ");
+                               }
+                               if( okToDelete ){
+                                       // there were no OUT edges, make sure the only OUT edge is to it's parent
+                                       Iterator <Edge> edgeOutIter = oldNode.edges(Direction.OUT);
+                                       int edgeCount = 0;
+                                       while( edgeOutIter.hasNext() ){
+                                               Edge badModVerE = edgeOutIter.next();
+                                               edgeCount++;
+                                               if( edgeCount > 1 ){
+                                                       // If there are more than one OUT edges, we won't delete this thing.
+                                                       okToDelete = false;
+                                                       parentModelNode = null;
+                                                       logAndPrintInfo("We will not delete old model-ver node because it still has > 1 OUT-edges.  model-version-id = [" + oldModVerId + "].");
+                                               }
+                                               else {
+                                                       String eLabel = badModVerE.label().toString();
+                                                       Vertex otherSideNode4ThisEdge = badModVerE.inVertex();
+                                                       String otherSideNodeType = otherSideNode4ThisEdge.value(AAIProperties.NODE_TYPE);
+                                                       if( ! eLabel.equals("org.onap.relationships.inventory.BelongsTo") ){
+                                                               logAndPrintInfo("We will not delete old model-ver node because it still has a non 'belongsTo' OUT-edge.  model-version-id = [" 
+                                                                               + oldModVerId + "], edgeLabel = [" + eLabel + "] edge goes to a [" + otherSideNodeType + "]. ");
+                                                               okToDelete = false;
+                                                       }
+                                                       else {
+                                                               if( ! otherSideNodeType.equals("model") ){
+                                                                       logAndPrintInfo("We will not delete old model-ver node (model-version-id = [" + oldModVerId + "]) "
+                                                                               + " because it still has an OUT edge to a [" + otherSideNodeType + "] node. ");
+                                                                       okToDelete = false;
+                                                                       parentModelNode = null;
+                                                               }
+                                                               else {
+                                                                       parentModelNode = otherSideNode4ThisEdge;
+                                                                       String parentInvId = parentModelNode.property("model-invariant-id").value().toString();
+                                                                       Iterator <Edge> pInIter = parentModelNode.edges(Direction.IN);
+                                                                       while( pInIter.hasNext() ){
+                                                                               Edge inE = pInIter.next();
+                                                                               String inELabel = inE.label().toString();
+                                                                               if( ! inELabel.equals("org.onap.relationships.inventory.BelongsTo") ){
+                                                                                       Vertex otherSideNode = inE.outVertex();
+                                                                                       String otherSideNT = otherSideNode.value(AAIProperties.NODE_TYPE);
+                                                                                       // If there are any IN edges still on the parent,
+                                                                                       //   we won't delete this model-ver since once the model-ver
+                                                                                       //   is gone, its hard to know what nodeType the model was
+                                                                                       //   for - so it would be hard to know what valid model-invariant-id
+                                                                                       //   to migrate its edges to.
+                                                                                       okToDelete = false;
+                                                                                       parentPreventIsaEdgeDelHash.put(parentInvId,"");
+                                                                                       logAndPrintInfo("We will not delete old model-ver node because its"
+                                                                                                       + " parent model still has IN edges. The model with model-invariant-id = [" 
+                                                                                                       + parentInvId + "], has an non-belongsTo IN edge, label = [" 
+                                                                                                       + inELabel + "] from a [" + otherSideNT + "] node. ");
+                                                                               }
+                                                                       }
+                                                               }
+                                                       }
+                                               }
+                                       }
+                               }
+                                               
+                               if( okToDelete ){
+                                       logAndPrintInfo(" >>> DELETEING model-ver node with model-version-id = [" + oldModVerId + "]" );
+                                       modelVerDelCount++;
+                                       oldNode.remove();
+                               }
+                               
+                               if( parentModelNode != null && okToDelete ){
+                                       // Delete the corresponding parent model IF it now has no 
+                                       //     edges anymore (and is not in our known valid model list)
+                                       //     and we were deleting the model-ver also.
+                                       boolean okToDelParent = true;
+                                       String parentModInvId = parentModelNode.property("model-invariant-id").value().toString();
+                                       
+                                       if( validModelInvariantIdHash.containsValue(parentModInvId) ){
+                                               okToDelParent = false;
+                                               logAndPrintInfo("We will not delete old model node because it is on our valid widget list. "
+                                                               + " model-invariant-id = [" + parentModInvId + "] ");
+                                               parentPreventValidDelCount++;
+                                       }       
+                                       else {
+                                               Iterator <Edge> pInIter = parentModelNode.edges(Direction.IN);
+                                               while( pInIter.hasNext() ){
+                                                       Edge inE = pInIter.next();
+                                                       String inELabel = inE.label().toString();
+                                                       Vertex otherSideNode4ThisEdge = inE.outVertex();
+                                                       String otherSideNodeType = otherSideNode4ThisEdge.value(AAIProperties.NODE_TYPE);
+                                                       // If there are any IN edges, we won't delete this thing.
+                                                       okToDelParent = false;
+                                                       parentPreventInEdgeIdHash.put(parentModInvId, "");
+                                                       logAndPrintInfo("We will not delete old model node (yet) because it still has IN edges. This model-invariant-id = [" 
+                                                               + parentModInvId + "], has IN edge, label = [" 
+                                                               + inELabel + "] from a [" + otherSideNodeType + "] node. ");
+                                               }
+                                               Iterator <Edge> pOutIter = parentModelNode.edges(Direction.OUT);
+                                               while( pOutIter.hasNext() ){
+                                                       Edge outE = pOutIter.next();
+                                                       String outELabel = outE.label().toString();
+                                                       Vertex otherSideNode4ThisEdge = outE.inVertex();
+                                                       String otherSideNodeType = otherSideNode4ThisEdge.value(AAIProperties.NODE_TYPE);
+                                                       // If there are any OUT edges, we won't delete this thing.
+                                                       okToDelParent = false;
+                                                       parentPreventOutEdgeIdHash.put(parentModInvId, "");
+                                                       logAndPrintInfo("We will not delete old model node because it still has OUT edges. This model-invariant-id = [" 
+                                                               + parentModInvId + "], has OUT edge, label = [" 
+                                                               + outELabel + "]  to a [" + otherSideNodeType + "] node. ");
+                                               }
+                                       }
+                               
+                                       if( okToDelParent ){
+                                               if( parentPreventInEdgeIdHash.containsKey(parentModInvId) ){
+                                                       // This parent had been prevented from being deleted until all its 
+                                                       // child model-ver's were deleted (it must have had more than one).
+                                                       // So we can now remove it from the list of parent guys that
+                                                       // could not be deleted.
+                                                       parentPreventInEdgeIdHash.remove(parentModInvId);
+                                               }
+                                               logAndPrintInfo(" >>> DELETEING model node which was the parent of model-ver with model-version-id = [" 
+                                                               + oldModVerId + "]. This model-invariant-id = [" + parentModInvId + "]" );
+                                               modelDelCount++;
+                                               parentModelNode.remove();
+                                       }
+                               }
+                       }
+                       
+                       logAndPrintInfo(" >>> SUMMARY: total number of bad model-ver nodes found = " + badModelVerCount );
+                       logAndPrintInfo(" >>> SUMMARY: number of model-ver nodes deleted = " + modelVerDelCount );
+                       logAndPrintInfo(" >>> SUMMARY: number of model nodes deleted = " + modelDelCount );
+                       logAndPrintInfo(" >>> SUMMARY: number of model-ver nodes not deleted because their PARENT still had IsA edges = " 
+                                       + parentPreventIsaEdgeDelHash.size() );
+                       logAndPrintInfo(" >>> SUMMARY: number of model nodes not deleted because they were valid = " 
+                                       + parentPreventValidDelCount);
+                       logAndPrintInfo(" >>> SUMMARY: number of model nodes not deleted because they had IN edges = " 
+                                       + parentPreventInEdgeIdHash.size() );
+                       logAndPrintInfo(" >>> SUMMARY: number of model nodes not deleted because they had OUT edges = " 
+                                       + parentPreventOutEdgeIdHash.size() );
+                       
+                       
+               } catch (Exception e) {
+                       logger.error("error encountered", e );
+                       success = false;
+               }       
+               
+       }
+       
+       private ArrayList <String> readInValidWidgetInfoFile(){
+               
+               ArrayList <String> fileLines = new ArrayList <String> ();
+               String homeDir = System.getProperty("AJSC_HOME");
+               String configDir = System.getProperty("BUNDLECONFIG_DIR");
+               if (homeDir == null) {
+                       logAndPrintInfo("ERROR: Could not find sys prop AJSC_HOME");
+                       success = false;
+                       return fileLines;
+               }
+               if (configDir == null) {
+                       logAndPrintInfo("ERROR: Could not find sys prop BUNDLECONFIG_DIR");
+                       success = false;
+                       return fileLines;
+               }
+               String fileName = homeDir + "/" + configDir + "/" + "migration-input-files/widget-model-migration-data/widget-model-migration-input.csv";
+               try (BufferedReader br = new BufferedReader(new FileReader(fileName))) {
+                       String modelInfoLine;
+                       while ((modelInfoLine = br.readLine()) != null) {
+                               modelInfoLine = modelInfoLine.replace("\n", "").replace("\r", "");
+                               if (!modelInfoLine.isEmpty()) {
+                                       fileLines.add(modelInfoLine);
+                               }
+                       }
+               } 
+               catch (FileNotFoundException e) {
+                       logger.error("ERROR: Could not find file " + fileName, e);
+                       success = false;
+               } catch (IOException e) {
+                       logger.error("ERROR: Issue reading file " + fileName, e);
+                       success = false;
+               } catch (Exception e) {
+                       logger.error("encountered exception", e);
+                       e.printStackTrace();
+                       success = false;
+               }
+               return fileLines;
+       }
+       
+       
+       HashMap <String,String> getModelVersionIdHash( ArrayList <String> fileLines ){
+               
+               HashMap <String, String> versionIdHash = new HashMap <String,String> ();
+               
+               if( fileLines == null ){
+                       logAndPrintInfo("ERROR: null fileLines array passed to getModelVersionIdHash");
+                       success = false;
+                       return versionIdHash;
+               }
+               
+               for(int i = 0; i < fileLines.size(); i++ ){
+                       String mLine = fileLines.get(i);
+                       String[] fields = mLine.split("\\,");
+                       if (fields.length != 3) {
+                               logAndPrintInfo("ERROR: row in data file did not contain 3 elements. should have: model-name,model-version-id,model-invariant-id on each line.");
+                               success = false;
+                       }
+                       else {
+                               versionIdHash.put(fields[0],fields[1]);
+                       }
+               }
+               
+               // Because of some bad data in the db, we will manually map the nodeType of "vdc" to what is 
+               //   the correct model info for "virtual-data-center".  Problem is that there is no vdc nodeType, but
+               //   there are named-queries pointing at a bad widget-model for "vdc".
+               String virtDataCenterVerId = versionIdHash.get("virtual-data-center");
+               if( virtDataCenterVerId != null ){
+                       versionIdHash.put("vdc",virtDataCenterVerId );
+               }
+               
+               return versionIdHash;
+       }
+                       
+       
+       HashMap <String,String> getModelInvariantIdHash( ArrayList <String> fileLines ){
+               HashMap <String, String> invIdHash = new HashMap <String,String> ();
+               
+               if( fileLines == null ){
+                       logAndPrintInfo("ERROR: null fileLines array passed to getModelVersionIdHash");
+                       success = false;
+                       return invIdHash;
+               }
+               
+               for(int i = 0; i < fileLines.size(); i++ ){
+                       String mLine = fileLines.get(i);
+                       String[] fields = mLine.split("\\,");
+                       if (fields.length != 3) {
+                               logAndPrintInfo("ERROR: row in data file did not contain 3 elements. should have: model-name,model-version-id,model-invariant-id on each line.");
+                               success = false;
+                       }
+                       else {
+                               invIdHash.put(fields[0],fields[2]);
+                       }
+               }
+               
+               // Because of some bad data in the db, we will manually map the nodeType of "vdc" to what is 
+               //   the correct model info for "virtual-data-center".  Problem is that there is no vdc nodeType, but
+               //   there are named-queries pointing at a bad widget-model for "vdc".
+               String virtDataCenterInvId = invIdHash.get("virtual-data-center");
+               if( invIdHash != null ){
+                       invIdHash.put("vdc",virtDataCenterInvId );
+               }
+               return invIdHash;
+       }
+       
+       /**
+        * Log and print.
+        *
+        * @param msg
+        *            the msg
+        */
+       protected void logAndPrintInfo(String msg) {
+               System.out.println(msg);
+               logger.info(msg);
+       }
+
+               
+                       
+}
\ No newline at end of file
diff --git a/src/main/java/org/onap/aai/migration/v13/MigrateEdgesBetweenVnfcAndVfModule.java b/src/main/java/org/onap/aai/migration/v13/MigrateEdgesBetweenVnfcAndVfModule.java
new file mode 100644 (file)
index 0000000..3e09c51
--- /dev/null
@@ -0,0 +1,83 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+/*-
+* ============LICENSE_START=======================================================
+* org.openecomp.aai
+* ================================================================================
+* Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+* ================================================================================
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+* 
+*      http://www.apache.org/licenses/LICENSE-2.0
+* 
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+* ============LICENSE_END=========================================================
+* */
+
+package org.onap.aai.migration.v13;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Optional;
+
+import org.javatuples.Pair;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.migration.EdgeMigrator;
+import org.onap.aai.migration.Enabled;
+import org.onap.aai.migration.MigrationDangerRating;
+import org.onap.aai.migration.MigrationPriority;
+import org.onap.aai.serialization.db.EdgeSerializer;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.setup.SchemaVersions;
+
+@MigrationPriority(10)
+@MigrationDangerRating(100)
+@Enabled
+public class MigrateEdgesBetweenVnfcAndVfModule extends EdgeMigrator {
+
+       public MigrateEdgesBetweenVnfcAndVfModule(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) {
+               super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+       }
+
+       @Override
+       public Optional<String[]> getAffectedNodeTypes() {
+               return Optional.empty();
+       }
+
+       @Override
+       public List<Pair<String, String>> getAffectedNodePairTypes() {
+               logger.info("Starting migration to update edge properties between vf-module and vnfc....");
+               List<Pair<String, String>> nodePairList = new ArrayList<Pair<String, String>>();
+               nodePairList.add(new Pair<>("vf-module", "vnfc"));
+               return nodePairList;
+       }
+       
+       @Override
+       public String getMigrationName() {
+               return "migrate-edge-vnfc-and-vf-module";
+       }
+}
\ No newline at end of file
diff --git a/src/main/java/org/onap/aai/migration/v13/MigrateForwarderEvcCircuitId.java b/src/main/java/org/onap/aai/migration/v13/MigrateForwarderEvcCircuitId.java
new file mode 100644 (file)
index 0000000..3f90934
--- /dev/null
@@ -0,0 +1,297 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v13;
+/*-
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Optional;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.onap.aai.db.props.AAIProperties;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.migration.Enabled;
+import org.onap.aai.migration.MigrationDangerRating;
+import org.onap.aai.migration.MigrationPriority;
+import org.onap.aai.migration.Migrator;
+import org.onap.aai.migration.Status;
+import org.onap.aai.serialization.db.EdgeSerializer;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.setup.SchemaVersions;
+
+
+@MigrationPriority(26)
+@MigrationDangerRating(100)
+@Enabled
+public class MigrateForwarderEvcCircuitId extends Migrator {
+
+       private final String PNF_NODE_TYPE = "pnf";
+       private final String PROPERTY_PNF_NAME = "pnf-name";
+       private final String PROPERTY_INTERFACE_NAME = "interface-name";
+       private final String PROPERTY_FORWARDER_ROLE = "forwarder-role";
+       private final String VALUE_INGRESS = "ingress";
+       private final String PROPERTY_SEQUENCE = "sequence";
+       private final int VALUE_EXPECTED_SEQUENCE = 1;
+       private final String FORWARDER_EVC_NODE_TYPE = "forwarder-evc";
+       private final String PROPERTY_CIRCUIT_ID = "circuit-id";
+       
+       private static boolean success = true;
+    private static boolean checkLog = false;
+    private static GraphTraversalSource g = null;
+    private int headerLength;
+    private int migrationSuccess = 0;
+    private int migrationFailure = 0;
+    
+    private static List<String> dmaapMsgList = new ArrayList<String>();
+    private static final String homeDir = System.getProperty("AJSC_HOME");
+
+    protected class CircuitIdFileData {
+               String pnfName;
+       String interfaceName;
+
+               String oldCircuitId;
+       String newCircuitId;
+       
+       public String getPnfName() {
+                       return pnfName;
+               }
+               public void setPnfName(String pnfName) {
+                       this.pnfName = pnfName;
+               }
+               public String getInterfaceName() {
+                       return interfaceName;
+               }
+               public void setInterfaceName(String interfaceName) {
+                       this.interfaceName = interfaceName;
+               }
+               
+               public String getOldCircuitId() {
+                       return oldCircuitId;
+               }
+               public void setOldCircuitId(String oldCircuitId) {
+                       this.oldCircuitId = oldCircuitId;
+               }
+               public String getNewCircuitId() {
+                       return newCircuitId;
+               }
+               public void setNewCircuitId(String newCircutId) {
+                       this.newCircuitId = newCircutId;
+               }
+    }
+    
+    private static ArrayList<CircuitIdFileData> circuitIdList = new ArrayList<CircuitIdFileData>();
+       
+    public MigrateForwarderEvcCircuitId(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) {
+        super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+        this.g = this.engine.asAdmin().getTraversalSource();
+    }
+
+    @Override
+    public void run() {
+        logger.info("---------- Start migration ----------");
+        String configDir = System.getProperty("BUNDLECONFIG_DIR");
+        if (homeDir == null) {
+            logger.info(this.MIGRATION_ERROR + "ERROR: Could not find sys prop AJSC_HOME");
+            success = false;
+            return;
+        }
+        if (configDir == null) {
+            success = false;
+            return;
+        }
+        
+        String feedDir = homeDir + "/" + configDir + "/" + "migration-input-files/sarea-inventory/";
+        int fileLineCounter = 0;
+        String fileName = feedDir+ "circuitIds.csv";
+        logger.info(fileName);
+        logger.info("---------- Processing Entries from file  ----------");
+        try  {
+               List<String> lines = Files.readAllLines(Paths.get(fileName));
+            Iterator<String> lineItr = lines.iterator();
+            while (lineItr.hasNext()){
+                String line = lineItr.next().replace("\n", "").replace("\r", "");
+                if (!line.isEmpty()) {
+                    if (fileLineCounter != 0) {
+                        String[] colList = line.split("\\s*,\\s*", -1);
+                        CircuitIdFileData lineData = new CircuitIdFileData();
+                        lineData.setPnfName(colList[0].replaceAll("^\"|\"$", "")
+                                       .replaceAll("[\t\n\r]+", "").trim());
+                        lineData.setInterfaceName(colList[1].replaceAll("^\"|\"$", "")
+                                       .replaceAll("[\t\n\r]+", "").trim());
+                        lineData.setOldCircuitId(colList[2].replaceAll("^\"|\"$", "")
+                                       .replaceAll("[\t\n\r]+", "").trim());
+                        lineData.setNewCircuitId(colList[4].replaceAll("^\"|\"$", "")
+                                       .replaceAll("[\t\n\r]+", "").trim());
+                        circuitIdList.add(lineData);
+                       
+                    } else {
+                        this.headerLength = line.split("\\s*,\\s*", -1).length;
+                        logger.info("headerLength: " + headerLength + "\n");
+                        if (this.headerLength != 6){
+                            logger.info(this.MIGRATION_ERROR + "ERROR: Input file should have 6 columns");
+                            this.success = false;
+                            return;
+                        }
+                    }
+                }
+                fileLineCounter++;
+            }
+            updateCircuitIdCount();
+            logger.info ("\n \n ******* Final Summary for Circuit Id Migration ********* \n");
+            logger.info(this.MIGRATION_SUMMARY_COUNT + "CircuitIds processed: "+ migrationSuccess);
+            logger.info(this.MIGRATION_SUMMARY_COUNT + "Total Rows Count: "+(fileLineCounter + 1));
+            logger.info(this.MIGRATION_SUMMARY_COUNT + "Unprocessed CircuitIds : "+ migrationFailure +"\n");
+     
+        } catch (FileNotFoundException e) {
+            logger.info(this.MIGRATION_ERROR + "ERROR: Could not file file " + fileName, e.getMessage());
+            success = false;
+            checkLog = true;
+        } catch (IOException e) {
+            logger.info(this.MIGRATION_ERROR + "ERROR: Issue reading file " + fileName, e);
+            success = false;
+        } catch (Exception e) {
+            logger.info(this.MIGRATION_ERROR + "encountered exception", e);
+            e.printStackTrace();
+            success = false;
+        }
+    }
+       
+       private void updateCircuitIdCount() {
+               int numberOfLines = circuitIdList.size();
+               for(int i = 0; i < numberOfLines; i ++) {
+                       GraphTraversal<Vertex, Vertex> nodeList = g.V().has(this.PROPERTY_PNF_NAME, circuitIdList.get(i).getPnfName())
+                                       .has(AAIProperties.NODE_TYPE, PNF_NODE_TYPE).in("tosca.relationships.network.BindsTo")
+                                       .has(this.PROPERTY_INTERFACE_NAME, circuitIdList.get(i).getInterfaceName()).in("org.onap.relationships.inventory.ForwardsTo")
+                                       .has(this.PROPERTY_FORWARDER_ROLE, this.VALUE_INGRESS).has(this.PROPERTY_SEQUENCE, this.VALUE_EXPECTED_SEQUENCE)
+                                       .out("org.onap.relationships.inventory.Uses").in("org.onap.relationships.inventory.BelongsTo");
+                               if(!nodeList.hasNext()) {
+                                       logger.info(this.MIGRATION_ERROR + "ERROR: Failure to update Circuit Id " + circuitIdList.get(i).getOldCircuitId() + 
+                                                       " to " + circuitIdList.get(i).getNewCircuitId() + " Graph Traversal failed \n");
+                                       migrationFailure++;
+                               }
+                               while (nodeList.hasNext()) {
+                                       Vertex forwarderEvcVtx = nodeList.next();
+                                       boolean updateSuccess = false;
+                                       if (forwarderEvcVtx != null) {
+                                               logger.info("forwarder-evc-id is " + forwarderEvcVtx.value("forwarder-evc-id"));
+                                               if(forwarderEvcVtx.property(PROPERTY_CIRCUIT_ID).isPresent() && 
+                                                  forwarderEvcVtx.value(PROPERTY_CIRCUIT_ID).equals(circuitIdList.get(i).getNewCircuitId())) {
+                                                       logger.info("Skipping Record: Old Collector CircuitId " + forwarderEvcVtx.value(PROPERTY_CIRCUIT_ID) + 
+                                                                       " is the same as New Collector CircuitId " + circuitIdList.get(i).getNewCircuitId() + "\n");
+                                                       migrationFailure++;
+                                               }
+                                               else if(!circuitIdList.get(i).getNewCircuitId().isEmpty() &&
+                                                       forwarderEvcVtx.property(PROPERTY_CIRCUIT_ID).isPresent() && 
+                                                       circuitIdList.get(i).getOldCircuitId().equals(forwarderEvcVtx.value(PROPERTY_CIRCUIT_ID)))
+                                               {
+                                                       try {
+                                                               forwarderEvcVtx.property(PROPERTY_CIRCUIT_ID, circuitIdList.get(i).getNewCircuitId());
+                                                               this.touchVertexProperties(forwarderEvcVtx, false);
+                                                               updateSuccess = true;
+                                                               
+                                                       } catch (Exception e) {
+                                                               logger.info(e.toString());
+                                                               logger.info(this.MIGRATION_ERROR + "ERROR: Failure to update Circuit Id " + circuitIdList.get(i).getOldCircuitId() + 
+                                                                               " to " + circuitIdList.get(i).getNewCircuitId() + "\n");
+                                                               migrationFailure++;
+                                                               
+                                                       }
+                                                       if(updateSuccess) {
+                                                               String dmaapMsg = System.nanoTime() + "_" + forwarderEvcVtx.id().toString() + "_"       + 
+                                                                               forwarderEvcVtx.value("resource-version").toString();
+                                                               dmaapMsgList.add(dmaapMsg);
+                                                               logger.info("Update of Circuit Id " + circuitIdList.get(i).getOldCircuitId() + " to " +
+                                                                               circuitIdList.get(i).getNewCircuitId() + " successful \n");
+                                                               migrationSuccess++;
+                                                       }
+                                               }
+                                               else if(!forwarderEvcVtx.property(PROPERTY_CIRCUIT_ID).isPresent())     
+                                               {
+                                                       logger.info(this.MIGRATION_ERROR + "ERROR: Old Collector Circuit Id not found " + circuitIdList.get(i).getOldCircuitId() + 
+                                                                       " was not updated to " + circuitIdList.get(i).getNewCircuitId() + "\n");
+                                                       migrationFailure++;
+                                               }
+                                               else {
+                                                       logger.info(this.MIGRATION_ERROR + "ERROR: Failure to update Circuit Id " + circuitIdList.get(i).getOldCircuitId() + 
+                                                                       " to " + circuitIdList.get(i).getNewCircuitId() + "\n");
+                                                       migrationFailure++;
+                                               }
+                                       }
+                               }
+               }
+               
+       }
+       
+    @Override
+    public Status getStatus() {
+        if (checkLog) {
+            return Status.CHECK_LOGS;
+        }
+        else if (success) {
+            return Status.SUCCESS;
+        }
+        else {
+            return Status.FAILURE;
+        }
+    }
+    
+    @Override
+       public void commit() {
+               engine.commit();
+               createDmaapFiles(dmaapMsgList);
+       }
+
+    @Override
+    public Optional<String[]> getAffectedNodeTypes() {
+        return Optional.of(new String[]{this.FORWARDER_EVC_NODE_TYPE});
+    }
+
+    @Override
+    public String getMigrationName() {
+        return "MigrateForwarderEvcCircuitId";
+    }
+}
diff --git a/src/main/java/org/onap/aai/migration/v14/MigrateGenericVnfMgmtOptions.java b/src/main/java/org/onap/aai/migration/v14/MigrateGenericVnfMgmtOptions.java
new file mode 100644 (file)
index 0000000..d32ce81
--- /dev/null
@@ -0,0 +1,103 @@
+/**\r
+ * ============LICENSE_START=======================================================\r
+ * org.onap.aai\r
+ * ================================================================================\r
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.\r
+ * ================================================================================\r
+ * Licensed under the Apache License, Version 2.0 (the "License");\r
+ * you may not use this file except in compliance with the License.\r
+ * You may obtain a copy of the License at\r
+ *\r
+ *    http://www.apache.org/licenses/LICENSE-2.0\r
+ *\r
+ * Unless required by applicable law or agreed to in writing, software\r
+ * distributed under the License is distributed on an "AS IS" BASIS,\r
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
+ * See the License for the specific language governing permissions and\r
+ * limitations under the License.\r
+ * ============LICENSE_END=========================================================\r
+ */\r
+package org.onap.aai.migration.v14;\r
+\r
+import java.util.ArrayList;\r
+import java.util.HashMap;\r
+import java.util.List;\r
+import java.util.Map;\r
+import java.util.Optional;\r
+\r
+import org.onap.aai.edges.EdgeIngestor;\r
+import org.onap.aai.introspection.LoaderFactory;\r
+import org.onap.aai.migration.MigrationDangerRating;\r
+import org.onap.aai.migration.MigrationPriority;\r
+import org.onap.aai.migration.Status;\r
+import org.onap.aai.migration.ValueMigrator;\r
+import org.onap.aai.migration.Enabled;\r
+import org.onap.aai.serialization.db.EdgeSerializer;\r
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;\r
+import org.onap.aai.setup.SchemaVersions;\r
+\r
+\r
+@MigrationPriority(1)\r
+@MigrationDangerRating(1)\r
+@Enabled\r
+public class MigrateGenericVnfMgmtOptions extends ValueMigrator {\r
+\r
+       protected static final String VNF_NODE_TYPE = "generic-vnf";\r
+       \r
+       \r
+       private static Map<String, Map> map;\r
+    private static Map<String, String> pair1;\r
+    private static Map<String, List<String>> conditionsMap;\r
+    \r
+       public MigrateGenericVnfMgmtOptions(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) {\r
+               super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions, setMgmtOptions(), setConditionsMap(), false);\r
+               \r
+       }\r
+       \r
+       private static Map<String, Map> setMgmtOptions(){\r
+               map = new HashMap<>();\r
+        pair1 = new HashMap<>();      \r
+\r
+               pair1.put("management-option", "AT&T Managed-Basic");           \r
+               map.put("generic-vnf", pair1);\r
+               \r
+        return map;\r
+       }\r
+       \r
+       \r
+\r
+       public static Map<String, List<String>> setConditionsMap() {\r
+               List<String> conditionsList = new ArrayList<String>();\r
+               conditionsMap = new HashMap<>();\r
+        \r
+               conditionsList.add("HN");\r
+        conditionsList.add("HP");\r
+        conditionsList.add("HG");\r
+        \r
+        conditionsMap.put("vnf-type", conditionsList);\r
+        \r
+        return conditionsMap;\r
+       }\r
+\r
+       @Override\r
+       public Status getStatus() {\r
+               return Status.SUCCESS;\r
+       }\r
+\r
+       @Override\r
+       public Optional<String[]> getAffectedNodeTypes() {\r
+               return Optional.of(new String[]{VNF_NODE_TYPE});\r
+       }\r
+\r
+       @Override\r
+       public String getMigrationName() {\r
+               return "MigrateGenericVnfMgmtOptions";\r
+       }\r
+       \r
+       @Override\r
+       public boolean isUpdateDmaap(){\r
+               return true;\r
+       }\r
+       \r
+\r
+}\r
diff --git a/src/main/java/org/onap/aai/migration/v14/MigrateMissingFqdnOnPservers.java b/src/main/java/org/onap/aai/migration/v14/MigrateMissingFqdnOnPservers.java
new file mode 100644 (file)
index 0000000..77ecc7e
--- /dev/null
@@ -0,0 +1,142 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v14;
+
+import java.util.List;
+import java.util.Optional;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.onap.aai.db.props.AAIProperties;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.migration.Enabled;
+import org.onap.aai.migration.MigrationDangerRating;
+import org.onap.aai.migration.MigrationPriority;
+import org.onap.aai.migration.Migrator;
+import org.onap.aai.migration.Status;
+import org.onap.aai.serialization.db.EdgeSerializer;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.setup.SchemaVersions;
+
+
+@MigrationPriority(20)
+@MigrationDangerRating(2)
+@Enabled
+public class MigrateMissingFqdnOnPservers extends Migrator {
+
+       protected static final String PSERVER_NODE_TYPE = "pserver";
+       protected static final String PSERVER_FQDN = "fqdn";
+       protected static final String PSERVER_HOSTNAME = "hostname";
+       protected static final String PSERVER_SOURCEOFTRUTH = "source-of-truth";
+       
+       private boolean success = true;
+       private GraphTraversalSource g = null;
+       
+       protected final AtomicInteger falloutRowsCount = new AtomicInteger(0);
+
+       public MigrateMissingFqdnOnPservers(TransactionalGraphEngine engine, LoaderFactory loaderFactory,
+                       EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) {
+               super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+       }
+
+       @Override
+       public void run() {
+               logger.info("---------- Start Updating fqdn for pserver  ----------");
+
+               try {
+                       int pserverCount = 0;
+                       int pserverUpdatedCount = 0;
+                       int pserverSkippedCount = 0;
+                       int pserverErrorCount = 0;
+                       int pserverWithMissingSOTCount = 0;
+
+                       GraphTraversal<Vertex, Vertex> pserverList = this.engine.asAdmin().getTraversalSource().V()
+                                       .has(AAIProperties.NODE_TYPE, PSERVER_NODE_TYPE).union(__.hasNot(PSERVER_FQDN),__.has(PSERVER_FQDN,""));//gets list of pservers with missing and empty fqdn
+                       
+                        while (pserverList.hasNext()) {
+                               pserverCount++;
+                       Vertex vertex = pserverList.next();
+                               String hostname = null;
+                               String sourceOfTruth = null;
+                               hostname = vertex.property(PSERVER_HOSTNAME).value().toString();
+                               
+                               if(vertex.property(PSERVER_SOURCEOFTRUTH).isPresent()) {
+                                       sourceOfTruth = vertex.property(PSERVER_SOURCEOFTRUTH).value().toString();
+                               }else {
+                                       logger.info("Missing source of truth for hostname : " + hostname);
+                                       pserverWithMissingSOTCount++;
+                               }
+                               
+                               if (!hostname.contains(".")) {
+                                       logger.info("Invalid format hostname :" + hostname + " and its source of truth is : " + sourceOfTruth);
+                                       pserverSkippedCount++;
+                                       continue;
+                               }
+
+                               try {
+                                       vertex.property(PSERVER_FQDN, hostname);
+                                       this.touchVertexProperties(vertex, false);
+                                       logger.info("Updated fqdn from hostname : " + hostname + " and its source of truth is : " + sourceOfTruth);
+                                       pserverUpdatedCount++;
+                               } catch (Exception e) {
+                                       success = false;
+                                       pserverErrorCount++;
+                                       logger.error(MIGRATION_ERROR + "encountered exception for fqdn update for pserver with hostname :" + hostname
+                                                       + " and source of truth : " + sourceOfTruth, e);
+                               }
+                       }
+                       
+                       logger.info("\n \n ******* Final Summary of Updated fqdn for pserver  Migration ********* \n");
+                       logger.info(MIGRATION_SUMMARY_COUNT + "Total Number of pservers with missing or empty fqdn : "+pserverCount + "\n");
+                       logger.info(MIGRATION_SUMMARY_COUNT + "Number of pservers updated: " + pserverUpdatedCount + "\n");
+                       logger.info(MIGRATION_SUMMARY_COUNT + "Number of pservers invalid: " + pserverSkippedCount + "\n");
+                       logger.info(MIGRATION_SUMMARY_COUNT + "Number of pservers failed to update due to error : " + pserverErrorCount + "\n");
+                       logger.info(MIGRATION_SUMMARY_COUNT + "Number of pservers with missing source of truth: " + pserverWithMissingSOTCount + "\n");
+                       
+               } catch (Exception e) {
+                       logger.info("encountered exception", e);
+                       success = false;
+               }
+       }
+
+       @Override
+       public Status getStatus() {
+               if (success) {
+                       return Status.SUCCESS;
+               } else {
+                       return Status.FAILURE;
+               }
+       }
+
+       @Override
+       public Optional<String[]> getAffectedNodeTypes() {
+               return Optional.of(new String[] { PSERVER_NODE_TYPE });
+       }
+
+       @Override
+       public String getMigrationName() {
+               return "MigrateMissingFqdnOnPserver";
+       }
+
+}
\ No newline at end of file
diff --git a/src/main/java/org/onap/aai/migration/v14/MigrateNetworkTechToCloudRegion.java b/src/main/java/org/onap/aai/migration/v14/MigrateNetworkTechToCloudRegion.java
new file mode 100644 (file)
index 0000000..afdea57
--- /dev/null
@@ -0,0 +1,172 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v14;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.onap.aai.db.props.AAIProperties;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.migration.Enabled;
+import org.onap.aai.migration.MigrationDangerRating;
+import org.onap.aai.migration.MigrationPriority;
+import org.onap.aai.migration.Migrator;
+import org.onap.aai.migration.Status;
+import org.onap.aai.serialization.db.EdgeSerializer;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.setup.SchemaVersions;
+
+@MigrationPriority(20)
+@MigrationDangerRating(2)
+@Enabled
+public class MigrateNetworkTechToCloudRegion extends Migrator{
+
+    protected static final String CLOUDREGION_NODETYPE = "cloud-region";
+    protected static final String CLOUD_OWNER = "cloud-owner";
+    protected static final String NETWORK_TECHNOLOGY_NODETYPE = "network-technology";
+    protected static final String NETWORK_TECHNOLOGY_ID = "network-technology-id";
+    protected static final String NETWORK_TECHNOLOGY_NAME = "network-technology-name";
+    
+
+    private boolean success = true;
+    
+    private static List<String> dmaapMsgList = new ArrayList<String>();
+    
+     
+    public MigrateNetworkTechToCloudRegion(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) {
+        super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+    }
+
+    @Override
+    public void run() {
+
+       List<Vertex> cloudRegionVertextList = this.engine.asAdmin().getTraversalSource().V().has(AAIProperties.NODE_TYPE, CLOUDREGION_NODETYPE).has(CLOUD_OWNER,"att-aic").toList();
+       logger.info("Number of cloud-region with cloud-owner att-aic : " + cloudRegionVertextList.size());
+       createEdges(cloudRegionVertextList, "CONTRAIL");
+        createEdges(cloudRegionVertextList, "AIC_SR_IOV");
+        
+        cloudRegionVertextList = this.engine.asAdmin().getTraversalSource().V().has(AAIProperties.NODE_TYPE, CLOUDREGION_NODETYPE).has(CLOUD_OWNER,"att-nc").toList();
+        logger.info("Number of cloud-region with cloud-owner att-nc : " + cloudRegionVertextList.size());
+       createEdges(cloudRegionVertextList, "OVS");
+        createEdges(cloudRegionVertextList, "STANDARD-SR-IOV");
+
+        }
+
+    private void createEdges(List<Vertex> sourceVertexList, String networkTechName)
+       {
+               int networkTechEdgeCount = 0;
+               int networkTechEdgeErrorCount = 0;
+
+               List<Vertex> networkTechVertexList = this.engine.asAdmin().getTraversalSource().V()
+                               .has(AAIProperties.NODE_TYPE, NETWORK_TECHNOLOGY_NODETYPE).has(NETWORK_TECHNOLOGY_NAME, networkTechName)
+                               .toList();
+               
+               logger.info("---------- Start Creating an Edge from cloud-region to network-technology nodes with network-technology-name " + networkTechName + "  ----------");
+
+               for (Vertex cloudRegionVertex : sourceVertexList) {
+
+                       try {
+
+                               for (Vertex networkVertex : networkTechVertexList) {
+                                       if (networkVertex != null) {
+                                               boolean edgePresent = false;
+                                               // Check if edge already exists for each of the source vertex
+                                               List<Vertex> outVertexList = this.engine.asAdmin().getTraversalSource().V(cloudRegionVertex)
+                                                               .out().has(AAIProperties.NODE_TYPE, NETWORK_TECHNOLOGY_NODETYPE)
+                                                               .has(NETWORK_TECHNOLOGY_NAME, networkTechName).has(NETWORK_TECHNOLOGY_ID,
+                                                                               networkVertex.property(NETWORK_TECHNOLOGY_ID).value().toString())
+                                                               .toList();
+                                               Iterator<Vertex> vertexItr = outVertexList.iterator();
+                                               if (outVertexList != null && !outVertexList.isEmpty() && vertexItr.hasNext()) {
+                                                       logger.info("\t Edge already exists from " + CLOUDREGION_NODETYPE + " with " + CLOUD_OWNER
+                                                                       + " and cloud-region-id "
+                                                                       + cloudRegionVertex.property("cloud-region-id").value().toString() + " to "
+                                                                       + NETWORK_TECHNOLOGY_NODETYPE + " nodes with " + NETWORK_TECHNOLOGY_NAME + " "
+                                                                       + networkTechName);
+                                                       edgePresent = true;
+                                                       continue;
+                                               }
+                                               // Build edge from vertex to modelVerVertex
+                                               if (!edgePresent) {
+                                                       this.createCousinEdge(cloudRegionVertex, networkVertex);
+                                                       updateDmaapList(cloudRegionVertex);
+                                                       networkTechEdgeCount++;
+                                               }
+                                       } else {
+                                               networkTechEdgeErrorCount++;
+                                               logger.info("\t" + MIGRATION_ERROR + "Unable to create edge from " + CLOUDREGION_NODETYPE
+                                                               + " with " + CLOUD_OWNER + " to " + NETWORK_TECHNOLOGY_NODETYPE + " nodes with "
+                                                               + NETWORK_TECHNOLOGY_NAME + " " + networkTechName);
+
+                                       }
+                               }
+                       } catch (Exception e) {
+                               success = false;
+                               networkTechEdgeErrorCount++;
+                               logger.error("\t" + MIGRATION_ERROR + "encountered exception from " + NETWORK_TECHNOLOGY_NODETYPE
+                                               + " node when trying to create edge to " + CLOUDREGION_NODETYPE, e);
+                       }
+               }
+
+               logger.info("\n \n ******* Summary " + NETWORK_TECHNOLOGY_NODETYPE + " Nodes: Finished creating an Edge from "
+                               + CLOUDREGION_NODETYPE + " with " + CLOUD_OWNER + " to " + NETWORK_TECHNOLOGY_NODETYPE + " nodes with "
+                               + NETWORK_TECHNOLOGY_NAME + " " + networkTechName + "  ********* \n");
+               logger.info(MIGRATION_SUMMARY_COUNT + "Number of edges created from cloud-region to "+networkTechName +" network-technology : " + networkTechEdgeCount + "\n");
+               logger.info(MIGRATION_SUMMARY_COUNT + "Number of edges failed from cloud-region to "+networkTechName +" network-technology : " + networkTechEdgeErrorCount + "\n");
+
+       }
+    
+    @Override
+    public Status getStatus() {
+        if (success) {
+            return Status.SUCCESS;
+        } else {
+            return Status.FAILURE;
+        }
+    }
+
+    @Override
+    public Optional<String[]> getAffectedNodeTypes() {
+        return Optional.of(new String[]{NETWORK_TECHNOLOGY_NODETYPE});
+    }
+
+    @Override
+    public String getMigrationName() {
+        return "MigrateNetworkTech";
+    }
+    
+    private void updateDmaapList(Vertex v){
+       String dmaapMsg = System.nanoTime() + "_" + v.id().toString() + "_"     + v.value("resource-version").toString();
+        dmaapMsgList.add(dmaapMsg);
+        logger.info("\tAdding Updated "+ CLOUDREGION_NODETYPE +" Vertex " + v.id().toString() + " to dmaapMsgList....");
+    }
+       
+    @Override
+       public void commit() {
+               engine.commit();
+               createDmaapFiles(dmaapMsgList);
+       }
+
+}
diff --git a/src/main/java/org/onap/aai/migration/v14/MigrateSameSourcedRCTROPserverData.java b/src/main/java/org/onap/aai/migration/v14/MigrateSameSourcedRCTROPserverData.java
new file mode 100644 (file)
index 0000000..d0c1e15
--- /dev/null
@@ -0,0 +1,576 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v14;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__;
+import org.apache.tinkerpop.gremlin.structure.*;
+import org.javatuples.Pair;
+import org.onap.aai.db.props.AAIProperties;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.edges.enums.AAIDirection;
+import org.onap.aai.edges.enums.EdgeProperty;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.migration.*;
+import org.onap.aai.introspection.Introspector;
+import org.onap.aai.serialization.db.EdgeSerializer;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.apache.tinkerpop.gremlin.process.traversal.P;
+import org.onap.aai.setup.SchemaVersions;
+import org.springframework.web.util.UriUtils;
+
+import javax.ws.rs.core.UriBuilder;
+
+import java.io.UnsupportedEncodingException;
+import java.util.*;
+import java.util.stream.Collectors;
+
+@Enabled
+@MigrationPriority(5)
+@MigrationDangerRating(100)
+public class MigrateSameSourcedRCTROPserverData extends EdgeSwingMigrator {
+    /**
+     * Instantiates a new migrator.
+     *
+     * @param engine
+     */
+    private final String PARENT_NODE_TYPE = "pserver";
+    private boolean success = true;
+    protected Set<Object> seen = new HashSet<>();
+    private Map<String, UriBuilder> nodeTypeToUri;
+    private Map<String, Set<String>> nodeTypeToKeys;
+    private static List<String> dmaapMsgList = new ArrayList<String>();
+    private static List<Introspector> dmaapDeleteList = new ArrayList<Introspector>();
+    Vertex complexFromOld;
+    private static int dupROCount = 0;
+    private static int roPserversUpdatedCount = 0;
+    private static int roPserversDeletedCount = 0;
+    private static int dupRctCount = 0;
+    private static int rctPserversUpdatedCount = 0;
+    private static int rctPserversDeletedCount = 0;
+    
+    public MigrateSameSourcedRCTROPserverData(TransactionalGraphEngine engine , LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) {
+        super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+    }
+
+    @Override
+    public Status getStatus() {
+        if (success) {
+            return Status.SUCCESS;
+        } else {
+            return Status.FAILURE;
+        }
+    }
+
+    @Override
+    public void commit() {
+        engine.commit();
+        createDmaapFiles(dmaapMsgList);
+        createDmaapFilesForDelete(dmaapDeleteList);
+    }
+
+    @Override
+    public List<Pair<Vertex, Vertex>> getAffectedNodePairs() {
+        return null;
+    }
+
+    @Override
+    public String getNodeTypeRestriction() {
+        return null;
+    }
+
+    @Override
+    public String getEdgeLabelRestriction() {
+        return null;
+    }
+
+    @Override
+    public String getEdgeDirRestriction() {
+        return null;
+    }
+
+    @Override
+    public void cleanupAsAppropriate(List<Pair<Vertex, Vertex>> nodePairL) {
+
+    }
+
+    @Override
+    public Optional<String[]> getAffectedNodeTypes() {
+        return Optional.of(new String[]{"lag-interface", "l-interface", "l3-interface-ipv4-address", "l3-interface-ipv6-address", "sriov-vf", "vlan", "p-interface", "sriov-pf"});
+    }
+
+    @Override
+    public String getMigrationName() {
+        return "MigrateCorrectRCTSourcedPserverData";
+    }
+
+    @Override
+    public void run() {
+
+
+        nodeTypeToUri = loader.getAllObjects().entrySet().stream().filter(e -> e.getValue().getGenericURI().contains("{")).collect(
+                Collectors.toMap(
+                        e -> e.getKey(),
+                        e -> UriBuilder.fromPath(e.getValue().getFullGenericURI().replaceAll("\\{"+ e.getKey() + "-", "{"))
+                ));
+
+        nodeTypeToKeys = loader.getAllObjects().entrySet().stream().filter(e -> e.getValue().getGenericURI().contains("{")).collect(
+                Collectors.toMap(
+                        e -> e.getKey(),
+                        e -> e.getValue().getKeys()
+                ));
+
+        List<Vertex> pserverTraversalRCT = graphTraversalSource().V().has("aai-node-type", "pserver").has("source-of-truth", P.within("RCT", "AAIRctFeed")).toList();
+        int rctCount = pserverTraversalRCT.size();
+        
+        try {
+               logger.info("RCT pserver count: "+rctCount);
+            updateToLatestRCT(pserverTraversalRCT);
+        } catch (UnsupportedEncodingException e) {
+            e.printStackTrace();
+        } catch (AAIException e) {
+            e.printStackTrace();
+        }
+
+        List<Vertex>  pserverTraversalRO = graphTraversalSource().V().has("aai-node-type", "pserver").has("source-of-truth", P.within("RO", "AAI-EXTENSIONS")).toList();
+        int roCount = pserverTraversalRO.size();
+        try {
+               logger.info("RO pserver count: "+roCount);
+            updateToLatestRO(pserverTraversalRO);
+        } catch (UnsupportedEncodingException e) {
+            e.printStackTrace();
+        } catch (AAIException e) {
+            e.printStackTrace();
+        }
+
+        logger.info ("\n \n ******* Migration Summary Counts for RCT and RO sourced pservers in A&AI ********* \n");
+        logger.info(this.MIGRATION_SUMMARY_COUNT + "Total number of RCT pservers: " +rctCount);
+        logger.info(this.MIGRATION_SUMMARY_COUNT + "Duplicate RCT pserver count: "+ dupRctCount);
+        logger.info(this.MIGRATION_SUMMARY_COUNT + "Number of RCT updated: "+ rctPserversUpdatedCount);
+        logger.info(this.MIGRATION_SUMMARY_COUNT + "Number of RCT deleted: "+ rctPserversDeletedCount +"\n");
+        
+        logger.info(this.MIGRATION_SUMMARY_COUNT + "Total number of RO pservers: " +roCount);
+        logger.info(this.MIGRATION_SUMMARY_COUNT + "Duplicate RO pserver count: "+ dupROCount);
+        logger.info(this.MIGRATION_SUMMARY_COUNT + "Number of RO updated: "+ roPserversUpdatedCount);
+        logger.info(this.MIGRATION_SUMMARY_COUNT + "Number of RO deleted: "+ roPserversDeletedCount +"\n");
+    }
+
+    public void updateToLatestRO(List<Vertex> list)  throws UnsupportedEncodingException, AAIException {
+        List<Vertex> removeROList = new ArrayList<>();
+
+        Vertex latestV = null;
+
+        for(int i=0;i<list.size();i++){
+            Vertex currV = list.get(i);
+            
+            if (removeROList.contains(currV)){
+               logger.info("RO Pserver: "+currV.property("hostname").value().toString() + "was already added to delete list. No further processing needed for this.");
+               continue;
+            }
+            logger.info("RO Pserver: "+currV.property("hostname").value().toString());
+
+            for(int j=i+1; j<list.size();j++) {
+
+                Vertex temp = list.get(j);
+
+                String[] currentVHostname = currV.property("hostname").value().toString().split("\\.");
+                String[] tempHostname = temp.property("hostname").value().toString().split("\\.");
+                
+                if (currentVHostname.length >0 && tempHostname.length > 0){
+                       if (!currentVHostname[0].isEmpty() && !tempHostname[0].isEmpty() && currentVHostname[0].equals(tempHostname[0])) {
+                               dupROCount++;
+                               logger.info("\tTemp RO Pserver: "+temp.property("hostname").value().toString());
+                           if (temp.property("hostname").value().toString().length() > currV.property("hostname").value().toString().length()) {
+                               //temp is the latest vertex swing everything from currV to temp
+                               latestV = temp;
+                               movePlink(currV, latestV);
+                               moveLagInterfaces(currV, latestV);
+                               swingEdges(currV, latestV, null, null, "BOTH");
+                               modifyChildrenUri(latestV);
+                               String dmaapMsg = System.nanoTime() + "_" + temp.id().toString() + "_"  + temp.value("resource-version").toString();
+                               dmaapMsgList.add(dmaapMsg);
+                               roPserversUpdatedCount++;
+                               logger.info("\tAdding pserver "+latestV.property("hostname").value().toString() + " to updated list");
+                               if (!removeROList.contains(list.get(i))) {
+                                       removeROList.add(list.get(i));
+                                       Introspector obj = serializer.getLatestVersionView(currV);//currV
+                                       logger.info("\tAdding pserver "+currV.property("hostname").value().toString() + " to delete list");
+                                       dmaapDeleteList.add(obj);
+                                       roPserversDeletedCount++;
+                               }
+                               currV = latestV;
+                           } else {
+                               //currV is the latest temp is the old vertex swing everything from temp to currV
+                               latestV = currV;
+                               movePlink(temp, latestV);
+                               moveLagInterfaces(temp, latestV);
+                               swingEdges(temp, latestV, null, null, "BOTH");
+                               modifyChildrenUri(latestV);
+                               String dmaapMsg = System.nanoTime() + "_" + currV.id().toString() + "_" + currV.value("resource-version").toString();
+                               dmaapMsgList.add(dmaapMsg);
+                               logger.info("\tAdding pserver "+latestV.property("hostname").value().toString() + " to updated list");
+                               roPserversUpdatedCount++;
+                               
+                               if (!removeROList.contains(list.get(j))) {
+                                       removeROList.add(list.get(j));
+                                       Introspector obj = serializer.getLatestVersionView(temp);//temp
+                                       logger.info("\tAdding pserver "+temp.property("hostname").value().toString() + " to delete list");
+                                       dmaapDeleteList.add(obj);
+                                       roPserversDeletedCount++;
+                               }
+                           }
+                       }
+                   }
+            }
+        }
+        logger.info("\tCount of RO Pservers removed = "+removeROList.size()+"\n");
+        removeROList.forEach(v ->v.remove());
+
+    }
+
+//    public void addComplexEdge(Vertex Latest) throws AAIException {
+//
+//        if(!(graphTraversalSource().V(Latest).has("aai-node-type", "pserver").out("org.onap.relationships.inventory.LocatedIn").has("aai-node-type","complex").hasNext())){
+//             if (complexFromOld != null)
+//            createCousinEdge(Latest,complexFromOld);
+//
+//        }
+//    }
+
+
+//    public void dropComplexEdge(Vertex old){
+//     List<Vertex> locatedInEdgeVertexList = graphTraversalSource().V(old).has("aai-node-type", "pserver").out("org.onap.relationships.inventory.LocatedIn").has("aai-node-type","complex").toList();
+//     if (locatedInEdgeVertexList != null && !locatedInEdgeVertexList.isEmpty()){
+//             Iterator<Vertex> locatedInEdgeVertexListItr = locatedInEdgeVertexList.iterator();
+//             while (locatedInEdgeVertexListItr.hasNext()){
+//                     complexFromOld = locatedInEdgeVertexListItr.next();
+//                     if ("complex".equalsIgnoreCase(complexFromOld.property("aai-node-type").value().toString())){
+//                             Edge pserverToComplexEdge = complexFromOld.edges(Direction.IN, "org.onap.relationships.inventory.LocatedIn").next();
+//                             pserverToComplexEdge.remove();
+//                     }
+//             }
+//     }
+//    }
+
+
+    private GraphTraversalSource graphTraversalSource() {
+               return this.engine.asAdmin().getTraversalSource();
+       }
+
+       public void updateToLatestRCT(List<Vertex> list) throws UnsupportedEncodingException, AAIException {
+        List<Vertex>removeRCTList = new ArrayList<>();
+
+        Vertex latestV = null;
+        for(int i=0;i<list.size();i++) {
+            Vertex currV = list.get(i);
+            if (!currV.property("fqdn").isPresent()){
+               continue;
+            }
+            
+            if (removeRCTList.contains(currV)){
+               logger.info("RCT Pserver: "+currV.property("hostname").value().toString() + "was already added to delete list. No further processing needed for this.");
+               continue;
+            }
+            logger.info("RCT Pserver: "+currV.property("hostname").value().toString());
+            for(int j=i+1;j<list.size();j++) {
+
+                Vertex temp = list.get(j);
+                if (temp.property("fqdn").isPresent()) {
+                    String[] currentVFqdn = currV.property("fqdn").value().toString().split("\\.");
+                    String[] tempFqdn = temp.property("fqdn").value().toString().split("\\.");
+                    if (currentVFqdn.length >0 && tempFqdn.length > 0){
+                           String currentFqdnFirstToken = currentVFqdn[0];
+                           String tempFqdnFirstToken = tempFqdn[0];
+                           if (!currentFqdnFirstToken.isEmpty() && !tempFqdnFirstToken.isEmpty() && currentFqdnFirstToken.equals(tempFqdnFirstToken)) {
+                               dupRctCount++;
+                               logger.info("\tMatching Temp RCT Pserver: "+temp.property("hostname").value().toString());
+                               long tempRV = Long.parseLong(temp.value("resource-version"));
+                               long currRV = Long.parseLong(currV.value("resource-version"));
+                               logger.info("\tcurrRV: "+currRV+ ", tempRV: "+tempRV);
+                               if (Long.parseLong(temp.value("resource-version")) > Long.parseLong(currV.value("resource-version"))) {
+                                   //currv is old, temp vertex found in traversal is the latest
+                                   latestV = temp;
+                                   movePlink(currV, latestV);
+                                   moveLagInterfaces(currV, latestV);
+                                   swingEdges(currV, latestV, null, null, "BOTH");
+                                   modifyChildrenUri(latestV);
+                                   String dmaapMsg = System.nanoTime() + "_" + temp.id().toString() + "_"      + temp.value("resource-version").toString();
+                                   dmaapMsgList.add(dmaapMsg);
+                                   rctPserversUpdatedCount++;
+                                   logger.info("\tAdding pserver "+latestV.property("hostname").value().toString() + " to updated list");
+                                   if (!removeRCTList.contains(list.get(i))) {
+                                       removeRCTList.add(list.get(i));
+                                       Introspector obj = serializer.getLatestVersionView(currV);
+                                       logger.info("\tAdding pserver "+currV.property("hostname").value().toString() + " to delete list");
+                                           dmaapDeleteList.add(obj);
+                                           rctPserversDeletedCount++;
+                                   }
+                                   currV = latestV;
+                               } else {
+                                   //currv Is the latest, temp vertex found is an older version
+                                   latestV = currV;
+                                   movePlink(temp, latestV);
+                                   moveLagInterfaces(temp, latestV);
+                                   swingEdges(temp, latestV, null, null, "BOTH");
+                                   modifyChildrenUri(latestV);
+                                   String dmaapMsg = System.nanoTime() + "_" + currV.id().toString() + "_"     + currV.value("resource-version").toString();
+                                   dmaapMsgList.add(dmaapMsg);
+                                   rctPserversUpdatedCount++;
+                                   logger.info("\tAdding pserver "+latestV.property("hostname").value().toString() + " to updated list");
+                                   if (!removeRCTList.contains(list.get(j))) {
+                                       removeRCTList.add(list.get(j));
+                                       Introspector obj = serializer.getLatestVersionView(temp);
+                                       logger.info("\tAdding pserver "+temp.property("hostname").value().toString() + " to delete list");
+                                           dmaapDeleteList.add(obj);
+                                           rctPserversDeletedCount++;
+                                   }
+                               }
+       
+                           }
+                    }
+                }
+            }
+        }
+        logger.info("\tCount of RCT Pservers removed = "+removeRCTList.size() +"\n");
+        removeRCTList.forEach((r)-> r.remove());
+
+    }
+
+
+    public void movePlink(Vertex old, Vertex latest) throws AAIException {
+
+        List<Vertex> pInterfacesOnOldPserver = graphTraversalSource().V(old).has("aai-node-type","pserver").in("tosca.relationships.network.BindsTo").has("aai-node-type","p-interface").toList();
+        List<Vertex> pInterfacesOnLatestPserver = graphTraversalSource().V(latest).has("aai-node-type","pserver").in("tosca.relationships.network.BindsTo").has("aai-node-type","p-interface").toList();
+        //                SCENARIO 1 = no match found move everything from pserver old to new in swing edges call outside this fcn
+
+        if(pInterfacesOnLatestPserver.size() == 0){
+               logger.info("\tNo P-interfaces found on "+latest.property("hostname").value().toString()+ "...");
+            if(pInterfacesOnOldPserver.size() != 0) {
+               logger.info("\tP-interfaces found on "+old.property("hostname").value().toString()+ ". Update plink name and move the p-interfaces to latest pserver.");
+                for (int i = 0; i < pInterfacesOnOldPserver.size(); i++) {
+                    if (graphTraversalSource().V(pInterfacesOnOldPserver.get(i)).has("aai-node-type", "p-interface").out("tosca.relationships.network.LinksTo").hasNext()) {
+                        Vertex oldPlink = graphTraversalSource().V(pInterfacesOnOldPserver.get(i)).has("aai-node-type", "p-interface").out("tosca.relationships.network.LinksTo").next();
+                        String linkName = oldPlink.property("link-name").value().toString();
+                        logger.info("\tPhysical-link "+linkName+ " found on "+graphTraversalSource().V(pInterfacesOnOldPserver.get(i).property("interface-name").value().toString()));
+                        linkName = linkName.replaceAll(old.property("hostname").value().toString(), latest.property("hostname").value().toString());
+                        String[] PlinkBarSplit = linkName.split("\\|");
+                        if (PlinkBarSplit.length > 1) {
+                            modifyPlinkName(oldPlink, linkName, old);
+                        }
+                    }
+                }
+            }
+
+            return;
+        }
+
+        for(int i=0; i<pInterfacesOnOldPserver.size();i++){
+            for(int j=0; j<pInterfacesOnLatestPserver.size(); j++){
+               Vertex oldPinterface = graphTraversalSource().V(pInterfacesOnOldPserver.get(i)).has("aai-node-type","p-interface").next();
+                //pinterfaces are the same
+                if(pInterfacesOnOldPserver.get(i).property("interface-name").value().toString().equals(pInterfacesOnLatestPserver.get(j).property("interface-name").value().toString())){
+                    Vertex newPinterface = graphTraversalSource().V(pInterfacesOnLatestPserver.get(j)).has("aai-node-type","p-interface").next();
+                    logger.info("\tMatching P-interface "+newPinterface.property("interface-name").value().toString()+ " found on pservers");
+//                  SCENARIO 3 there already exists a plink in the new pinterface need to move all other pinterfaces and nodes in swing edges after the fcn no need for plink name change
+                    List<Vertex> oldPlinkList = graphTraversalSource().V(pInterfacesOnOldPserver.get(i)).has("aai-node-type","p-interface").out("tosca.relationships.network.LinksTo").toList();
+                    if(graphTraversalSource().V(pInterfacesOnLatestPserver.get(j)).has("aai-node-type","p-interface").out("tosca.relationships.network.LinksTo").hasNext()){
+                       logger.info("\tPhysical-link exists on new pserver's p-interface also... So, don't move this p-interface to new pserver...");
+                       if (!oldPlinkList.isEmpty()) {  
+                               //drop edge b/w oldPInterface and oldPlink
+                               String oldPlinkName = ""; 
+                               Edge oldPIntToPlinkEdge = oldPinterface.edges(Direction.OUT, "tosca.relationships.network.LinksTo").next();
+                               oldPIntToPlinkEdge.remove();
+       
+                               //remove physical link vertex also
+                               Vertex oldPlink = null;
+                            
+                               oldPlink = oldPlinkList.get(0);
+                               oldPlinkName = oldPlink.property("link-name").value().toString();
+                               oldPlink.remove();
+                               logger.info("\tDropped edge b/w old P-interface and Physical-link, and deleted old physical-link "+oldPlinkName);
+                            }
+                       moveChildrenOfMatchingPInterfaceToNewPserver(pInterfacesOnOldPserver, i, oldPinterface, newPinterface);
+                    }
+//                  SCENARIO 2 = there is no  plink in new  pinterface and move old plink to new
+                    else{
+                       logger.info("\tNo Physical-link exists on new pserver's p-interface... Move old plink to new pserver's p-interface");
+                        Vertex oldPlink = null;
+                        if (!oldPlinkList.isEmpty()) {
+                               oldPlink = oldPlinkList.get(0);
+                               String linkName = oldPlink.property("link-name").value().toString();
+                               createCousinEdge(newPinterface,oldPlink);
+                               logger.info("\tCreated edge b/w new P-interface and old physical-link "+linkName);
+                               //drop edge b/w oldPInterface and oldPlink
+                               Edge oldPIntToPlinkEdge = oldPinterface.edges(Direction.OUT, "tosca.relationships.network.LinksTo").next();
+                               oldPIntToPlinkEdge.remove();
+                               logger.info("\tDropped edge b/w old P-interface and Physical-link "+linkName);
+                               linkName =  linkName.replaceAll(old.property("hostname").value().toString(),latest.property("hostname").value().toString());
+       
+                               String[] PlinkBarSplit = linkName.split("\\|");
+                               if(PlinkBarSplit.length>1) {
+                                   modifyPlinkName(oldPlink,linkName,old);
+                               }
+                               else{
+                                   logger.info("\t" +oldPlink.property("link-name").value().toString()+ " does not comply with naming conventions related to pserver hostname:" + old.property("hostname").value().toString());
+                               }
+                               moveChildrenOfMatchingPInterfaceToNewPserver(pInterfacesOnOldPserver, i, oldPinterface, newPinterface);
+                        } else {
+                               moveChildrenOfMatchingPInterfaceToNewPserver(pInterfacesOnOldPserver, i, oldPinterface, newPinterface);
+                        }
+                    }
+                  //delete the oldPInterface
+                    oldPinterface.remove();
+                    break;
+                }
+            }
+        }
+    }
+
+       private void moveChildrenOfMatchingPInterfaceToNewPserver(List<Vertex> pInterfacesOnOldPserver, int i, Vertex oldPinterface, Vertex newPinterface) {
+               // Check if there are children under old pserver's p-int and move them to new pserver's matching p-int
+               List<Vertex> oldPIntChildren = graphTraversalSource().V(pInterfacesOnOldPserver.get(i)).has("aai-node-type","p-interface").in().has("aai-node-type", P.within("l-interface","sriov-pf")).toList();
+               if (oldPIntChildren != null && !oldPIntChildren.isEmpty()){
+                       oldPIntChildren.forEach((c)-> { swingEdges(oldPinterface, newPinterface, null, null, "IN");
+//                                                                                     c.remove();
+                       });
+                       logger.info("\t"+"Child vertices of p-interface on old pserver have been moved to p-interface on new pserver");
+                       
+               }
+       }
+
+       public void modifyPlinkName(Vertex oldPlink,String linkName,Vertex old ){
+
+        String[] PlinkBarSplit = linkName.split("\\|");
+        if(PlinkBarSplit.length>1) {
+            String[] pserv1Connection = PlinkBarSplit[0].split(":");
+            String[] pserv2Connection = PlinkBarSplit[1].split(":");
+
+            HashMap<String, String> map = new HashMap<>();
+            map.put(pserv1Connection[0], pserv1Connection[1]);
+            map.put(pserv2Connection[0], pserv2Connection[1]);
+
+            String[] temp = new String[2];
+            temp[0] = pserv1Connection[0];
+            temp[1] = pserv2Connection[0];
+            Arrays.sort(temp);
+            String linkNameNew = temp[0] + ":" + map.get(temp[0]).toString() + "|" + temp[1] + ":" + map.get(temp[1]).toString();
+            oldPlink.property("link-name", linkNameNew);
+            logger.info("\tUpdate physical-link name from "+linkName+ " to "+linkNameNew);
+        }
+        else{
+            logger.info("\t" +oldPlink.property("link-name").value().toString()+ "Does not comply with naming conventions related to pserver hostname:" + old.property("hostname").value().toString());
+
+        }
+    }
+
+    public void moveLagInterfaces(Vertex old, Vertex latest) throws AAIException {
+
+        List<Vertex> lagInterfacesOnOldPserver = graphTraversalSource().V(old).has("aai-node-type","pserver").in("tosca.relationships.network.BindsTo").has("aai-node-type","lag-interface").toList();
+        List<Vertex> lagInterfacesOnLatestPserver = graphTraversalSource().V(latest).has("aai-node-type","pserver").in("tosca.relationships.network.BindsTo").has("aai-node-type","lag-interface").toList();
+        //                SCENARIO 1 = no match found move everything from pserver old to new in swing edges call outside this fcn
+
+        if(lagInterfacesOnLatestPserver.size() == 0){
+            return;
+        }
+
+        for(int i=0; i<lagInterfacesOnOldPserver.size();i++){
+
+            for(int j=0; j<lagInterfacesOnLatestPserver.size(); j++){
+                //lag interface-name matches on both
+                if(lagInterfacesOnOldPserver.get(i).property("interface-name").value().toString().equals(lagInterfacesOnLatestPserver.get(j).property("interface-name").value().toString())){
+                    Vertex oldLaginterface = graphTraversalSource().V(lagInterfacesOnOldPserver.get(i)).has("aai-node-type","lag-interface").next();
+                    Vertex newLaginterface = graphTraversalSource().V(lagInterfacesOnLatestPserver.get(j)).has("aai-node-type","lag-interface").next();
+                    //Check if there are any children on the old lag-interface and move them to new
+                 // Check if there are children under old pserver's p-int and move them to new pserver's matching p-int
+                       List<Vertex> oldPIntChildren = graphTraversalSource().V(lagInterfacesOnOldPserver.get(i)).has("aai-node-type","lag-interface").in().has("aai-node-type", P.within("l-interface")).toList();
+                       if (oldPIntChildren != null && !oldPIntChildren.isEmpty()){
+                               oldPIntChildren.forEach((c)-> swingEdges(oldLaginterface, newLaginterface, null, null, "BOTH"));
+                       }
+                       logger.info("\t"+"Child vertices of lag-interface on old pserver have been moved to lag-interface on new pserver");
+                    //delete the oldLagInterface
+                    oldLaginterface.remove();
+                    break;
+                }
+            }
+        }
+    }
+
+
+    private void modifyChildrenUri(Vertex v) throws UnsupportedEncodingException, AAIException {
+       logger.info("\tModifying children uri for all levels.....");
+        Set<Vertex> parentSet = new HashSet<>();
+        parentSet.add(v);
+        verifyOrAddUri("", parentSet);
+    }
+
+
+    protected void verifyOrAddUri(String parentUri, Set<Vertex> vertexSet) throws UnsupportedEncodingException, AAIException {
+
+
+        String correctUri;
+        for (Vertex v : vertexSet) {
+            seen.add(v.id());
+            //if there is an issue generating the uri catch, log and move on;
+            try {
+                correctUri = parentUri + this.getUriForVertex(v);
+            } catch (Exception e) {
+                logger.error("\tVertex has issue generating uri " + e.getMessage() + "\n\t" + this.asString(v));
+                continue;
+            }
+            try {
+                v.property(AAIProperties.AAI_URI, correctUri);
+            } catch (Exception e) {
+                logger.info(e.getMessage() + "\n\t" + this.asString(v));
+            }
+            if (!v.property(AAIProperties.AAI_UUID).isPresent()) {
+                v.property(AAIProperties.AAI_UUID, UUID.randomUUID().toString());
+            }
+            this.verifyOrAddUri(correctUri, getChildren(v));
+        }
+    }
+
+    protected Set<Vertex> getChildren(Vertex v) {
+
+        Set<Vertex> children = graphTraversalSource().V(v).bothE().not(__.has(EdgeProperty.CONTAINS.toString(), AAIDirection.NONE.toString())).otherV().toSet();
+
+        return children.stream().filter(child -> !seen.contains(child.id())).collect(Collectors.toSet());
+    }
+
+    protected String getUriForVertex(Vertex v) {
+        String aaiNodeType = v.property(AAIProperties.NODE_TYPE).value().toString();
+
+
+        Map<String, String> parameters = this.nodeTypeToKeys.get(aaiNodeType).stream().collect(Collectors.toMap(
+                key -> key,
+                key -> encodeProp(v.property(key).value().toString())
+        ));
+
+        return this.nodeTypeToUri.get(aaiNodeType).buildFromEncodedMap(parameters).toString();
+    }
+    private static String encodeProp(String s) {
+        try {
+            return UriUtils.encode(s, "UTF-8");
+        } catch (UnsupportedEncodingException e) {
+            return "";
+        }
+    }
+
+}
\ No newline at end of file
diff --git a/src/main/java/org/onap/aai/migration/v14/MigrateSdnaIvlanData.java b/src/main/java/org/onap/aai/migration/v14/MigrateSdnaIvlanData.java
new file mode 100644 (file)
index 0000000..6f759fb
--- /dev/null
@@ -0,0 +1,443 @@
+/**\r
+ * ============LICENSE_START=======================================================\r
+ * org.onap.aai\r
+ * ================================================================================\r
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.\r
+ * ================================================================================\r
+ * Licensed under the Apache License, Version 2.0 (the "License");\r
+ * you may not use this file except in compliance with the License.\r
+ * You may obtain a copy of the License at\r
+ *\r
+ *    http://www.apache.org/licenses/LICENSE-2.0\r
+ *\r
+ * Unless required by applicable law or agreed to in writing, software\r
+ * distributed under the License is distributed on an "AS IS" BASIS,\r
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
+ * See the License for the specific language governing permissions and\r
+ * limitations under the License.\r
+ * ============LICENSE_END=========================================================\r
+ */\r
+package org.onap.aai.migration.v14;\r
+\r
+import java.io.FileNotFoundException;\r
+import java.io.IOException;\r
+import java.nio.file.Files;\r
+import java.nio.file.NoSuchFileException;\r
+import java.nio.file.Paths;\r
+import java.util.ArrayList;\r
+import java.util.HashMap;\r
+import java.util.Iterator;\r
+import java.util.List;\r
+import java.util.Map;\r
+import java.util.Optional;\r
+\r
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;\r
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;\r
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__;\r
+import org.apache.tinkerpop.gremlin.structure.Vertex;\r
+import org.onap.aai.db.props.AAIProperties;\r
+import org.onap.aai.edges.EdgeIngestor;\r
+import org.onap.aai.introspection.LoaderFactory;\r
+import org.onap.aai.migration.Enabled;\r
+import org.onap.aai.migration.MigrationDangerRating;\r
+import org.onap.aai.migration.MigrationPriority;\r
+import org.onap.aai.migration.Migrator;\r
+import org.onap.aai.migration.Status;\r
+import org.onap.aai.serialization.db.EdgeSerializer;\r
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;\r
+import org.onap.aai.setup.SchemaVersions;\r
+\r
+@MigrationPriority(100)\r
+@MigrationDangerRating(1)\r
+@Enabled\r
+public class MigrateSdnaIvlanData extends Migrator {\r
+       \r
+       private final String CONFIGURATION_NODE_TYPE = "configuration";\r
+       private final String EVC_NODE_TYPE = "evc";\r
+       private final String FORWARDER_NODE_TYPE = "forwarder";\r
+       private final String FORWRDER_EVC_NODE_TYPE = "forwarder-evc";  \r
+       private final String FORWARDING_PATH_NODE_TYPE = "forwarding-path";\r
+       private final String PNF_NODE_TYPE = "pnf";\r
+       private final String  P_INTERFACE_NODE_TYPE = "p-interface";\r
+       private final String  LAG_INTERFACE_NODE_TYPE = "lag-interface";\r
+       private final String SAREA_GLOBAL_CUSTOMER_ID = "8a00890a-e6ae-446b-9dbe-b828dbeb38bd";\r
+       \r
+       GraphTraversal<Vertex, Vertex> serviceSubscriptionGt;\r
+       \r
+       private static GraphTraversalSource g = null;\r
+       private static boolean success = true;\r
+    private static boolean checkLog = false;\r
+    private int headerLength;\r
+    private int migrationSuccess = 0;\r
+    private int migrationFailure = 0;\r
+    private int invalidPInterfaceCount = 0;\r
+    private int invalidLagInterfaceCount = 0;\r
+    \r
+    \r
+    private static List<String> dmaapMsgList = new ArrayList<String>();\r
+    private static final String homeDir = System.getProperty("AJSC_HOME");\r
+    \r
+    private static List<String> validPnfList = new ArrayList<String>();\r
+    private static List<String> invalidPnfList = new ArrayList<String>();\r
+   \r
+    private static Map<String, List<String>> validInterfaceMap =  new HashMap<String, List<String>>();\r
+    private static Map<String, List<String>> invalidInterfaceMap =  new HashMap<String, List<String>>();\r
+       \r
+    protected class SdnaIvlanFileData{\r
+       String evcName;\r
+       String pnfName;\r
+               String interfaceAID;\r
+       int ivlanValue;\r
+       \r
+       public String getEvcName() {\r
+                       return evcName;\r
+               }\r
+               public void setEvcName(String evcName) {\r
+                       this.evcName = evcName;\r
+               }\r
+               \r
+               public String getPnfName() {\r
+                       return pnfName;\r
+               }\r
+               public void setPnfName(String pnfName) {\r
+                       this.pnfName = pnfName;\r
+               }\r
+               public String getInterfaceAID() {\r
+                       return interfaceAID;\r
+               }\r
+               public void setInterfaceAID(String interfaceAID) {\r
+                       this.interfaceAID = interfaceAID;\r
+               }\r
+               \r
+               public int getIvlanValue() {\r
+                       return ivlanValue;\r
+               }\r
+               public void setIvlanValue(int ivlanValue) {\r
+                       this.ivlanValue = ivlanValue;\r
+               }\r
+               \r
+    }\r
+    \r
+    private static ArrayList<SdnaIvlanFileData> ivlanList = new ArrayList<SdnaIvlanFileData>();\r
+   \r
+       public MigrateSdnaIvlanData(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) {\r
+               super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);\r
+               \r
+               this.g = this.engine.asAdmin().getTraversalSource();\r
+               this.serviceSubscriptionGt = g.V().has("global-customer-id", SAREA_GLOBAL_CUSTOMER_ID).in("org.onap.relationships.inventory.BelongsTo").has("service-type", "SAREA");\r
+       }\r
+\r
+       @Override\r
+       public void run() {\r
+               logger.info("---------- Start migration ----------");\r
+        String configDir = System.getProperty("BUNDLECONFIG_DIR");\r
+        if (homeDir == null) {\r
+            logger.info(this.MIGRATION_ERROR + "ERROR: Could not find sys prop AJSC_HOME");\r
+            success = false;\r
+            return;\r
+        }\r
+        if (configDir == null) {\r
+            success = false;\r
+            return;\r
+        }\r
+        \r
+        String feedDir = homeDir + "/" + configDir + "/" + "migration-input-files/sarea-inventory/";\r
+\r
+        int fileLineCounter = 0;\r
+\r
+        String fileName = feedDir+ "ivlanData.csv";\r
+        logger.info(fileName);\r
+        logger.info("---------- Processing Entries from file  ----------");\r
+        \r
+                               \r
+        try  {\r
+               List<String> lines = Files.readAllLines(Paths.get(fileName));\r
+            Iterator<String> lineItr = lines.iterator();\r
+            while (lineItr.hasNext()){\r
+                String line = lineItr.next().trim();\r
+                if (!line.isEmpty()) {\r
+                    if (fileLineCounter != 0) {\r
+                       \r
+                       try{\r
+                               String[] colList = line.split(",", -1);                      \r
+                               SdnaIvlanFileData lineData = new SdnaIvlanFileData();\r
+                               lineData.setEvcName(colList[0].trim());\r
+                               lineData.setPnfName(colList[1].trim());\r
+                               lineData.setInterfaceAID(colList[2].trim());\r
+                               lineData.setIvlanValue(Integer.valueOf(colList[3].trim()));\r
+                               ivlanList.add(lineData);\r
+                               \r
+                       } catch (Exception e){\r
+                               logger.info(this.MIGRATION_ERROR + " ERROR: Record Format is invalid.  Expecting Numeric value for Forwarder_Id and Ivlan_Value.  Skipping Record:  "  + line);\r
+                               this.migrationFailure++;\r
+                       }\r
+               \r
+                    } else {\r
+                        this.headerLength = line.split(",", -1).length;\r
+                        if (this.headerLength < 4){\r
+                            logger.info(this.MIGRATION_ERROR + "ERROR: Input file should have atleast 4 columns");\r
+                            this.success = false;\r
+                            return;\r
+                        }\r
+                    }\r
+                }\r
+                fileLineCounter++;\r
+            }\r
+            \r
+            processSdnaIvlan();\r
+            \r
+            int invalidInterfacesCount = getInvalidInterfaceCount();\r
+            \r
+            logger.info ("\n \n ******* Final Summary for SDN-A IVLAN Migration ********* \n");\r
+            logger.info(this.MIGRATION_SUMMARY_COUNT + "SDN-A forward-evcs: IVLANs updated: "+ migrationSuccess);\r
+            logger.info(this.MIGRATION_SUMMARY_COUNT + "Total File Record Count: "+(fileLineCounter - 1));\r
+            logger.info(this.MIGRATION_SUMMARY_COUNT + "Unprocessed SDNA File Records : "+ migrationFailure);\r
+            logger.info(this.MIGRATION_SUMMARY_COUNT + "PNFs from Input File not found : "+ Integer.toString(invalidPnfList.size()) + "\n");\r
+            \r
+           \r
+            logger.info(this.MIGRATION_SUMMARY_COUNT + "Total PNF + P-INTERFACEs from Input File not found : " + Integer.toString(invalidPInterfaceCount));\r
+            logger.info(this.MIGRATION_SUMMARY_COUNT + "Total PNF + LAG-INTERFACEs from Input File not found : " + Integer.toString(invalidLagInterfaceCount));\r
+            logger.info(this.MIGRATION_SUMMARY_COUNT + "Total PNF/INTERFACEs from Input File not found : " + Integer.toString(invalidInterfacesCount));\r
+\r
+        } catch (FileNotFoundException e) {\r
+            logger.info(this.MIGRATION_ERROR + "ERROR: Could not find file " + fileName, e.getMessage());\r
+            success = false;\r
+            checkLog = true;     \r
+        }  catch (NoSuchFileException e) {\r
+            logger.info(this.MIGRATION_ERROR + "ERROR: Could not find file " + fileName, e.getMessage());\r
+            success = false;\r
+            checkLog = true; \r
+        } catch (IOException e) {\r
+            logger.info(this.MIGRATION_ERROR + "ERROR: Issue reading file " + fileName, e);\r
+            success = false;\r
+        } catch (Exception e) {\r
+            logger.info(this.MIGRATION_ERROR + "encountered exception", e);\r
+            e.printStackTrace();\r
+            success = false;\r
+        }        \r
+\r
+       }\r
+       private void processSdnaIvlan() {\r
+\r
+               for(int i = 0; i < ivlanList.size(); i ++) {\r
+                       String evc = ivlanList.get(i).getEvcName();\r
+                       String pnf = ivlanList.get(i).getPnfName();\r
+                       String interfaceId = ivlanList.get(i).getInterfaceAID();\r
+                       String ivlanValue =  Integer.toString(ivlanList.get(i).getIvlanValue());\r
+                       \r
+                       Boolean pnfExists = pnfExists(pnf);\r
+                       GraphTraversal<Vertex, Vertex> forwarderEvcGT;\r
+                       Vertex forwarderEvcVtx = null;\r
+                       String interfaceNodeType;\r
+                       String forwarderEvcId = null;\r
+                       \r
+                       if (!pnfExists){\r
+                               migrationFailure++;\r
+                       }else{\r
+                               \r
+                               if (interfaceId.contains(".")){\r
+                                       interfaceNodeType = P_INTERFACE_NODE_TYPE;                                      \r
+                               }else{\r
+                                       interfaceNodeType = LAG_INTERFACE_NODE_TYPE;                                    \r
+                               }\r
+                               \r
+                               validateInterface(pnf, interfaceNodeType, interfaceId);\r
+\r
+                               forwarderEvcGT = g.V()\r
+                                               .has("pnf-name", pnf).has(AAIProperties.NODE_TYPE, PNF_NODE_TYPE)\r
+                                               .in("tosca.relationships.network.BindsTo")\r
+                                               .has(AAIProperties.NODE_TYPE, interfaceNodeType).has("interface-name", interfaceId)\r
+                                               .in("org.onap.relationships.inventory.ForwardsTo")\r
+                                               .where(__.out("org.onap.relationships.inventory.BelongsTo").has("forwarding-path-id", evc))\r
+                                               .out("org.onap.relationships.inventory.Uses")\r
+                                               .in("org.onap.relationships.inventory.BelongsTo"); \r
+                               \r
+                               // fwd-evc not found for pnf + interface\r
+                               if(!forwarderEvcGT.hasNext()){\r
+                                       forwarderEvcId = pnf + " " + evc;\r
+                                       migrationError(PNF_NODE_TYPE + "/" + EVC_NODE_TYPE, forwarderEvcId, "ivlan", ivlanValue);\r
+                                       \r
+                               }\r
+                               \r
+                               while(forwarderEvcGT.hasNext()){\r
+                                       forwarderEvcVtx = forwarderEvcGT.next();\r
+                                       \r
+                                       // fwd-evc vertex is null \r
+                                       if(forwarderEvcVtx == null){\r
+                                               forwarderEvcId = pnf + " " + evc;\r
+                                               migrationError(PNF_NODE_TYPE + "/" + EVC_NODE_TYPE, forwarderEvcId, "ivlan", ivlanValue);\r
+                                       }\r
+                                       // update fwd-evc with ivlan value\r
+                                       else{\r
+                                                                                               \r
+                                               forwarderEvcId = forwarderEvcVtx.property("forwarder-evc-id").value().toString();\r
+                                               try{\r
+                                                       forwarderEvcVtx.property("ivlan", ivlanValue);\r
+                                                       logger.info(String.format("Updating Node Type forwarder-evc Property ivlan value %s", ivlanValue.toString()));\r
+                                                       this.touchVertexProperties(forwarderEvcVtx, false);\r
+                                                       updateDmaapList(forwarderEvcVtx);\r
+                                                       migrationSuccess++;     \r
+                                                       \r
+                                               }catch (Exception e){\r
+                                                       logger.info(e.toString());\r
+                                                       migrationError(FORWRDER_EVC_NODE_TYPE, forwarderEvcId, "ivlan", ivlanValue);\r
+                                               }                                                       \r
+                                       }\r
+                               }       \r
+                       }\r
+               \r
+               }\r
+       }\r
+       \r
+       /** \r
+        * Description: Validate if pnf node exists in Graph\r
+        * @param pnf \r
+        * @return boolean\r
+        */\r
+       private boolean pnfExists(String pnf){\r
+               if (invalidPnfList.contains(pnf)){\r
+                       logger.info(this.MIGRATION_ERROR + "ERROR: PNF value " + pnf + " does not exist.");\r
+                       return false;\r
+               }\r
+               if (validPnfList.contains(pnf)){\r
+                       return true;\r
+               }\r
+               \r
+               GraphTraversal<Vertex, Vertex> pnfGT = g.V()\r
+                               .has("pnf-name", pnf).has(AAIProperties.NODE_TYPE, PNF_NODE_TYPE);\r
+               \r
+               if(pnfGT.hasNext()){\r
+                       validPnfList.add(pnf);\r
+                       return true;\r
+               }\r
+               else{\r
+                       logger.info(this.MIGRATION_ERROR + "ERROR: PNF value " + pnf + " does not exist.");\r
+                       invalidPnfList.add(pnf);\r
+                       return false;\r
+               }\r
+\r
+       }\r
+       \r
+       /**\r
+        * Description: Validate if p-interface or lag-interface node exists in Graph\r
+        * @param pnf\r
+        * @param interfaceNodeType\r
+        * @param interfaceName\r
+        */\r
+       private void validateInterface(String pnf, String interfaceNodeType, String interfaceName){\r
+               \r
+               List <String> validInterfaceList;\r
+               List <String> invalidInterfaceList;\r
+               \r
+               if(!validInterfaceMap.containsKey(pnf) ){\r
+                       validInterfaceList = new ArrayList<String>();\r
+               }else{\r
+                       validInterfaceList = validInterfaceMap.get(pnf);                        \r
+               }\r
+               \r
+               if(!invalidInterfaceMap.containsKey(pnf)){\r
+                       invalidInterfaceList = new ArrayList<String>();\r
+               }else{\r
+                       invalidInterfaceList = invalidInterfaceMap.get(pnf);                    \r
+               }\r
+               \r
+               if(invalidInterfaceList.contains(interfaceName)){\r
+                       logger.info(this.MIGRATION_ERROR + "ERROR PNF " + pnf  + " with a " + interfaceNodeType + " of " + interfaceName + " does not exist.");\r
+                       return;\r
+               }\r
+               if(validInterfaceList.contains(interfaceName)){\r
+                       return;\r
+               }\r
+               \r
+               GraphTraversal<Vertex, Vertex> interfaceGT = g.V()\r
+                               .has("pnf-name", pnf).has(AAIProperties.NODE_TYPE, PNF_NODE_TYPE)\r
+                               .in("tosca.relationships.network.BindsTo")\r
+                               .has("interface-name", interfaceName).has(AAIProperties.NODE_TYPE, interfaceNodeType);\r
+               \r
+               if(interfaceGT.hasNext()){\r
+                       validInterfaceList.add(interfaceName);  \r
+                       validInterfaceMap.put(pnf, validInterfaceList);\r
+               }\r
+               else{\r
+                       logger.info(this.MIGRATION_ERROR + "ERROR PNF " + pnf  + " with a " + interfaceNodeType + " of " + interfaceName + " does not exist.");\r
+                       invalidInterfaceList.add(interfaceName);        \r
+                       invalidInterfaceMap.put(pnf, invalidInterfaceList);\r
+               }\r
+       }\r
+       \r
+       \r
+       /**\r
+        * Description: Error Routine if graph is not updated by input file record\r
+        * @param nodeType\r
+        * @param nodeId\r
+        * @param property\r
+        * @param propertyValue\r
+        */\r
+       private void migrationError(String nodeType, String nodeId, String property, String propertyValue){\r
+               logger.info(this.MIGRATION_ERROR + "ERROR: Failure to update " \r
+                               + nodeType + " ID " + nodeId + ", " + property + " to value " + propertyValue \r
+                               + ".  Node Not Found \n");\r
+               migrationFailure++;\r
+       }\r
+       \r
+       private int getInvalidInterfaceCount(){\r
+               int interfaceCount = 0;\r
+\r
+               for (Map.Entry<String, List<String>> entry: invalidInterfaceMap.entrySet()){\r
+               String key = entry.getKey();\r
+               List <String> invalidList = invalidInterfaceMap.get(key);\r
+                               \r
+               for (int i = 0; i < invalidList.size(); i++){\r
+                       if(invalidList.get(i).contains(".")){\r
+                               invalidPInterfaceCount++;\r
+                       }else{\r
+                               invalidLagInterfaceCount++;\r
+                       }\r
+                       \r
+               }                               \r
+               interfaceCount = interfaceCount + invalidInterfaceMap.get(key).size();                          \r
+       } \r
+               return interfaceCount;\r
+       }\r
+       \r
+       /**\r
+        * Description: Dmaap Routine\r
+        * @param v\r
+        */\r
+       private void updateDmaapList(Vertex v){\r
+       String dmaapMsg = System.nanoTime() + "_" + v.id().toString() + "_"     + v.value("resource-version").toString();\r
+        dmaapMsgList.add(dmaapMsg);\r
+        logger.info("\tAdding Updated Vertex " + v.id().toString() + " to dmaapMsgList....");\r
+    }\r
+               \r
+       @Override\r
+         public Status getStatus() {\r
+        if (checkLog) {\r
+            return Status.CHECK_LOGS;\r
+        }\r
+        else if (success) {\r
+            return Status.SUCCESS;\r
+        }\r
+        else {\r
+            return Status.FAILURE;\r
+        }\r
+    }\r
+       \r
+       @Override\r
+       public void commit() {\r
+               engine.commit();\r
+               createDmaapFiles(dmaapMsgList);\r
+       }\r
+\r
+       @Override\r
+    public Optional<String[]> getAffectedNodeTypes() {\r
+        return Optional.of(new String[]{this.FORWRDER_EVC_NODE_TYPE});\r
+    }\r
+\r
+\r
+       @Override\r
+       public String getMigrationName() {\r
+               return "MigrateSdnaIvlanData";\r
+       }\r
+\r
+}\r
diff --git a/src/main/java/org/onap/aai/migration/v14/PserverDedupWithDifferentSourcesOfTruth.java b/src/main/java/org/onap/aai/migration/v14/PserverDedupWithDifferentSourcesOfTruth.java
new file mode 100644 (file)
index 0000000..80944ff
--- /dev/null
@@ -0,0 +1,358 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v14;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__;
+import org.apache.tinkerpop.gremlin.process.traversal.P;
+import org.apache.tinkerpop.gremlin.structure.*;
+import org.janusgraph.core.attribute.Text;
+import org.javatuples.Pair;
+import org.onap.aai.db.props.AAIProperties;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.edges.enums.AAIDirection;
+import org.onap.aai.edges.enums.EdgeProperty;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.migration.*;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.springframework.web.util.UriUtils;
+
+import javax.ws.rs.core.UriBuilder;
+
+import java.io.UnsupportedEncodingException;
+import java.net.URI;
+import java.util.*;
+import java.util.stream.Collectors;
+
+import org.onap.aai.serialization.db.EdgeSerializer;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.introspection.Introspector;
+
+@Enabled
+@MigrationPriority(10)
+@MigrationDangerRating(100)
+public class PserverDedupWithDifferentSourcesOfTruth extends EdgeSwingMigrator {
+    /**
+     * Instantiates a new migrator.
+     *
+     * @param engine
+     */
+    private final String PARENT_NODE_TYPE = "pserver";
+    private boolean success = true;
+    protected Set<Object> seen = new HashSet<>();
+    private Map<String, UriBuilder> nodeTypeToUri;
+    private Map<String, Set<String>> nodeTypeToKeys;
+    private static List<String> dmaapMsgList = new ArrayList<String>();
+    private static List<Introspector> dmaapDeleteList = new ArrayList<Introspector>();
+    private static int pserversUpdatedCount = 0;
+    private static int pserversDeletedCount = 0;
+    
+    
+    private static String[] rctSourceOfTruth = new String[]{"AAIRctFeed", "RCT"};
+    private static String[] roSourceOfTruth = new String[]{"AAI-EXTENSIONS", "RO"};
+
+    List<Vertex> RemoveROList = new ArrayList<>();
+
+    public PserverDedupWithDifferentSourcesOfTruth(TransactionalGraphEngine engine , LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) {
+        super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+    }
+    @Override
+    public void commit() {
+        engine.commit();
+        createDmaapFiles(dmaapMsgList);
+        createDmaapFilesForDelete(dmaapDeleteList);
+
+    }
+
+    @Override
+    public Status getStatus() {
+       if (success) {
+            return Status.SUCCESS;
+        }
+        else {
+            return Status.FAILURE;
+        }
+    }
+
+    @Override
+    public List<Pair<Vertex, Vertex>> getAffectedNodePairs() {
+        return null;
+    }
+
+    @Override
+    public String getNodeTypeRestriction() {
+        return null;
+    }
+
+    @Override
+    public String getEdgeLabelRestriction() {
+        return null;
+    }
+
+    @Override
+    public String getEdgeDirRestriction() {
+        return null;
+    }
+
+    @Override
+    public void cleanupAsAppropriate(List<Pair<Vertex, Vertex>> nodePairL) {
+
+    }
+
+    @Override
+    public Optional<String[]> getAffectedNodeTypes() {
+        return null;
+    }
+
+    @Override
+    public String getMigrationName() {
+        return "PserverDedupWithDifferentSourcesOfTruth";
+    }
+
+    @Override
+    public void run() {
+       
+       int dupCount = 0;
+        nodeTypeToUri = loader.getAllObjects().entrySet().stream().filter(e -> e.getValue().getGenericURI().contains("{")).collect(
+                Collectors.toMap(
+                        e -> e.getKey(),
+                        e -> UriBuilder.fromPath(e.getValue().getFullGenericURI().replaceAll("\\{"+ e.getKey() + "-", "{"))
+                ));
+
+        nodeTypeToKeys = loader.getAllObjects().entrySet().stream().filter(e -> e.getValue().getGenericURI().contains("{")).collect(
+                Collectors.toMap(
+                        e -> e.getKey(),
+                        e -> e.getValue().getKeys()
+                ));
+
+        List<Vertex> rctList = graphTraversalSource().V().has("aai-node-type", "pserver").has("source-of-truth", P.within(rctSourceOfTruth)).toList();
+        List<Vertex> roList =  graphTraversalSource().V().has("aai-node-type", "pserver").has("source-of-truth", P.within(roSourceOfTruth)).toList();
+        
+        logger.info("Total number of RCT sourced pservers in A&AI :" +rctList.size());
+        logger.info("Total number of RO sourced pservers in A&AI :" +roList.size());
+        
+        for(int i=0;i<rctList.size();i++){
+            Vertex currRct = rctList.get(i);
+            Object currRctFqdn = null;
+            if (currRct.property("fqdn").isPresent() && (currRct.property("fqdn").value() != null)){
+               currRctFqdn = currRct.property("fqdn").value();
+               logger.info("\n");
+               logger.info("Current RCT Pserver hostname: " + currRct.property("hostname").value().toString() + " fqdn: " +currRct.property("fqdn").value().toString());
+                   for(int j=0;j<roList.size();j++){
+                       Vertex currRo = roList.get(j);
+                       Object currRoHostname = null;
+                       if (currRo.property("hostname").isPresent()){
+                               currRoHostname = currRo.property("hostname").value();
+                       }
+                       if (currRoHostname != null){
+                               String[] rctFqdnSplit = currRctFqdn.toString().split("\\.");
+                               String[] roHostnameSplit = currRoHostname.toString().split("\\.");
+                               if (rctFqdnSplit.length >0 && roHostnameSplit.length > 0){
+                                       if(!rctFqdnSplit[0].isEmpty() && !roHostnameSplit[0].isEmpty() && rctFqdnSplit[0].equals(roHostnameSplit[0])){
+                                               logger.info("\tPserver match found - RO Pserver with hostname: "+currRo.property("hostname").value().toString());
+                                               dupCount++;
+                                           try {
+                                               mergePservers(currRct,currRo);
+                                               break;
+                                           } catch (UnsupportedEncodingException e) {
+                                               success = false;
+                                           } catch (AAIException e) {
+                                               success = false;
+                                           }
+                                       }
+                               }
+                       }
+                   }
+            }
+        }
+        RemoveROList.forEach(v ->v.remove());
+        logger.info ("\n \n ******* Migration Summary Counts for Dedup of RCT and RO sourced pservers ********* \n");
+        logger.info(this.MIGRATION_SUMMARY_COUNT + "Total number of RCT: " +rctList.size());
+        logger.info(this.MIGRATION_SUMMARY_COUNT + "Total number of RO: " +roList.size());
+        logger.info(this.MIGRATION_SUMMARY_COUNT + "Duplicate pserver count: "+ dupCount);
+        logger.info(this.MIGRATION_SUMMARY_COUNT + "Number of RCT updated: "+pserversUpdatedCount);
+        logger.info(this.MIGRATION_SUMMARY_COUNT + "Number of RO deleted: "+ pserversDeletedCount +"\n");
+    }
+       private GraphTraversalSource graphTraversalSource() {
+               return this.engine.asAdmin().getTraversalSource();
+       }
+
+
+    public void mergePservers(Vertex rct, Vertex ro) throws UnsupportedEncodingException, AAIException {
+        Introspector obj = serializer.getLatestVersionView(ro);
+        dmaapDeleteList.add(obj);
+        rct.property("fqdn",ro.property("hostname").value().toString());
+        dropComplexEdge(ro);
+        dropMatchingROPInterfaces(ro, rct);
+        dropMatchingROLagInterfaces(ro, rct);
+        swingEdges(ro, rct, null, null, "BOTH");
+        modifyChildrenUri(rct);
+        if(!(rct.property("pserver-id").isPresent())){
+            rct.property("pserver-id",UUID.randomUUID().toString());
+        }
+        String dmaapMsg = System.nanoTime() + "_" + rct.id().toString() + "_"  + rct.value("resource-version").toString();
+        dmaapMsgList.add(dmaapMsg);
+        pserversUpdatedCount++;
+        logger.info("\tAdding RO pserver to the delete list....");
+        RemoveROList.add(ro);
+        pserversDeletedCount++;
+    }
+
+    private void dropMatchingROPInterfaces(Vertex ro, Vertex rct) {
+        Map<String, Vertex> removeROPIntMap = new HashMap<String, Vertex>();
+       List<Vertex> pIntList = graphTraversalSource().V(ro).in("tosca.relationships.network.BindsTo").has("aai-node-type","p-interface").toList();
+       if (pIntList != null && !pIntList.isEmpty()) {
+               Iterator<Vertex> pIntListItr = pIntList.iterator();
+               while(pIntListItr.hasNext()){
+                       Vertex pInt = pIntListItr.next();
+                       
+                       removeROPIntMap.put(pInt.property("interface-name").value().toString(), pInt);
+               }
+               Set<String> interfaceNameSet = removeROPIntMap.keySet();
+               List<Vertex> rctPIntList = graphTraversalSource().V(rct).in("tosca.relationships.network.BindsTo").has("aai-node-type","p-interface").toList();
+               if (rctPIntList != null && !rctPIntList.isEmpty()){
+                       Iterator<Vertex> rctPIntListItr = rctPIntList.iterator();
+                       while(rctPIntListItr.hasNext()){
+                               Vertex rctPInt = rctPIntListItr.next();
+                               String rctIntfName = rctPInt.property("interface-name").value().toString();
+                               if (interfaceNameSet.contains(rctIntfName)){
+                                       Vertex pIntToRemoveFromROPserver = removeROPIntMap.get(rctIntfName);
+                                       String roPIntUri = "roPIntUri";
+                                       if (pIntToRemoveFromROPserver.property("aai-uri").isPresent()){
+                                               roPIntUri = pIntToRemoveFromROPserver.property("aai-uri").value().toString();
+                                       }
+                                       Edge roPIntToPserverEdge = pIntToRemoveFromROPserver.edges(Direction.OUT, "tosca.relationships.network.BindsTo").next();
+                                       roPIntToPserverEdge.remove();
+                                       pIntToRemoveFromROPserver.remove();
+                                       logger.info("\tRemoved p-interface "+roPIntUri + " and its edge to RO pserver, not swinging the p-interface to RCT pserver");
+                               }
+                       }
+               }
+       } 
+       }
+    
+    private void dropMatchingROLagInterfaces(Vertex ro, Vertex rct) {
+        Map<String, Vertex> removeROLagIntMap = new HashMap<String, Vertex>();
+       List<Vertex> lagIntList = graphTraversalSource().V(ro).in("tosca.relationships.network.BindsTo").has("aai-node-type","lag-interface").toList();
+       if (lagIntList != null && !lagIntList.isEmpty()) {
+               Iterator<Vertex> lagIntListItr = lagIntList.iterator();
+               while(lagIntListItr.hasNext()){
+                       Vertex lagInt = lagIntListItr.next();
+                       
+                       removeROLagIntMap.put(lagInt.property("interface-name").value().toString(), lagInt);
+               }
+               Set<String> interfaceNameSet = removeROLagIntMap.keySet();
+               List<Vertex> rctLagIntList = graphTraversalSource().V(rct).in("tosca.relationships.network.BindsTo").has("aai-node-type","lag-interface").toList();
+               if (rctLagIntList != null && !rctLagIntList.isEmpty()){
+                       Iterator<Vertex> rctLagIntListItr = rctLagIntList.iterator();
+                       while(rctLagIntListItr.hasNext()){
+                               Vertex rctPInt = rctLagIntListItr.next();
+                               String rctIntfName = rctPInt.property("interface-name").value().toString();
+                               if (interfaceNameSet.contains(rctIntfName)){
+                                       Vertex lagIntToRemoveFromROPserver = removeROLagIntMap.get(rctIntfName);
+                                       String roLagIntUri = "roPIntUri";
+                                       if (lagIntToRemoveFromROPserver.property("aai-uri").isPresent()){
+                                               roLagIntUri = lagIntToRemoveFromROPserver.property("aai-uri").value().toString();
+                                       }
+                                       Edge roLagIntToPserverEdge = lagIntToRemoveFromROPserver.edges(Direction.OUT, "tosca.relationships.network.BindsTo").next();
+                                       roLagIntToPserverEdge.remove();
+                                       lagIntToRemoveFromROPserver.remove();
+                                       logger.info("\tRemoved lag-interface "+roLagIntUri + " and its edge to RO pserver, not swinging the lag-interface to RCT pserver");
+                               }
+                       }
+               }
+       } 
+       }
+    
+       public void dropComplexEdge(Vertex ro){
+       List<Vertex> locatedInEdgeVertexList = graphTraversalSource().V(ro).has("aai-node-type", "pserver").out("org.onap.relationships.inventory.LocatedIn").has("aai-node-type","complex").toList();
+       if (locatedInEdgeVertexList != null && !locatedInEdgeVertexList.isEmpty()){
+               Iterator<Vertex> locatedInEdgeVertexListItr = locatedInEdgeVertexList.iterator();
+               while (locatedInEdgeVertexListItr.hasNext()){
+                       Vertex v = locatedInEdgeVertexListItr.next();
+                       if ("complex".equalsIgnoreCase(v.property("aai-node-type").value().toString())){
+                               Edge pserverToComplexEdge = v.edges(Direction.IN, "org.onap.relationships.inventory.LocatedIn").next();
+                               pserverToComplexEdge.remove();
+                       }
+               }
+       }
+    }
+
+
+    private void modifyChildrenUri(Vertex v) throws UnsupportedEncodingException, AAIException {
+        Set<Vertex> parentSet = new HashSet<>();
+        parentSet.add(v);
+        verifyOrAddUri("", parentSet);
+    }
+
+
+    protected void verifyOrAddUri(String parentUri, Set<Vertex> vertexSet) throws UnsupportedEncodingException, AAIException {
+
+
+        String correctUri;
+        for (Vertex v : vertexSet) {
+            seen.add(v.id());
+            //if there is an issue generating the uri catch, log and move on;
+            try {
+                correctUri = parentUri + this.getUriForVertex(v);
+            } catch (Exception e) {
+                logger.error("Vertex has issue generating uri " + e.getMessage() + "\n\t" + this.asString(v));
+                continue;
+            }
+            try {
+                v.property(AAIProperties.AAI_URI, correctUri);
+            } catch (Exception e) {
+                logger.info("\t" + e.getMessage() + "\n\t" + this.asString(v));
+            }
+            if (!v.property(AAIProperties.AAI_UUID).isPresent()) {
+                v.property(AAIProperties.AAI_UUID, UUID.randomUUID().toString());
+            }
+            this.verifyOrAddUri(correctUri, getChildren(v));
+        }
+    }
+
+    protected Set<Vertex> getChildren(Vertex v) {
+
+        Set<Vertex> children = graphTraversalSource().V(v).bothE().not(__.has(EdgeProperty.CONTAINS.toString(), AAIDirection.NONE.toString())).otherV().toSet();
+
+        return children.stream().filter(child -> !seen.contains(child.id())).collect(Collectors.toSet());
+    }
+
+    protected String getUriForVertex(Vertex v) {
+        String aaiNodeType = v.property(AAIProperties.NODE_TYPE).value().toString();
+
+
+        Map<String, String> parameters = this.nodeTypeToKeys.get(aaiNodeType).stream().collect(Collectors.toMap(
+                key -> key,
+                key -> encodeProp(v.property(key).value().toString())
+        ));
+
+        return this.nodeTypeToUri.get(aaiNodeType).buildFromEncodedMap(parameters).toString();
+    }
+    private static String encodeProp(String s) {
+        try {
+            return UriUtils.encode(s, "UTF-8");
+        } catch (UnsupportedEncodingException e) {
+            return "";
+        }
+    }
+
+}
diff --git a/src/main/java/org/onap/aai/migration/v15/MigrateBooleanDefaultsToFalse.java b/src/main/java/org/onap/aai/migration/v15/MigrateBooleanDefaultsToFalse.java
new file mode 100644 (file)
index 0000000..3152436
--- /dev/null
@@ -0,0 +1,115 @@
+/**\r
+ * ============LICENSE_START=======================================================\r
+ * org.onap.aai\r
+ * ================================================================================\r
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.\r
+ * ================================================================================\r
+ * Licensed under the Apache License, Version 2.0 (the "License");\r
+ * you may not use this file except in compliance with the License.\r
+ * You may obtain a copy of the License at\r
+ *\r
+ *    http://www.apache.org/licenses/LICENSE-2.0\r
+ *\r
+ * Unless required by applicable law or agreed to in writing, software\r
+ * distributed under the License is distributed on an "AS IS" BASIS,\r
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
+ * See the License for the specific language governing permissions and\r
+ * limitations under the License.\r
+ * ============LICENSE_END=========================================================\r
+ */\r
+package org.onap.aai.migration.v15;\r
+\r
+import java.util.HashMap;\r
+import java.util.Map;\r
+import java.util.Optional;\r
+\r
+import org.onap.aai.edges.EdgeIngestor;\r
+import org.onap.aai.introspection.LoaderFactory;\r
+import org.onap.aai.serialization.db.EdgeSerializer;\r
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;\r
+import org.onap.aai.migration.Enabled;\r
+import org.onap.aai.migration.MigrationDangerRating;\r
+import org.onap.aai.migration.MigrationPriority;\r
+import org.onap.aai.migration.Status;\r
+import org.onap.aai.migration.ValueMigrator;\r
+import org.onap.aai.setup.SchemaVersions;\r
+\r
+\r
+@MigrationPriority(20)\r
+@MigrationDangerRating(2)\r
+@Enabled\r
+public class MigrateBooleanDefaultsToFalse extends ValueMigrator {\r
+       protected static final String VNF_NODE_TYPE = "generic-vnf";\r
+       protected static final String VSERVER_NODE_TYPE = "vserver";\r
+       protected static final String VNFC_NODE_TYPE = "vnfc";\r
+       protected static final String L3NETWORK_NODE_TYPE = "l3-network";\r
+       protected static final String SUBNET_NODE_TYPE = "subnet";\r
+       protected static final String LINTERFACE_NODE_TYPE = "l-interface";\r
+       protected static final String VFMODULE_NODE_TYPE = "vf-module";\r
+       \r
+       private static Map<String, Map> map;\r
+    private static Map<String, Boolean> pair1;\r
+    private static Map<String, Boolean> pair2;\r
+    private static Map<String, Boolean> pair3;\r
+    private static Map<String, Boolean> pair4;\r
+    private static Map<String, Boolean> pair5;\r
+    private static Map<String, Boolean> pair6;\r
\r
+       public MigrateBooleanDefaultsToFalse(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) {\r
+               super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions, setBooleanDefaultsToFalse(), false);\r
+               \r
+       }\r
+       \r
+       private static Map<String, Map> setBooleanDefaultsToFalse(){\r
+               map = new HashMap<>();\r
+        pair1 = new HashMap<>();\r
+        pair2 = new HashMap<>();\r
+        pair3 = new HashMap<>();\r
+        pair4 = new HashMap<>();\r
+        pair5 = new HashMap<>();\r
+        pair6 = new HashMap<>();\r
+\r
+\r
+               pair1.put("is-closed-loop-disabled", false);            \r
+               map.put("generic-vnf", pair1);\r
+               map.put("vnfc", pair1);\r
+               map.put("vserver", pair1);\r
+               \r
+               pair2.put("is-bound-to-vpn", false);\r
+               pair2.put("is-provider-network", false);\r
+               pair2.put("is-shared-network", false);\r
+               pair2.put("is-external-network", false);\r
+               map.put("l3-network", pair2);\r
+               \r
+               pair3.put("dhcp-enabled", false);\r
+               map.put("subnet", pair3);\r
+               \r
+               pair4.put("is-port-mirrored", false);\r
+               pair4.put("is-ip-unnumbered", false);\r
+               map.put("l-interface", pair4);\r
+               \r
+               pair5.put("is-base-vf-module", false);\r
+               map.put("vf-module", pair5);\r
+               \r
+               pair6.put("is-ip-unnumbered", false);\r
+               map.put("vlan", pair6);\r
+        \r
+        return map;\r
+       }\r
+\r
+       @Override\r
+       public Status getStatus() {\r
+               return Status.SUCCESS;\r
+       }\r
+\r
+       @Override\r
+       public Optional<String[]> getAffectedNodeTypes() {\r
+               return Optional.of(new String[]{VNF_NODE_TYPE,VSERVER_NODE_TYPE,VNFC_NODE_TYPE,L3NETWORK_NODE_TYPE,SUBNET_NODE_TYPE,LINTERFACE_NODE_TYPE,VFMODULE_NODE_TYPE});\r
+       }\r
+\r
+       @Override\r
+       public String getMigrationName() {\r
+               return "MigrateBooleanDefaultsToFalse";\r
+       }\r
+\r
+}
\ No newline at end of file
diff --git a/src/main/java/org/onap/aai/migration/v15/MigrateCloudRegionUpgradeCycle.java b/src/main/java/org/onap/aai/migration/v15/MigrateCloudRegionUpgradeCycle.java
new file mode 100644 (file)
index 0000000..6104d9d
--- /dev/null
@@ -0,0 +1,361 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v15;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.javatuples.Pair;
+import org.onap.aai.db.props.AAIProperties;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.migration.Enabled;
+import org.onap.aai.migration.MigrationDangerRating;
+import org.onap.aai.migration.MigrationPriority;
+import org.onap.aai.migration.Migrator;
+import org.onap.aai.migration.Status;
+import org.onap.aai.serialization.db.EdgeSerializer;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.setup.SchemaVersions;
+
+
+@MigrationPriority(20)
+@MigrationDangerRating(2)
+@Enabled
+public class MigrateCloudRegionUpgradeCycle extends Migrator {
+
+       protected static final String CLOUD_REGION_NODE_TYPE = "cloud-region";
+       protected static final String CLOUD_OWNER = "cloud-owner";
+       protected static final String CLOUD_REGION_ID = "cloud-region-id";
+       protected static final String UPGRADE_CYCLE = "upgrade-cycle";
+       private static final String homeDir = System.getProperty("AJSC_HOME");
+
+       protected final AtomicInteger skippedRowsCount = new AtomicInteger(0);
+       protected final AtomicInteger processedRowsCount = new AtomicInteger(0);
+
+       private boolean success = true;
+       private GraphTraversalSource g = null;
+       protected int headerLength;
+
+       protected final AtomicInteger falloutRowsCount = new AtomicInteger(0);
+
+       public MigrateCloudRegionUpgradeCycle(TransactionalGraphEngine engine, LoaderFactory loaderFactory,
+                       EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) {
+               super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+       }
+
+       @Override
+       public void run() {
+               logger.info("---------- Start Updating upgrade-cycle for cloud-region  ----------");
+
+               String configDir = System.getProperty("BUNDLECONFIG_DIR");
+        if (homeDir == null) {
+            logger.info("ERROR: Could not find sys prop AJSC_HOME");
+            success = false;
+            return;
+        }
+        if (configDir == null) {
+            success = false;
+            return;
+        }
+        
+        String feedDir = homeDir + "/" + configDir + "/" + "migration-input-files/CloudRegion-ART-migration-data/";
+               String fileName = feedDir + "CloudRegion-ART-migration-data.csv";
+               logger.info(fileName);
+               logger.info("---------- Processing Region Entries from file  ----------");
+
+               Map cloudRegionVertexMap = new HashMap();
+
+               try {
+                       int cloudRegionCount = 0;
+                       int cloudRegionErrorCount = 0;
+                       ArrayList data = loadFile(fileName);
+
+                       Map<String, String> cloudRegionMapFromART = (Map) data.get(0);
+                       Map<String, String> cloudAliasMapFromART = (Map) data.get(1);
+
+                       List<Vertex> cloudRegionList = this.engine.asAdmin().getTraversalSource().V()
+                                       .has(AAIProperties.NODE_TYPE, CLOUD_REGION_NODE_TYPE).has(CLOUD_OWNER, "att-aic").toList();
+
+                       for (Vertex vertex : cloudRegionList) {
+                               String cloudRegionId = null;
+                               cloudRegionId = getCloudRegionIdNodeValue(vertex);
+                               cloudRegionVertexMap.put(cloudRegionId, vertex);
+                       }
+
+                       for (Map.Entry<String, String> entry : cloudRegionMapFromART.entrySet()) {
+                               boolean regionFound = false;
+                               String regionFromART = "";
+                               String aliasFromART = "";
+                               String vertexKey = "";
+                               
+                               regionFromART = (String) entry.getKey();
+
+                               if (cloudRegionVertexMap.containsKey(regionFromART)) {
+                                       regionFound = true;
+                                       vertexKey = regionFromART;
+                               } else {
+                                       aliasFromART = cloudAliasMapFromART.get(regionFromART).toString();
+                                       if (aliasFromART != null && !"".equals(aliasFromART)
+                                                       && cloudRegionVertexMap.containsKey(aliasFromART)) {
+                                               regionFound = true;
+                                               vertexKey = aliasFromART;
+                                       }
+                               }
+
+                               if (regionFound) {
+                                       String upgradeCycle = "";
+                                       try {
+                                               upgradeCycle = (String) entry.getValue();
+
+                                               if (upgradeCycle != null && !"".equals(upgradeCycle)) {
+                                                       Vertex vertex = (Vertex) cloudRegionVertexMap.get(vertexKey);
+                                                       vertex.property(UPGRADE_CYCLE, upgradeCycle);
+                                                       this.touchVertexProperties(vertex, false);
+                                                       logger.info("Updated cloud-region, upgrade-cycle to " + upgradeCycle
+                                                                       + " having cloud-region-id : " + vertexKey);
+                                                       cloudRegionCount++;
+                                               } else {
+                                                       logger.info("upgrade-cycle value from ART is null or empty for the cloud-region-id : "
+                                                                       + vertexKey);
+                                               }
+                                       } catch (Exception e) {
+                                               success = false;
+                                               cloudRegionErrorCount++;
+                                               logger.error(MIGRATION_ERROR
+                                                               + "encountered exception for upgrade-cycle update having cloud-region-id :" + vertexKey,
+                                                               e);
+                                       }
+                               } else {
+                                       logger.info("Region "+regionFromART+" from ART is not found in A&AI");
+                               }
+
+                       }
+
+                       logger.info("\n \n ******* Final Summary of Updated upgrade-cycle for cloud-region  Migration ********* \n");
+                       logger.info(MIGRATION_SUMMARY_COUNT + "Number of cloud-region updated: " + cloudRegionCount + "\n");
+                       logger.info(MIGRATION_SUMMARY_COUNT + "Number of cloud-region failed to update due to error : "
+                                       + cloudRegionErrorCount + "\n");
+
+               } catch (FileNotFoundException e) {
+                       logger.info("ERROR: Could not file file " + fileName, e.getMessage());
+                       success = false;
+               } catch (IOException e) {
+                       logger.info("ERROR: Issue reading file " + fileName, e);
+                       success = false;
+               } catch (Exception e) {
+                       logger.info("encountered exception", e);
+                       e.printStackTrace();
+                       success = false;
+               }
+       }
+
+       /**
+        * Load file to the map for processing
+        * 
+        * @param fileName
+        * @return
+        * @throws Exception
+        */
+       protected ArrayList loadFile(String fileName) throws Exception {
+               List<String> lines = Files.readAllLines(Paths.get(fileName));
+               return this.getFileContents(lines);
+       }
+
+       /**
+        * Get lines from file.
+        * 
+        * @param lines
+        * @return
+        * @throws Exception
+        */
+       protected ArrayList getFileContents(List<String> lines) throws Exception {
+
+               final Map<String, String> regionMap = new ConcurrentHashMap<>();
+               final Map<String, String> aliasMap = new ConcurrentHashMap<>();
+               final ArrayList fileContent = new ArrayList();
+
+               processAndRemoveHeader(lines);
+
+               logger.info("Total rows count excluding header: " + lines.size());
+
+               lines.stream().filter(line -> !line.isEmpty()).map(line -> Arrays.asList(line.split("\\s*,\\s*", -1)))
+                               .map(this::processRegionUpgradeCycle).filter(Optional::isPresent).map(Optional::get).forEach(p -> {
+                                       processedRowsCount.getAndIncrement();
+                                       String pnfName = p.getValue0();
+                                       if (!regionMap.containsKey(pnfName)) {
+                                               regionMap.put(p.getValue0(), p.getValue1());
+                                       }
+                               });
+
+               fileContent.add(regionMap);
+
+               lines.stream().filter(line -> !line.isEmpty()).map(line -> Arrays.asList(line.split("\\s*,\\s*", -1)))
+                               .map(this::processRegionAlias).filter(Optional::isPresent).map(Optional::get).forEach(p -> {
+                                       processedRowsCount.getAndIncrement();
+                                       String pnfName = p.getValue0();
+                                       if (!aliasMap.containsKey(pnfName)) {
+                                               aliasMap.put(p.getValue0(), p.getValue1());
+                                       }
+                               });
+               fileContent.add(aliasMap);
+               return fileContent;
+
+       }
+
+       /**
+        * Verify line has the necessary details.
+        * 
+        * @param line
+        * @return
+        */
+       protected boolean verifyLine(List<String> line) {
+               if (line.size() != headerLength) {
+                       logger.info("ERROR: INV line should contain " + headerLength + " columns, contains " + line.size()
+                                       + " instead.");
+                       this.skippedRowsCount.getAndIncrement();
+                       return false;
+               }
+               return true;
+       }
+
+       /**
+        * * Get the pnf name and interface name from the line.
+        * 
+        * @param line
+        * @return
+        */
+       protected Optional<Pair<String, String>> processRegionAlias(List<String> line) {
+               //logger.info("Processing line... " + line.toString());
+               int lineSize = line.size();
+               if (lineSize < 4) {
+                       logger.info("Skipping line, does not contain region and/or upgrade-cycle columns");
+                       skippedRowsCount.getAndIncrement();
+                       return Optional.empty();
+               }
+
+               String cloudRegion = line.get(0);
+               String upgradeCycle = line.get(1).replaceAll("^\"|\"$", "").replaceAll("\\s+", "");
+
+               if (cloudRegion.isEmpty()) {
+                       logger.info("Line missing cloudRegion name" + line);
+                       falloutRowsCount.getAndIncrement();
+                       return Optional.empty();
+               }
+
+               return Optional.of(Pair.with(cloudRegion, upgradeCycle));
+       }
+
+       /**
+        * * Get the pnf name and interface name from the line.
+        * 
+        * @param line
+        * @return
+        */
+       protected Optional<Pair<String, String>> processRegionUpgradeCycle(List<String> line) {
+               //logger.info("Processing line... " + line.toString());
+               int lineSize = line.size();
+               if (lineSize < 4) {
+                       logger.info("Skipping line, does not contain region and/or upgrade-cycle columns");
+                       skippedRowsCount.getAndIncrement();
+                       return Optional.empty();
+               }
+
+               String cloudRegion = line.get(0);
+               String upgradeCycle = line.get(3).replaceAll("^\"|\"$", "").replaceAll("\\s+", "");
+
+               if (cloudRegion.isEmpty()) {
+                       logger.info("Line missing cloudRegion name" + line);
+                       falloutRowsCount.getAndIncrement();
+                       return Optional.empty();
+               }
+
+               return Optional.of(Pair.with(cloudRegion, upgradeCycle));
+       }
+
+       /**
+        * Verify header of the csv and remove it from the list.
+        * 
+        * @param lines
+        * @throws Exception
+        */
+       protected String processAndRemoveHeader(List<String> lines) throws Exception {
+               String firstLine;
+               if (lines.isEmpty()) {
+                       String msg = "ERROR: Missing Header in file";
+                       success = false;
+                       logger.error(msg);
+                       throw new Exception(msg);
+               } else {
+                       firstLine = lines.get(0);
+               }
+
+               this.headerLength = firstLine.split("\\s*,\\s*", -1).length;
+               logger.info("headerLength: " + headerLength);
+               if (this.headerLength < 4) {
+                       String msg = "ERROR: Input file should have 4 columns";
+                       success = false;
+                       logger.error(msg);
+                       throw new Exception(msg);
+               }
+
+               return lines.remove(0);
+       }
+
+       private String getCloudRegionIdNodeValue(Vertex vertex) {
+               String propertyValue = "";
+               if (vertex != null && vertex.property(CLOUD_REGION_ID).isPresent()) {
+                       propertyValue = vertex.property(CLOUD_REGION_ID).value().toString();
+               }
+               return propertyValue;
+       }
+
+       @Override
+       public Status getStatus() {
+               if (success) {
+                       return Status.SUCCESS;
+               } else {
+                       return Status.FAILURE;
+               }
+       }
+
+       @Override
+       public Optional<String[]> getAffectedNodeTypes() {
+               return Optional.of(new String[] { CLOUD_REGION_NODE_TYPE });
+       }
+
+       @Override
+       public String getMigrationName() {
+               return "MigrateCloudRegionUpgradeCycle";
+       }
+
+}
diff --git a/src/main/java/org/onap/aai/migration/v15/MigrateInMaintDefaultToFalse.java b/src/main/java/org/onap/aai/migration/v15/MigrateInMaintDefaultToFalse.java
new file mode 100644 (file)
index 0000000..d00d4f1
--- /dev/null
@@ -0,0 +1,99 @@
+/**\r
+ * ============LICENSE_START=======================================================\r
+ * org.onap.aai\r
+ * ================================================================================\r
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.\r
+ * ================================================================================\r
+ * Licensed under the Apache License, Version 2.0 (the "License");\r
+ * you may not use this file except in compliance with the License.\r
+ * You may obtain a copy of the License at\r
+ *\r
+ *    http://www.apache.org/licenses/LICENSE-2.0\r
+ *\r
+ * Unless required by applicable law or agreed to in writing, software\r
+ * distributed under the License is distributed on an "AS IS" BASIS,\r
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
+ * See the License for the specific language governing permissions and\r
+ * limitations under the License.\r
+ * ============LICENSE_END=========================================================\r
+ */\r
+package org.onap.aai.migration.v15;\r
+\r
+import java.util.HashMap;\r
+import java.util.List;\r
+import java.util.Map;\r
+import java.util.Optional;\r
+\r
+import org.onap.aai.edges.EdgeIngestor;\r
+import org.onap.aai.introspection.LoaderFactory;\r
+import org.onap.aai.serialization.db.EdgeSerializer;\r
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;\r
+import org.onap.aai.migration.Enabled;\r
+import org.onap.aai.migration.MigrationDangerRating;\r
+import org.onap.aai.migration.MigrationPriority;\r
+import org.onap.aai.migration.Status;\r
+import org.onap.aai.migration.ValueMigrator;\r
+import org.onap.aai.setup.SchemaVersions;\r
+\r
+\r
+@MigrationPriority(20)\r
+@MigrationDangerRating(2)\r
+@Enabled\r
+public class MigrateInMaintDefaultToFalse extends ValueMigrator {\r
+       \r
+       protected static final String VNF_NODE_TYPE = "generic-vnf";\r
+       protected static final String LINTERFACE_NODE_TYPE = "l-interface";\r
+       protected static final String LAG_INTERFACE_NODE_TYPE = "lag-interface";\r
+       protected static final String LOGICAL_LINK_NODE_TYPE = "logical-link";\r
+       protected static final String PINTERFACE_NODE_TYPE = "p-interface";\r
+       protected static final String VLAN_NODE_TYPE = "vlan";\r
+       protected static final String VNFC_NODE_TYPE = "vnfc";\r
+       protected static final String VSERVER_NODE_TYPE = "vserver";\r
+       protected static final String PSERVER_NODE_TYPE = "pserver";\r
+       protected static final String PNF_NODE_TYPE = "pnf";\r
+       protected static final String NOS_SERVER_NODE_TYPE = "nos-server";\r
+               \r
+       private static Map<String, Map> map;\r
+    private static Map<String, Boolean> pair;\r
\r
+       public MigrateInMaintDefaultToFalse(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) {\r
+               super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions, setInMaintToFalse(), false);\r
+       }       \r
+               \r
+       private static Map<String, Map> setInMaintToFalse(){\r
+               map = new HashMap<>();\r
+        pair = new HashMap<>();\r
+\r
+               pair.put("in-maint", false);\r
+               \r
+               map.put("generic-vnf", pair);\r
+               map.put("l-interface", pair);\r
+               map.put("lag-interface", pair);\r
+               map.put("logical-link", pair);\r
+               map.put("p-interface", pair);\r
+               map.put("vlan", pair);\r
+               map.put("vnfc", pair);\r
+               map.put("vserver", pair);\r
+               map.put("pserver", pair);\r
+        map.put("pnf", pair);\r
+        map.put("nos-server", pair);\r
+        \r
+        return map;\r
+       }       \r
+\r
+       @Override\r
+       public Status getStatus() {\r
+               return Status.SUCCESS;\r
+       }\r
+\r
+       @Override\r
+       public Optional<String[]> getAffectedNodeTypes() {\r
+               return Optional.of(new String[]{VNF_NODE_TYPE,LINTERFACE_NODE_TYPE,LAG_INTERFACE_NODE_TYPE,LOGICAL_LINK_NODE_TYPE,PINTERFACE_NODE_TYPE,VLAN_NODE_TYPE,VNFC_NODE_TYPE,VSERVER_NODE_TYPE,PSERVER_NODE_TYPE,PNF_NODE_TYPE,NOS_SERVER_NODE_TYPE});\r
+       }\r
+\r
+       @Override\r
+       public String getMigrationName() {\r
+               return "MigrateInMaintDefaultToFalse";\r
+       }\r
+\r
+}
\ No newline at end of file
diff --git a/src/main/java/org/onap/aai/migration/v15/MigrateRadcomChanges.java b/src/main/java/org/onap/aai/migration/v15/MigrateRadcomChanges.java
new file mode 100644 (file)
index 0000000..3ae17bd
--- /dev/null
@@ -0,0 +1,733 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v15;
+/*-
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Optional;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.javatuples.Pair;
+import org.onap.aai.db.props.AAIProperties;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.edges.enums.EdgeType;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.migration.EdgeSwingMigrator;
+import org.onap.aai.migration.Enabled;
+import org.onap.aai.migration.MigrationDangerRating;
+import org.onap.aai.migration.MigrationPriority;
+import org.onap.aai.migration.Status;
+import org.onap.aai.serialization.db.EdgeSerializer;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.setup.SchemaVersions;
+
+
+@MigrationPriority(26)
+@MigrationDangerRating(100)
+@Enabled
+public class MigrateRadcomChanges extends EdgeSwingMigrator {
+
+       private final String SERVICE_MODEL_TYPE = "Service";
+       private final String RESOURCE_MODEL_TYPE = "VNF-Resource";
+       private final String MODEL_INVARIANT_ID = "model-invariant-id";
+       private final String MODEL_INVARIANT_ID_LOCAL = "model-invariant-id-local";
+       private final String MODEL_VERSION_ID = "model-version-id";
+       private final String MODEL_VERSION_ID_LOCAL = "model-version-id-local";
+       private final String MODEL_CUSTOMIZATION_ID = "model-customization-id";
+       private final String PERSONA_MODEL_VERSION = "persona-model-version";
+       private final String GENERIC_VNF = "generic-vnf";
+       private final String VNF_NAME = "vnf-name";
+       private final String VNF_TYPE = "vnf-type";
+       private final String SERVICE_INSTANCE = "service-instance";
+       private final String SERVICE_INSTANCE_ID = "service-instance-id";
+       private final String VF_MODULE = "vf-module";
+       private final String VF_MODULE_ID = "vf-module-id";
+       private final String MODEL = "model";
+       private final String MODEL_VER = "model-ver";
+       private final String MODEL_NAME = "model-name";
+       private final String MODEL_VERSION = "model-version";
+       private final String MODEL_ELEMENT = "model-element";
+       private final String VSERVER = "vserver";
+       private final String VSERVER_ID = "vserver-id";
+       private final String IMAGE = "image";
+       private final String IMAGE_NAME = "image-name";
+       private final String TENANT = "tenant";
+       private final String CLOUD_REGION = "cloud-region";
+       
+       private static boolean success = true;
+    private static boolean checkLog = false;
+    private static GraphTraversalSource g = null;
+    private int headerLength;
+    private int genericVnfMigrationSuccess = 0;
+    private int genericVnfMigrationFailure = 0;
+    private int serviceInstanceMigrationSuccess = 0;
+    private int serviceInstanceMigrationFailure = 0;
+    private int vfModuleMigrationSuccess = 0;
+    private int vfModuleMigrationFailure = 0;
+    private int imageMigrationSuccess = 0;
+    private int imageMigrationFailure = 0;
+    
+    private static List<String> dmaapMsgList = new ArrayList<String>();
+    private static final String homeDir = System.getProperty("AJSC_HOME");
+
+    protected class VfModuleFileData {
+       String vfModuleId;
+               String vfModuleModelName;
+       String imageName;       
+       
+       public VfModuleFileData(String vfModuleId, String vfModuleModelName, String imageName) {
+               this.vfModuleId = vfModuleId;
+               this.vfModuleModelName = vfModuleModelName;
+               this.imageName = imageName;
+       }
+       
+       public String getVfModuleId() {
+                       return vfModuleId;
+               }
+               public void setVfModuleId(String vfModuleId) {
+                       this.vfModuleId = vfModuleId;
+               }
+               public String getVfModuleModelName() {
+                       return vfModuleModelName;
+               }
+               public void setVfModuleModelName(String vfModuleModelName) {
+                       this.vfModuleModelName = vfModuleModelName;
+               }
+               public String getImageName() {
+                       return imageName;
+               }
+               public void setImageName(String imageName) {
+                       this.imageName = imageName;
+               }
+    }
+    
+    
+    
+    public MigrateRadcomChanges(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) {
+        super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+        g = this.engine.asAdmin().getTraversalSource();
+    }
+    
+    @Override   
+    public void executeModifyOperation() {
+        logger.info("---------- Start migration ----------");
+        String configDir = System.getProperty("BUNDLECONFIG_DIR");
+        if (homeDir == null) {
+            logger.info(MIGRATION_ERROR + "ERROR: Could not find sys prop AJSC_HOME");
+            success = false;
+            return;
+        }
+        if (configDir == null) {
+            success = false;
+            return;
+        }
+        
+        ArrayList<VfModuleFileData> vfModuleFileLineList = new ArrayList<VfModuleFileData>();
+       
+        String feedDir = homeDir + "/" + configDir + "/" + "migration-input-files/radcom-changes/";
+        String fileName = feedDir+ "INPUT-MODEL.csv";
+        int genericVnfFileLineCounter = 0;
+      
+        logger.info(fileName);
+        logger.info("---------- Reading all file types and vf-modules ----------");
+        ArrayList<String> fileTypeList = new ArrayList<String>();
+        try  {
+               List<String> lines = Files.readAllLines(Paths.get(fileName));
+            Iterator<String> lineItr = lines.iterator();
+            int typeFileLineCounter = 0;
+            while (lineItr.hasNext()){
+               String line = lineItr.next().replace("\n", "").replace("\r", "");
+               if (!line.isEmpty()) {
+                    if (typeFileLineCounter != 0) {
+                        String[] colList = line.split("\\s*,\\s*", -1);
+                        if(!colList[0].equalsIgnoreCase(SERVICE_MODEL_TYPE) && !colList[0].equalsIgnoreCase(RESOURCE_MODEL_TYPE)) {
+                                               vfModuleFileLineList.add(new VfModuleFileData(colList[0], colList[5], colList[6]));
+                                       }
+                        if(!colList[1].isEmpty() && !fileTypeList.contains(colList[1])) {
+                               fileTypeList.add(colList[1]);
+                        }
+                    } else {
+                        this.headerLength = line.split("\\s*,\\s*", -1).length;
+                        logger.info("headerLength: " + headerLength + "\n");
+                        if (this.headerLength != 7 ){
+                            logger.info(MIGRATION_ERROR + "ERROR: Input file should have 7 columns");
+                            success = false;
+                            return;
+                        }
+                    }
+                }
+                typeFileLineCounter++;
+            }
+        } catch (FileNotFoundException e) {
+            logger.info(MIGRATION_ERROR + "ERROR: Could not file file " + fileName, e.getMessage());
+            success = false;
+            checkLog = true;
+        } catch (IOException e) {
+            logger.info(MIGRATION_ERROR + "ERROR: Issue reading file " + fileName, e);
+            success = false;
+        } catch (Exception e) {
+            logger.info(MIGRATION_ERROR + "encountered exception", e);
+            e.printStackTrace();
+            success = false;
+        }
+        
+        int numberOfFileTypes = fileTypeList.size();
+        for(int i = 0; i < numberOfFileTypes; i++) {
+               String newServiceModelInvariantId = "";
+               String newServiceModelVersionId = "";
+               String newResourceModelInvariantId = "";
+               String newResourceModelVersionId = "";
+               String newResourceModelCustomizationId = "";
+               ArrayList<String> genericVnfList = new ArrayList<String>();
+               
+
+            int modelFileLineCounter = 0;
+            genericVnfFileLineCounter = 0;
+            fileName = feedDir+ "INPUT-MODEL.csv";
+               
+               logger.info(fileName);
+               logger.info("---------- Processing Entries from file  ----------");
+               try  {
+                       List<String> lines = Files.readAllLines(Paths.get(fileName));
+                       Iterator<String> lineItr = lines.iterator();
+                       while (lineItr.hasNext()){
+                               String line = lineItr.next().replace("\n", "").replace("\r", "");
+                               if (!line.isEmpty()) {
+                                       if (modelFileLineCounter != 0) {
+                                               String[] colList = line.split("\\s*,\\s*", -1);
+                                               if(colList[1].equals(fileTypeList.get(i)) && colList[0].equalsIgnoreCase(SERVICE_MODEL_TYPE)) {
+                                                       newServiceModelInvariantId = colList[2];
+                                                       newServiceModelVersionId = colList[3];
+                                               }
+                                               else if(colList[1].equals(fileTypeList.get(i)) && colList[0].equalsIgnoreCase(RESOURCE_MODEL_TYPE)) {
+                                                       newResourceModelInvariantId = colList[2];
+                                                       newResourceModelVersionId = colList[3];
+                                                       newResourceModelCustomizationId = colList[4];
+                                               }
+                                       }
+                               }
+                               modelFileLineCounter++;
+                       }
+                       fileName = feedDir+ "INPUT-VNF.csv";
+                       logger.info(fileName);
+                       logger.info("---------- Processing Entries from file  ----------");
+                       lines = Files.readAllLines(Paths.get(fileName));
+                       lineItr = lines.iterator();
+                       while (lineItr.hasNext()){
+                               String line = lineItr.next().replace("\n", "").replace("\r", "");
+                               if (!line.isEmpty()) {
+                                       if (genericVnfFileLineCounter != 0) {
+                                               String[] colList = line.split("\\s*,\\s*", -1);
+                                               if(colList[1].equals(fileTypeList.get(i))) {
+                                                       genericVnfList.add(colList[0]);
+                                               }
+                                       } else {
+                                               this.headerLength = line.split("\\s*,\\s*", -1).length;
+                                               logger.info("headerLength: " + headerLength + "\n");
+                                               if (this.headerLength != 2){
+                                                       logger.info(MIGRATION_ERROR + "ERROR: Input file should have 2 columns");
+                                                       success = false;
+                                                       return;
+                                               }
+                                       }
+                               }
+                               genericVnfFileLineCounter++;
+                       }       
+                       updateGenericVnfs(fileTypeList.get(i), genericVnfList, newServiceModelInvariantId, newServiceModelVersionId,
+                       newResourceModelInvariantId, newResourceModelVersionId, newResourceModelCustomizationId, vfModuleFileLineList);
+               } catch (FileNotFoundException e) {
+                logger.info(MIGRATION_ERROR + "ERROR: Could not file file " + fileName, e.getMessage());
+                success = false;
+                checkLog = true;
+            } catch (IOException e) {
+                logger.info(MIGRATION_ERROR + "ERROR: Issue reading file " + fileName, e);
+                success = false;
+            } catch (Exception e) {
+                logger.info(MIGRATION_ERROR + "encountered exception", e);
+                e.printStackTrace();
+                success = false;
+            }
+        }
+        logger.info ("\n \n ******* Final Summary for RADCOM Change Migration ********* \n");
+        logger.info(MIGRATION_SUMMARY_COUNT + "Total generic-vnfs in File: "+(genericVnfFileLineCounter + 1));
+        logger.info(MIGRATION_SUMMARY_COUNT + " generic-vnfs processed: "+ genericVnfMigrationSuccess);
+        logger.info(MIGRATION_SUMMARY_COUNT + " generic-vnfs failed to process: "+ genericVnfMigrationFailure);
+        logger.info(MIGRATION_SUMMARY_COUNT + " service-instances processed: "+ serviceInstanceMigrationSuccess);
+        logger.info(MIGRATION_SUMMARY_COUNT + " service-instances failed to process: "+ serviceInstanceMigrationFailure);
+        logger.info(MIGRATION_SUMMARY_COUNT + " vf-modules processed: "+ vfModuleMigrationSuccess);
+        logger.info(MIGRATION_SUMMARY_COUNT + " vf-modules failed to process: "+ vfModuleMigrationFailure);
+        logger.info(MIGRATION_SUMMARY_COUNT + " images processed: "+ imageMigrationSuccess);
+        logger.info(MIGRATION_SUMMARY_COUNT + " images failed to process: "+ imageMigrationFailure +"\n");
+    }
+       
+       private void updateGenericVnfs(String vnfType, ArrayList<String> genericVnfList, String newServiceModelInvariantId,
+                       String newServiceModelVersionId, String newResourceModelInvariantId, String newResourceModelVersionId, 
+                       String newResourceModelCustomizationId, ArrayList<VfModuleFileData> vfModuleFileLineList) {
+               int numberOfNames = genericVnfList.size();
+               Vertex newModelVerNode = null;
+               GraphTraversal<Vertex, Vertex> modelVerNodeList = g.V().has(AAIProperties.NODE_TYPE, MODEL).
+                               has(MODEL_INVARIANT_ID, newResourceModelInvariantId).in("org.onap.relationships.inventory.BelongsTo").
+                               has(AAIProperties.NODE_TYPE, MODEL_VER).has(MODEL_VERSION_ID, newResourceModelVersionId);
+               if(!modelVerNodeList.hasNext()) {
+                       logger.info(MIGRATION_ERROR + "ERROR: Model " + newResourceModelInvariantId + " with model-ver "
+                                        + newResourceModelVersionId + " does not exist in database \n");
+                       for(int i = 0; i < numberOfNames; i++) {
+                               genericVnfMigrationFailure++;
+                       }
+               }
+               else {
+                       newModelVerNode = modelVerNodeList.next();
+                       for(int i = 0; i < numberOfNames; i++) {
+                               GraphTraversal<Vertex, Vertex> genericVnfNodeList = g.V().has(AAIProperties.NODE_TYPE, GENERIC_VNF).
+                                               has(VNF_NAME, genericVnfList.get(i)).has(VNF_TYPE, vnfType);
+                               if(!genericVnfNodeList.hasNext()) {
+                                       logger.info(MIGRATION_ERROR + "ERROR: Failure to update generic-vnf " + genericVnfList.get(i) + 
+                                                       " Graph Traversal failed \n");
+                                       genericVnfMigrationFailure++;
+                               }
+                               while (genericVnfNodeList.hasNext()) {
+                                       Vertex genericVnfVtx = genericVnfNodeList.next();
+                                       boolean updateSuccess = false;
+                                       if (genericVnfVtx != null) {
+                                               logger.info("Updating generic-vnf " + genericVnfVtx.value(VNF_NAME) + " with "
+                                                               + "current model-invariant-id "
+                                                               + (genericVnfVtx.property(MODEL_INVARIANT_ID).isPresent()
+                                                                               ? genericVnfVtx.value(MODEL_INVARIANT_ID) : "null")
+                                                                               + ", current model-version-id " 
+                                                                               + (genericVnfVtx.property(MODEL_VERSION_ID).isPresent()
+                                                                                               ? genericVnfVtx.value(MODEL_VERSION_ID) : "null")
+                                                                                               + ", and current model-customization-id " 
+                                                                                               + (genericVnfVtx.property(MODEL_CUSTOMIZATION_ID).isPresent()
+                                                                                                               ? genericVnfVtx.value(MODEL_CUSTOMIZATION_ID) : "null")
+                                                                                                               + " to use model-invariant-id " + newResourceModelInvariantId + ","
+                                                                                                               + " model-version-id " + newResourceModelVersionId + " and model-customization-id "
+                                                                                                               + newResourceModelCustomizationId);
+                                               try {
+                                                       Vertex oldModelVerNode = null;
+                                                       GraphTraversal<Vertex, Vertex> modelVerQuery= g.V(genericVnfVtx).out("org.onap.relationships.inventory.IsA")
+                                                                       .has(AAIProperties.NODE_TYPE, MODEL_VER);
+                                                       if(modelVerQuery.hasNext()) {
+                                                               oldModelVerNode = modelVerQuery.next();
+                                                       }
+                                                       genericVnfVtx.property(MODEL_INVARIANT_ID_LOCAL, newResourceModelInvariantId);
+                                                       genericVnfVtx.property(MODEL_VERSION_ID_LOCAL, newResourceModelVersionId);
+                                                       genericVnfVtx.property(MODEL_CUSTOMIZATION_ID, newResourceModelCustomizationId);
+                                                       if(newModelVerNode.property(MODEL_VERSION).isPresent()) {
+                                                               genericVnfVtx.property(PERSONA_MODEL_VERSION, newModelVerNode.value(MODEL_VERSION));
+                                                       }
+                                                       this.touchVertexProperties(genericVnfVtx, false);
+                                                       if(oldModelVerNode != null) {
+                                                               this.swingEdges(oldModelVerNode, newModelVerNode, GENERIC_VNF, "org.onap.relationships.inventory.IsA", "IN");
+                                                       }
+                                                       else {
+                                                               this.createPrivateEdge(newModelVerNode, genericVnfVtx);
+                                                       }
+                                                       updateSuccess = true;   
+                                               } catch (Exception e) {
+                                                       logger.info(e.toString());
+                                                       logger.info(MIGRATION_ERROR + "ERROR: Failure to update generic-vnf " + genericVnfList.get(i) + "\n");
+                                                       genericVnfMigrationFailure++;                                                           
+                                               }
+                                               if(updateSuccess) {
+                                                       String dmaapMsg = System.nanoTime() + "_" + genericVnfVtx.id().toString() + "_" + 
+                                                                       genericVnfVtx.value("resource-version").toString();
+                                                       dmaapMsgList.add(dmaapMsg);
+                                                       logger.info("Update of generic-vnf " + genericVnfList.get(i) + " successful \n");
+                                                       genericVnfMigrationSuccess++;
+                                                       updateServiceInstances(vnfType, genericVnfList.get(i), newServiceModelInvariantId,
+                                                                       newServiceModelVersionId);
+                                                       updateVfModules(vnfType, genericVnfList.get(i), newResourceModelInvariantId, newResourceModelVersionId,
+                                                                       vfModuleFileLineList);
+                                               }
+                                       }
+                                       else {
+                                               logger.info(MIGRATION_ERROR + "ERROR: Failure to update generic-vnf " + genericVnfList.get(i) + 
+                                                               " Graph Traversal returned an empty vertex \n");
+                                               genericVnfMigrationFailure++;
+                                       }
+                               }
+                       }
+               }
+       }
+       
+       private void updateServiceInstances(String vnfType, String vnfName, String newServiceModelInvariantId,
+                       String newServiceModelVersionId) {
+               GraphTraversal<Vertex, Vertex> serviceInstanceNodeList = g.V().
+                       has(AAIProperties.NODE_TYPE, GENERIC_VNF).has(VNF_NAME, vnfName).has(VNF_TYPE, vnfType).
+                       in("org.onap.relationships.inventory.ComposedOf").has(AAIProperties.NODE_TYPE, SERVICE_INSTANCE);
+               Vertex newModelVerNode = null;
+               GraphTraversal<Vertex, Vertex> modelVerNodeList = g.V().has(AAIProperties.NODE_TYPE, MODEL).
+                               has(MODEL_INVARIANT_ID, newServiceModelInvariantId).in("org.onap.relationships.inventory.BelongsTo").
+                               has(AAIProperties.NODE_TYPE, MODEL_VER).has(MODEL_VERSION_ID, newServiceModelVersionId);
+               if(!modelVerNodeList.hasNext()) {
+                       logger.info(MIGRATION_ERROR + "ERROR: Model " + newServiceModelInvariantId + " with model-ver "
+                                        + newServiceModelVersionId + " does not exist in database \n");
+                       while(serviceInstanceNodeList.hasNext()) { 
+                               serviceInstanceNodeList.next();
+                               serviceInstanceMigrationFailure++;
+                       }
+               }
+               else {
+                       newModelVerNode = modelVerNodeList.next();
+                       while (serviceInstanceNodeList.hasNext()) {
+                               Vertex serviceInstanceVtx = serviceInstanceNodeList.next();
+                               boolean updateSuccess = false;
+                               if (serviceInstanceVtx != null) {
+                                       logger.info("Updating service-instance " + serviceInstanceVtx.value(SERVICE_INSTANCE_ID)
+                                                       + " with current model-invariant-id " 
+                                                       + (serviceInstanceVtx.property(MODEL_INVARIANT_ID).isPresent()
+                                                                       ? serviceInstanceVtx.value(MODEL_INVARIANT_ID) : "null")
+                                                                       + " and current model-version-id "  
+                                                                       + (serviceInstanceVtx.property(MODEL_VERSION_ID).isPresent()
+                                                                                       ? serviceInstanceVtx.value(MODEL_VERSION_ID) : "null")
+                                                                                       + " to use model-invariant-id " + newServiceModelInvariantId + " and"
+                                                                                       + " model-version-id " + newServiceModelVersionId);
+                                       try {
+                                               Vertex oldModelVerNode = null;
+                                               GraphTraversal<Vertex, Vertex> modelVerQuery= g.V(serviceInstanceVtx).out("org.onap.relationships.inventory.IsA")
+                                                               .has(AAIProperties.NODE_TYPE, MODEL_VER);
+                                               if(modelVerQuery.hasNext()) {
+                                                       oldModelVerNode = modelVerQuery.next();
+                                               }
+                                               serviceInstanceVtx.property(MODEL_INVARIANT_ID_LOCAL, newServiceModelInvariantId);
+                                               serviceInstanceVtx.property(MODEL_VERSION_ID_LOCAL, newServiceModelVersionId);
+                                               if(newModelVerNode.property(MODEL_VERSION).isPresent()) {
+                                                       serviceInstanceVtx.property(PERSONA_MODEL_VERSION, newModelVerNode.value(MODEL_VERSION));
+                                               }
+                                               this.touchVertexProperties(serviceInstanceVtx, false);
+                                               if(oldModelVerNode != null) {
+                                                       this.swingEdges(oldModelVerNode, newModelVerNode, SERVICE_INSTANCE, "org.onap.relationships.inventory.IsA", "IN");
+                                               }
+                                               else {
+                                                       this.createPrivateEdge(newModelVerNode, serviceInstanceVtx);
+                                               }
+                                               updateSuccess = true;
+                                       } catch (Exception e) {
+                                               logger.info(e.toString());
+                                               logger.info(MIGRATION_ERROR + "ERROR: Failure to update service-instance " 
+                                                               + serviceInstanceVtx.value(SERVICE_INSTANCE_ID) + "\n");
+                                               serviceInstanceMigrationFailure++;                                                              
+                                       }
+                                       if(updateSuccess) {
+                                               String dmaapMsg = System.nanoTime() + "_" + serviceInstanceVtx.id().toString() + "_"    + 
+                                                               serviceInstanceVtx.value("resource-version").toString();
+                                               dmaapMsgList.add(dmaapMsg);
+                                               logger.info("Update of service-instance " 
+                                                               + serviceInstanceVtx.value(SERVICE_INSTANCE_ID) + " successful \n");
+                                               serviceInstanceMigrationSuccess++;         
+                                       }
+                               }
+                       }
+               }
+       }
+       
+       private void updateVfModules(String vnfType, String vnfName, String newResourceModelInvariantId,
+                       String newResourceModelVersionId, ArrayList<VfModuleFileData> vfModuleFileLineList) {
+               int numberOfLines = vfModuleFileLineList.size();
+               ArrayList<Integer> processedNodes = new ArrayList<Integer>();
+               for(int i = 0; i < numberOfLines; i++) {
+                       VfModuleFileData currentLine = vfModuleFileLineList.get(i);
+                       String vfModuleId = currentLine.getVfModuleId();
+                       String vfModuleModelName = currentLine.getVfModuleModelName();
+                       String imageName = currentLine.getImageName();
+                       String vfModuleInvariantId = "";
+                       String vfModuleVersionId = "";
+                       GraphTraversal<Vertex, Vertex> vfModuleNodeList = g.V().
+                                       has(AAIProperties.NODE_TYPE, GENERIC_VNF).has(VNF_NAME, vnfName).has(VNF_TYPE, vnfType).
+                                       in("org.onap.relationships.inventory.BelongsTo").has(AAIProperties.NODE_TYPE, VF_MODULE).
+                                       has(VF_MODULE_ID, vfModuleId);
+                       if(vfModuleNodeList.hasNext()) {
+                               GraphTraversal<Vertex, Vertex> modelElementNodeList = g.V().
+                                               has(AAIProperties.NODE_TYPE, MODEL).has(MODEL_INVARIANT_ID, newResourceModelInvariantId).
+                                               in("org.onap.relationships.inventory.BelongsTo").has(AAIProperties.NODE_TYPE, MODEL_VER).
+                                               has(MODEL_VERSION_ID, newResourceModelVersionId).in("org.onap.relationships.inventory.BelongsTo").
+                                               has(AAIProperties.NODE_TYPE, MODEL_ELEMENT);
+                               while(modelElementNodeList.hasNext()) {
+                                       Vertex modelElement = modelElementNodeList.next();
+                                       GraphTraversal<Vertex, Vertex> modelVersionLookup = g.V(modelElement).out("org.onap.relationships.inventory.IsA").
+                                                       has(AAIProperties.NODE_TYPE, MODEL_VER);
+                                       while(modelVersionLookup.hasNext()) {
+                                               Vertex modelVersionVertex = modelVersionLookup.next();
+                                               if(modelVersionVertex.value(MODEL_NAME).equals(vfModuleModelName)) {
+                                                       vfModuleVersionId = modelVersionVertex.value(MODEL_VERSION_ID);
+                                                       vfModuleInvariantId = g.V(modelVersionVertex).out("org.onap.relationships.inventory.BelongsTo").
+                                                                       has(AAIProperties.NODE_TYPE, MODEL).next().value(MODEL_INVARIANT_ID);
+                                                       break;
+                                               }
+                                       }
+                                       if(!vfModuleVersionId.isEmpty() && !vfModuleInvariantId.isEmpty()) {
+                                               break;
+                                       }
+                                       GraphTraversal<Vertex, Vertex> modelElementLookup = g.V(modelElement).in("org.onap.relationships.inventory.BelongsTo").
+                                                       has(AAIProperties.NODE_TYPE, MODEL_ELEMENT);
+                                       while(modelElementLookup.hasNext()) {
+                                               ArrayList<String> returnedValues = recursiveSearchForModelName(vfModuleModelName, modelElementLookup.next());
+                                               if(!returnedValues.isEmpty()) {
+                                                       vfModuleInvariantId = returnedValues.get(0);
+                                                       vfModuleVersionId = returnedValues.get(1);
+                                                       break;
+                                               }
+                                       }       
+                                       if(!vfModuleVersionId.isEmpty() && !vfModuleInvariantId.isEmpty()) {
+                                               break;
+                                       }
+                               }       
+                               while (vfModuleNodeList.hasNext()) {
+                                       Vertex vfModuleVtx = vfModuleNodeList.next();
+                                       boolean updateSuccess = false;
+                                       if (vfModuleVtx != null) {
+                                               if(vfModuleInvariantId.isEmpty() && vfModuleVersionId.isEmpty()) {
+                                                       logger.info(MIGRATION_ERROR + "ERROR: Failure to update vf-module " +vfModuleVtx.value(VF_MODULE_ID) + 
+                                                                       ". model-invariant-id and model-version-id not found \n");
+                                                       vfModuleMigrationFailure++;
+                                               }
+                                               else if(vfModuleInvariantId.isEmpty()) {
+                                                       logger.info(MIGRATION_ERROR + "ERROR: Failure to update vf-module " +vfModuleVtx.value(VF_MODULE_ID) + 
+                                                                       ". model-invariant-id not found \n");
+                                                       vfModuleMigrationFailure++;
+                                               }
+                                               else if(vfModuleVersionId.isEmpty()) {
+                                                       logger.info(MIGRATION_ERROR + "ERROR: Failure to update vf-module " +vfModuleVtx.value(VF_MODULE_ID) + 
+                                                                       ". model-version-id not found \n");
+                                                       vfModuleMigrationFailure++;
+                                               }
+                                               else {
+                                                       logger.info("Updating vf-module " + vfModuleVtx.value(VF_MODULE_ID)
+                                                                       + " with current model-invariant-id " 
+                                                                       + (vfModuleVtx.property(MODEL_INVARIANT_ID).isPresent()
+                                                                       ? vfModuleVtx.value(MODEL_INVARIANT_ID) : "null")
+                                                                       + " and current model-version-id "  
+                                                                       + (vfModuleVtx.property(MODEL_VERSION_ID).isPresent()
+                                                                   ? vfModuleVtx.value(MODEL_VERSION_ID) : "null")
+                                                                   + " to use model-invariant-id " + vfModuleInvariantId + " and"
+                                                                   + " model-version-id " + vfModuleVersionId);
+                                                       Vertex newModelVerNode = null;
+                                                       GraphTraversal<Vertex, Vertex> modelVerNodeList = g.V().has(AAIProperties.NODE_TYPE, MODEL).
+                                                                       has(MODEL_INVARIANT_ID, vfModuleInvariantId).in("org.onap.relationships.inventory.BelongsTo").
+                                                                       has(AAIProperties.NODE_TYPE, MODEL_VER).has(MODEL_VERSION_ID, vfModuleVersionId);
+                                                       if(!modelVerNodeList.hasNext()) {
+                                                               logger.info(MIGRATION_ERROR + "ERROR: Model " + vfModuleInvariantId + " with model-ver "
+                                                                                + vfModuleVersionId + " could not be found in traversal, error in finding vf-module model \n");
+                                                               vfModuleMigrationFailure++;
+                                                       }
+                                                       else {
+                                                               newModelVerNode = modelVerNodeList.next();
+                                                               try {
+                                                                       Vertex oldModelVerNode = null;
+                                                                       GraphTraversal<Vertex, Vertex> modelVerQuery= g.V(vfModuleVtx).out("org.onap.relationships.inventory.IsA")
+                                                                                       .has(AAIProperties.NODE_TYPE, MODEL_VER);
+                                                                       if(modelVerQuery.hasNext()) {
+                                                                               oldModelVerNode = modelVerQuery.next();
+                                                                       }
+                                                                       vfModuleVtx.property(MODEL_INVARIANT_ID_LOCAL, vfModuleInvariantId);
+                                                                       vfModuleVtx.property(MODEL_VERSION_ID_LOCAL, vfModuleVersionId);
+                                                                       if(newModelVerNode.property(MODEL_VERSION).isPresent()) {
+                                                                               vfModuleVtx.property(PERSONA_MODEL_VERSION, newModelVerNode.value(MODEL_VERSION));
+                                                                       }
+                                                                       this.touchVertexProperties(vfModuleVtx, false);
+                                                                       if(oldModelVerNode != null) {
+                                                                               this.swingEdges(oldModelVerNode, newModelVerNode, VF_MODULE, "org.onap.relationships.inventory.IsA", "IN");
+                                                                       }
+                                                                       else {
+                                                                               this.createPrivateEdge(newModelVerNode, vfModuleVtx);
+                                                                       }
+                                                                       updateSuccess = true;   
+                                                               } catch (Exception e) {
+                                                                       logger.info(e.toString());
+                                                                       logger.info(MIGRATION_ERROR + "ERROR: Failure to update vf-module " 
+                                                                                       + vfModuleVtx.value(VF_MODULE_ID) + "\n");
+                                                                       vfModuleMigrationFailure++;                                                             
+                                                               }
+                                                       }
+                                               }
+                                       }
+                                       if(updateSuccess) {
+                                               String dmaapMsg = System.nanoTime() + "_" + vfModuleVtx.id().toString() + "_"   + 
+                                                               vfModuleVtx.value("resource-version").toString();
+                                               dmaapMsgList.add(dmaapMsg);
+                                               logger.info("Update of vf-module " 
+                                                               + vfModuleVtx.value(VF_MODULE_ID) + " successful \n");
+                                               vfModuleMigrationSuccess++;
+                                               if(!processedNodes.contains(i)) {
+                                                       processedNodes.add(i);
+                                               }
+                                               updateVserverAndImage(vfModuleId, imageName);
+                                       }
+                               }
+                       }       
+               }
+               int processedNodesNum = processedNodes.size();
+               for (int i = 0; i < processedNodesNum; i++) {
+                       vfModuleFileLineList.remove(i);
+               }
+       }               
+       
+       private ArrayList<String> recursiveSearchForModelName(String vfModuleModelName, Vertex modelElement) {
+               ArrayList<String> returnedValues = new ArrayList<String>();
+               GraphTraversal<Vertex, Vertex> modelVersionLookup = g.V(modelElement).out("org.onap.relationships.inventory.IsA").
+                               has(AAIProperties.NODE_TYPE, MODEL_VER);
+               while(modelVersionLookup.hasNext()) {
+                       Vertex modelVersionVertex = modelVersionLookup.next();
+                       if(modelVersionVertex.value(MODEL_NAME).equals(vfModuleModelName)) {
+                               returnedValues.add(modelVersionVertex.value(MODEL_VERSION_ID));
+                               returnedValues.add(0, g.V(modelVersionVertex).out("org.onap.relationships.inventory.BelongsTo")
+                                               .next().value(MODEL_INVARIANT_ID));
+                               return returnedValues;
+                       }
+               }
+               GraphTraversal<Vertex, Vertex> modelElementLookup = g.V(modelElement).in("org.onap.relationships.inventory.BelongsTo").
+                               has(AAIProperties.NODE_TYPE, MODEL_ELEMENT);
+               while(modelElementLookup.hasNext()) {
+                       returnedValues = recursiveSearchForModelName(vfModuleModelName, modelElementLookup.next());
+                       if(!returnedValues.isEmpty()) {
+                               return returnedValues;
+                       }
+               }
+               return returnedValues;
+       }
+       
+       private void updateVserverAndImage(String vfModuleId, String imageName) {
+               GraphTraversal<Vertex, Vertex> vserverNodeList = g.V().
+                       has(AAIProperties.NODE_TYPE, VF_MODULE).has(VF_MODULE_ID, vfModuleId).
+                       out("org.onap.relationships.inventory.Uses").has(AAIProperties.NODE_TYPE, VSERVER);                             
+               while (vserverNodeList.hasNext()) {
+                       Vertex vserverVtx = vserverNodeList.next();
+                       boolean updateSuccess = false;
+                       GraphTraversal<Vertex, Vertex> oldImageLookup = g.V(vserverVtx).out("org.onap.relationships.inventory.Uses").
+                                       has(AAIProperties.NODE_TYPE, IMAGE);
+                       Vertex oldImageVtx = null;
+                       if(oldImageLookup.hasNext()) {
+                               oldImageVtx = oldImageLookup.next();
+                       }
+                       GraphTraversal<Vertex, Vertex> newImageLookup = g.V(vserverVtx).out("org.onap.relationships.inventory.BelongsTo").
+                                       has(AAIProperties.NODE_TYPE, TENANT).out("org.onap.relationships.inventory.BelongsTo").
+                                       has(AAIProperties.NODE_TYPE, CLOUD_REGION).in("org.onap.relationships.inventory.BelongsTo").
+                                       has(AAIProperties.NODE_TYPE, IMAGE).has(IMAGE_NAME, imageName);
+                       Vertex newImageVtx = null;
+                       if(newImageLookup.hasNext()) {
+                               newImageVtx = newImageLookup.next();
+                       }
+                       if (vserverVtx != null && newImageVtx!= null) {
+                               logger.info("Updating vserver " + vserverVtx.value(VSERVER_ID)
+                                       + " to replace all current image relationships with relationship to new image " + imageName);
+                               try {
+                                       if(oldImageVtx != null) {
+                                               this.swingEdges(oldImageVtx, newImageVtx, VSERVER, "org.onap.relationships.inventory.Uses", "IN");
+                                       }
+                                       else {
+                                               this.createEdgeIfPossible(EdgeType.COUSIN, vserverVtx, newImageVtx);
+                                       }
+                                       updateSuccess = true;   
+                               } catch (Exception e) {
+                                               logger.info(e.toString());
+                                               logger.info(MIGRATION_ERROR + "ERROR: Failure to update vserver " 
+                                               + vserverVtx.value(VSERVER_ID) + " with image " + imageName + "\n");
+                                               imageMigrationFailure++;                                                                
+                               }
+                               if(updateSuccess) {
+                                       logger.info("Update of vserver " 
+                                                       + vserverVtx.value(VSERVER_ID) + " with image " + newImageVtx.value(IMAGE_NAME) 
+                                                       + " successful \n");
+                                       imageMigrationSuccess++;         
+                               }
+                       }
+               }
+       }
+       
+    @Override
+    public Status getStatus() {
+        if (checkLog) {
+            return Status.CHECK_LOGS;
+        }
+        else if (success) {
+            return Status.SUCCESS;
+        }
+        else {
+            return Status.FAILURE;
+        }
+    }
+    
+    @Override
+       public void commit() {
+               engine.commit();
+               createDmaapFiles(dmaapMsgList);
+       }
+
+    @Override
+    public Optional<String[]> getAffectedNodeTypes() {
+        return Optional.of(new String[]{GENERIC_VNF, SERVICE_INSTANCE, VF_MODULE, VSERVER, IMAGE});
+    }
+
+    @Override
+    public String getMigrationName() {
+        return "MigrateRadcomChanges";
+    }
+
+       @Override
+       public List<Pair<Vertex, Vertex>> getAffectedNodePairs() {
+               return null;
+       }
+
+       @Override
+       public String getNodeTypeRestriction() {
+               return VSERVER;
+       }
+
+       @Override
+       public String getEdgeLabelRestriction() {
+               return "org.onap.relationships.inventory.Uses";
+       }
+
+       @Override
+       public String getEdgeDirRestriction() {
+               return "IN";
+       }
+
+       @Override
+       public void cleanupAsAppropriate(List<Pair<Vertex, Vertex>> nodePairL) {        
+       }
+}
index 2431d11..9bcd843 100644 (file)
@@ -27,7 +27,7 @@ import org.apache.tinkerpop.gremlin.structure.Vertex;
 import org.javatuples.Pair;
 import org.onap.aai.exceptions.AAIException;
 import org.onap.aai.rest.dsl.DslQueryProcessor;
-import org.onap.aai.restcore.search.GroovyQueryBuilderSingleton;
+import org.onap.aai.restcore.search.GroovyQueryBuilder;
 import org.onap.aai.restcore.util.URITools;
 import org.onap.aai.serialization.engines.TransactionalGraphEngine;
 import org.onap.aai.serialization.queryformats.SubGraphStyle;
@@ -37,7 +37,6 @@ import javax.ws.rs.core.MultivaluedMap;
 import java.io.FileNotFoundException;
 import java.net.URI;
 import java.util.*;
-import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
 public abstract class GenericQueryProcessor {
@@ -48,7 +47,7 @@ public abstract class GenericQueryProcessor {
        protected static Pattern p = Pattern.compile("query/(.*+)");
        protected Optional<String> gremlin;
        protected final TransactionalGraphEngine dbEngine;
-       protected static GroovyQueryBuilderSingleton queryBuilderSingleton = GroovyQueryBuilderSingleton.getInstance();
+       protected static GroovyQueryBuilder groovyQueryBuilder = new GroovyQueryBuilder();
        protected final boolean isGremlin;
        protected Optional<DslQueryProcessor> dslQueryProcessorOptional;
        /* dsl parameters to store dsl query and to check
@@ -122,7 +121,7 @@ public abstract class GenericQueryProcessor {
                        String dslUserQuery = dsl.get();
                        if(dslQueryProcessorOptional.isPresent()){
                                String dslQuery = dslQueryProcessorOptional.get().parseAaiQuery(dslUserQuery);
-                               query = queryBuilderSingleton.executeTraversal(dbEngine, dslQuery, params);
+                               query = groovyQueryBuilder.executeTraversal(dbEngine, dslQuery, params);
                                String startPrefix = "g.V()";
                                query = startPrefix + query;
                        }
index 3db4301..8f83751 100644 (file)
@@ -20,7 +20,7 @@
 package org.onap.aai.rest.search;
 
 import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
-import org.onap.aai.restcore.search.GremlinGroovyShellSingleton;
+import org.onap.aai.restcore.search.GremlinGroovyShell;
 
 import java.util.Map;
 
@@ -35,7 +35,7 @@ public class GroovyShellImpl extends GenericQueryProcessor {
 
                params.put("g", this.dbEngine.asAdmin().getTraversalSource());
                
-               GremlinGroovyShellSingleton shell = GremlinGroovyShellSingleton.getInstance();
+               GremlinGroovyShell shell = new GremlinGroovyShell();
                
                return shell.executeTraversal(query, params);
        }
index 924009d..f35cece 100644 (file)
@@ -24,13 +24,14 @@ import com.att.eelf.configuration.EELFLogger;
 import com.att.eelf.configuration.EELFManager;
 import org.janusgraph.core.JanusGraph;
 import org.janusgraph.core.schema.JanusGraphManagement;
+import org.onap.aai.config.PropertyPasswordConfiguration;
 import org.onap.aai.dbgen.SchemaGenerator;
 import org.onap.aai.dbmap.AAIGraph;
+import org.onap.aai.exceptions.AAIException;
 import org.onap.aai.logging.ErrorLogHelper;
 import org.onap.aai.logging.LoggingContext;
 import org.onap.aai.logging.LoggingContext.StatusCode;
-import org.onap.aai.util.AAIConfig;
-import org.onap.aai.util.AAIConstants;
+import org.onap.aai.util.*;
 import org.springframework.context.annotation.AnnotationConfigApplicationContext;
 
 import java.util.Properties;
@@ -46,7 +47,7 @@ public class GenTester {
         *
         * @param args the arguments
         */
-       public static void main(String[] args) {
+       public static void main(String[] args) throws AAIException{
           
                JanusGraph graph = null;
                System.setProperty("aai.service.name", GenTester.class.getSimpleName());
@@ -67,11 +68,23 @@ public class GenTester {
                LoggingContext.statusCode(StatusCode.COMPLETE);
                LoggingContext.responseCode(LoggingContext.SUCCESS);
 
-               AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext(
-                               "org.onap.aai.config",
-                               "org.onap.aai.setup"
-               );
-
+               AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext();
+               PropertyPasswordConfiguration initializer = new PropertyPasswordConfiguration();
+               initializer.initialize(ctx);
+               try {
+                       ctx.scan(
+                                       "org.onap.aai.config",
+                                       "org.onap.aai.setup"
+                       );
+                       ctx.refresh();
+               } catch (Exception e) {
+                       AAIException aai = ExceptionTranslator.schemaServiceExceptionTranslator(e);
+                       LOGGER.error("Problems running the tool "+aai.getMessage());
+                       LoggingContext.statusCode(LoggingContext.StatusCode.ERROR);
+                       LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                       ErrorLogHelper.logError(aai.getCode(), e.getMessage() + ", resolve and retry");
+                       throw aai;
+               }
                try {
             LOGGER.info("GenTester uses either cql jar or Cassandra jar");
 
@@ -158,6 +171,5 @@ public class GenTester {
            LOGGER.auditEvent("-- all done, if program does not exit, please kill.");
            System.exit(0);
     }
-
 }
 
diff --git a/src/main/java/org/onap/aai/util/ExceptionTranslator.java b/src/main/java/org/onap/aai/util/ExceptionTranslator.java
new file mode 100644 (file)
index 0000000..a395c23
--- /dev/null
@@ -0,0 +1,47 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.util;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.apache.commons.lang3.exception.ExceptionUtils;
+import org.onap.aai.GraphAdminApp;
+import org.onap.aai.exceptions.AAIException;
+
+public class ExceptionTranslator {
+    private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(ExceptionTranslator.class);
+    public static AAIException schemaServiceExceptionTranslator(Exception ex) {
+        AAIException aai = null;
+        LOGGER.info("Exception is " + ExceptionUtils.getRootCause(ex).getMessage() + "Root cause is"+ ExceptionUtils.getRootCause(ex).toString());
+        if(ExceptionUtils.getRootCause(ex).getMessage().contains("NodeIngestor")){
+            aai = new  AAIException("AAI_3026","Error reading OXM from SchemaService - Investigate");
+        }
+        else if(ExceptionUtils.getRootCause(ex).getMessage().contains("EdgeIngestor")){
+            aai = new  AAIException("AAI_3027","Error reading EdgeRules from SchemaService - Investigate");
+        }
+        else if(ExceptionUtils.getRootCause(ex).getMessage().contains("Connection refused")){
+            aai = new  AAIException("AAI_3025","Error connecting to SchemaService - Investigate");
+        }else {
+            aai = new  AAIException("AAI_3025","Error connecting to SchemaService - Please Investigate");
+        }
+
+        return aai;
+    }
+}
diff --git a/src/main/java/org/onap/aai/util/GraphAdminConstants.java b/src/main/java/org/onap/aai/util/GraphAdminConstants.java
new file mode 100644 (file)
index 0000000..017d92e
--- /dev/null
@@ -0,0 +1,49 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.util;
+
+public final class GraphAdminConstants {
+    
+    public static final int AAI_SNAPSHOT_DEFAULT_THREADS_FOR_CREATE = 15;
+    public static final int AAI_SNAPSHOT_DEFAULT_MAX_ERRORS_PER_THREAD = 25;
+    public static final Long AAI_SNAPSHOT_DEFAULT_VERTEX_ADD_DELAY_MS = 1L;
+    public static final Long AAI_SNAPSHOT_DEFAULT_EDGE_ADD_DELAY_MS = 1L;
+    public static final Long AAI_SNAPSHOT_DEFAULT_FAILURE_DELAY_MS = 50L;
+    public static final Long AAI_SNAPSHOT_DEFAULT_RETRY_DELAY_MS = 1500L;
+    public static final Long AAI_SNAPSHOT_DEFAULT_VERTEX_TO_EDGE_PROC_DELAY_MS = 9000L;
+    public static final Long AAI_SNAPSHOT_DEFAULT_STAGGER_THREAD_DELAY_MS = 5000L;
+
+    public static final int AAI_GROOMING_DEFAULT_MAX_FIX = 150;
+    public static final int AAI_GROOMING_DEFAULT_SLEEP_MINUTES = 7;
+
+    public static final int AAI_DUPETOOL_DEFAULT_MAX_FIX = 25;
+    public static final int AAI_DUPETOOL_DEFAULT_SLEEP_MINUTES = 7;
+
+    
+    
+    
+    /**
+     * Instantiates a new GraphAdmin constants.
+     */
+    private GraphAdminConstants() {
+        // prevent instantiation
+    }
+
+}
diff --git a/src/main/java/org/onap/aai/util/GraphAdminDBUtils.java b/src/main/java/org/onap/aai/util/GraphAdminDBUtils.java
new file mode 100644 (file)
index 0000000..992223e
--- /dev/null
@@ -0,0 +1,40 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.util;
+
+import java.util.Iterator;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+
+public class GraphAdminDBUtils {
+
+       private static EELFLogger LOGGER = EELFManager.getInstance().getLogger(GraphAdminDBUtils.class);
+
+       public static void logConfigs(org.apache.commons.configuration.Configuration configuration) {
+
+               if (configuration != null && configuration.getKeys() != null) {
+                       Iterator<String> keys = configuration.getKeys();
+                       keys.forEachRemaining(
+                                       key -> LOGGER.info("Key is " + key + "Value is  " + configuration.getProperty(key).toString()));
+               }
+
+       }
+}
index ad96efe..e559a78 100644 (file)
@@ -21,8 +21,11 @@ package org.onap.aai.util;
 
 import com.beust.jcommander.JCommander;
 import com.beust.jcommander.Parameter;
+import org.onap.aai.config.PropertyPasswordConfiguration;
 import org.onap.aai.dbmap.AAIGraph;
+import org.onap.aai.exceptions.AAIException;
 import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.logging.ErrorLogHelper;
 import org.onap.aai.logging.LoggingContext;
 import org.onap.aai.migration.EventAction;
 import org.onap.aai.setup.SchemaVersions;
@@ -32,7 +35,7 @@ import java.util.*;
 
 public class SendDeleteMigrationNotificationsMain {
 
-       public static void main(String[] args) {
+       public static void main(String[] args) throws AAIException {
 
                Arrays.asList(args).stream().forEach(System.out::println);
 
@@ -47,11 +50,22 @@ public class SendDeleteMigrationNotificationsMain {
                LoggingContext.statusCode(LoggingContext.StatusCode.COMPLETE);
                LoggingContext.responseCode(LoggingContext.SUCCESS);
 
-               AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext(
-                               "org.onap.aai.config",
-                               "org.onap.aai.setup"
-               );
-
+               AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext();
+               PropertyPasswordConfiguration initializer = new PropertyPasswordConfiguration();
+               initializer.initialize(ctx);
+               try {
+                       ctx.scan(
+                                       "org.onap.aai.config",
+                                       "org.onap.aai.setup"
+                       );
+                       ctx.refresh();
+               } catch (Exception e) {
+                       AAIException aai = ExceptionTranslator.schemaServiceExceptionTranslator(e);
+                       LoggingContext.statusCode(LoggingContext.StatusCode.ERROR);
+                       LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                       ErrorLogHelper.logError(aai.getCode(), e.getMessage() + ", resolve and retry");
+                       throw aai;
+               }
                LoaderFactory loaderFactory = ctx.getBean(LoaderFactory.class);
                SchemaVersions schemaVersions = ctx.getBean(SchemaVersions.class);
                String basePath = ctx.getEnvironment().getProperty("schema.uri.base.path");
index 29eb1da..6ae2bb4 100644 (file)
@@ -21,8 +21,11 @@ package org.onap.aai.util;
 
 import com.beust.jcommander.JCommander;
 import com.beust.jcommander.Parameter;
+import org.onap.aai.config.PropertyPasswordConfiguration;
 import org.onap.aai.dbmap.AAIGraph;
+import org.onap.aai.exceptions.AAIException;
 import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.logging.ErrorLogHelper;
 import org.onap.aai.logging.LoggingContext;
 import org.onap.aai.migration.EventAction;
 import org.onap.aai.setup.SchemaVersions;
@@ -32,7 +35,7 @@ import java.util.*;
 
 public class SendMigrationNotificationsMain {
 
-       public static void main(String[] args) {
+       public static void main(String[] args) throws AAIException {
 
                Arrays.asList(args).stream().forEach(System.out::println);
 
@@ -47,11 +50,23 @@ public class SendMigrationNotificationsMain {
                LoggingContext.statusCode(LoggingContext.StatusCode.COMPLETE);
                LoggingContext.responseCode(LoggingContext.SUCCESS);
 
-               AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext(
-                               "org.onap.aai.config",
-                               "org.onap.aai.setup"
-               );
-
+               AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext();
+               PropertyPasswordConfiguration initializer = new PropertyPasswordConfiguration();
+               initializer.initialize(ctx);
+               try {
+                       ctx.scan(
+                                       "org.onap.aai.config",
+                                       "org.onap.aai.setup"
+                       );
+                       ctx.refresh();
+               } catch (Exception e) {
+                       AAIException aai = ExceptionTranslator.schemaServiceExceptionTranslator(e);
+                       System.out.println("Problems running tool "+aai.getMessage());
+                       LoggingContext.statusCode(LoggingContext.StatusCode.ERROR);
+                       LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                       ErrorLogHelper.logError(aai.getCode(), e.getMessage() + ", resolve and retry");
+                       throw aai;
+               }
                LoaderFactory loaderFactory = ctx.getBean(LoaderFactory.class);
                SchemaVersions schemaVersions = ctx.getBean(SchemaVersions.class);
                String basePath = ctx.getEnvironment().getProperty("schema.uri.base.path");
index d636bb6..cfab945 100644 (file)
@@ -48,7 +48,7 @@ schema.ingest.file=${server.local.startpath}/application.properties
 
 schema.uri.base.path=/aai
 # Lists all of the versions in the schema
-schema.version.list=v8,v9,v10,v11,v12,v13,v14
+schema.version.list=v8,v9,v10,v11,v12,v13,v14,v15
 # Specifies from which version should the depth parameter to default to zero
 schema.version.depth.start=v9
 # Specifies from which version should the related link be displayed in response payload
@@ -62,3 +62,14 @@ schema.version.namespace.change.start=v12
 schema.version.edge.label.start=v12
 # Specifies the version that the application should default to
 schema.version.api.default=v14
+schema.translator.list=schema-service
+#schema.service.client=no-auth
+schema.service.base.url=https://localhost:8452/aai/schema-service/v1/
+schema.service.nodes.endpoint=nodes?version=
+schema.service.edges.endpoint=edgerules?version=
+schema.service.versions.endpoint=versions
+
+schema.service.ssl.key-store=${server.local.startpath}etc/auth/aai_keystore
+schema.service.ssl.key-store-password=password(OBF:1vn21ugu1saj1v9i1v941sar1ugw1vo0)
+schema.service.ssl.trust-store=${server.local.startpath}etc/auth/aai_keystore
+schema.service.ssl.trust-store-password=password(OBF:1vn21ugu1saj1v9i1v941sar1ugw1vo0)
index a8f5e95..4b9371f 100644 (file)
@@ -1,4 +1,4 @@
-topic=AAI-EVENT\r
-partition=AAI\r
-maxBatchSize=100\r
-maxAgeMs=250\r
+topic=AAI-EVENT
+partition=AAI
+maxBatchSize=100
+maxAgeMs=250
index d2cf54b..c15c6f5 100644 (file)
-#\r
-# ============LICENSE_START=======================================================\r
-# org.onap.aai\r
-# ================================================================================\r
-# Copyright Â© 2017-18 AT&T Intellectual Property. All rights reserved.\r
-# ================================================================================\r
-# Licensed under the Apache License, Version 2.0 (the "License");\r
-# you may not use this file except in compliance with the License.\r
-# You may obtain a copy of the License at\r
-#\r
-#    http://www.apache.org/licenses/LICENSE-2.0\r
-#\r
-# Unless required by applicable law or agreed to in writing, software\r
-# distributed under the License is distributed on an "AS IS" BASIS,\r
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
-# See the License for the specific language governing permissions and\r
-# limitations under the License.\r
-# ============LICENSE_END=========================================================\r
-\r
-####################################################################\r
-#  REMEMBER TO THINK ABOUT ENVIRONMENTAL DIFFERENCES AND CHANGE THE\r
-#  TEMPLATE AND *ALL* DATAFILES\r
-####################################################################\r
-\r
-aai.config.checktime=1000\r
-\r
-# this could come from siteconfig.pl?\r
-aai.config.nodename=AutomaticallyOverwritten\r
-\r
-aai.transaction.logging=true\r
-aai.transaction.logging.get=true\r
-aai.transaction.logging.post=true\r
-\r
-aai.server.url.base=https://localhost:8443/aai/\r
-aai.server.url=https://localhost:8443/aai/v14/\r
-aai.oldserver.url.base=https://localhost:8443/aai/servers/\r
-aai.oldserver.url=https://localhost:8443/aai/servers/v2/\r
-aai.global.callback.url=https://localhost:8443/aai/\r
-\r
-# Start of INTERNAL Specific Properties\r
-\r
-aai.truststore.filename=aai_keystore\r
-aai.truststore.passwd.x=OBF:1vn21ugu1saj1v9i1v941sar1ugw1vo0\r
-aai.keystore.filename=aai-client-cert.p12\r
-aai.keystore.passwd.x=OBF:1vn21ugu1saj1v9i1v941sar1ugw1vo0\r
-\r
-aai.realtime.clients=RO,SDNC,MSO,SO\r
-\r
-# End of INTERNAL Specific Properties\r
-\r
-aai.notification.current.version=v14\r
-aai.notificationEvent.default.status=UNPROCESSED\r
-aai.notificationEvent.default.eventType=AAI-EVENT\r
-aai.notificationEvent.default.domain=devINT1\r
-aai.notificationEvent.default.sourceName=aai\r
-aai.notificationEvent.default.sequenceNumber=0\r
-aai.notificationEvent.default.severity=NORMAL\r
-aai.notificationEvent.default.version=v14\r
-# This one lets us enable/disable resource-version checking on updates/deletes\r
-aai.resourceversion.enableflag=true\r
-aai.logging.maxStackTraceEntries=10\r
-aai.default.api.version=v14\r
-\r
-# Used by Model-processing code\r
-aai.model.delete.sleep.per.vtx.msec=500\r
-aai.model.query.resultset.maxcount=50\r
-aai.model.query.timeout.sec=90\r
\r
-# Used by Data Grooming\r
-aai.grooming.default.max.fix=150\r
-aai.grooming.default.sleep.minutes=7\r
-\r
-# Used by DupeTool\r
-aai.dupeTool.default.max.fix=25\r
-aai.dupeTool.default.sleep.minutes=7\r
-\r
-aai.model.proc.max.levels=50\r
-aai.edgeTag.proc.max.levels=50\r
-\r
-# Used by the ForceDelete tool\r
-aai.forceDel.protected.nt.list=cloud-region\r
-aai.forceDel.protected.edge.count=10\r
-aai.forceDel.protected.descendant.count=10\r
-\r
-# Used for CTAG-Pool generation\r
-aai.ctagPool.rangeString.vplsPe1=2001-2500\r
-aai.ctagPool.rangeString.vplsPe2=2501-3000\r
-\r
-aai.jms.enable=false\r
-\r
-#used by the dataGrooming and dataSnapshot cleanup tasks\r
-aai.cron.enable.datagroomingcleanup=true\r
-aai.cron.enable.datasnapshotcleanup=true\r
-aai.datagrooming.agezip=5\r
-aai.datagrooming.agedelete=30\r
-aai.datasnapshot.agezip=5\r
-aai.datasnapshot.agedelete=30\r
-\r
-#used by the dataSnapshot and dataGrooming tasks\r
-aai.cron.enable.dataSnapshot=true\r
-aai.cron.enable.dataGrooming=true\r
-\r
-#used by the dataGrooming tasks\r
-aai.datagrooming.enableautofix=true\r
-aai.datagrooming.enabledupefixon=true\r
-aai.datagrooming.enabledontfixorphans=true\r
-aai.datagrooming.enabletimewindowminutes=true\r
-aai.datagrooming.enableskiphostcheck=false\r
-aai.datagrooming.enablesleepminutes=false\r
-aai.datagrooming.enableedgesonly=false\r
-aai.datagrooming.enableskipedgechecks=false\r
-aai.datagrooming.enablemaxfix=false\r
-aai.datagrooming.enablesinglecommits=false\r
-aai.datagrooming.enabledupecheckoff=false\r
-aai.datagrooming.enableghost2checkoff=false\r
-aai.datagrooming.enableghost2fixon=false\r
-aai.datagrooming.enablef=false\r
-\r
-# used by the dataGrooming to set values\r
-aai.datagrooming.timewindowminutesvalue=10500\r
-aai.datagrooming.sleepminutesvalue=100\r
-aai.datagrooming.maxfixvalue=10\r
-aai.datagrooming.fvalue=10\r
-\r
-#timeout for traversal enabled flag\r
-aai.graphadmin.timeoutenabled=true\r
-\r
-#timeout app specific -1 to bypass for that app id, a whole number to override the timeout with that value (in ms)\r
-aai.graphadmin.timeout.appspecific=JUNITTESTAPP1,1|JUNITTESTAPP2,-1\r
-\r
-#default timeout limit added for graphadmin if not overridden (in ms)\r
-aai.graphadmin.timeoutlimit=180000\r
-\r
-# Disable the process check which are oriented towards linux OS\r
-# These props should only be true for local on windows\r
-aai.disable.check.snapshot.running=true\r
-aai.disable.check.grooming.running=true\r
-\r
-# Specify the params listed right here that you would have send to the dataSnapshot shell script\r
-# JUST_TAKE_SNAPSHOT\r
-# THREADED_SNAPSHOT 2 DEBUG\r
-# THREADED_SNAPSHOT 2\r
-aai.datasnapshot.params=JUST_TAKE_SNAPSHOT\r
-\r
+#
+# ============LICENSE_START=======================================================
+# org.onap.aai
+# ================================================================================
+# Copyright Â© 2017-18 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+
+####################################################################
+#  REMEMBER TO THINK ABOUT ENVIRONMENTAL DIFFERENCES AND CHANGE THE
+#  TEMPLATE AND *ALL* DATAFILES
+####################################################################
+
+aai.config.checktime=1000
+
+# this could come from siteconfig.pl?
+aai.config.nodename=AutomaticallyOverwritten
+
+aai.transaction.logging=true
+aai.transaction.logging.get=true
+aai.transaction.logging.post=true
+
+aai.server.url.base=https://localhost:8443/aai/
+aai.server.url=https://localhost:8443/aai/v14/
+aai.oldserver.url.base=https://localhost:8443/aai/servers/
+aai.oldserver.url=https://localhost:8443/aai/servers/v2/
+aai.global.callback.url=https://localhost:8443/aai/
+
+# Start of INTERNAL Specific Properties
+
+aai.truststore.filename=aai_keystore
+aai.truststore.passwd.x=OBF:1vn21ugu1saj1v9i1v941sar1ugw1vo0
+aai.keystore.filename=aai-client-cert.p12
+aai.keystore.passwd.x=OBF:1vn21ugu1saj1v9i1v941sar1ugw1vo0
+
+aai.realtime.clients=RO,SDNC,MSO,SO
+
+# End of INTERNAL Specific Properties
+
+aai.notification.current.version=v14
+aai.notificationEvent.default.status=UNPROCESSED
+aai.notificationEvent.default.eventType=AAI-EVENT
+aai.notificationEvent.default.domain=devINT1
+aai.notificationEvent.default.sourceName=aai
+aai.notificationEvent.default.sequenceNumber=0
+aai.notificationEvent.default.severity=NORMAL
+aai.notificationEvent.default.version=v14
+# This one lets us enable/disable resource-version checking on updates/deletes
+aai.resourceversion.enableflag=true
+aai.logging.maxStackTraceEntries=10
+aai.default.api.version=v14
+
+# Used by Model-processing code
+aai.model.delete.sleep.per.vtx.msec=500
+aai.model.query.resultset.maxcount=50
+aai.model.query.timeout.sec=90
+# Used by Data Grooming
+aai.grooming.default.max.fix=150
+aai.grooming.default.sleep.minutes=7
+
+# Used by Data Snapshot
+aai.datasnapshot.default.threads.for.create=16
+
+# Used by DupeTool
+aai.dupeTool.default.max.fix=25
+aai.dupeTool.default.sleep.minutes=7
+
+aai.model.proc.max.levels=50
+aai.edgeTag.proc.max.levels=50
+
+# Used by the ForceDelete tool
+aai.forceDel.protected.nt.list=cloud-region
+aai.forceDel.protected.edge.count=10
+aai.forceDel.protected.descendant.count=10
+
+# Used for CTAG-Pool generation
+aai.ctagPool.rangeString.vplsPe1=2001-2500
+aai.ctagPool.rangeString.vplsPe2=2501-3000
+
+aai.jms.enable=false
+
+#used by the dataGrooming and dataSnapshot cleanup tasks
+aai.cron.enable.datagroomingcleanup=true
+aai.cron.enable.datasnapshotcleanup=true
+aai.datagrooming.agezip=5
+aai.datagrooming.agedelete=30
+aai.datasnapshot.agezip=5
+aai.datasnapshot.agedelete=30
+
+#used by the dataSnapshot and dataGrooming tasks
+aai.cron.enable.dataSnapshot=true
+aai.cron.enable.dataGrooming=true
+
+#used by the dataGrooming tasks
+aai.datagrooming.enableautofix=true
+aai.datagrooming.enabledupefixon=true
+aai.datagrooming.enabledontfixorphans=true
+aai.datagrooming.enabletimewindowminutes=true
+aai.datagrooming.enableskiphostcheck=false
+aai.datagrooming.enablesleepminutes=false
+aai.datagrooming.enableedgesonly=false
+aai.datagrooming.enableskipedgechecks=false
+aai.datagrooming.enablemaxfix=false
+aai.datagrooming.enablesinglecommits=false
+aai.datagrooming.enabledupecheckoff=false
+aai.datagrooming.enableghost2checkoff=false
+aai.datagrooming.enableghost2fixon=false
+aai.datagrooming.enablef=false
+
+# used by the dataGrooming to set values
+aai.datagrooming.timewindowminutesvalue=10500
+aai.datagrooming.sleepminutesvalue=100
+aai.datagrooming.maxfixvalue=10
+aai.datagrooming.fvalue=10
+
+#timeout for traversal enabled flag
+aai.graphadmin.timeoutenabled=true
+
+#timeout app specific -1 to bypass for that app id, a whole number to override the timeout with that value (in ms)
+aai.graphadmin.timeout.appspecific=JUNITTESTAPP1,1|JUNITTESTAPP2,-1
+
+#default timeout limit added for graphadmin if not overridden (in ms)
+aai.graphadmin.timeoutlimit=180000
+
+# Disable the process check which are oriented towards linux OS
+# These props should only be true for local on windows
+aai.disable.check.snapshot.running=true
+aai.disable.check.grooming.running=true
+
+# Specify the params listed right here that you would have send to the dataSnapshot shell script
+# JUST_TAKE_SNAPSHOT
+# THREADED_SNAPSHOT 2 DEBUG
+# THREADED_SNAPSHOT 2
+aai.datasnapshot.params=JUST_TAKE_SNAPSHOT
+
+# Threshold for margin of error (in ms) for resources_with_sot format to derive the most recent http method performed
+aai.resource.formatter.threshold=10
index 708fb1f..1550d6c 100644 (file)
@@ -32,6 +32,10 @@ AAI_3011=5:6:WARN:3011:400:3000:Unknown XML namespace used in payload
 AAI_3012=5:6:WARN:3012:400:3012:Unrecognized AAI function
 AAI_3013=5:6:WARN:3013:400:3013:Query payload missing required parameters %1
 AAI_3014=5:6:WARN:3014:400:3014:Query payload is invalid %1
+AAI_3025=5:4:FATAL:3025:500:3025:Error connecting to Schema Service - Investigate
+AAI_3026=5:4:FATAL:3026:500:3026:Error reading OXM from Schema Service - Investigate
+AAI_3027=5:4:FATAL:3026:500:3026:Error reading EdgeRules from Schema Service - Investigate
+
 # pol errors
 AAI_3100=5:1:WARN:3100:400:3100:Unsupported operation %1
 AAI_3101=5:1:WARN:3101:403:3101:Attempt by client %1 to execute API %2
diff --git a/src/main/resources/etc/appprops/janusgraph-migration.properties b/src/main/resources/etc/appprops/janusgraph-migration.properties
new file mode 100644 (file)
index 0000000..6090c84
--- /dev/null
@@ -0,0 +1,52 @@
+#
+# ============LICENSE_START=======================================================
+# org.onap.aai
+# ================================================================================
+# Copyright Â© 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+
+query.fast-property=true
+# the following parameters are not reloaded automatically and require a manual bounce
+storage.backend=inmemory
+storage.hostname=localhost
+
+#schema.default=none
+storage.lock.wait-time=300
+storage.hbase.table=aaigraph-dev1.dev
+storage.hbase.ext.zookeeper.znode.parent=/hbase-unsecure
+# Setting db-cache to false ensure the fastest propagation of changes across servers
+cache.db-cache = false
+
+#load graphson file on startup
+load.snapshot.file=false
+query.smart-limit=false
+
+
+#storage.backend=cql
+#storage.hostname=host1,host2,host3
+#storage.cql.replication-strategy-class=NetworkTopologyStrategy
+#storage.cql.replication-strategy-options=options
+# for single datacenter cluster
+#storage.cql.replication-factor=3
+
+#storage.cql.keyspace=aaigraph_single_dc
+#storage.cql.only-use-local-consistency-for-system-operations=true
+#storage.cql.cluster-name=clusterName
+#storage.cql.local-datacenter=dataCenter
+#storage.cql.read-consistency-level=ONE
+#storage.cql.write-consistency-level=QUORUM
+#storage.connection-timeout=100000
+#cache.tx-cache-size = 1000000
+#metrics.enabled=true
\ No newline at end of file
index 405a667..20bd0a8 100644 (file)
@@ -21,10 +21,10 @@ if [ "$#" -lt 1 ]; then
 fi
 
 source_profile;
-export PRE_JAVA_OPTS=${PRE_JAVA_OPTS:--Xms6g -Xmx8g};
+export JAVA_PRE_OPTS=${JAVA_PRE_OPTS:--Xms6g -Xmx8g};
 
 #### Step 1) clear out the database
-execute_spring_jar org.onap.aai.datasnapshot.DataSnapshot ${PROJECT_HOME}/resources/logback.xml "CLEAR_ENTIRE_DATABASE" "$1" "$2"
+execute_spring_jar org.onap.aai.datasnapshot.DataSnapshot ${PROJECT_HOME}/resources/logback.xml "-c" "CLEAR_ENTIRE_DATABASE" "-f" "$1"
 if [ "$?" -ne "0" ]; then
     echo "Problem clearing out database."
     exit 1
@@ -39,7 +39,7 @@ fi
 
 #### Step 3) reload the data from a snapshot file
 
-execute_spring_jar org.onap.aai.datasnapshot.DataSnapshot ${PROJECT_HOME}/resources/logback.xml "RELOAD_DATA" "$1"
+execute_spring_jar org.onap.aai.datasnapshot.DataSnapshot ${PROJECT_HOME}/resources/logback.xml "-c" "RELOAD_DATA" "-f" "$1"
 if [ "$?" -ne "0" ]; then
     echo "Problem reloading data into the database."
     end_date;
diff --git a/src/main/scripts/dataRestoreFromSnapshotMulti.sh b/src/main/scripts/dataRestoreFromSnapshotMulti.sh
new file mode 100644 (file)
index 0000000..1e322dc
--- /dev/null
@@ -0,0 +1,52 @@
+#!/bin/ksh
+#
+# NOTE - this is the updated version of this script which uses multi-threaded reload code
+#
+# This script uses the dataSnapshot and SchemaGenerator (via GenTester) java classes to restore 
+# data to a database by doing three things: 
+#   1) clear out whatever data and schema are currently in the db 
+#   2) rebuild the schema (using the SchemaGenerator)
+#   3) reload data from the passed-in datafile (which must found in the dataSnapShots directory and
+#      contain an xml view of the db data).
+#
+
+COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P )
+. ${COMMON_ENV_PATH}/common_functions.sh
+
+start_date;
+check_user;
+
+if [ "$#" -lt 1 ]; then
+    echo "Illegal number of parameters"
+    echo "usage: $0 previous_snapshot_filename"
+    exit 1
+fi
+
+source_profile;
+export JAVA_PRE_OPTS=${JAVA_PRE_OPTS:--Xms6g -Xmx8g};
+
+#### Step 1) clear out the database
+execute_spring_jar org.onap.aai.datasnapshot.DataSnapshot ${PROJECT_HOME}/resources/logback.xml "-c" "CLEAR_ENTIRE_DATABASE" "-f" "$1"
+if [ "$?" -ne "0" ]; then
+    echo "Problem clearing out database."
+    exit 1
+fi
+#### Step 2) rebuild the db-schema
+execute_spring_jar org.onap.aai.schema.GenTester ${PROJECT_HOME}/resources/logback.xml "GEN_DB_WITH_NO_DEFAULT_CR"
+if [ "$?" -ne "0" ]; then
+    echo "Problem rebuilding the schema (SchemaGenerator)."
+    exit 1
+fi
+
+#### Step 3) reload the data from a snapshot file
+
+execute_spring_jar org.onap.aai.datasnapshot.DataSnapshot ${PROJECT_HOME}/resources/logback.xml "-c" "MULTITHREAD_RELOAD" "-f" "$1"
+if [ "$?" -ne "0" ]; then
+    echo "Problem reloading data into the database."
+    end_date;
+    exit 1
+fi
+end_date;
+exit 0
index f380e85..ca3b033 100644 (file)
@@ -23,6 +23,15 @@ fi
 start_date;
 check_user;
 source_profile;
+
+# Only sourcing the file aai-graphadmin-tools-vars for dataSnapshot
+# Do not source this for dataRestore or otherwise
+# both taking a snapshot and restoring from a snapshot
+# will use the same amount of memory but restoring from snapshot
+# will use a lot more memory than taking a snapshot
+if [ -f "$PROJECT_HOME/resources/aai-graphadmin-tools-vars.sh" ]; then
+    source $PROJECT_HOME/resources/aai-graphadmin-tools-vars.sh
+fi;
 execute_spring_jar org.onap.aai.datasnapshot.DataSnapshot $PROJECT_HOME/resources/logback.xml "$@"
 end_date;
 exit 0
index 3d30790..2140354 100644 (file)
@@ -136,15 +136,15 @@ while getopts ":f:s:d:n:c:i:m:o:p:" opt; do
 
 echo 'Done'
 
-set -A nodes pserver cloud-region availability-zone tenant zone complex
-
+set -A nodes customer service-subscription service pserver cloud-region availability-zone tenant zone complex
 #Create empty partial file
  > $INPUT_DATASNAPSHOT_FILE".partial"
 
 for nodeType in ${nodes[@]}
-       do
-         grep "aai-node-type.*\"value\":\"$nodeType\"" $INPUT_DATASNAPSHOT_FILE >>$INPUT_DATASNAPSHOT_FILE'.partial'
-    done
+ do
+        grep "aai-node-type.*\"value\":\"$nodeType\"" $INPUT_DATASNAPSHOT_FILE'.P'* >>$INPUT_DATASNAPSHOT_FILE'.out'
+     cat $INPUT_DATASNAPSHOT_FILE'.out' | cut -d':' -f2- > $INPUT_DATASNAPSHOT_FILE'.partial'
+ done
 
 
 execute_spring_jar org.onap.aai.dbgen.DynamicPayloadGenerator ${PROJECT_HOME}/resources/dynamicPayloadGenerator-logback.xml -s ${VALIDATE_SCHEMA} \
index 8021aa6..77b2919 100644 (file)
@@ -3,7 +3,7 @@
 #Create empty partial snapshot file
 INPUT_DATASNAPSHOT_FILE=$1
 
-set -A nodes pserver cloud-region availability-zone tenant zone complex
+set -A nodes customer service-subscription service pserver cloud-region availability-zone tenant zone complex
  > $INPUT_DATASNAPSHOT_FILE".partial"
 
 for nodeType in ${nodes[@]}
diff --git a/src/main/scripts/preDataRestore.sh b/src/main/scripts/preDataRestore.sh
new file mode 100644 (file)
index 0000000..c176a1a
--- /dev/null
@@ -0,0 +1,44 @@
+#!/bin/ksh
+#
+# This script does just the first two steps of our normal dataRestoreFromSnapshot script.
+# This should only be needed if we are trouble-shooting and need to run step 3 (the 
+#    actual call to dataSnapshot) separately with different input params.
+#
+# This script does these two steps:
+#   1) clear out whatever data and schema are currently in the db 
+#   2) rebuild the schema (using the SchemaGenerator)
+# 
+
+COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P )
+. ${COMMON_ENV_PATH}/common_functions.sh
+
+start_date;
+check_user;
+
+if [ "$#" -lt 1 ]; then
+    echo "Illegal number of parameters"
+    echo "usage: $0 previous_snapshot_filename"
+    exit 1
+fi
+
+source_profile;
+export JAVA_PRE_OPTS=${JAVA_PRE_OPTS:--Xms6g -Xmx8g};
+
+#### Step 1) clear out the database
+execute_spring_jar org.onap.aai.datasnapshot.DataSnapshot ${PROJECT_HOME}/resources/logback.xml "CLEAR_ENTIRE_DATABASE" "$1" "$2"
+if [ "$?" -ne "0" ]; then
+    echo "Problem clearing out database."
+    exit 1
+fi
+#### Step 2) rebuild the db-schema
+execute_spring_jar org.onap.aai.schema.GenTester ${PROJECT_HOME}/resources/logback.xml "GEN_DB_WITH_NO_DEFAULT_CR"
+if [ "$?" -ne "0" ]; then
+    echo "Problem rebuilding the schema (SchemaGenerator)."
+    exit 1
+fi
+
+
+end_date;
+exit 0
index 2b0f5c5..cbfe335 100644 (file)
@@ -30,7 +30,7 @@ start_date;
 check_user;
 source_profile;
 
-ARGS="-c ${PROJECT_HOME}/resources/etc/appprops/janusgraph-realtime.properties";
+ARGS="-c ${PROJECT_HOME}/resources/etc/appprops/janusgraph-migration.properties";
 
 if [ -f "$PROJECT_HOME/resources/application.properties" ]; then
     # Get the application properties file and look for all lines
index 6385fee..64bf5fa 100644 (file)
@@ -44,7 +44,11 @@ import org.springframework.web.client.RestTemplate;
 
 import javax.ws.rs.core.Response;
 import java.io.UnsupportedEncodingException;
-import java.util.*;
+import java.util.Base64;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.UUID;
 
 import static org.hamcrest.CoreMatchers.containsString;
 import static org.hamcrest.CoreMatchers.is;
@@ -66,6 +70,11 @@ import static org.junit.Assert.fail;
 @SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT, classes = GraphAdminApp.class)
 @ContextConfiguration(initializers = PropertyPasswordConfiguration.class)
 @Import(GraphAdminTestConfiguration.class)
+@TestPropertySource(properties = {
+        "schema.uri.base.path = /aai",
+        "schema.ingest.file = src/main/resources/application.properties",
+        "schema.translator.list = config"
+})
 public class AAIGremlinQueryTest {
 
     @ClassRule
@@ -139,7 +148,6 @@ public class AAIGremlinQueryTest {
 
         String authorization = Base64.getEncoder().encodeToString("AAI:AAI".getBytes("UTF-8"));
         headers.add("Authorization", "Basic " + authorization);
-        httpEntity = new HttpEntity(headers);
         baseUrl = "https://localhost:" + randomPort;
     }
 
index 59afe60..5ceafa7 100644 (file)
-/**\r
- * ============LICENSE_START=======================================================\r
- * org.onap.aai\r
- * ================================================================================\r
- * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.\r
- * ================================================================================\r
- * Licensed under the Apache License, Version 2.0 (the "License");\r
- * you may not use this file except in compliance with the License.\r
- * You may obtain a copy of the License at\r
- *\r
- *    http://www.apache.org/licenses/LICENSE-2.0\r
- *\r
- * Unless required by applicable law or agreed to in writing, software\r
- * distributed under the License is distributed on an "AS IS" BASIS,\r
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * See the License for the specific language governing permissions and\r
- * limitations under the License.\r
- * ============LICENSE_END=========================================================\r
- */\r
-package org.onap.aai;\r
-\r
-import static org.junit.Assert.assertNotNull;\r
-\r
-import java.io.IOException;\r
-import java.io.InputStream;\r
-import java.util.Map;\r
-\r
-import org.apache.commons.io.IOUtils;\r
-import org.janusgraph.core.JanusGraph;\r
-import org.janusgraph.core.JanusGraphFactory;\r
-import org.janusgraph.core.JanusGraphTransaction;\r
-import org.junit.*;\r
-import org.onap.aai.config.*;\r
-import org.onap.aai.db.schema.AuditorFactory;\r
-import org.onap.aai.edges.EdgeIngestor;\r
-import org.onap.aai.introspection.LoaderFactory;\r
-import org.onap.aai.introspection.MoxyLoader;\r
-import org.onap.aai.nodes.NodeIngestor;\r
-import org.onap.aai.rest.db.HttpEntry;\r
-import org.onap.aai.serialization.db.EdgeSerializer;\r
-import org.onap.aai.setup.AAIConfigTranslator;\r
-import org.onap.aai.setup.SchemaLocationsBean;\r
-import org.onap.aai.setup.SchemaVersions;\r
-import org.onap.aai.setup.SchemaVersion;\r
-import org.springframework.beans.factory.annotation.Autowired;\r
-import org.springframework.beans.factory.annotation.Value;\r
-import org.springframework.test.context.ContextConfiguration;\r
-import org.springframework.test.context.TestPropertySource;\r
-import org.springframework.test.context.junit4.rules.SpringClassRule;\r
-import org.springframework.test.context.junit4.rules.SpringMethodRule;\r
-\r
-@ContextConfiguration(classes = {\r
-        SchemaLocationsBean.class,\r
-        AAIConfigTranslator.class,\r
-        SchemaVersions.class,\r
-        NodeIngestor.class,\r
-        EdgeIngestor.class,\r
-        EdgeSerializer.class,\r
-        SpringContextAware.class,\r
-        AuditorConfiguration.class,\r
-        DslConfiguration.class,\r
-        IntrospectionConfig.class,\r
-        RestBeanConfig.class\r
-})\r
-@TestPropertySource(properties = {\r
-        "schema.uri.base.path = /aai",\r
-        "schema.ingest.file = src/main/resources/application.properties"\r
-})\r
-public abstract class AAISetup {\r
-\r
-    @Autowired\r
-    protected NodeIngestor nodeIngestor;\r
-\r
-    @Autowired\r
-    protected LoaderFactory loaderFactory;\r
-\r
-    @Autowired\r
-    protected Map<SchemaVersion, MoxyLoader> moxyLoaderInstance;\r
-\r
-    @Autowired\r
-    protected HttpEntry traversalHttpEntry;\r
-\r
-    @Autowired\r
-    protected HttpEntry traversalUriHttpEntry;\r
-\r
-    @Autowired\r
-    protected EdgeSerializer edgeSerializer;\r
-\r
-    @Autowired\r
-    protected SchemaVersions schemaVersions;\r
-\r
-    @Autowired\r
-    protected EdgeIngestor edgeIngestor;\r
-\r
-    @Autowired\r
-    protected AuditorFactory auditorFactory;\r
-\r
-    @Value("${schema.uri.base.path}")\r
-    protected String basePath;\r
-\r
-    @ClassRule\r
-    public static final SpringClassRule springClassRule = new SpringClassRule();\r
-\r
-    @Rule\r
-    public final SpringMethodRule springMethodRule = new SpringMethodRule();\r
-\r
-    protected static JanusGraph graph;\r
-    protected static JanusGraphTransaction tx;\r
-\r
-    @BeforeClass\r
-    public static void setupBundleconfig() throws Exception {\r
-        System.setProperty("AJSC_HOME", "./");\r
-        System.setProperty("BUNDLECONFIG_DIR", "src/main/resources/");\r
-        System.setProperty("org.onap.aai.graphadmin.started", "true");\r
-        graph = JanusGraphFactory.build().set("storage.backend","inmemory").open();\r
-        tx = graph.newTransaction();\r
-    }\r
-\r
-    @AfterClass\r
-    public static void cleanUpGraph() {\r
-        tx.tx().rollback();\r
-        graph.close();\r
-    }\r
-\r
-    public String getPayload(String filename) throws IOException {\r
-\r
-        InputStream inputStream = getClass()\r
-                .getClassLoader()\r
-                .getResourceAsStream(filename);\r
-\r
-        String message = String.format("Unable to find the %s in src/test/resources", filename);\r
-        assertNotNull(message, inputStream);\r
-\r
-        String resource = IOUtils.toString(inputStream);\r
-        return resource;\r
-    }\r
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai;
+
+import static org.junit.Assert.assertNotNull;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Map;
+
+import org.apache.commons.io.IOUtils;
+import org.janusgraph.core.JanusGraph;
+import org.janusgraph.core.JanusGraphFactory;
+import org.janusgraph.core.JanusGraphTransaction;
+import org.junit.*;
+import org.onap.aai.config.*;
+import org.onap.aai.db.schema.AuditorFactory;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.introspection.MoxyLoader;
+import org.onap.aai.nodes.NodeIngestor;
+import org.onap.aai.rest.db.HttpEntry;
+import org.onap.aai.serialization.db.EdgeSerializer;
+import org.onap.aai.setup.AAIConfigTranslator;
+import org.onap.aai.setup.SchemaLocationsBean;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.setup.SchemaVersion;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.test.context.ContextConfiguration;
+import org.springframework.test.context.TestPropertySource;
+import org.springframework.test.context.junit4.rules.SpringClassRule;
+import org.springframework.test.context.junit4.rules.SpringMethodRule;
+
+@ContextConfiguration(classes = {
+        SchemaLocationsBean.class,
+        AAIConfigTranslator.class,
+        SchemaVersions.class,
+        NodeIngestor.class,
+        EdgeIngestor.class,
+        EdgeSerializer.class,
+        SpringContextAware.class,
+        AuditorConfiguration.class,
+        DslConfiguration.class,
+        IntrospectionConfig.class,
+        RestBeanConfig.class
+})
+@TestPropertySource(properties = {
+        "schema.uri.base.path = /aai",
+        "schema.ingest.file = src/main/resources/application.properties",
+        "schema.translator.list = config"
+})
+public abstract class AAISetup {
+
+    @Autowired
+    protected NodeIngestor nodeIngestor;
+
+    @Autowired
+    protected LoaderFactory loaderFactory;
+
+    @Autowired
+    protected Map<SchemaVersion, MoxyLoader> moxyLoaderInstance;
+
+    @Autowired
+    protected HttpEntry traversalHttpEntry;
+
+    @Autowired
+    protected HttpEntry traversalUriHttpEntry;
+
+    @Autowired
+    protected EdgeSerializer edgeSerializer;
+
+    @Autowired
+    protected SchemaVersions schemaVersions;
+
+    @Autowired
+    protected EdgeIngestor edgeIngestor;
+
+    @Autowired
+    protected AuditorFactory auditorFactory;
+
+    @Value("${schema.uri.base.path}")
+    protected String basePath;
+
+    @ClassRule
+    public static final SpringClassRule springClassRule = new SpringClassRule();
+
+    @Rule
+    public final SpringMethodRule springMethodRule = new SpringMethodRule();
+
+    protected static JanusGraph graph;
+    protected static JanusGraphTransaction tx;
+
+    @BeforeClass
+    public static void setupBundleconfig() throws Exception {
+        System.setProperty("AJSC_HOME", "./");
+        System.setProperty("BUNDLECONFIG_DIR", "src/main/resources/");
+        System.setProperty("org.onap.aai.graphadmin.started", "true");
+        graph = JanusGraphFactory.build().set("storage.backend","inmemory").open();
+        tx = graph.newTransaction();
+    }
+
+    @AfterClass
+    public static void cleanUpGraph() {
+        tx.tx().rollback();
+        graph.close();
+    }
+
+    public String getPayload(String filename) throws IOException {
+
+        InputStream inputStream = getClass()
+                .getClassLoader()
+                .getResourceAsStream(filename);
+
+        String message = String.format("Unable to find the %s in src/test/resources", filename);
+        assertNotNull(message, inputStream);
+
+        String resource = IOUtils.toString(inputStream);
+        return resource;
+    }
 }
\ No newline at end of file
index 161702d..63a7a24 100644 (file)
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.onap.aai.datagrooming;
 
 import com.att.eelf.configuration.EELFLogger;
 import com.att.eelf.configuration.EELFManager;
+
 import org.janusgraph.core.JanusGraphTransaction;
 import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
 import org.apache.tinkerpop.gremlin.structure.Edge;
@@ -30,17 +30,15 @@ import org.junit.After;
 import org.junit.Before;
 import org.junit.FixMethodOrder;
 import org.junit.Test;
+import org.junit.Ignore;
 import org.junit.runners.MethodSorters;
 import org.onap.aai.AAISetup;
 import org.onap.aai.dbmap.AAIGraph;
 import org.onap.aai.exceptions.AAIException;
 
-import java.io.ByteArrayInputStream;
-import java.io.InputStream;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.HashSet;
-import java.util.List;
 
 import static org.hamcrest.CoreMatchers.is;
 import static org.hamcrest.MatcherAssert.assertThat;
@@ -55,8 +53,6 @@ public class DataGroomingTest extends AAISetup {
 
        private Vertex cloudRegionVertex;
 
-       private boolean setUp = false;
-
        @Before
        public void setup() {
                dataGrooming = new DataGrooming(loaderFactory, schemaVersions);
@@ -66,78 +62,103 @@ public class DataGroomingTest extends AAISetup {
                try {
                        GraphTraversalSource g = transaction.traversal();
                        cloudRegionVertex = g.addV().property("aai-node-type", "cloud-region").property("cloud-owner", "test-owner")
-                                       .property("cloud-region-id", "test-region").property("source-of-truth", "JUNIT").next();
+                                       .property("cloud-region-id", "test-region").property("source-of-truth", "JUNIT")
+                                       .property("aai-uri", "aai-uriX01")
+                                       .property("aai-last-mod-ts","19191919").next();
 
                        Vertex cloudRegionVertexDupe = g.addV().property("aai-node-type", "cloud-region")
                                        .property("cloud-owner", "test-owner").property("cloud-region-id", "test-region")
-                                       .property("source-of-truth", "JUNIT").next();
+                                       .property("aai-uri", "aai-uriX02")
+                                       .property("aai-last-mod-ts","19191919").property("source-of-truth", "JUNIT").next();
 
                        Vertex cloudRegionDupe3 = g.addV().property("aai-node-type", "cloud-region")
                                        .property("cloud-owner", "test-owner").property("cloud-region-id", "test-region")
-                                       .property("source-of-truth", "JUNIT").next();
+                                       .property("aai-uri", "aai-uriX03")
+                                       .property("aai-last-mod-ts","19191919").property("source-of-truth", "JUNIT").next();
 
                        Vertex cloudRegionDupe4 = g.addV().property("aai-node-type", "cloud-region")
                                        .property("cloud-owner", "test-owner").property("cloud-region-id", "test-region")
-                                       .property("source-of-truth", "JUNIT").next();
+                                       .property("aai-uri", "aai-uriX04")
+                                       .property("aai-last-mod-ts","19191919").property("source-of-truth", "JUNIT").next();
 
                        Vertex cloudRegionDupe5 = g.addV().property("aai-node-type", "cloud-region")
                                        .property("cloud-owner", "test-owner").property("cloud-region-id", "test-region")
+                                       .property("aai-uri", "aai-uriX05")
                                        .property("source-of-truth", "JUNIT").next();
                        
                        Vertex cloudRegionVertexBadNode = g.addV().property("aai-node-type", "cloud-region")
+                                       .property("aai-uri", "aai-uriX06")
                                        .property("cloud-owner", "test-owner-noregionId").property("source-of-truth", "JUNIT").next();
 
                        
                        Vertex cloudRegionVertexBadNode2 = g.addV().property("aai-node-type", "cloud-region")
+                                       .property("aai-uri", "aai-uriX07")
                                        .property("cloud-region", "test-owner-noownerId").property("source-of-truth", "JUNIT").next();
 
                        Vertex cloudRegionVertexBadNode3 = g.addV().property("aai-node-type", "cloud-region")
+                                       .property("aai-uri", "aai-uriX08")
                                        .property("cloud-region", "test-owner-noownerId2").property("source-of-truth", "JUNIT").next();
 
                        Vertex tenantGhostNodeNoNT = g.addV().property("tenant-id", "test-owner-tenant-id-1")
+                                       .property("aai-uri", "aai-uriX09")
                                        .property("source-of-truth", "JUNIT").next();
 
                        Vertex cloudRegionNoNT = g.addV().property("cloud-region", "test-owner-noownerIdnont-1")
+                                       .property("aai-uri", "aai-uriX10")
                                        .property("cloud-owner", "test-owner-noregion-nont2").property("source-of-truth", "JUNIT").next();
 
                        Vertex tenantNoNT = g.addV().property("tenant-id", "test-owner-tenant-id-1")
+                                       .property("aai-uri", "aai-uriX11")
                                        .property("source-of-truth", "JUNIT").next();
 
                        Vertex tenantNoKey = g.addV().property("aai-node-type", "tenant").property("source-of-truth", "JUNIT")
+                                       .property("aai-uri", "aai-uriX12")
                                        .next();
 
                        Vertex cloudRegionNoKey = g.addV().property("aai-node-type", "cloud-region")
+                                       .property("aai-uri", "aai-uriX13")
                                        .property("source-of-truth", "JUNIT").next();
 
                        Vertex tenantNoParent = g.addV().property("aai-node-type", "tenant")
+                                       .property("aai-uri", "aai-uriX14")
                                        .property("tenant-id", "test-owner-tenant-id").property("source-of-truth", "JUNIT").next();
 
                        Vertex tenantNoParent1 = g.addV().property("aai-node-type", "tenant")
+                                       .property("aai-uri", "aai-uriX15")
                                        .property("tenant-id", "test-owner-tenant-id1").property("source-of-truth", "JUNIT").next();
 
                        Vertex tenantNoParentDupe1 = g.addV().property("aai-node-type", "tenant")
+                                       .property("aai-uri", "aai-uriX16")
                                        .property("tenant-id", "test-owner-tenant-id1").property("source-of-truth", "JUNIT").next();
 
                        Vertex tenantNoParentDupe2 = g.addV().property("aai-node-type", "tenant")
+                                       .property("aai-uri", "aai-uriX17")
                                        .property("tenant-id", "test-owner-tenant-id1").property("source-of-truth", "JUNIT").next();
 
                        Vertex tenantDupe3 = g.addV().property("aai-node-type", "tenant")
+                                       .property("aai-uri", "aai-uriX18")
                                        .property("tenant-id", "test-owner-tenant-id1").property("source-of-truth", "JUNIT").next();
+                       
                        Vertex tenantDupe4 = g.addV().property("aai-node-type", "tenant")
+                                       .property("aai-uri", "aai-uriX19")
                                        .property("tenant-id", "test-owner-tenant-id1").property("source-of-truth", "JUNIT").next();
 
                        Vertex tenantNoParent2 = g.addV().property("aai-node-type", "tenant")
+                                       .property("aai-uri", "aai-uriX20")
                                        .property("tenant-id", "test-owner-tenant-id2").property("source-of-truth", "JUNIT").next();
 
                        tenantNoParent2.property("aai-uuid", tenantNoParent2.id() + "dummy");
 
                        Vertex tenantVertex = g.addV().property("aai-node-type", "tenant").property("tenant-id", "test-tenant")
+                                       .property("aai-uri", "aai-uriX21")
                                        .property("source-of-truth", "JUNIT").next();
 
                        Vertex pserverVertex = g.addV().property("aai-node-type", "pserver").property("hostname", "test-pserver")
+                                       .property("aai-uri", "aai-uriX22")
                                        .property("in-maint", false).property("source-of-truth", "JUNIT").next();
 
                        Vertex azNokey = g.addV().property("aai-node-type", "availability-zone")
+                                       .property("aai-uri", "aai-uriX23")
                                        .property("source-of-truth", "JUNIT").next();
 
                        cloudRegionVertex.addEdge("BadEdge", tenantGhostNodeNoNT, null);
@@ -147,6 +168,10 @@ public class DataGroomingTest extends AAISetup {
                        edgeSerializer.addTreeEdge(g, cloudRegionNoKey, tenantNoKey);
                        edgeSerializer.addEdge(g, pserverVertex, azNokey);
 
+                       Edge e = g.addV().property("aai-node-type", "blah")
+                                       .property("aai-uri", "aai-uriX24")
+                                       .property("source-of-truth", "JUNIT").addE("blah").next();
+                       
                        cloudRegionNoNT.addEdge("Base Edge2", tenantNoNT, null);
 
                } catch (Exception ex) {
@@ -162,10 +187,13 @@ public class DataGroomingTest extends AAISetup {
                }
        }
 
+       
+       
+
        @Test
        public void testGroomingNonAutoFix() throws AAIException {
                String[] args = {
-                               "-edgesOnly", "false", "-autoFix ", "false", "-skipHostCheck ", "true", "-dontFixOrphans ", "true"
+                                "-skipHostCheck ",  "-dontFixOrphans "
                };
 
                dataGrooming.execute(args);
@@ -178,31 +206,22 @@ public class DataGroomingTest extends AAISetup {
                assertThat(dataGrooming.getOneArmedEdgeHashCount(), is(3));
        }
 
+
        @Test
        public void testGroomingWithAutoFix() throws AAIException {
                String[] args = {
-                               "-autoFix ", "true", "-edgesOnly", "false", "-skipHostCheck ", "false", "-dontFixOrphans ", "false",
-                               "-skipIndexUpdateFix", "true", "-sleepMinutes", "1", "-timeWindowMinutes", "100", "-dupeFixOn", "true"
+                               "-autoFix ",  "-maxFix", "0", 
+                               "-skipIndexUpdateFix",  "-sleepMinutes", "1", "-timeWindowMinutes", "100", "-dupeFixOn"
                };
 
                dataGrooming.execute(args);
-               assertThat(dataGrooming.getDeleteCandidateList().size(), is(19));
-               assertThat(dataGrooming.getDeleteCount(), is(18));
+               assertThat(dataGrooming.getDeleteCandidateList().size(), is(0));
+               assertThat(dataGrooming.getDeleteCount(), is(0));
        }
 
-       @Test
-       public void testGroomingUpdateIndexedProps() throws AAIException {
 
-               JanusGraphTransaction transaction = AAIGraph.getInstance().getGraph().newTransaction();
-               GraphTraversalSource g = transaction.traversal();
-               Vertex cloudRegionVertex1 = g.addV().property("aai-node-type", "cloud-region")
-                               .property("cloud-owner", "test-owner-partial").property("cloud-region-id", "test-region")
-                               .property("source-of-truth", "JUNIT").next();
-               dataGrooming.updateIndexedProps(cloudRegionVertex1, "1", "cloud-region", new HashMap<>(), new ArrayList<>());
-               transaction.rollback();
-               // TODO asset something
-       }
 
+       
        @Test
        public void testGroomingGettersAndSetters() throws AAIException {
 
@@ -221,6 +240,7 @@ public class DataGroomingTest extends AAISetup {
                assertThat(dataGrooming.getDeleteCount(), is(0));
        }
 
+
        @Test
        public void testGroomingNoArgs() throws AAIException {
                String[] args = {
@@ -235,6 +255,7 @@ public class DataGroomingTest extends AAISetup {
                assertThat(dataGrooming.getDeleteCount(), is(0));
        }
 
+
        @Test
        public void testGroomingDupeCheck() throws AAIException {
                String[] args = {
@@ -244,16 +265,82 @@ public class DataGroomingTest extends AAISetup {
                assertThat(dataGrooming.getDupeGroups().size(), is(2));
        }
 
+
        @Test
        public void testGroomingAutoFixMaxRecords() throws AAIException {
 
-               String[] args = { "-autoFix ", "true", "-maxFix", "0",  "-edgesOnly",
-               "true" , "-sleepMinutes", "1"};
+               String[] args = { "-autoFix ",  "-sleepMinutes", "1"};
+               dataGrooming.execute(args);
+               assertThat(dataGrooming.getDeleteCandidateList().size(), is(14));
+
+       }
+       
+
+       @Test
+       public void testGroomingMain() throws AAIException {
+
+               String[] args = { "-autoFix ",  "-sleepMinutes", "1", "-f", "groomingInput", "-neverUseCache",  "-singleNodeType", "cloud-region"};
                dataGrooming.execute(args);
                assertThat(dataGrooming.getDeleteCandidateList().size(), is(0));
 
        }
 
+
+       
+       @Test
+       public void testGroomingSingleNT() throws AAIException {
+
+               String[] args = { "-autoFix ",  "-sleepMinutes", "1", "-neverUseCache",  "-singleNodeType", "cloud-region"};
+               dataGrooming.execute(args);
+               assertThat(dataGrooming.getDeleteCandidateList().size(), is(8));
+
+       }
+       
+
+       @Test
+       public void testGroomingUpdateIndexedPropsForMissingNT() throws AAIException {
+
+               JanusGraphTransaction transaction = AAIGraph.getInstance().getGraph().newTransaction();
+               GraphTraversalSource g = transaction.traversal();
+               Vertex cloudRegionVertex1 = g.addV().property("aai-node-type", "cloud-region")
+                               .property("cloud-owner", "test-owner-partial").property("cloud-region-id", "test-region")
+                               .property("aai-uri", "aai-uriX25")
+                               .property("source-of-truth", "JUNIT").next();
+               dataGrooming.updateIndexedPropsForMissingNT(cloudRegionVertex1, "1", "cloud-region", new HashMap<>(), new ArrayList<>());
+               transaction.rollback();
+               // TODO assert something
+       }
+       
+
+       @Test
+       public void testTryToReSetIndexedProps() throws AAIException {
+               JanusGraphTransaction transaction = AAIGraph.getInstance().getGraph().newTransaction();
+               GraphTraversalSource g = transaction.traversal();
+               Vertex cloudRegionVertex2 = g.addV().property("aai-node-type", "cloud-region")
+                               .property("aai-uri", "aai-uriX26")
+                               .property("cloud-owner", "test-owner-resetIndx").property("cloud-region-id", "test-region")
+                               .property("source-of-truth", "JUNIT").next();
+               dataGrooming.tryToReSetIndexedProps(cloudRegionVertex2, "1", new ArrayList<>());
+               transaction.rollback();
+               // TODO assert something
+       }
+       
+
+       @Test
+       public void testCheckAaiUriOk() throws AAIException {
+
+               JanusGraphTransaction transaction = AAIGraph.getInstance().getGraph().newTransaction();
+               GraphTraversalSource g = transaction.traversal();
+               Vertex cloudRegionVertex3 = g.addV().property("aai-node-type", "cloud-region")
+                               .property("cloud-owner", "test-owner-no-uri").property("cloud-region-id", "test-region")
+                               .property("source-of-truth", "JUNIT").next();
+               
+               assertThat(dataGrooming.checkAaiUriOk(g, cloudRegionVertex3), is(false));
+               
+               transaction.rollback();
+               
+       }
+       
        @After
        public void tearDown() {
 
index 63fd1fa..d04b6a2 100644 (file)
@@ -34,6 +34,9 @@ import org.onap.aai.dbmap.AAIGraph;
 import org.onap.aai.exceptions.AAIException;
 import org.springframework.boot.test.rule.OutputCapture;
 
+import com.beust.jcommander.ParameterException;
+
+import java.lang.NumberFormatException;
 import java.io.File;
 import java.io.IOException;
 import java.nio.file.Files;
@@ -46,7 +49,7 @@ import java.util.stream.Collectors;
 
 import static org.hamcrest.CoreMatchers.is;
 import static org.hamcrest.Matchers.containsString;
-import static org.junit.Assert.assertThat;
+import static org.junit.Assert.*;
 
 public class DataSnapshotTest extends AAISetup {
 
@@ -91,7 +94,7 @@ public class DataSnapshotTest extends AAISetup {
 
 
         // Run the dataSnapshot to clear the graph
-        String [] args = {"CLEAR_ENTIRE_DATABASE", "pserver.graphson"};
+        String [] args = {"-c", "CLEAR_ENTIRE_DATABASE", "-f", "pserver.graphson"};
         DataSnapshot.main(args);
 
         // Since the code doesn't clear the graph using AAIGraph.getInstance().getGraph(), its creating a second inmemory graph
@@ -114,7 +117,7 @@ public class DataSnapshotTest extends AAISetup {
         copySnapshotFile(sourceFileName,destFileName);
 
         // Run the clear dataSnapshot and this time it should fail
-        String [] args = {"CLEAR_ENTIRE_DATABASE", "empty.graphson"};
+        String [] args = {"-c","CLEAR_ENTIRE_DATABASE", "-f","empty.graphson"};
         DataSnapshot.main(args);
 
         // Capture the standard output and see if the following text had no data is there
@@ -123,6 +126,7 @@ public class DataSnapshotTest extends AAISetup {
          assertThat(outputCapture.toString(), containsString("graphson had no data."));
     }
 
+    
     @Test
     public void testTakeSnapshotAndItShouldCreateASnapshotFileWithOneVertex() throws IOException, InterruptedException {
 
@@ -131,7 +135,9 @@ public class DataSnapshotTest extends AAISetup {
         Set<Path> preSnapshotFiles = Files.walk(Paths.get(logsFolder)).collect(Collectors.toSet());
 
         // Run the clear dataSnapshot and this time it should fail
-        String [] args = {"JUST_TAKE_SNAPSHOT"};
+        //String [] args = {"JUST_TAKE_SNAPSHOT"};  >> default behavior is now to use 15 threads
+        // To just get one file, you have to tell it to just use one.
+        String [] args = {"-c","THREADED_SNAPSHOT", "-threadCount" ,"1"};
 
         DataSnapshot.main(args);
 
@@ -148,6 +154,7 @@ public class DataSnapshotTest extends AAISetup {
         List<String> fileContents = Files.readAllLines(snapshotPathList.get(0));
         assertThat(fileContents.get(0), containsString("id"));
     }
+    
 
     @Test
     public void testTakeSnapshotMultiAndItShouldCreateMultipleSnapshotFiles() throws IOException {
@@ -155,7 +162,7 @@ public class DataSnapshotTest extends AAISetup {
         String logsFolder     = System.getProperty("AJSC_HOME") + "/logs/data/dataSnapshots/";
 
         // Run the clear dataSnapshot and this time it should fail
-        String [] args = {"THREADED_SNAPSHOT", "2"};
+        String [] args = {"-c","THREADED_SNAPSHOT", "-threadCount","2"};
 
         DataSnapshot.main(args);
 
@@ -169,7 +176,7 @@ public class DataSnapshotTest extends AAISetup {
         String logsFolder     = System.getProperty("AJSC_HOME") + "/logs/data/dataSnapshots/";
 
         // Run the clear dataSnapshot and this time it should fail
-        String [] args = {"THREADED_SNAPSHOT", "2", "DEBUG"};
+        String [] args = {"-c","THREADED_SNAPSHOT", "-threadCount","2", "-debugFlag","DEBUG"};
 
         DataSnapshot.main(args);
 
@@ -181,11 +188,12 @@ public class DataSnapshotTest extends AAISetup {
     @Test
     public void testTakeSnapshotMultiWithDebugAndInvalidNumberAndItShouldFail() throws IOException {
 
-        String logsFolder     = System.getProperty("AJSC_HOME") + "/logs/data/dataSnapshots/";
+        boolean thrown = false;
+       String logsFolder     = System.getProperty("AJSC_HOME") + "/logs/data/dataSnapshots/";
 
         // Run the clear dataSnapshot and this time it should fail
-        String [] args = {"THREADED_SNAPSHOT", "foo", "DEBUG"};
-
+        String [] args = {"-c","THREADED_SNAPSHOT", "-threadCount","foo","-debugFlag", "DEBUG"};
+        
         DataSnapshot.main(args);
 
         // For this test if there is only one vertex in the graph, not sure if it will create multiple files
@@ -198,9 +206,9 @@ public class DataSnapshotTest extends AAISetup {
         String logsFolder     = System.getProperty("AJSC_HOME") + "/logs/data/dataSnapshots/";
 
         // Run the clear dataSnapshot and this time it should fail
-        String [] args = {"THREADED_SNAPSHOT", "foo", "DEBUG", "100"};
+        String [] args = {"-c","THREADED_SNAPSHOT","-threadCount", "foo", "-debugFlag","DEBUG","-debugAddDelayTime", "100"};
 
-        DataSnapshot.main(args);
+               DataSnapshot.main(args);
 
         // For this test if there is only one vertex in the graph, not sure if it will create multiple files
         // would need to add more data to the janusgraph
@@ -212,7 +220,7 @@ public class DataSnapshotTest extends AAISetup {
         String logsFolder     = System.getProperty("AJSC_HOME") + "/logs/data/dataSnapshots/";
 
         // Run the clear dataSnapshot and this time it should fail
-        String [] args = {"THREADED_SNAPSHOT", "0", "DEBUG", "100"};
+        String [] args = {"-c","THREADED_SNAPSHOT", "-threadCount","0", "-debugFlag","DEBUG", "-debugAddDelayTime","100"};
 
         DataSnapshot.main(args);
 
@@ -226,7 +234,7 @@ public class DataSnapshotTest extends AAISetup {
         String logsFolder     = System.getProperty("AJSC_HOME") + "/logs/data/dataSnapshots/";
 
         // Run the clear dataSnapshot and this time it should fail
-        String [] args = {"THREADED_SNAPSHOT", "0", "DEBUG", "foo"};
+        String [] args = {"-c","THREADED_SNAPSHOT","-threadCount", "0","-debugFlag","DEBUG", "-debugAddDelayTime","foo"};
 
         DataSnapshot.main(args);
 
@@ -234,13 +242,13 @@ public class DataSnapshotTest extends AAISetup {
         // would need to add more data to the janusgraph
     }
 
-    @Test
+//    @Test
     public void testTakeSnapshotMultiWithMoreParametersThanAllowedAndItShouldFail() throws IOException {
 
         String logsFolder     = System.getProperty("AJSC_HOME") + "/logs/data/dataSnapshots/";
 
         // Run the clear dataSnapshot and this time it should fail
-        String [] args = {"THREADED_SNAPSHOT", "0", "DEBUG", "foo", "bar"};
+        String [] args = {"-c","THREADED_SNAPSHOT", "-threadCount", "0", "-debugFlag","DEBUG",  "-debugAddDelayTime","foo", "bar"};
 
         DataSnapshot.main(args);
 
@@ -256,7 +264,7 @@ public class DataSnapshotTest extends AAISetup {
         String logsFolder     = System.getProperty("AJSC_HOME") + "/logs/data/dataSnapshots/";
 
         // Run the clear dataSnapshot and this time it should fail
-        String [] args = {"THREADED_SNAPSHOT", "0"};
+        String [] args = {"-c","THREADED_SNAPSHOT", "-threadCount","0"};
 
         DataSnapshot.main(args);
     }
@@ -269,7 +277,7 @@ public class DataSnapshotTest extends AAISetup {
         String logsFolder     = System.getProperty("AJSC_HOME") + "/logs/data/dataSnapshots/";
 
         // Run the clear dataSnapshot and this time it should fail
-        String [] args = {"THREADED_SNAPSHOT", "foo"};
+        String [] args = {"-c","THREADED_SNAPSHOT","-threadCount", "foo"};
 
         DataSnapshot.main(args);
     }
@@ -288,7 +296,7 @@ public class DataSnapshotTest extends AAISetup {
         String destFileName   = System.getProperty("AJSC_HOME") + "/logs/data/dataSnapshots/pserver.graphson";
         copySnapshotFile(sourceFileName,destFileName);
 
-        String [] args = {"RELOAD_DATA", "pserver.graphson"};
+        String [] args = {"-c","RELOAD_DATA", "-f","pserver.graphson"};
 
         DataSnapshot.main(args);
     }
@@ -310,7 +318,7 @@ public class DataSnapshotTest extends AAISetup {
         // After reload remove the added vertexes in the graph
         // The reason for this so each test is independent
         // as there shouldn't be dependencies and cause weird issues
-        String [] args = {"MULTITHREAD_RELOAD", "pserver2.graphson"};
+        String [] args = {"-c","MULTITHREAD_RELOAD","-f", "pserver2.graphson"};
 
         DataSnapshot.main(args);
     }
@@ -321,7 +329,7 @@ public class DataSnapshotTest extends AAISetup {
         // After reload remove the added vertexes in the graph
         // The reason for this so each test is independent
         // as there shouldn't be dependencies and cause weird issues
-        String [] args = {"MULTITHREAD_RELOAD", "emptyfoo2.graphson"};
+        String [] args = {"-c","MULTITHREAD_RELOAD", "-f","emptyfoo2.graphson"};
 
         DataSnapshot.main(args);
     }
@@ -343,7 +351,7 @@ public class DataSnapshotTest extends AAISetup {
         // After reload remove the added vertexes in the graph
         // The reason for this so each test is independent
         // as there shouldn't be dependencies and cause weird issues
-        String [] args = {"RELOAD_DATA_MULTI", "pserver2.graphson"};
+        String [] args = {"-c","RELOAD_DATA_MULTI","-f", "pserver2.graphson"};
 
         DataSnapshot.main(args);
     }
index 1d3228e..5e7a9a1 100644 (file)
@@ -21,6 +21,7 @@ package org.onap.aai.dbgen;
 
 import com.att.eelf.configuration.EELFLogger;
 import com.att.eelf.configuration.EELFManager;
+
 import org.janusgraph.core.JanusGraphTransaction;
 import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
 import org.apache.tinkerpop.gremlin.structure.Vertex;
@@ -30,6 +31,8 @@ import org.junit.Test;
 import org.onap.aai.AAISetup;
 import org.onap.aai.dbmap.AAIGraph;
 
+import static org.hamcrest.CoreMatchers.is;
+import static org.hamcrest.MatcherAssert.assertThat;
 import static org.junit.Assert.*;
 
 public class DupeToolTest extends AAISetup {
@@ -53,38 +56,65 @@ public class DupeToolTest extends AAISetup {
         try {
 
             GraphTraversalSource g = transaction.traversal();
-
-            Vertex cloudRegionVertex = g.addV()
-                    .property("aai-node-type", "cloud-region")
-                    .property("cloud-owner", "test-owner")
-                    .property("cloud-region-id", "test-region")
+            
+            Vertex pserverVertex = g.addV()
+                    .property("aai-node-type", "pserver")
+                    .property("hostname", "test-pserver")
+                    .property("in-maint", false)
                     .property("source-of-truth", "JUNIT")
                     .next();
 
-            Vertex tenantVertex = g.addV()
-                    .property("aai-node-type", "tenant")
-                    .property("tenant-id", "test-tenant")
+            // Dupe set #1
+            Vertex pInterfaceVertex1 = g.addV()
+                    .property("aai-node-type", "p-interface")
+                    .property("interface-name", "p-interface-name1")
+                    .property("in-maint", false)
                     .property("source-of-truth", "JUNIT")
                     .next();
-
-            Vertex pserverVertex = g.addV()
-                    .property("aai-node-type", "pserver")
-                    .property("hostname", "test-pserver")
+            edgeSerializer.addTreeEdge(g, pserverVertex, pInterfaceVertex1);
+                
+            Vertex pInterfaceVertex2 = g.addV()
+                    .property("aai-node-type", "p-interface")
+                    .property("interface-name", "p-interface-name1")
                     .property("in-maint", false)
                     .property("source-of-truth", "JUNIT")
                     .next();
-
-            for(int i = 0; i < 100; ++i){
-                g.addV()
-                        .property("aai-node-type", "p-interface")
-                        .property("interface-name", "p-interface-name")
-                        .property("in-maint", false)
-                        .property("source-of-truth", "JUNIT")
-                        .next();
-            }
-
-            edgeSerializer.addTreeEdge(g, cloudRegionVertex, tenantVertex);
-            edgeSerializer.addEdge(g, cloudRegionVertex, pserverVertex);
+            edgeSerializer.addTreeEdge(g, pserverVertex, pInterfaceVertex2);
+            
+            // Dupe Set #2
+            Vertex pInterfaceVertex3 = g.addV()
+                    .property("aai-node-type", "p-interface")
+                    .property("interface-name", "p-interface-name2")
+                    .property("in-maint", false)
+                    .property("source-of-truth", "JUNIT")
+                    .next();
+            edgeSerializer.addTreeEdge(g, pserverVertex, pInterfaceVertex3);
+                
+            Vertex pInterfaceVertex4 = g.addV()
+                    .property("aai-node-type", "p-interface")
+                    .property("interface-name", "p-interface-name2")
+                    .property("in-maint", false)
+                    .property("source-of-truth", "JUNIT")
+                    .next();
+            edgeSerializer.addTreeEdge(g, pserverVertex, pInterfaceVertex4);
+            
+         // Dupe Set #3
+            Vertex pInterfaceVertex5 = g.addV()
+                    .property("aai-node-type", "p-interface")
+                    .property("interface-name", "p-interface-name3")
+                    .property("in-maint", false)
+                    .property("source-of-truth", "JUNIT")
+                    .next();
+            edgeSerializer.addTreeEdge(g, pserverVertex, pInterfaceVertex5);
+                
+            Vertex pInterfaceVertex6 = g.addV()
+                    .property("aai-node-type", "p-interface")
+                    .property("interface-name", "p-interface-name3")
+                    .property("in-maint", false)
+                    .property("source-of-truth", "JUNIT")
+                    .next();
+            edgeSerializer.addTreeEdge(g, pserverVertex, pInterfaceVertex6);
+      
 
         } catch(Exception ex){
             success = false;
@@ -99,19 +129,21 @@ public class DupeToolTest extends AAISetup {
         }
     }
 
-    @Test
+
+       @Test
     public void testDupeToolForPInterface(){
-        //TODO: test does not find duplicates
+        
         String[] args = {
                 "-userId", "testuser",
                 "-nodeType", "p-interface",
                 "-timeWindowMinutes", "30",
-                "-autoFix",
                 "-maxFix", "30",
                 "-sleepMinutes", "0"
         };
 
         dupeTool.execute(args);
+        assertThat(dupeTool.getDupeGroupCount(), is(3));
+        
     }
 
     @After
diff --git a/src/test/java/org/onap/aai/dbgen/schemamod/SchemaModTest.java b/src/test/java/org/onap/aai/dbgen/schemamod/SchemaModTest.java
new file mode 100644 (file)
index 0000000..06a511d
--- /dev/null
@@ -0,0 +1,138 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.dbgen.schemamod;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.janusgraph.core.JanusGraphTransaction;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.structure.Edge;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.FixMethodOrder;
+import org.junit.Test;
+import org.junit.runners.MethodSorters;
+import org.mockito.Mock;
+import org.mockito.Mockito;
+import org.onap.aai.AAISetup;
+import org.onap.aai.dbmap.AAIGraph;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.setup.SchemaVersions;
+import org.springframework.context.annotation.AnnotationConfigApplicationContext;
+import static org.mockito.Mockito.when;
+
+import java.io.ByteArrayInputStream;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+
+import static org.hamcrest.CoreMatchers.is;
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.junit.Assert.*;
+
+@FixMethodOrder(MethodSorters.NAME_ASCENDING)
+public class SchemaModTest extends AAISetup {
+
+       private static final EELFLogger logger = EELFManager.getInstance().getLogger(SchemaModTest.class);
+
+       private SchemaMod schemaMod;
+
+       private Vertex cloudRegionVertex;
+
+       private boolean setUp = false;
+
+       @Before
+       public void setup() {
+               schemaMod = new SchemaMod(loaderFactory, schemaVersions);
+               // deleteTool.SHOULD_EXIT_VM = false;
+               JanusGraphTransaction transaction = AAIGraph.getInstance().getGraph().newTransaction();
+               boolean success = true;
+               try {
+                       GraphTraversalSource g = transaction.traversal();
+                       cloudRegionVertex = g.addV().property("aai-node-type", "cloud-region").property("cloud-owner", "test-owner")
+                                       .property("cloud-region-id", "test-region").property("source-of-truth", "JUNIT")
+                                       .property("aai-last-mod-ts","19191919").next();
+
+               
+
+                       Vertex pserverVertex = g.addV().property("aai-node-type", "pserver").property("hostname", "test-pserver")
+                                       .property("in-maint", false).property("source-of-truth", "JUNIT").next();
+
+               
+                       edgeSerializer.addEdge(g, cloudRegionVertex, pserverVertex);
+               
+
+               } catch (Exception ex) {
+                       success = false;
+                       logger.error("Unable to create the vertexes", ex);
+               } finally {
+                       if (success) {
+                               transaction.commit();
+                       } else {
+                               transaction.rollback();
+                               fail("Unable to setup the graph");
+                       }
+               }
+       }
+
+       
+       
+       @Test
+       public void testSchemaModDataType() throws AAIException {
+               String usageString = "Usage: SchemaMod propertyName targetDataType targetIndexInfo preserveDataFlag \n";
+               String[] args = {
+                               "sriov-automation", "String", "noIndex", "false"
+               };
+
+               schemaMod.execute(args);
+               /*
+                * 2 GhostNodes - CloudRegions 1 OrphaNode - tenant
+                */
+               
+       }
+
+
+       
+       @After
+       public void tearDown() {
+
+               JanusGraphTransaction transaction = AAIGraph.getInstance().getGraph().newTransaction();
+               boolean success = true;
+               try {
+                       GraphTraversalSource g = transaction.traversal();
+                       g.V().has("source-of-truth", "JUNIT").toList().forEach(v -> v.remove());
+
+               } catch (Exception ex) {
+                       success = false;
+                       logger.error("Unable to remove the vertexes", ex);
+               } finally {
+                       if (success) {
+                               transaction.commit();
+                       } else {
+                               transaction.rollback();
+                               fail("Unable to teardown the graph");
+                       }
+               }
+       }
+}
\ No newline at end of file
diff --git a/src/test/java/org/onap/aai/migration/v12/ALTSLicenseEntitlementMigrationTest.java b/src/test/java/org/onap/aai/migration/v12/ALTSLicenseEntitlementMigrationTest.java
new file mode 100644 (file)
index 0000000..ad4ae1b
--- /dev/null
@@ -0,0 +1,161 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v12;
+
+import static org.junit.Assert.*;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.when;
+
+import java.io.UnsupportedEncodingException;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.junit.*;
+import org.onap.aai.AAISetup;
+import org.onap.aai.db.props.AAIProperties;
+import org.onap.aai.dbmap.DBConnectionType;
+import org.onap.aai.introspection.Loader;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.setup.SchemaVersion;
+import org.onap.aai.serialization.engines.QueryStyle;
+import org.onap.aai.serialization.engines.JanusGraphDBEngine;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.janusgraph.core.JanusGraphFactory;
+import org.janusgraph.core.JanusGraph;
+import org.janusgraph.core.JanusGraphTransaction;
+
+public class ALTSLicenseEntitlementMigrationTest extends AAISetup {
+    private final static ModelType introspectorFactoryType = ModelType.MOXY;
+    private final static QueryStyle queryStyle = QueryStyle.TRAVERSAL;
+    private final static DBConnectionType type = DBConnectionType.REALTIME;
+
+    private Loader loader;
+    private TransactionalGraphEngine dbEngine;
+    private JanusGraph graph;
+    private ALTSLicenseEntitlementMigration migration;
+    private GraphTraversalSource g;
+    private JanusGraphTransaction tx;
+
+    @Before
+    public void setUp() throws Exception {
+        graph = JanusGraphFactory.build().set("storage.backend","inmemory").open();
+        tx = graph.newTransaction();
+        g = tx.traversal();
+        loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, schemaVersions.getDefaultVersion());
+        System.setProperty("AJSC_HOME", ".");
+        System.setProperty("BUNDLECONFIG_DIR", "src/test/resources");
+        dbEngine = new JanusGraphDBEngine(
+                queryStyle,
+                type,
+                loader);
+
+        Vertex vnf = g.addV().property("aai-node-type", "generic-vnf")
+                .property("vnf-id", "123456789")
+                .property("vnf-name", "test-vnf-name")
+                .property("equipment-role", "UCPE")
+                .next();
+
+        Vertex entitlement = g.addV().property("aai-node-type", "entitlement")
+                .property("group-uuid", "guuid-entitlement")
+                .property("resource-uuid", "ruuid-entitlement")
+                .property("aai-uri", "/network/generic-vnfs/generic-vnf/123456789/entitlements/entitlement/ruuideuuid/ruuid-entitlement")
+                .next();
+
+        Vertex license = g.addV().property("aai-node-type", "license")
+                .property("group-uuid", "guuid-license")
+                .property("resource-uuid", "ruuid-license")
+                .property("aai-uri", "/network/generic-vnfs/generic-vnf/123456789/licenses/license/ruuideuuid/ruuid-license")
+                .next();
+
+        Vertex vnf2 = g.addV().property("aai-node-type", "generic-vnf")
+                .property("vnf-id", "23456789")
+                .property("vnf-name", "test-vnf-name")
+                .property("equipment-role", "UCPE")
+                .next();
+        Vertex duplicateEntitlement = g.addV().property("aai-node-type", "entitlement")
+                .property("group-uuid", "guuid")
+                .property("resource-uuid", "ruuid-entitlement")
+                .property("aai-uri", "/network/generic-vnfs/generic-vnf/123456789/entitlements/entitlement/ruuideuuid/ruuid-entitlement")
+                .next();
+
+        Vertex duplicateLicense = g.addV().property("aai-node-type", "license")
+                .property("group-uuid", "guuid")
+                .property("resource-uuid", "ruuid-license")
+                .property("aai-uri", "/network/generic-vnfs/generic-vnf/123456789/licenses/license/ruuideuuid/ruuid-license")
+                .next();
+
+
+
+        edgeSerializer.addTreeEdge(g, vnf, license);
+        edgeSerializer.addTreeEdge(g, vnf, entitlement);
+        edgeSerializer.addTreeEdge(g, vnf2, duplicateEntitlement);
+        edgeSerializer.addTreeEdge(g, vnf2, duplicateLicense);
+
+        TransactionalGraphEngine spy = spy(dbEngine);
+        TransactionalGraphEngine.Admin adminSpy = spy(dbEngine.asAdmin());
+        GraphTraversalSource traversal = g;
+        when(spy.asAdmin()).thenReturn(adminSpy);
+        when(adminSpy.getTraversalSource()).thenReturn(traversal);
+        migration = new ALTSLicenseEntitlementMigration(spy, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+        migration.run();
+    }
+
+    @After
+    public void cleanUp() {
+        tx.rollback();
+        graph.close();
+    }
+
+    @Test
+    public void testEntitlementsUpdated() throws UnsupportedEncodingException {
+        assertEquals("Found 1 entitlement", (Long)1L,
+                g.V().has("aai-node-type", "generic-vnf").has("vnf-id", "123456789").in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "entitlement").count().next());
+        assertEquals("Entitlement's resource-uuid is updated ", true,
+                g.V().has("aai-node-type", "generic-vnf").has("vnf-id", "123456789").in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "entitlement").has("resource-uuid", "new-ruuid-entitlement").hasNext());
+        assertEquals("Entitlement's resource-uuid is updated by migration ", true,
+                g.V().has("aai-node-type", "generic-vnf").has("vnf-id", "123456789").in("org.onap.relationships.inventory.BelongsTo")
+                .has("aai-node-type", "entitlement").has("resource-uuid", "new-ruuid-entitlement").has("last-mod-source-of-truth", "ALTSLicenseEntitlementMigration").hasNext());
+    }
+    @Test
+    public void testLicensesUpdated() throws UnsupportedEncodingException {
+        assertEquals("Found 1 License", (Long)1L,
+                g.V().has("aai-node-type", "generic-vnf").has("vnf-id", "123456789").in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "license").count().next());
+        assertEquals("License's resource-uuid is updated ", true,
+                g.V().has("aai-node-type", "generic-vnf").has("vnf-id", "123456789").in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "license").has("resource-uuid", "new-ruuid-license").hasNext());
+    }
+
+    @Test
+    public void verifyUri() {
+        assertEquals("Uri should be updated", "/network/generic-vnfs/generic-vnf/123456789/entitlements/entitlement/ruuideuuid/new-ruuid-entitlement",
+                g.V().has("aai-node-type", "generic-vnf").has("vnf-id", "123456789").in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "entitlement").has("resource-uuid", "new-ruuid-entitlement").next().property(AAIProperties.AAI_URI).value());
+        assertEquals("Uri should be updated", "/network/generic-vnfs/generic-vnf/123456789/licenses/license/ruuideuuid/new-ruuid-license",
+                g.V().has("aai-node-type", "generic-vnf").has("vnf-id", "123456789").in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "license").has("resource-uuid", "new-ruuid-license").next().property(AAIProperties.AAI_URI).value());
+    }
+
+    @Test
+    public void duplicateGroupUuid() {
+        Long count = g.V().has("aai-node-type", "generic-vnf").has("vnf-id", "23456789").in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "license").has("resource-uuid", "new-ruuid-license2").count().next() +
+                g.V().has("aai-node-type", "generic-vnf").has("vnf-id", "23456789").in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "entitlement").has("resource-uuid", "new-ruuid-entitlement2").count().next();
+        assertEquals("Duplicate Entitlement or License Group Uuid should be skipped", (Long)1L, count);
+
+
+    }
+}
diff --git a/src/test/java/org/onap/aai/migration/v12/MigrateDataFromASDCToConfigurationTest.java b/src/test/java/org/onap/aai/migration/v12/MigrateDataFromASDCToConfigurationTest.java
new file mode 100644 (file)
index 0000000..7acb40d
--- /dev/null
@@ -0,0 +1,199 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v12;
+
+import org.janusgraph.core.JanusGraphFactory;
+import org.janusgraph.core.JanusGraph;
+import org.janusgraph.core.JanusGraphTransaction;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.aai.AAISetup;
+import org.onap.aai.dbmap.DBConnectionType;
+import org.onap.aai.introspection.Loader;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.setup.SchemaVersion;
+import org.onap.aai.serialization.engines.QueryStyle;
+import org.onap.aai.serialization.engines.JanusGraphDBEngine;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+
+import static org.junit.Assert.assertEquals;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.when;
+
+public class MigrateDataFromASDCToConfigurationTest extends AAISetup {
+
+    private final static ModelType introspectorFactoryType = ModelType.MOXY;
+    private final static QueryStyle queryStyle = QueryStyle.TRAVERSAL;
+    private final static DBConnectionType type = DBConnectionType.REALTIME;
+
+    private Loader loader;
+    private TransactionalGraphEngine dbEngine;
+    private JanusGraph graph;
+    private MigrateDataFromASDCToConfiguration migration;
+    private GraphTraversalSource g;
+    private JanusGraphTransaction tx;
+
+    Vertex configuration;
+    Vertex configuration2;
+    Vertex configuration3;
+    Vertex configuration4;
+    Vertex configuration5;
+
+    private boolean success = true;
+    private  String entitlementPoolUuid = "";
+    private final String PARENT_NODE_TYPE = "generic-vnf";
+    private String VNT = "";
+
+    @Before
+    public void setUp() throws Exception {
+        graph = JanusGraphFactory.build().set("storage.backend","inmemory").open();
+        tx = graph.newTransaction();
+        g = tx.traversal();
+        loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, schemaVersions.getDefaultVersion());
+        dbEngine = new JanusGraphDBEngine(
+                queryStyle,
+                type,
+                loader);
+        
+        System.setProperty("BUNDLECONFIG_DIR", "src/test/resources");
+        Vertex genericvnf1 = g.addV().property("aai-node-type", "generic-vnf")
+                .property("vnf-id", "vnfId1")
+                .property("vnf-type","HN")
+                .next();
+
+        Vertex genericvnf2 = g.addV().property("aai-node-type", "generic-vnf")
+                .property("vnf-id", "vnfId2")
+                .property("vnf-type","HN")
+                .next();
+
+        Vertex genericvnf_wrongtype = g.addV().property("aai-node-type", "generic-vnf")
+                .property("vnf-id", "vnfIdWrong")
+                .property("vnf-type","vHNF")
+                .next();
+
+        Vertex entitlement1 = g.addV().property("aai-node-type", "entitlement")
+                .property("group-uuid", "599a2d74-cfbd-413d-aedb-ec4875817313")
+                .next();
+
+        Vertex entitlement2 = g.addV().property("aai-node-type", "entitlement")
+                .property("group-uuid", "ea9a547e-137b-48e9-a788-c3fb4e631a2a")
+                .next();
+
+        Vertex serviceInstance1 = g.addV().property("aai-node-type", "service-instance")
+                .property("service-instance-id", "servinstanceTestId1")
+                .next();
+
+        Vertex serviceInstance2 = g.addV().property("aai-node-type", "service-instance")
+                .property("service-instance-id", "servinstanceTestId2")
+                .next();
+
+        configuration =  g.addV().property("aai-node-type", "configuration")
+                .property("configuration-id", "configurationIdGraph")
+                .property("vendor-allowed-max-bandwidth", "20")
+                .next();
+        configuration3 =  g.addV().property("aai-node-type", "configuration")
+                .property("configuration-id", "configurationIdGraph3")
+                .property("vendor-allowed-max-bandwidth", "15")
+                .next();
+        configuration2 =  g.addV().property("aai-node-type", "configuration")
+                .property("configuration-id", "configurationIdGraph2")
+                .property("vendor-allowed-max-bandwidth", "25")
+                .next();
+        configuration4 =  g.addV().property("aai-node-type", "configuration")
+                .property("configuration-id", "configurationIdGraph4")
+                .property("vendor-allowed-max-bandwidth", "50")
+                .next();
+        configuration5 =  g.addV().property("aai-node-type", "configuration")
+                .property("configuration-id", "configurationIdGraph4")
+                .property("vendor-allowed-max-bandwidth", "75")
+                .next();
+
+        edgeSerializer.addTreeEdge(g, genericvnf1, entitlement1);
+        edgeSerializer.addEdge(g, genericvnf1, serviceInstance1);
+        edgeSerializer.addEdge(g, serviceInstance1, configuration);
+        edgeSerializer.addEdge(g, serviceInstance1, configuration3);
+
+
+        edgeSerializer.addEdge(g, genericvnf2, configuration2, "org.onap.relationships.inventory.Uses");
+
+        edgeSerializer.addTreeEdge(g, genericvnf_wrongtype, entitlement2);
+        edgeSerializer.addEdge(g, genericvnf_wrongtype, serviceInstance2);
+        edgeSerializer.addEdge(g, serviceInstance2, configuration5);
+
+        TransactionalGraphEngine spy = spy(dbEngine);
+        TransactionalGraphEngine.Admin adminSpy = spy(dbEngine.asAdmin());
+        GraphTraversalSource traversal = g;
+        when(spy.asAdmin()).thenReturn(adminSpy);
+        when(adminSpy.getTraversalSource()).thenReturn(traversal);
+        migration = new MigrateDataFromASDCToConfiguration(spy, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+        migration.run();
+    }
+
+
+    @After
+    public void cleanUp() {
+        tx.rollback();
+        graph.close();
+    }
+
+
+    /***
+     * checks if the VNt value was updated and if theres a second configuration object it is also to be modified
+     */
+
+    @Test
+    public void confirmVNtValueChanged() {
+
+        assertEquals("1000",configuration.property("vendor-allowed-max-bandwidth").value());
+        assertEquals("1000",configuration3.property("vendor-allowed-max-bandwidth").value());
+
+    }
+
+    /***
+     * checks to see if the entitlement object is missing the configuration objects should not be modified at all
+     */
+    @Test
+    public void missingEntitlementObject() {
+        assertEquals("25",configuration2.property("vendor-allowed-max-bandwidth").value());
+    }
+    /***
+     * checks to see if there's a configuration object not connected to anything it should not be modified at all
+     */
+
+    @Test
+    public void confirmConfiguration4notchanged() {
+        assertEquals("50",configuration4.property("vendor-allowed-max-bandwidth").value());
+    }
+    /***
+     * checks that a configuration object not linked to a "HN" vnf-type should not be changed
+     */
+    @Test
+    public void differentVNFType() {
+        assertEquals("75",configuration5.property("vendor-allowed-max-bandwidth").value());
+    }
+
+
+
+
+}
diff --git a/src/test/java/org/onap/aai/migration/v12/MigrateHUBEvcInventoryTest.java b/src/test/java/org/onap/aai/migration/v12/MigrateHUBEvcInventoryTest.java
new file mode 100644 (file)
index 0000000..72daf2e
--- /dev/null
@@ -0,0 +1,377 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v12;
+
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.when;
+
+import java.util.Optional;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.process.traversal.strategy.verification.ReadOnlyStrategy;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.aai.AAISetup;
+import org.onap.aai.dbmap.DBConnectionType;
+import org.onap.aai.introspection.Loader;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.setup.SchemaVersion;
+import org.onap.aai.serialization.engines.QueryStyle;
+import org.onap.aai.serialization.engines.JanusGraphDBEngine;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+
+import org.janusgraph.core.JanusGraphFactory;
+import org.janusgraph.core.JanusGraph;
+import org.janusgraph.core.JanusGraphTransaction;
+
+public class MigrateHUBEvcInventoryTest extends AAISetup {
+
+       private final static ModelType introspectorFactoryType = ModelType.MOXY;
+       private final static QueryStyle queryStyle = QueryStyle.TRAVERSAL;
+       private final static DBConnectionType type = DBConnectionType.REALTIME;
+
+       private Loader loader;
+       private TransactionalGraphEngine dbEngine;
+       private JanusGraph graph;
+       private MigrateHUBEvcInventory migration;
+       private JanusGraphTransaction tx;
+       private GraphTraversalSource g;
+
+       @Before
+       public void setUp() throws Exception {
+               graph = JanusGraphFactory.build().set("storage.backend","inmemory").open();
+               tx = graph.newTransaction();
+               g = tx.traversal();
+               loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, schemaVersions.getDefaultVersion());
+               dbEngine = new JanusGraphDBEngine(
+                               queryStyle,
+                               type,
+                               loader);
+
+               System.setProperty("BUNDLECONFIG_DIR", "src/test/resources");
+               
+               Vertex customer1 = g.addV()
+                               .property("aai-node-type", "customer")
+                               .property("global-customer-id", "customer-id-1")
+                               .property("subscriber-type", "CUST")
+                               .next();
+               
+               Vertex servSub1 = g.addV()
+                               .property("aai-node-type", "service-subscription")
+                               .property("service-type", "SAREA")
+                               .next();
+               
+               Vertex servInstance1 = g.addV()
+                               .property("aai-node-type", "service-instance")
+                               .property("service-type", "SAREA")
+                               .property("service-instance-id", "evc-name-1")
+                               .next();
+               Vertex servInstance3 = g.addV()
+                               .property("aai-node-type", "service-instance")
+                               .property("service-type", "SAREA")
+                               .property("service-instance-id", "evc-name-3")
+                               .next();
+               Vertex servInstance2 = g.addV()
+                               .property("aai-node-type", "service-instance")
+                               .property("service-type", "SAREA")
+                               .property("service-instance-id", "evc-name-2")
+                               .next();
+               
+               Vertex evc1 = g.addV().property("aai-node-type", "evc")
+                               .property("evc-id", "evc-name-1")
+                               .next();
+               Vertex config1 = g.addV().property("aai-node-type", "configuration")
+                               .property("configuration-id", "evc-name-1")
+                               .next();
+               Vertex fp1 =  g.addV()
+                               .property("aai-node-type", "forwarding-path")
+                               .property("forwarding-path-id", "evc-name-1")
+                               .next();
+               Vertex for11 = g.addV()
+                               .property("aai-node-type", "forwarder")
+                               .property("sequence", "1")
+                               .property("forwarder-role","ingress")
+                               .next();
+               Vertex for12 = g.addV()
+                               .property("aai-node-type", "forwarder")
+                               .property("sequence", "2")
+                               .property("forwarder-role","egress")
+                               .next();
+               Vertex config11 = g.addV()
+                               .property("aai-node-type", "configuration")
+                               .property("configuration-id", "evc-name-1-1")
+                               .next();
+               Vertex config12 = g.addV()
+                               .property("aai-node-type", "configuration")
+                               .property("configuration-id", "evc-name-1-2")
+                               .next();
+               Vertex fevc11 = g.addV()
+                               .property("aai-node-type", "forwarder-evc")
+                               .property("forwarder-evc-id", "evc-name-1-1")
+                               .property("svlan",  "6")
+                               .next();
+               Vertex fevc12 = g.addV()
+                               .property("aai-node-type", "forwarder-evc")
+                               .property("forwarder-evc-id", "evc-name-1-2")
+                               .property("svlan",  "16")
+                               .next();
+               
+               
+               
+               Vertex evc2 = g.addV().property("aai-node-type", "evc")
+                               .property("evc-id", "evc-name-2")
+                               .next();
+               Vertex config2 = g.addV().property("aai-node-type", "configuration")
+                               .property("configuration-id", "evc-name-2")
+                               .next();
+               Vertex fp2 =  g.addV()
+                               .property("aai-node-type", "forwarding-path")
+                               .property("forwarding-path-id", "evc-name-2")
+                               .next();
+               Vertex for21 = g.addV()
+                               .property("aai-node-type", "forwarder")
+                               .property("sequence", "1")
+                               .property("forwarder-role","ingress")
+                               .next();
+               Vertex for22 = g.addV()
+                               .property("aai-node-type", "forwarder")
+                               .property("sequence", "2")
+                               .property("forwarder-role","ingress")
+                               .next();
+               Vertex for23 = g.addV()
+                               .property("aai-node-type", "forwarder")
+                               .property("sequence", "3")
+                               .property("forwarder-role","egress")
+                               .next();
+               Vertex for24 = g.addV()
+                               .property("aai-node-type", "forwarder")
+                               .property("sequence", "4")
+                               .property("forwarder-role","egress")
+                               .next();
+               Vertex config21 = g.addV()
+                               .property("aai-node-type", "configuration")
+                               .property("configuration-id", "evc-name-2-1")
+                               .next();
+               Vertex config22 = g.addV()
+                               .property("aai-node-type", "configuration")
+                               .property("configuration-id", "evc-name-2-2")
+                               .next();
+               Vertex config23 = g.addV()
+                               .property("aai-node-type", "configuration")
+                               .property("configuration-id", "evc-name-2-3")
+                               .next();
+               Vertex config24 = g.addV()
+                               .property("aai-node-type", "configuration")
+                               .property("configuration-id", "evc-name-2-4")
+                               .next();
+               Vertex fevc21 = g.addV()
+                               .property("aai-node-type", "forwarder-evc")
+                               .property("forwarder-evc-id", "evc-name-2-1")
+                               .property("svlan",  "6")
+                               .next();
+               Vertex fevc22 = g.addV()
+                               .property("aai-node-type", "forwarder-evc")
+                               .property("forwarder-evc-id", "evc-name-2-2")
+                               .property("svlan",  "16")
+                               .next();
+               Vertex fevc23 = g.addV()
+                               .property("aai-node-type", "forwarder-evc")
+                               .property("forwarder-evc-id", "evc-name-2-3")
+                               .property("svlan",  "12")
+                               .property("ivlan", "600")
+                               .next();
+               Vertex fevc24 = g.addV()
+                               .property("aai-node-type", "forwarder-evc")
+                               .property("forwarder-evc-id", "evc-name-2-4")
+                               .property("svlan",  "16")
+                               .property("ivlan", "600")
+                               .next();
+
+               Vertex evc3 = g.addV().property("aai-node-type", "evc")
+                               .property("evc-id", "evc-name-3")
+                               .next();
+               Vertex config3 = g.addV().property("aai-node-type", "configuration")
+                               .property("configuration-id", "evc-name-3")
+                               .next();
+               Vertex fp3 =  g.addV()
+                               .property("aai-node-type", "forwarding-path")
+                               .property("forwarding-path-id", "evc-name-3")
+                               .next();
+               Vertex for31 = g.addV()
+                               .property("aai-node-type", "forwarder")
+                               .property("sequence", "1")
+                               .property("forwarder-role","ingress")
+                               .next();
+               Vertex for32 = g.addV()
+                               .property("aai-node-type", "forwarder")
+                               .property("sequence", "2")
+                               .property("forwarder-role","egress")
+                               .next();
+               Vertex config31 = g.addV()
+                               .property("aai-node-type", "configuration")
+                               .property("configuration-id", "evc-name-3-1")
+                               .next();
+               Vertex config32 = g.addV()
+                               .property("aai-node-type", "configuration")
+                               .property("configuration-id", "evc-name-3-2")
+                               .next();
+               Vertex fevc31 = g.addV()
+                               .property("aai-node-type", "forwarder-evc")
+                               .property("forwarder-evc-id", "evc-name-3-1")
+                               .property("svlan",  "6")
+                               .next();
+               Vertex fevc32 = g.addV()
+                               .property("aai-node-type", "forwarder-evc")
+                               .property("forwarder-evc-id", "evc-name-3-2")
+//                             .property("svlan",  "16")
+                               .next();
+               
+               // graph 1
+               edgeSerializer.addTreeEdge(g, customer1, servSub1);
+               edgeSerializer.addTreeEdge(g, servSub1, servInstance1);
+               edgeSerializer.addTreeEdge(g, servSub1, servInstance2);
+               edgeSerializer.addTreeEdge(g, servSub1, servInstance3);
+               
+               edgeSerializer.addEdge(g, servInstance1, fp1);
+               edgeSerializer.addEdge(g, servInstance2, fp2);
+               
+               edgeSerializer.addEdge(g, fp1, config1);
+               edgeSerializer.addEdge(g, fp2, config2);
+               edgeSerializer.addEdge(g, fp3, config3);
+               
+               edgeSerializer.addTreeEdge(g, evc1,  config1);
+               edgeSerializer.addTreeEdge(g, evc2, config2);
+               edgeSerializer.addTreeEdge(g, evc3, config3);
+               
+               edgeSerializer.addTreeEdge(g, fp1, for11);
+               edgeSerializer.addTreeEdge(g, fp1, for12);
+               edgeSerializer.addTreeEdge(g, fp2, for21);
+               edgeSerializer.addTreeEdge(g, fp2, for22);
+               edgeSerializer.addTreeEdge(g, fp2, for23);
+               edgeSerializer.addTreeEdge(g, fp2, for24);
+               edgeSerializer.addTreeEdge(g, fp3, for31);
+               edgeSerializer.addTreeEdge(g, fp3, for32);
+               
+               edgeSerializer.addEdge(g,  for11, config11);
+               edgeSerializer.addEdge(g,  for12, config12);
+               edgeSerializer.addEdge(g,  for21, config21);
+               edgeSerializer.addEdge(g,  for22, config22);
+               edgeSerializer.addEdge(g,  for23, config23);
+               edgeSerializer.addEdge(g,  for24, config24);
+               edgeSerializer.addEdge(g,  for31, config31);
+               edgeSerializer.addEdge(g,  for32, config32);
+               
+               edgeSerializer.addTreeEdge(g, config11, fevc11);
+               edgeSerializer.addTreeEdge(g, config12, fevc12);
+               edgeSerializer.addTreeEdge(g, config21, fevc21);
+               edgeSerializer.addTreeEdge(g, config22, fevc22);
+               edgeSerializer.addTreeEdge(g, config23, fevc23);
+               edgeSerializer.addTreeEdge(g, config24, fevc24);
+               edgeSerializer.addTreeEdge(g, config31, fevc31);
+               edgeSerializer.addTreeEdge(g, config32, fevc32);
+               
+               TransactionalGraphEngine spy = spy(dbEngine);
+               TransactionalGraphEngine.Admin adminSpy = spy(dbEngine.asAdmin());
+
+               GraphTraversalSource traversal = g;
+               GraphTraversalSource readOnly = tx.traversal(GraphTraversalSource.build().with(ReadOnlyStrategy.instance()));
+               when (spy.tx()).thenReturn(tx);
+               when(spy.asAdmin()).thenReturn(adminSpy);
+               when(adminSpy.getTraversalSource()).thenReturn(traversal);
+               when(adminSpy.getReadOnlyTraversalSource()).thenReturn(readOnly);
+               
+               migration = new MigrateHUBEvcInventory(spy, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+               migration.run();
+       }
+       
+       @After
+       public void cleanUp() {
+               tx.tx().rollback();
+               graph.close();
+       }
+       
+       @Test
+       public void testRun_checkFevc1AndFevc2AreUpdated() throws Exception {
+               
+               // check if forwarder-evc nodes get updated
+               assertEquals("forwarder-evc evc-name-1-1 updated with ivlan", true, 
+                               g.V().has("aai-node-type", "forwarder-evc")
+                               .has("forwarder-evc-id", "evc-name-1-1")
+                               .has("ivlan","4054")
+                               .hasNext());
+               
+               assertEquals("forwarder-evc evc-name-2-2 updated with ivlan", true, 
+                               g.V().has("aai-node-type", "forwarder-evc")
+                               .has("forwarder-evc-id", "evc-name-2-2")
+                               .has("ivlan","4084")
+                               .hasNext());
+               assertEquals("forwarder-evc evc-name-2-3 updated with ivlan", true, 
+                               g.V().has("aai-node-type", "forwarder-evc")
+                               .has("forwarder-evc-id", "evc-name-2-3")
+                               .has("ivlan","4054")
+                               .hasNext());
+               
+               assertEquals("4 forwarder-evcs exist for evc evc-name-2", new Long(4L), 
+                               g.V().has("forwarding-path-id", "evc-name-2")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "forwarder")
+                               .out("org.onap.relationships.inventory.Uses").has("aai-node-type", "configuration")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "forwarder-evc")
+                               .count().next());
+               
+               assertEquals("3 forwarder-evcs updated for evc evc-name-2", new Long(3L), 
+                               g.V().has("forwarding-path-id", "evc-name-2")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "forwarder")
+                               .out("org.onap.relationships.inventory.Uses").has("aai-node-type", "configuration")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "forwarder-evc")
+                               .has("forwarder-evc-id").has("ivlan")
+                               .count().next());
+               
+               assertEquals("forwarder-evc evc-name-3-1 updated with ivlan", false, 
+                               g.V().has("aai-node-type", "forwarder-evc")
+                               .has("forwarder-evc-id", "evc-name-3-1")
+                               .has("ivlan")
+                               .hasNext());
+       }
+
+       
+       @Test
+       public void testGetAffectedNodeTypes() {
+               Optional<String[]> types = migration.getAffectedNodeTypes();
+               Optional<String[]> expected = Optional.of(new String[]{"forwarder-evc"});
+               
+               assertNotNull(types);
+               assertArrayEquals(expected.get(), types.get());
+       }
+
+       @Test
+       public void testGetMigrationName() {
+               String migrationName = migration.getMigrationName();
+
+               assertNotNull(migrationName);
+               assertEquals("MigrateHUBEvcInventory", migrationName);
+       }
+}
diff --git a/src/test/java/org/onap/aai/migration/v12/MigrateINVPhysicalInventoryMethodTest.java b/src/test/java/org/onap/aai/migration/v12/MigrateINVPhysicalInventoryMethodTest.java
new file mode 100644 (file)
index 0000000..59ae5e7
--- /dev/null
@@ -0,0 +1,149 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v12;
+
+import org.janusgraph.core.JanusGraphFactory;
+import org.janusgraph.core.JanusGraph;
+import org.janusgraph.core.JanusGraphTransaction;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.process.traversal.strategy.verification.ReadOnlyStrategy;
+import org.javatuples.Pair;
+import org.junit.*;
+import org.onap.aai.AAISetup;
+import org.onap.aai.dbmap.DBConnectionType;
+import org.onap.aai.introspection.Loader;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.setup.SchemaVersion;
+import org.onap.aai.serialization.engines.QueryStyle;
+import org.onap.aai.serialization.engines.JanusGraphDBEngine;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+
+import java.util.*;
+
+import static org.junit.Assert.*;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.when;
+
+public class MigrateINVPhysicalInventoryMethodTest extends AAISetup {
+
+       private final static ModelType introspectorFactoryType = ModelType.MOXY;
+       private final static QueryStyle queryStyle = QueryStyle.TRAVERSAL;
+       private final static DBConnectionType type = DBConnectionType.REALTIME;
+
+       private Loader loader;
+       private TransactionalGraphEngine dbEngine;
+       private JanusGraph graph;
+       private JanusGraphTransaction tx;
+       private GraphTraversalSource g;
+       private TransactionalGraphEngine spy;
+
+       @Before
+       public void setUp() throws Exception {
+               graph = JanusGraphFactory.build().set("storage.backend","inmemory").open();
+               tx = graph.newTransaction();
+               g = tx.traversal();
+               loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, schemaVersions.getDefaultVersion());
+               dbEngine = new JanusGraphDBEngine(
+                               queryStyle,
+                               type,
+                               loader);
+
+               System.setProperty("BUNDLECONFIG_DIR", "src/test/resources");
+
+               spy = spy(dbEngine);
+               TransactionalGraphEngine.Admin adminSpy = spy(dbEngine.asAdmin());
+
+               GraphTraversalSource traversal = g;
+               GraphTraversalSource readOnly = tx.traversal(GraphTraversalSource.build().with(ReadOnlyStrategy.instance()));
+               when (spy.tx()).thenReturn(tx);
+               when(spy.asAdmin()).thenReturn(adminSpy);
+               when(adminSpy.getTraversalSource()).thenReturn(traversal);
+               when(adminSpy.getReadOnlyTraversalSource()).thenReturn(readOnly);
+       }
+       
+       @After
+       public void cleanUp() {
+               tx.tx().rollback();
+               graph.close();
+       }
+
+
+       @Test
+       public void headerTest() throws Exception {
+               MigrateINVPhysicalInventory m = new MigrateINVPhysicalInventory(spy, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+               String header = "ptnii-name,fic,equipment-model,equipment-role,equipment-role-additional,ip-addr,subnet-mask,slot-name,card-type,card-port-lock,card-vlan-lock,port-aid,port-type,port-role,port-lock,vlan-lock,reservation-name,collector-interconnect-type,tag-mode,media-type,media-speed-value+media-speed-units,uni-cir-value+uni-cir-units,evc-name";
+               List<String> lines = new ArrayList<>();
+               lines.add(header);
+               assertEquals(header, m.processAndRemoveHeader(lines));
+               assertEquals(0, lines.size());
+               assertEquals(23, m.headerLength);
+
+       }
+
+       @Test
+       public void verifyLineTest() throws Exception {
+               MigrateINVPhysicalInventory m = new MigrateINVPhysicalInventory(spy, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+               m.headerLength = 23;
+               assertFalse(m.verifyLine(Collections.nCopies(5, "foo")));
+               assertTrue(m.verifyLine(Collections.nCopies(23, "foo")));
+               assertEquals(1, m.skippedRowsCount.intValue());
+
+       }
+
+       @Test
+       public void readLineTest() throws Exception {
+               MigrateINVPhysicalInventory m = new MigrateINVPhysicalInventory(spy, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+               String line = "pnf-name-collector-1,06000D.121,5150,AED,,2001:1890:fcfe:7000:7021:0:1:2,64,,,,,\"1.7    \",SFP_1GE/Ethernet_10/100/1000M,ACCESS,N,N,M0651881_ST,SHARED,DOUBLE,SFP-1GE-LX,1000Mbps,,evc-name-1\n";
+               Pair<String, String> pair = m.processLine(Arrays.asList(line.split("\\s*,\\s*", -1))).get();
+               assertEquals("Test 1","pnf-name-collector-1", pair.getValue0());
+               assertEquals("Test 1","1.7", pair.getValue1());
+
+               line = "pnf-name-1,06000D.121,5150,AED,,2001:1890:fcfe:7000:7021:0:1:2,64,,,,,1.2,SFP_1GE/Ethernet_10/100/1000M,ACCESS,N,N,M0651882_ST,SHARED,DOUBLE,SFP-1GE-LX,1000Mbps,,evc-name-3";
+               pair = m.processLine(Arrays.asList(line.split("\\s*,\\s*", -1))).get();
+               assertEquals("Test 1","pnf-name-1", pair.getValue0());
+               assertEquals("Test 1","1.2", pair.getValue1());
+
+       }
+
+       @Test
+       public void getFileContentsTest() throws Exception {
+               MigrateINVPhysicalInventory m = new MigrateINVPhysicalInventory(spy, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+
+               Map<String,Set<String>> expected = new HashMap<>();
+               List<String> lines = new ArrayList<>();
+
+               String header = "ptnii-name,fic,equipment-model,equipment-role,equipment-role-additional,ip-addr,subnet-mask,slot-name,card-type,card-port-lock,card-vlan-lock,port-aid,port-type,port-role,port-lock,vlan-lock,reservation-name,collector-interconnect-type,tag-mode,media-type,media-speed-value+media-speed-units,uni-cir-value+uni-cir-units,evc-name";
+               lines.add(header);
+
+               lines.add("pnf-name-collector-1,06000D.121,5150,AED,,2001:1890:fcfe:7000:7021:0:1:2,64,,,,,\"1.7        \",SFP_1GE/Ethernet_10/100/1000M,ACCESS,N,N,M0651881_ST,SHARED,DOUBLE,SFP-1GE-LX,1000Mbps,,evc-name-1");
+               expected.put("pnf-name-collector-1", new HashSet<>(Arrays.asList("1.7")));
+
+               lines.add("pnf-name-1,06000D.121,5150,AED,,2001:1890:fcfe:7000:7021:0:1:2,64,,,,,1.2,SFP_1GE/Ethernet_10/100/1000M,ACCESS,N,N,M0651882_ST,SHARED,DOUBLE,SFP-1GE-LX,1000Mbps,,evc-name-3");
+               lines.add("pnf-name-1,06000D.121,5150,AED,,2001:1890:fcfe:7000:7021:0:1:2,64,,,,,1.2,SFP_1GE/Ethernet_10/100/1000M,ACCESS,N,N,M0651882_ST,SHARED,DOUBLE,SFP-1GE-LX,1000Mbps,,evc-name-3");
+               lines.add("pnf-name-1,06000D.121,5150,AED,,2001:1890:fcfe:7000:7021:0:1:2,64,,,,,1.3,SFP_1GE/Ethernet_10/100/1000M,ACCESS,N,N,M0651882_ST,SHARED,DOUBLE,SFP-1GE-LX,1000Mbps,,evc-name-3");
+               expected.put("pnf-name-1", new HashSet<>(Arrays.asList("1.2", "1.3")));
+
+               lines.add("foo");
+
+               assertEquals(expected, m.getFileContents(lines));
+
+       }
+}
diff --git a/src/test/java/org/onap/aai/migration/v12/MigrateINVPhysicalInventoryTest.java b/src/test/java/org/onap/aai/migration/v12/MigrateINVPhysicalInventoryTest.java
new file mode 100644 (file)
index 0000000..82ea770
--- /dev/null
@@ -0,0 +1,168 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v12;
+
+import org.janusgraph.core.JanusGraphFactory;
+import org.janusgraph.core.JanusGraph;
+import org.janusgraph.core.JanusGraphTransaction;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.process.traversal.strategy.verification.ReadOnlyStrategy;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.junit.*;
+import org.onap.aai.AAISetup;
+import org.onap.aai.dbmap.DBConnectionType;
+import org.onap.aai.introspection.Loader;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.setup.SchemaVersion;
+import org.onap.aai.serialization.engines.QueryStyle;
+import org.onap.aai.serialization.engines.JanusGraphDBEngine;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+
+import java.util.Optional;
+
+import static org.junit.Assert.*;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.when;
+
+public class MigrateINVPhysicalInventoryTest extends AAISetup {
+
+       private final static ModelType introspectorFactoryType = ModelType.MOXY;
+       private final static QueryStyle queryStyle = QueryStyle.TRAVERSAL;
+       private final static DBConnectionType type = DBConnectionType.REALTIME;
+
+       private Loader loader;
+       private TransactionalGraphEngine dbEngine;
+       private JanusGraph graph;
+       private MigrateINVPhysicalInventory migration;
+       private JanusGraphTransaction tx;
+       private GraphTraversalSource g;
+
+       @Before
+       public void setUp() throws Exception {
+               graph = JanusGraphFactory.build().set("storage.backend","inmemory").open();
+               tx = graph.newTransaction();
+               g = tx.traversal();
+               loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, schemaVersions.getDefaultVersion());
+               dbEngine = new JanusGraphDBEngine(
+                               queryStyle,
+                               type,
+                               loader);
+
+               System.setProperty("BUNDLECONFIG_DIR", "src/test/resources");
+               
+               
+               Vertex pnf1 = g.addV()
+                               .property("aai-node-type", "pnf")
+                               .property("pnf-name", "pnf-name-1")
+                               .property("aai-uri", "/network/pnfs/pnf/pnf-name-1")
+                               .next();
+               Vertex  port11 = g.addV()
+                               .property("aai-node-type", "p-interface")
+                               .property("interface-name", "1.1")
+                               .property("aai-uri", "/network/pnfs/pnf/pnf-name-1/p-interfaces/pinterface/1.1")
+                               .next();
+               // graph 1
+                               
+               edgeSerializer.addTreeEdge(g, pnf1, port11);
+
+
+               TransactionalGraphEngine spy = spy(dbEngine);
+               TransactionalGraphEngine.Admin adminSpy = spy(dbEngine.asAdmin());
+
+               GraphTraversalSource traversal = g;
+               GraphTraversalSource readOnly = tx.traversal(GraphTraversalSource.build().with(ReadOnlyStrategy.instance()));
+               when (spy.tx()).thenReturn(tx);
+               when(spy.asAdmin()).thenReturn(adminSpy);
+               when(adminSpy.getTraversalSource()).thenReturn(traversal);
+               when(adminSpy.getReadOnlyTraversalSource()).thenReturn(readOnly);
+               
+               migration = new MigrateINVPhysicalInventory(spy, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+               migration.run();
+       }
+       
+       @After
+       public void cleanUp() {
+               tx.tx().rollback();
+               graph.close();
+       }
+
+
+       @Test
+       public void pnfsExistTest() throws Exception {
+               // check if pnf node gets created
+               assertEquals("2 PNFs exist", new Long(2L),
+                               g.V().has("aai-node-type", "pnf")
+                                               .count().next());
+       }
+
+       @Test
+       public void pInterfacesExistTest() throws Exception {
+
+               assertEquals("4 Pinterfaces exist", new Long(4L),
+                               g.V().has("aai-node-type", "p-interface")
+                                               .count().next());
+       }
+
+       @Test
+       public void testRun_checkPnfsAndPInterfacesExist() throws Exception {
+               // check if graph nodes exist
+               
+               // check if pnf node gets created
+               assertEquals("2 PNFs exist", new Long(2L), 
+                               g.V().has("aai-node-type", "pnf")
+                               .count().next());
+               
+               System.out.println("cOUNT:" +g.V().has("aai-node-type", "pnf")
+                               .has("pnf-name", "pnf-name-collector-1").in("tosca.relationships.network.BindsTo").count().next());
+                               
+               assertEquals("p-interfaces created for pnfs", new Long(1L),
+                               g.V().has("aai-node-type", "pnf")
+                               .has("pnf-name", "pnf-name-collector-1").count().next());
+               
+               assertEquals("p-interface 1.7 created for pnf-name-collector-1", true,
+                               g.V().has("aai-node-type", "pnf")
+                               .has("pnf-name", "pnf-name-collector-1")
+                               .in("tosca.relationships.network.BindsTo")
+                               .has("interface-name","1.7")
+                               .hasNext());
+               assertEquals("p-interfaces created for pnfs", new Long(2L),
+                               g.V().has("aai-node-type", "pnf")
+                               .has("pnf-name", "pnf-name-1")
+                               .in("tosca.relationships.network.BindsTo").count().next());
+       }
+       
+       @Test
+       public void testGetAffectedNodeTypes() {
+               Optional<String[]> types = migration.getAffectedNodeTypes();
+               Optional<String[]> expected = Optional.of(new String[]{"pnf"});
+               
+               assertNotNull(types);
+               assertArrayEquals(expected.get(), types.get());
+       }
+
+       @Test
+       public void testGetMigrationName() {
+               String migrationName = migration.getMigrationName();
+
+               assertNotNull(migrationName);
+               assertEquals("MigrateINVPhysicalInventory", migrationName);
+       }
+}
diff --git a/src/test/java/org/onap/aai/migration/v12/MigrateInvEvcInventoryTest.java b/src/test/java/org/onap/aai/migration/v12/MigrateInvEvcInventoryTest.java
new file mode 100644 (file)
index 0000000..ebe5136
--- /dev/null
@@ -0,0 +1,152 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v12;
+
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.when;
+
+import java.util.Optional;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.process.traversal.strategy.verification.ReadOnlyStrategy;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.junit.*;
+import org.onap.aai.AAISetup;
+import org.onap.aai.dbmap.DBConnectionType;
+import org.onap.aai.introspection.Loader;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.setup.SchemaVersion;
+import org.onap.aai.serialization.engines.QueryStyle;
+import org.onap.aai.serialization.engines.JanusGraphDBEngine;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+
+import org.janusgraph.core.JanusGraphFactory;
+import org.janusgraph.core.JanusGraph;
+import org.janusgraph.core.JanusGraphTransaction;
+
+public class MigrateInvEvcInventoryTest extends AAISetup {
+
+       private final static ModelType introspectorFactoryType = ModelType.MOXY;
+       private final static QueryStyle queryStyle = QueryStyle.TRAVERSAL;
+       private final static DBConnectionType type = DBConnectionType.REALTIME;
+
+       private static Loader loader;
+       private static TransactionalGraphEngine dbEngine;
+       private static JanusGraph graph;
+       private static MigrateINVEvcInventory migration;
+       private static JanusGraphTransaction tx;
+       private static GraphTraversalSource g;
+
+       @Before
+       public void setUp() throws Exception {
+               graph = JanusGraphFactory.build().set("storage.backend","inmemory").open();
+               tx = graph.newTransaction();
+               g = tx.traversal();
+               loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, schemaVersions.getDefaultVersion());
+               dbEngine = new JanusGraphDBEngine(
+                               queryStyle,
+                               type,
+                               loader);
+
+               System.setProperty("BUNDLECONFIG_DIR", "src/test/resources");
+               
+               Vertex evc = g.addV()
+                               .property("aai-node-type", "evc")
+                               .property("evc-id", "evc-name-1")
+                               .next();
+               
+               Vertex evc2 = g.addV()
+                               .property("aai-node-type", "evc")
+                               .property("evc-id", "evc-name-2")
+                               .next();
+               
+               TransactionalGraphEngine spy = spy(dbEngine);
+               TransactionalGraphEngine.Admin adminSpy = spy(dbEngine.asAdmin());
+
+               GraphTraversalSource traversal = g;
+               GraphTraversalSource readOnly = tx.traversal(GraphTraversalSource.build().with(ReadOnlyStrategy.instance()));
+               when (spy.tx()).thenReturn(tx);
+               when(spy.asAdmin()).thenReturn(adminSpy);
+               when(adminSpy.getTraversalSource()).thenReturn(traversal);
+               when(adminSpy.getReadOnlyTraversalSource()).thenReturn(readOnly);
+               
+               migration = new MigrateINVEvcInventory(spy, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+               migration.run();
+       }
+       
+       @After
+       public void cleanUp() {
+               tx.tx().rollback();
+               graph.close();
+       }
+       
+       @Test
+       public void testRun_updateEvcNode() throws Exception {
+               // check if graph nodes exist
+               assertEquals("evc node exists", true, 
+                               g.V().has("aai-node-type", "evc")
+                                        .has("evc-id", "evc-name-1")
+                               .hasNext());
+               
+               // check if evc object is updated to set the value for inter-connect-type-ingress
+               assertEquals("evc is updated", true, 
+                               g.V().has("aai-node-type", "evc").has("evc-id", "evc-name-1")
+                               .has("inter-connect-type-ingress", "SHARED")
+                               .hasNext());
+       }
+       
+       @Test
+       public void testRun_evcNotCreated() throws Exception {
+               
+               assertEquals("evc node does not exist", false, 
+                               g.V().has("aai-node-type", "evc").has("evc-id", "evc-name-3")
+                               .hasNext());
+               
+               //inter-connect-type-ingress is not present on the evc
+               assertEquals("evc node exists", true, 
+                               g.V().has("aai-node-type", "evc").has("evc-id", "evc-name-2")
+                               .hasNext());
+               assertEquals("evc node not updated with inter-connect-type-ingress", false, 
+                               g.V().has("aai-node-type", "evc").has("evc-id", "evc-name-2").has("inter-connect-type-ingress")
+                               .hasNext());
+               
+       }
+
+       @Test
+       public void testGetAffectedNodeTypes() {
+               Optional<String[]> types = migration.getAffectedNodeTypes();
+               Optional<String[]> expected = Optional.of(new String[]{"evc"});
+               
+               assertNotNull(types);
+               assertArrayEquals(expected.get(), types.get());
+       }
+
+       @Test
+       public void testGetMigrationName() {
+               String migrationName = migration.getMigrationName();
+
+               assertNotNull(migrationName);
+               assertEquals("MigrateINVEvcInventory", migrationName);
+       }
+}
diff --git a/src/test/java/org/onap/aai/migration/v12/MigratePATHEvcInventoryTest.java b/src/test/java/org/onap/aai/migration/v12/MigratePATHEvcInventoryTest.java
new file mode 100644 (file)
index 0000000..334f32b
--- /dev/null
@@ -0,0 +1,658 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v12;
+
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.when;
+
+import java.util.Optional;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.process.traversal.strategy.verification.ReadOnlyStrategy;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.junit.*;
+import org.onap.aai.AAISetup;
+import org.onap.aai.dbmap.DBConnectionType;
+import org.onap.aai.introspection.Loader;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.setup.SchemaVersion;
+import org.onap.aai.serialization.engines.QueryStyle;
+import org.onap.aai.serialization.engines.JanusGraphDBEngine;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+
+public class MigratePATHEvcInventoryTest extends AAISetup {
+
+       private final static ModelType introspectorFactoryType = ModelType.MOXY;
+       private final static QueryStyle queryStyle = QueryStyle.TRAVERSAL;
+       private final static DBConnectionType type = DBConnectionType.REALTIME;
+
+       private Loader loader;
+       private TransactionalGraphEngine dbEngine;
+       private MigratePATHEvcInventory migration;
+       private GraphTraversalSource g;
+
+       @Before
+       public void setUp() throws Exception {
+               g = tx.traversal();
+               loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, schemaVersions.getDefaultVersion());
+               dbEngine = new JanusGraphDBEngine(
+                               queryStyle,
+                               type,
+                               loader);
+               
+               System.setProperty("BUNDLECONFIG_DIR", "src/test/resources");
+
+               Vertex customer1 = g.addV()
+                               .property("aai-node-type", "customer")
+                               .property("global-customer-id", "customer-id-1")
+                               .property("subscriber-type", "CUST")
+                               .next();
+               
+               Vertex servSub1 = g.addV()
+                               .property("aai-node-type", "service-subscription")
+                               .property("service-type", "SAREA")
+                               .next();
+               
+               Vertex servInstance1 = g.addV()
+                               .property("aai-node-type", "service-instance")
+                               .property("service-type", "SAREA")
+                               .property("service-instance-id", "evc-name-1")
+                               .next();
+               Vertex servInstance3 = g.addV()
+                               .property("aai-node-type", "service-instance")
+                               .property("service-type", "SAREA")
+                               .property("service-instance-id", "evc-name-3")
+                               .next();
+               Vertex servInstance2 = g.addV()
+                               .property("aai-node-type", "service-instance")
+                               .property("service-type", "SAREA")
+                               .property("service-instance-id", "evc-name-2")
+                               .next();
+               
+               Vertex pnf1 = g.addV()
+                               .property("aai-node-type", "pnf")
+                               .property("pnf-name", "pnf-name-1")
+                               .next();
+               Vertex  port11 = g.addV()
+                               .property("aai-node-type", "p-interface")
+                               .property("interface-name", "1.1")
+                               .next();
+               Vertex  port12 = g.addV()
+                               .property("aai-node-type", "p-interface")
+                               .property("interface-name", "1.41")
+                               .next();
+               
+               Vertex pnf2 = g.addV()
+                               .property("aai-node-type", "pnf")
+                               .property("pnf-name", "pnf-name-2")
+                               .next();
+               Vertex  port21 = g.addV()
+                               .property("aai-node-type", "p-interface")
+                               .property("interface-name", "1.25")
+                               .next();
+               Vertex  port22 = g.addV()
+                               .property("aai-node-type", "lag-interface")
+                               .property("interface-name", "ae1")
+                               .next();
+               
+               Vertex pnf3 = g.addV()
+                               .property("aai-node-type", "pnf")
+                               .property("pnf-name", "pnf-name-3")
+                               .next();
+               Vertex  port31 = g.addV()
+                               .property("aai-node-type", "p-interface")
+                               .property("interface-name", "1.32")
+                               .next();
+               Vertex  port32 = g.addV()
+                               .property("aai-node-type", "lag-interface")
+                               .property("interface-name", "ae1")
+                               .next();
+               
+               Vertex pnf4 = g.addV()
+                               .property("aai-node-type", "pnf")
+                               .property("pnf-name", "pnf-name-4")
+                               .next();
+               Vertex  port41 = g.addV()
+                               .property("aai-node-type", "p-interface")
+                               .property("interface-name", "1.7")
+                               .next();
+               Vertex  port42 = g.addV()
+                               .property("aai-node-type", "lag-interface")
+                               .property("interface-name", "ae101")
+                               .next();
+               
+               Vertex pnf5 = g.addV()
+                               .property("aai-node-type", "pnf")
+                               .property("pnf-name", "pnf-name-5")
+                               .next();
+               Vertex  port51 = g.addV()
+                               .property("aai-node-type", "lag-interface")
+                               .property("interface-name", "104")
+                               .next();
+               Vertex  port52 = g.addV()
+                               .property("aai-node-type", "lag-interface")
+                               .property("interface-name", "101")
+                               .next();
+               
+               Vertex pnf6 = g.addV()
+                               .property("aai-node-type", "pnf")
+                               .property("pnf-name", "pnf-name-6")
+                               .next();
+               Vertex  port61 = g.addV()
+                               .property("aai-node-type", "lag-interface")
+                               .property("interface-name", "ae104")
+                               .next();
+               Vertex  port62 = g.addV()
+                               .property("aai-node-type", "p-interface")
+                               .property("interface-name", "1.39")
+                               .next();
+               
+               Vertex evc1 = g.addV().property("aai-node-type", "evc")
+                               .property("evc-id", "evc-name-1")
+                               .next();
+               Vertex fp1 =  g.addV()
+                               .property("aai-node-type", "forwarding-path")
+                               .property("forwarding-path-id", "evc-name-1")
+                               .next();
+               
+               Vertex evc2 = g.addV().property("aai-node-type", "evc")
+                               .property("evc-id", "evc-name-2")
+                               .next();
+               Vertex fp2 =  g.addV()
+                               .property("aai-node-type", "forwarding-path")
+                               .property("forwarding-path-id", "evc-name-2")
+                               .next();
+               
+               Vertex evc3 = g.addV().property("aai-node-type", "evc")
+                               .property("evc-id", "evc-name-3")
+                               .next();
+               Vertex fp3 =  g.addV()
+                               .property("aai-node-type", "forwarding-path")
+                               .property("forwarding-path-id", "evc-name-3")
+                               .next();
+               
+               // graph 1
+               edgeSerializer.addTreeEdge(g, customer1, servSub1);
+               edgeSerializer.addTreeEdge(g, servSub1, servInstance1);
+               edgeSerializer.addTreeEdge(g, servSub1, servInstance2);
+               edgeSerializer.addTreeEdge(g, servSub1, servInstance3);
+               
+               edgeSerializer.addEdge(g, servInstance1, fp1);
+               edgeSerializer.addEdge(g, servInstance2, fp2);
+               edgeSerializer.addEdge(g, servInstance3, fp3);
+               
+               edgeSerializer.addTreeEdge(g, pnf1, port11);
+               edgeSerializer.addTreeEdge(g, pnf1, port12);
+               
+               edgeSerializer.addTreeEdge(g, pnf2, port21);
+               edgeSerializer.addTreeEdge(g, pnf2, port22);
+               
+               edgeSerializer.addTreeEdge(g, pnf3, port31);
+               edgeSerializer.addTreeEdge(g, pnf3, port32);
+               
+               edgeSerializer.addTreeEdge(g, pnf4, port41);
+               edgeSerializer.addTreeEdge(g, pnf4, port42);
+               
+               edgeSerializer.addTreeEdge(g, pnf5, port51);
+               edgeSerializer.addTreeEdge(g, pnf5, port52);
+               
+               edgeSerializer.addTreeEdge(g, pnf6, port61);
+               edgeSerializer.addTreeEdge(g, pnf6, port62);
+               
+
+
+               TransactionalGraphEngine spy = spy(dbEngine);
+               TransactionalGraphEngine.Admin adminSpy = spy(dbEngine.asAdmin());
+
+               GraphTraversalSource traversal = g;
+               GraphTraversalSource readOnly = tx.traversal(GraphTraversalSource.build().with(ReadOnlyStrategy.instance()));
+               when (spy.tx()).thenReturn(tx);
+               when(spy.asAdmin()).thenReturn(adminSpy);
+               when(adminSpy.getTraversalSource()).thenReturn(traversal);
+               when(adminSpy.getReadOnlyTraversalSource()).thenReturn(readOnly);
+               
+               migration = new MigratePATHEvcInventory(spy, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+               migration.run();
+       }
+
+       @Test
+       public void testRun_checkServInstanceAndForwardingPathsExist() throws Exception {
+               // check if graph nodes exist
+               
+               // check if service-instance node gets created
+               assertEquals("service subscription node, service-type=SAREA", true, 
+                               g.V().has("service-instance-id", "evc-name-1")
+                               .out("org.onap.relationships.inventory.BelongsTo").has("service-type", "SAREA")
+                               .hasNext());
+               
+               assertEquals("fowarding-path node exists", true, 
+                               g.V().has("global-customer-id", "customer-id-1")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "SAREA")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "evc-name-1")
+                               .in("org.onap.relationships.inventory.AppliesTo")
+                               .has("aai-node-type", "forwarding-path")
+                               .has("forwarding-path-id", "evc-name-1")
+                               .hasNext());
+               assertEquals("fowarding-path node exists", true, 
+                               g.V().has("global-customer-id", "customer-id-1")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "SAREA")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "evc-name-2")
+                               .in("org.onap.relationships.inventory.AppliesTo")
+                               .has("aai-node-type", "forwarding-path")
+                               .has("forwarding-path-id", "evc-name-2")
+                               .hasNext());
+               assertEquals("fowarding-path node exists", true, 
+                               g.V().has("global-customer-id", "customer-id-1")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "SAREA")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "evc-name-3")
+                               .in("org.onap.relationships.inventory.AppliesTo")
+                               .has("aai-node-type", "forwarding-path")
+                               .has("forwarding-path-id", "evc-name-3")
+                               .hasNext());
+               
+       }
+
+       @Test
+       public void testRun_checkForwardersForEvc1AreCreated() throws Exception {
+               // check if graph nodes exist
+               // check if forwarder node gets created
+               
+               assertEquals("forwarder node is created for evc-name-1 ", true,
+               g.V().has("global-customer-id", "customer-id-1")
+               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "SAREA")
+               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "evc-name-1")
+               .in("org.onap.relationships.inventory.AppliesTo").has("aai-node-type", "forwarding-path")
+               .in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "forwarder")
+               .has("sequence", 1)
+               .has("forwarder-role", "ingress")
+               .hasNext());
+               
+               assertEquals("forwarder node is created for evc-name-1 ", true,
+                               g.V().has("global-customer-id", "customer-id-1")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "SAREA")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "evc-name-1")
+                               .in("org.onap.relationships.inventory.AppliesTo").has("aai-node-type", "forwarding-path")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "forwarder")
+                               .has("sequence", 2)
+                               .has("forwarder-role", "egress")
+                               .hasNext());
+       }
+       
+       @Test
+       public void testRun_checkForwardersForEvc2AreCreated() throws Exception {
+               
+               // check if forwarder node gets created
+               
+               assertEquals("forwarder node is created for evc-name-2 ", true,
+                               g.V().has("global-customer-id", "customer-id-1")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "SAREA")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "evc-name-2")
+                               .hasNext());
+               
+               assertEquals("forwarder node is created for evc-name-2 ", true,
+                               g.V().has("global-customer-id", "customer-id-1")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "SAREA")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "evc-name-2")
+                               .in("org.onap.relationships.inventory.AppliesTo").has("aai-node-type", "forwarding-path")
+                               .hasNext());
+               
+               assertEquals("4 forwarders are created for evc-name-2 ", (Long)4l,
+                               g.V().
+                                       has("aai-node-type", "forwarding-path").has("forwarding-path-id","evc-name-2")
+                                       .in("org.onap.relationships.inventory.BelongsTo")
+                                       .has("aai-node-type", "forwarder").count().next()); //org.onap.relationships.inventory.BelongsTo
+               
+               assertEquals("forwarder node is created for evc-name-2 ", true,
+               g.V().has("aai-node-type", "forwarding-path").has("forwarding-path-id","evc-name-2")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "forwarder")
+                               .has("sequence", 1)
+                               .has("forwarder-role", "ingress")
+                               .hasNext());
+               
+               assertEquals("forwarder node is created for evc-name-2 ", true,
+               g.V().has("global-customer-id", "customer-id-1")
+               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "SAREA")
+               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "evc-name-2")
+               .in("org.onap.relationships.inventory.AppliesTo").has("aai-node-type", "forwarding-path")
+               .in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "forwarder")
+               .has("sequence", 1)
+               .has("forwarder-role", "ingress")
+               .hasNext());
+               
+               assertEquals("forwarder node is created for evc-name-2 ", true,
+                               g.V().has("global-customer-id", "customer-id-1")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "SAREA")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "evc-name-2")
+                               .in("org.onap.relationships.inventory.AppliesTo").has("aai-node-type", "forwarding-path")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "forwarder")
+                               .has("sequence", 2)
+                               .has("forwarder-role", "intermediate")
+                               .hasNext());
+               
+               assertEquals("forwarder node is created for evc-name-2 ", true,
+                               g.V().has("global-customer-id", "customer-id-1")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "SAREA")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "evc-name-2")
+                               .in("org.onap.relationships.inventory.AppliesTo").has("aai-node-type", "forwarding-path")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "forwarder")
+                               .has("sequence", 3)
+                               .has("forwarder-role", "intermediate")
+                               .hasNext());
+               
+               assertEquals("forwarder node is created for evc-name-2 ", true,
+                               g.V().has("global-customer-id", "customer-id-1")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "SAREA")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "evc-name-2")
+                               .in("org.onap.relationships.inventory.AppliesTo").has("aai-node-type", "forwarding-path")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "forwarder")
+                               .has("sequence", 4)
+                               .has("forwarder-role", "egress")
+                               .hasNext());
+       }
+       
+       @Test
+       public void testRun_checkForwardersForEvc3AreCreated() throws Exception {
+               
+               // check if forwarder node gets created
+               
+               assertEquals("forwarder node is created for evc-name-3 ", true,
+               g.V().has("global-customer-id", "customer-id-1")
+               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "SAREA")
+               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "evc-name-3")
+               .in("org.onap.relationships.inventory.AppliesTo").has("aai-node-type", "forwarding-path")
+               .in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "forwarder")
+               .has("sequence", 1)
+               .has("forwarder-role", "ingress")
+               .hasNext());
+               
+               assertEquals("forwarder node is created for evc-name-3 ", true,
+                               g.V().has("global-customer-id", "customer-id-1")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "SAREA")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "evc-name-3")
+                               .in("org.onap.relationships.inventory.AppliesTo").has("aai-node-type", "forwarding-path")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "forwarder")
+                               .has("sequence", 1)
+                               .has("forwarder-role", "ingress")
+                               .out("org.onap.relationships.inventory.ForwardsTo")
+                               .has("aai-node-type", "p-interface")
+                               .has("interface-name","1.7")
+                               .hasNext());
+               
+               assertEquals("forwarder-evc node is created for forwarder with sequence 1 ", true,
+                               g.V().has("global-customer-id", "customer-id-1")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "SAREA")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "evc-name-3")
+                               .in("org.onap.relationships.inventory.AppliesTo").has("aai-node-type", "forwarding-path")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "forwarder")
+                               .has("sequence", 1)
+                               .has("forwarder-role", "ingress")
+                               .out("org.onap.relationships.inventory.Uses").has("aai-node-type", "configuration")
+                               .has("configuration-id","evc-name-3-1").has("configuration-type","forwarder").has("configuration-sub-type", "forwarder")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "forwarder-evc")
+                               .has("forwarder-evc-id","evc-name-3-1")
+                               .has("circuit-id","M0651881")
+                               .has("cvlan","34")
+                               .has("svlan","8")
+                               .hasNext());
+               
+               assertEquals("forwarder node is created for evc-name-3 ", true,
+                               g.V().has("global-customer-id", "customer-id-1")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "SAREA")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "evc-name-3")
+                               .in("org.onap.relationships.inventory.AppliesTo").has("aai-node-type", "forwarding-path")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "forwarder")
+                               .has("sequence", 2)
+                               .has("forwarder-role", "intermediate")
+                               .hasNext());
+               
+               //forwarder to interface check
+               assertEquals("forwarder node is created for evc-name-3 ", true,
+                               g.V().has("global-customer-id", "customer-id-1")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "SAREA")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "evc-name-3")
+                               .in("org.onap.relationships.inventory.AppliesTo").has("aai-node-type", "forwarding-path")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "forwarder")
+                               .has("sequence", 2)
+                               .has("forwarder-role", "intermediate")
+                               .out("org.onap.relationships.inventory.ForwardsTo")
+                               .has("aai-node-type", "lag-interface")
+                               .has("interface-name","ae101")
+                               .hasNext());
+               
+               assertEquals("forwarder-evc node is created for forwarder with sequence 2 ", true,
+                               g.V().has("global-customer-id", "customer-id-1")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "SAREA")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "evc-name-3")
+                               .in("org.onap.relationships.inventory.AppliesTo").has("aai-node-type", "forwarding-path")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "forwarder")
+                               .has("sequence", 2)
+                               .has("forwarder-role", "intermediate")
+                               .out("org.onap.relationships.inventory.Uses").has("aai-node-type", "configuration")
+                               .has("configuration-id","evc-name-3-2").has("configuration-type","forwarder").has("configuration-sub-type", "forwarder")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "forwarder-evc")
+                               .has("forwarder-evc-id","evc-name-3-2")
+                               .has("cvlan","34")
+                               .has("svlan","740")
+                               .hasNext());
+               
+               assertEquals("forwarder node is created for evc-name-3 ", true,
+                               g.V().has("global-customer-id", "customer-id-1")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "SAREA")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "evc-name-3")
+                               .in("org.onap.relationships.inventory.AppliesTo").has("aai-node-type", "forwarding-path")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "forwarder")
+                               .has("sequence", 3)
+                               .has("forwarder-role", "intermediate")
+                               .hasNext());
+               
+               assertEquals("forwarder node is created for evc-name-3 ", true,
+                               g.V().has("global-customer-id", "customer-id-1")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "SAREA")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "evc-name-3")
+                               .in("org.onap.relationships.inventory.AppliesTo").has("aai-node-type", "forwarding-path")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "forwarder")
+                               .has("sequence", 3)
+                               .has("forwarder-role", "intermediate")
+                               .out("org.onap.relationships.inventory.ForwardsTo")
+                               .has("aai-node-type", "lag-interface")
+                               .has("interface-name","101")
+                               .hasNext());
+               
+               assertEquals("forwarder-evc node is created for forwarder with sequence 3 ", true,
+                               g.V().has("global-customer-id", "customer-id-1")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "SAREA")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "evc-name-3")
+                               .in("org.onap.relationships.inventory.AppliesTo").has("aai-node-type", "forwarding-path")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "forwarder")
+                               .has("sequence", 3)
+                               .has("forwarder-role", "intermediate")
+                               .out("org.onap.relationships.inventory.Uses").has("aai-node-type", "configuration")
+                               .has("configuration-id","evc-name-3-3").has("configuration-type","forwarder").has("configuration-sub-type", "forwarder")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "forwarder-evc")
+                               .has("forwarder-evc-id","evc-name-3-3")
+                               .has("cvlan","35")
+                               .has("svlan","740")
+                               .hasNext());
+               
+               assertEquals("forwarder node is created for evc-name-3 ", true,
+                               g.V().has("global-customer-id", "customer-id-1")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "SAREA")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "evc-name-3")
+                               .in("org.onap.relationships.inventory.AppliesTo").has("aai-node-type", "forwarding-path")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "forwarder")
+                               .has("sequence", 4)
+                               .has("forwarder-role", "intermediate")
+                               .hasNext());
+               
+               assertEquals("forwarder node is created for evc-name-3 ", true,
+                               g.V().has("global-customer-id", "customer-id-1")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "SAREA")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "evc-name-3")
+                               .in("org.onap.relationships.inventory.AppliesTo").has("aai-node-type", "forwarding-path")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "forwarder")
+                               .has("sequence", 4)
+                               .has("forwarder-role", "intermediate")
+                               .out("org.onap.relationships.inventory.ForwardsTo")
+                               .has("aai-node-type", "lag-interface")
+                               .has("interface-name","104")
+                               .hasNext());
+               
+               assertEquals("forwarder-evc node is created for forwarder with sequence 4 ", true,
+                               g.V().has("global-customer-id", "customer-id-1")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "SAREA")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "evc-name-3")
+                               .in("org.onap.relationships.inventory.AppliesTo").has("aai-node-type", "forwarding-path")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "forwarder")
+                               .has("sequence", 4)
+                               .has("forwarder-role", "intermediate")
+                               .out("org.onap.relationships.inventory.Uses").has("aai-node-type", "configuration")
+                               .has("configuration-id","evc-name-3-4").has("configuration-type","forwarder").has("configuration-sub-type", "forwarder")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "forwarder-evc")
+                               .has("forwarder-evc-id","evc-name-3-4")
+                               .has("cvlan","37")
+                               .has("svlan","740")
+                               .hasNext());
+               
+               assertEquals("forwarder node is created for evc-name-3 ", true,
+                               g.V().has("global-customer-id", "customer-id-1")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "SAREA")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "evc-name-3")
+                               .in("org.onap.relationships.inventory.AppliesTo").has("aai-node-type", "forwarding-path")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "forwarder")
+                               .has("sequence", 5)
+                               .has("forwarder-role", "intermediate")
+                               .hasNext());
+               
+               assertEquals("forwarder node is created for evc-name-3 ", true,
+                               g.V().has("global-customer-id", "customer-id-1")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "SAREA")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "evc-name-3")
+                               .in("org.onap.relationships.inventory.AppliesTo").has("aai-node-type", "forwarding-path")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "forwarder")
+                               .has("sequence", 5)
+                               .has("forwarder-role", "intermediate")
+                               .out("org.onap.relationships.inventory.ForwardsTo")
+                               .has("aai-node-type", "lag-interface")
+                               .has("interface-name","ae104")
+                               .hasNext());
+               
+               
+               
+               assertEquals("configuration node is created for forwarder with sequence 5 ", true,
+                               g.V().has("global-customer-id", "customer-id-1")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "SAREA")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "evc-name-3")
+                               .in("org.onap.relationships.inventory.AppliesTo").has("aai-node-type", "forwarding-path")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "forwarder")
+                               .has("sequence", 5)
+                               .has("forwarder-role", "intermediate")
+                               .out("org.onap.relationships.inventory.Uses").has("aai-node-type", "configuration")
+                               .has("configuration-id","evc-name-3-5").has("configuration-type","forwarder").has("configuration-sub-type", "forwarder")
+                               .hasNext());
+               
+               assertEquals("forwarder-evc node is created for forwarder with sequence 5 ", true,
+                               g.V().has("global-customer-id", "customer-id-1")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "SAREA")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "evc-name-3")
+                               .in("org.onap.relationships.inventory.AppliesTo").has("aai-node-type", "forwarding-path")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "forwarder")
+                               .has("sequence", 5)
+                               .has("forwarder-role", "intermediate")
+                               .out("org.onap.relationships.inventory.Uses").has("aai-node-type", "configuration")
+                               .has("configuration-id","evc-name-3-5").has("configuration-type","forwarder").has("configuration-sub-type", "forwarder")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "forwarder-evc")
+                               .has("forwarder-evc-id","evc-name-3-5")
+                               .has("cvlan","36")
+                               .has("svlan","740")
+                               .hasNext());
+               
+               assertEquals("forwarder node is created for evc-name-3 ", true,
+                               g.V().has("global-customer-id", "customer-id-1")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "SAREA")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "evc-name-3")
+                               .in("org.onap.relationships.inventory.AppliesTo").has("aai-node-type", "forwarding-path")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "forwarder")
+                               .has("sequence", 6)
+                               .has("forwarder-role", "egress")
+                               .hasNext());
+               
+               assertEquals("forwarder node is created for evc-name-3 ", true,
+                               g.V().has("global-customer-id", "customer-id-1")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "SAREA")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "evc-name-3")
+                               .in("org.onap.relationships.inventory.AppliesTo").has("aai-node-type", "forwarding-path")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "forwarder")
+                               .has("sequence", 6)
+                               .has("forwarder-role", "egress")
+                               .out("org.onap.relationships.inventory.ForwardsTo")
+                               .has("aai-node-type", "p-interface")
+                               .has("interface-name","1.39")
+                               .hasNext());
+               
+               assertEquals("configuration node is created for forwarder with sequence 6 ", true,
+                               g.V().has("global-customer-id", "customer-id-1")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "SAREA")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "evc-name-3")
+                               .in("org.onap.relationships.inventory.AppliesTo").has("aai-node-type", "forwarding-path")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "forwarder")
+                               .has("sequence", 6)
+                               .has("forwarder-role", "egress")
+                               .out("org.onap.relationships.inventory.Uses").has("aai-node-type", "configuration")
+                               .has("configuration-id","evc-name-3-6").has("configuration-type","forwarder").has("configuration-sub-type", "forwarder")
+                               .hasNext());
+               
+               assertEquals("configuration node is created for forwarder with sequence 6 ", true,
+                               g.V().has("global-customer-id", "customer-id-1")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "SAREA")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "evc-name-3")
+                               .in("org.onap.relationships.inventory.AppliesTo").has("aai-node-type", "forwarding-path")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "forwarder")
+                               .has("sequence", 6)
+                               .has("forwarder-role", "egress")
+                               .out("org.onap.relationships.inventory.Uses").has("aai-node-type", "configuration")
+                               .has("configuration-id","evc-name-3-6").has("configuration-type","forwarder").has("configuration-sub-type", "forwarder")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "forwarder-evc")
+                               .has("forwarder-evc-id","evc-name-3-6").has("circuit-id","IZEZ.597112..ATI").has("cvlan","36").has("svlan","3")
+                               .hasNext());
+               
+       }
+
+       
+       @Test
+       public void testGetAffectedNodeTypes() {
+               Optional<String[]> types = migration.getAffectedNodeTypes();
+               Optional<String[]> expected = Optional.of(new String[]{"forwarding-path"});
+               
+               assertNotNull(types);
+               assertArrayEquals(expected.get(), types.get());
+       }
+
+       @Test
+       public void testGetMigrationName() {
+               String migrationName = migration.getMigrationName();
+
+               assertNotNull(migrationName);
+               assertEquals("MigratePATHEvcInventory", migrationName);
+       }
+}
diff --git a/src/test/java/org/onap/aai/migration/v12/MigratePATHPhysicalInventoryTest.java b/src/test/java/org/onap/aai/migration/v12/MigratePATHPhysicalInventoryTest.java
new file mode 100644 (file)
index 0000000..11aa0a6
--- /dev/null
@@ -0,0 +1,159 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v12;
+
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.mockito.Matchers.shortThat;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.when;
+
+import java.util.Optional;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.process.traversal.strategy.verification.ReadOnlyStrategy;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.junit.*;
+import org.onap.aai.AAISetup;
+import org.onap.aai.dbmap.DBConnectionType;
+import org.onap.aai.introspection.Loader;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.setup.SchemaVersion;
+import org.onap.aai.serialization.engines.QueryStyle;
+import org.onap.aai.serialization.engines.JanusGraphDBEngine;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+
+import org.janusgraph.core.JanusGraphFactory;
+import org.janusgraph.core.JanusGraph;
+import org.janusgraph.core.JanusGraphTransaction;
+
+public class MigratePATHPhysicalInventoryTest extends AAISetup {
+
+       private final static ModelType introspectorFactoryType = ModelType.MOXY;
+       private final static QueryStyle queryStyle = QueryStyle.TRAVERSAL;
+       private final static DBConnectionType type = DBConnectionType.REALTIME;
+
+       private Loader loader;
+       private TransactionalGraphEngine dbEngine;
+       private MigratePATHPhysicalInventory migration;
+       private GraphTraversalSource g;
+
+       @Before
+       public void setUp() throws Exception {
+               g = tx.traversal();
+               loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, schemaVersions.getDefaultVersion());
+               dbEngine = new JanusGraphDBEngine(
+                               queryStyle,
+                               type,
+                               loader);
+               
+               System.setProperty("BUNDLECONFIG_DIR", "src/test/resources");
+
+               Vertex pnf2 = g.addV()
+                               .property("aai-node-type", "pnf")
+                               .property("pnf-name", "pnf-name-2")
+                               .next();
+               Vertex  port21 = g.addV()
+                               .property("aai-node-type", "lag-interface")
+                               .property("interface-name", "ae1")
+                               .next();
+               
+               Vertex pnf3 = g.addV()
+                               .property("aai-node-type", "pnf")
+                               .property("pnf-name", "pnf-name-3")
+                               .next();
+               Vertex pnf4 = g.addV()
+                               .property("aai-node-type", "pnf")
+                               .property("pnf-name", "pnf-name-4")
+                               .next();
+               Vertex pnf5 = g.addV()
+                               .property("aai-node-type", "pnf")
+                               .property("pnf-name", "pnf-name-5")
+                               .next();
+               // graph 1
+                               
+               edgeSerializer.addTreeEdge(g, pnf2, port21);
+
+
+               TransactionalGraphEngine spy = spy(dbEngine);
+               TransactionalGraphEngine.Admin adminSpy = spy(dbEngine.asAdmin());
+
+               GraphTraversalSource traversal = g;
+               GraphTraversalSource readOnly = tx.traversal(GraphTraversalSource.build().with(ReadOnlyStrategy.instance()));
+               when (spy.tx()).thenReturn(tx);
+               when(spy.asAdmin()).thenReturn(adminSpy);
+               when(adminSpy.getTraversalSource()).thenReturn(traversal);
+               when(adminSpy.getReadOnlyTraversalSource()).thenReturn(readOnly);
+               
+               migration = new MigratePATHPhysicalInventory(spy, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+               migration.run();
+       }
+       
+       @Test
+       public void testRun_checkPnfsAndPInterfacesExist() throws Exception {
+               // check if graph nodes exist
+
+               testGetMigrationName();
+               testGetAffectedNodeTypes();
+               
+               // check if pnf node gets created
+               assertEquals("4 PNFs exist", new Long(4L), 
+                               g.V().has("aai-node-type", "pnf")
+                               .count().next());
+               
+               assertEquals("5 lag-interfaces were created", new Long (5L), g.V().has("aai-node-type", "lag-interface")
+                               .out("tosca.relationships.network.BindsTo").count().next());
+                               
+               assertEquals("lag-interfaces created for pnfs", new Long(1L),
+                               g.V().has("aai-node-type", "pnf")
+                               .has("pnf-name", "pnf-name-3").count().next());
+               
+               assertEquals("lag-interface ae1 created for pnf-name-3", true,
+                               g.V().has("aai-node-type", "pnf")
+                               .has("pnf-name", "pnf-name-3")
+                               .in("tosca.relationships.network.BindsTo")
+                               .has("aai-node-type", "lag-interface")
+                               .has("interface-name","ae1")
+                               .hasNext());
+               
+               assertEquals("lag-interfaces created for pnfs", new Long(2L),
+                               g.V().has("aai-node-type", "pnf")
+                               .has("pnf-name", "pnf-name-5")
+                               .in("tosca.relationships.network.BindsTo")
+                               .has("aai-node-type", "lag-interface").count().next());
+       }
+       
+       public void testGetAffectedNodeTypes() {
+               Optional<String[]> types = migration.getAffectedNodeTypes();
+               Optional<String[]> expected = Optional.of(new String[]{"lag-interface"});
+               
+               assertNotNull(types);
+               assertArrayEquals(expected.get(), types.get());
+       }
+
+       public void testGetMigrationName() {
+               String migrationName = migration.getMigrationName();
+
+               assertNotNull(migrationName);
+               assertEquals("MigratePATHPhysicalInventory", migrationName);
+       }
+}
diff --git a/src/test/java/org/onap/aai/migration/v12/MigrateSAREvcInventoryTest.java b/src/test/java/org/onap/aai/migration/v12/MigrateSAREvcInventoryTest.java
new file mode 100644 (file)
index 0000000..79a5877
--- /dev/null
@@ -0,0 +1,357 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v12;
+
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.when;
+
+import java.util.Optional;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.process.traversal.strategy.verification.ReadOnlyStrategy;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.aai.AAISetup;
+import org.onap.aai.dbmap.DBConnectionType;
+import org.onap.aai.introspection.Loader;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.setup.SchemaVersion;
+import org.onap.aai.serialization.engines.QueryStyle;
+import org.onap.aai.serialization.engines.JanusGraphDBEngine;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+
+import org.janusgraph.core.JanusGraphFactory;
+import org.janusgraph.core.JanusGraph;
+import org.janusgraph.core.JanusGraphTransaction;
+
+public class MigrateSAREvcInventoryTest extends AAISetup {
+
+       private final static ModelType introspectorFactoryType = ModelType.MOXY;
+       private final static QueryStyle queryStyle = QueryStyle.TRAVERSAL;
+       private final static DBConnectionType type = DBConnectionType.REALTIME;
+
+       private Loader loader;
+       private TransactionalGraphEngine dbEngine;
+       private JanusGraph graph;
+       private MigrateSAREvcInventory migration;
+       private JanusGraphTransaction tx;
+       private GraphTraversalSource g;
+
+       @Before
+       public void setUp() throws Exception {
+               graph = JanusGraphFactory.build().set("storage.backend","inmemory").open();
+               tx = graph.newTransaction();
+               g = tx.traversal();
+               loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, schemaVersions.getDefaultVersion());
+               dbEngine = new JanusGraphDBEngine(
+                               queryStyle,
+                               type,
+                               loader);
+
+               System.setProperty("BUNDLECONFIG_DIR", "src/test/resources");
+               
+               Vertex customer1 = g.addV()
+                               .property("aai-node-type", "customer")
+                               .property("global-customer-id", "8a00890a-e6ae-446b-9dbe-b828dbeb38bd")
+                               .property("subscriber-type", "CUST")
+                               .next();
+               
+               Vertex servSub1 = g.addV()
+                               .property("aai-node-type", "service-subscription")
+                               .property("service-type", "SAREA")
+                               .next();
+               
+               Vertex servInst1 = g.addV()
+                               .property("aai-node-type", "service-instance")
+                               .property("service-instance-id", "evc-name-1")
+                               .next();
+               
+               Vertex customer2 = g.addV()
+                               .property("aai-node-type", "customer")
+                               .property("global-customer-id", "cust-1")
+                               .property("subscriber-type", "CUST")
+                               .next();
+               
+               Vertex servSub2 = g.addV()
+                               .property("aai-node-type", "service-subscription")
+                               .property("service-type", "SAREA")
+                               .next();
+               
+               Vertex servInst2 = g.addV()
+                               .property("aai-node-type", "service-instance")
+                               .property("service-instance-id", "evc-name-1")
+                               .next();
+               
+               Vertex collectorPnf = g.addV()
+                               .property("aai-node-type", "pnf")
+                               .property("pnf-name", "pnf-name-collector-1")
+                               .next();
+               
+               Vertex bearerPnf = g.addV()
+                               .property("aai-node-type", "pnf")
+                               .property("pnf-name", "pnf-name-bearer-1")
+                               .next();
+               
+               Vertex collectorPort = g.addV()
+                               .property("aai-node-type", "p-interface")
+                               .property("interface-name", "p-int-collector-1")
+                               .next();
+               
+               Vertex bearerPort = g.addV()
+                               .property("aai-node-type", "p-interface")
+                               .property("interface-name", "p-int-bearer-1")
+                               .next();
+               
+               Vertex servInst4 = g.addV()
+                               .property("aai-node-type", "service-instance")
+                               .property("service-instance-id", "evc-name-4")
+                               .next();
+
+               // graph 1
+               edgeSerializer.addTreeEdge(g, customer1, servSub1);
+//             edgeSerializer.addTreeEdge(g, servSub1, servInst1);
+               edgeSerializer.addTreeEdge(g, customer2, servSub2);
+               edgeSerializer.addTreeEdge(g, servSub2, servInst2);
+               edgeSerializer.addTreeEdge(g, servSub1, servInst4); //evc-name-4 exists in graph as a child of SAREA serv-sub
+               edgeSerializer.addTreeEdge(g, collectorPnf, collectorPort);
+               edgeSerializer.addTreeEdge(g, bearerPnf, bearerPort);
+
+               TransactionalGraphEngine spy = spy(dbEngine);
+               TransactionalGraphEngine.Admin adminSpy = spy(dbEngine.asAdmin());
+
+               GraphTraversalSource traversal = g;
+               GraphTraversalSource readOnly = tx.traversal(GraphTraversalSource.build().with(ReadOnlyStrategy.instance()));
+               when (spy.tx()).thenReturn(tx);
+               when(spy.asAdmin()).thenReturn(adminSpy);
+               when(adminSpy.getTraversalSource()).thenReturn(traversal);
+               when(adminSpy.getReadOnlyTraversalSource()).thenReturn(readOnly);
+               
+               migration = new MigrateSAREvcInventory(spy, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+               migration.run();
+       }
+       
+       @After
+       public void cleanUp() {
+               tx.tx().rollback();
+               graph.close();
+       }
+       
+       @Test
+       public void testRun_createServiceInstanceNode() throws Exception {
+               // check if graph nodes exist
+               assertEquals("service instance node exists", true, 
+                               g.V().has("service-instance-id", "evc-name-1")
+                               .hasNext());
+               
+               // check if service-instance node gets created
+               assertEquals("service subscription node, service-type=SAREA", true, 
+                               g.V().has("service-instance-id", "evc-name-1")
+                               .out("org.onap.relationships.inventory.BelongsTo").has("service-type", "SAREA")
+                               .hasNext());
+               
+       
+               
+               // check if fowarding-path node gets created
+               assertEquals("fowarding-path is created", true, g.V().has("forwarding-path-id", "evc-name-1")
+                               .has("forwarding-path-name", "evc-name-1").hasNext());
+               
+               assertEquals("fowarding-path node exists", true, 
+                               g.V().has("global-customer-id", "8a00890a-e6ae-446b-9dbe-b828dbeb38bd")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "SAREA")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "evc-name-1")
+                               .in("org.onap.relationships.inventory.AppliesTo")
+                               .has("aai-node-type", "forwarding-path")
+                               .has("forwarding-path-id", "evc-name-1")
+                               .has("forwarding-path-name", "evc-name-1")
+                               .hasNext());
+               
+               // check if configuration node gets created
+               assertEquals("configuration node, configuration-type= forwarding-path", true,
+                               g.V().has("global-customer-id", "8a00890a-e6ae-446b-9dbe-b828dbeb38bd")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "SAREA")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "evc-name-1")
+                               .in("org.onap.relationships.inventory.AppliesTo").has("aai-node-type", "forwarding-path")
+                               .out("org.onap.relationships.inventory.Uses").has("aai-node-type", "configuration")
+                               .has("configuration-type", "forwarding-path")
+                               .has("configuration-sub-type", "evc")
+                               .hasNext());
+               
+               //check if evc node gets created
+               assertEquals("evc is created", true, 
+                               g.V().has("global-customer-id", "8a00890a-e6ae-446b-9dbe-b828dbeb38bd")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "SAREA")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "evc-name-1")
+                               .in("org.onap.relationships.inventory.AppliesTo").has("aai-node-type", "forwarding-path")
+                               .out("org.onap.relationships.inventory.Uses").has("aai-node-type", "configuration")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "evc")
+                               .hasNext());
+               
+               // check if evc node gets created
+               assertEquals("configuration node, configuration-type= evc", true, 
+                               g.V().has("global-customer-id", "8a00890a-e6ae-446b-9dbe-b828dbeb38bd")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "SAREA")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "evc-name-1")
+                               .in("org.onap.relationships.inventory.AppliesTo").has("aai-node-type", "forwarding-path")
+                               .out("org.onap.relationships.inventory.Uses").has("aai-node-type", "configuration")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "evc")
+                               .has("evc-id", "evc-name-1")
+                               .has("forwarding-path-topology", "PointToPoint")
+                               .has("cir-value", "40")
+                               .has("cir-units", "Mbps")
+                               .has("tagmode-access-ingress", "DOUBLE")
+                               .has("tagmode-access-egress", "DOUBLE")
+                               .hasNext());
+       }
+
+       @Test
+       public void testRun_evcNotCreated() throws Exception {
+               // check if graph nodes exist
+               assertEquals("customer node exists", true, 
+                               g.V().has("global-customer-id", "8a00890a-e6ae-446b-9dbe-b828dbeb38bd")
+                               .hasNext());
+               
+               assertEquals("service subscription node, service-type=SAREA", true, 
+                               g.V().has("global-customer-id", "8a00890a-e6ae-446b-9dbe-b828dbeb38bd")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "SAREA")
+                               .hasNext());
+               
+               //service-instance should not be created
+               assertEquals("service instance node created", false, 
+                               g.V().has("global-customer-id", "8a00890a-e6ae-446b-9dbe-b828dbeb38bd")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "SAREA")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "evc-name-2")
+                               .hasNext());
+               
+               assertEquals("service instance node already exists", true, 
+                               g.V().has("global-customer-id", "cust-1")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "SAREA")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "evc-name-1")
+                               .hasNext());
+               
+               // fowarding-path node should not be created
+               assertEquals("fowarding-path created", false, g.V().has("aai-node-type", "forwarding-path")
+                               .has("forwarding-path-name", "evc-name-2").hasNext());
+               
+               // configuration node should not be created
+               assertEquals("configuration node created", false, g.V().has("aai-node-type", "configuration")
+                               .has("configuration-id", "evc-name-2").hasNext());
+               
+               // evc node should not be created
+               assertEquals("evc node created", false, g.V().has("aai-node-type", "evc")
+                               .has("evc-id", "evc-name-2").hasNext());
+               
+               // service-instance is not created because pnf exists, but p-interface does not
+               assertEquals("service instance node created", false, 
+                               g.V().has("global-customer-id", "8a00890a-e6ae-446b-9dbe-b828dbeb38bd")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "SAREA")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "evc-name-3")
+                               .hasNext());
+       }
+       
+       @Test
+       public void testRun_createFPConfigurationEvcNode4() throws Exception {
+               // check if graph nodes exist
+               assertEquals("service instance node exists", true, 
+                               g.V().has("service-instance-id", "evc-name-4")
+                               .hasNext());
+               
+               // check if service-instance node gets created
+               assertEquals("service subscription node, service-type=SAREA", true, 
+                               g.V().has("service-instance-id", "evc-name-4")
+                               .out("org.onap.relationships.inventory.BelongsTo").has("service-type", "SAREA")
+                               .hasNext());
+               
+       
+               
+               // check if fowarding-path node gets created
+               assertEquals("fowarding-path is created", true, g.V().has("forwarding-path-id", "evc-name-4")
+                               .has("forwarding-path-name", "evc-name-4").hasNext());
+               
+               assertEquals("fowarding-path node exists", true, 
+                               g.V().has("global-customer-id", "8a00890a-e6ae-446b-9dbe-b828dbeb38bd")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "SAREA")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "evc-name-4")
+                               .in("org.onap.relationships.inventory.AppliesTo")
+                               .has("aai-node-type", "forwarding-path")
+                               .has("forwarding-path-id", "evc-name-4")
+                               .has("forwarding-path-name", "evc-name-4")
+                               .hasNext());
+               
+               // check if configuration node gets created
+               assertEquals("configuration node, configuration-type= forwarding-path", true,
+                               g.V().has("global-customer-id", "8a00890a-e6ae-446b-9dbe-b828dbeb38bd")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "SAREA")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "evc-name-4")
+                               .in("org.onap.relationships.inventory.AppliesTo").has("aai-node-type", "forwarding-path")
+                               .out("org.onap.relationships.inventory.Uses").has("aai-node-type", "configuration")
+                               .has("configuration-type", "forwarding-path")
+                               .has("configuration-sub-type", "evc")
+                               .hasNext());
+               
+               //check if evc node gets created
+               assertEquals("evc is created", true, 
+                               g.V().has("global-customer-id", "8a00890a-e6ae-446b-9dbe-b828dbeb38bd")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "SAREA")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "evc-name-4")
+                               .in("org.onap.relationships.inventory.AppliesTo").has("aai-node-type", "forwarding-path")
+                               .out("org.onap.relationships.inventory.Uses").has("aai-node-type", "configuration")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "evc")
+                               .hasNext());
+               
+               // check if evc node gets created
+               assertEquals("configuration node, configuration-type= evc", true, 
+                               g.V().has("global-customer-id", "8a00890a-e6ae-446b-9dbe-b828dbeb38bd")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "SAREA")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "evc-name-1")
+                               .in("org.onap.relationships.inventory.AppliesTo").has("aai-node-type", "forwarding-path")
+                               .out("org.onap.relationships.inventory.Uses").has("aai-node-type", "configuration")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "evc")
+                               .has("evc-id", "evc-name-1")
+                               .has("forwarding-path-topology", "PointToPoint")
+                               .has("cir-value", "40")
+                               .has("cir-units", "Mbps")
+                               .has("tagmode-access-ingress", "DOUBLE")
+                               .has("tagmode-access-egress", "DOUBLE")
+                               .hasNext());
+       }
+       
+       @Test
+       public void testGetAffectedNodeTypes() {
+               Optional<String[]> types = migration.getAffectedNodeTypes();
+               Optional<String[]> expected = Optional.of(new String[]{"service-instance"});
+               
+               assertNotNull(types);
+               assertArrayEquals(expected.get(), types.get());
+       }
+
+       @Test
+       public void testGetMigrationName() {
+               String migrationName = migration.getMigrationName();
+
+               assertNotNull(migrationName);
+               assertEquals("MigrateSAREvcInventory", migrationName);
+       }
+}
diff --git a/src/test/java/org/onap/aai/migration/v13/MigrateBadWidgetModelsPartOneTest.java b/src/test/java/org/onap/aai/migration/v13/MigrateBadWidgetModelsPartOneTest.java
new file mode 100644 (file)
index 0000000..d3607d3
--- /dev/null
@@ -0,0 +1,173 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v13;
+
+import static org.junit.Assert.*;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.when;
+
+import java.util.Optional;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.Mockito;
+import org.onap.aai.AAISetup;
+import org.onap.aai.dbmap.DBConnectionType;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.introspection.Loader;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.serialization.db.EdgeSerializer;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.setup.SchemaVersion;
+import org.onap.aai.serialization.engines.QueryStyle;
+import org.onap.aai.serialization.engines.JanusGraphDBEngine;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+
+import org.janusgraph.core.JanusGraphFactory;
+import org.janusgraph.core.JanusGraph;
+import org.janusgraph.core.JanusGraphTransaction;
+import org.janusgraph.core.schema.JanusGraphManagement;
+
+public class MigrateBadWidgetModelsPartOneTest extends AAISetup {
+
+       private final static ModelType introspectorFactoryType = ModelType.MOXY;
+       private final static QueryStyle queryStyle = QueryStyle.TRAVERSAL;
+       private final static DBConnectionType type = DBConnectionType.REALTIME;
+       private Loader loader;
+       private TransactionalGraphEngine dbEngine;
+       private GraphTraversalSource g;
+       private MockMigrateBadWidgetModelsPartOne migration;
+       private Vertex modelVer1 = null;
+       private Vertex modelVer3 = null;
+
+       @Before
+       public void setUp() throws Exception {
+               System.setProperty("BUNDLECONFIG_DIR", "src/test/resources");
+               JanusGraphManagement janusgraphManagement = graph.openManagement();
+               g = graph.traversal();
+               loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, schemaVersions.getDefaultVersion());
+               dbEngine = new JanusGraphDBEngine(queryStyle, type, loader);
+               createFirstVertexAndRelatedVertexes();
+               TransactionalGraphEngine spy = spy(dbEngine);
+               TransactionalGraphEngine.Admin adminSpy = spy(dbEngine.asAdmin());
+               GraphTraversalSource traversal = g;
+               when(spy.asAdmin()).thenReturn(adminSpy);
+               when(adminSpy.getTraversalSource()).thenReturn(traversal);
+               Mockito.doReturn(janusgraphManagement).when(adminSpy).getManagementSystem();
+
+               migration = new MockMigrateBadWidgetModelsPartOne(spy, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+               migration.run();
+       }
+
+       private void createFirstVertexAndRelatedVertexes() throws AAIException {
+               
+               // Add model1/model-ver1 -- invalid model/model-ver
+               Vertex model1 = g.addV().property("aai-node-type", "model")
+                               .property("model-invariant-id", "model-invariant-id-1").property("model-type", "widget").next();
+               modelVer1 = g.addV().property("aai-node-type", "model-ver").property("model-version-id", "model-version-id-1")
+                               .property("model-name", "connector").property("model-version", "v1.0").next();
+               edgeSerializer.addTreeEdge(g, model1, modelVer1);
+               
+               // Add named-query and named-query-element nodes.  Point the named-query-element at model1
+               Vertex namedQ1 = g.addV().property("aai-node-type", "named-query")
+               .property("named-query-uuid", "named-query-uuid-1").property("named-query-name", "test-NQ-1").next();
+               Vertex namedQElement1 = g.addV().property("aai-node-type", "named-query-element")
+               .property("named-query-element-uuid", "named-query-element-uuid-1").next();
+               edgeSerializer.addTreeEdge(g, namedQElement1, namedQ1);
+               edgeSerializer.addEdge(g, model1, namedQElement1);
+
+
+               // For model3/model-ver3 - we use valid invId/versionIds
+               Vertex model3 = g.addV().property("aai-node-type", "model")
+                               .property("model-invariant-id", "4c01c948-7607-4d66-8a6c-99c2c2717936").property("model-type", "widget")
+                               .next();
+               modelVer3 = g.addV().property("aai-node-type", "model-ver")
+                               .property("model-version-id", "22104c9f-29fd-462f-be07-96cd6b46dd33")
+                               .property("model-name", "connector").property("model-version", "v1.0").next();
+               edgeSerializer.addTreeEdge(g, model3, modelVer3);
+       }
+
+       class MockMigrateBadWidgetModelsPartOne extends MigrateBadWidgetModelsPartOne {
+
+               public MockMigrateBadWidgetModelsPartOne(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) {
+                       super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+               }
+
+               @Override
+               public Optional<String[]> getAffectedNodeTypes() {
+                       return Optional.of(new String[] { "model", "model-element", "model-ver" });
+               }
+
+               @Override
+               public String getMigrationName() {
+                       return "MockMigrateBadWidgetModelsPartOne";
+               }
+       }
+
+       @Test
+       public void testBelongsToEdgeStillThereForNqElement() {
+               assertEquals(true,
+                               g.V().has("aai-node-type", "named-query-element").has("named-query-element-uuid", "named-query-element-uuid-1")
+                                               .out("org.onap.relationships.inventory.BelongsTo")
+                                               .has("named-query-uuid", "named-query-uuid-1").hasNext());      
+       }
+       
+       @Test
+       public void testBadNodesAreNotGone() {
+               assertEquals(true,
+                               g.V().has("aai-node-type", "model-ver").has("model-version-id", "model-version-id-1").hasNext());
+               assertEquals(true,
+                               g.V().has("aai-node-type", "model").has("model-invariant-id", "model-invariant-id-1").hasNext());
+       }
+       
+       @Test
+       public void testNQNodesaAreStillThere() {
+               assertEquals(true,
+                               g.V().has("aai-node-type", "named-query").has("named-query-uuid", "named-query-uuid-1").hasNext());
+               assertEquals(true,
+                               g.V().has("aai-node-type", "named-query-element").has("named-query-element-uuid", "named-query-element-uuid-1").hasNext());
+               
+       }
+       
+
+       @Test
+       public void testThatNewEdgeAdded() {
+               assertEquals(true,
+                               g.V().has("aai-node-type", "model").has("model-invariant-id", "4c01c948-7607-4d66-8a6c-99c2c2717936")
+                                               .in("org.onap.relationships.inventory.IsA").has("named-query-element-uuid", "named-query-element-uuid-1")
+                                               .hasNext());
+       }
+
+       @Test
+       public void testThatOldEdgeDeleted() {
+               assertEquals(false,
+                               g.V().has("aai-node-type", "model").has("model-invariant-id", "model-invariant-id-1")
+                                               .in("org.onap.relationships.inventory.IsA").has("named-query-element-uuid", "named-query-element-uuid-1")
+                                               .hasNext());
+       }
+       
+       
+
+}
\ No newline at end of file
diff --git a/src/test/java/org/onap/aai/migration/v13/MigrateBadWidgetModelsPartTwoTest.java b/src/test/java/org/onap/aai/migration/v13/MigrateBadWidgetModelsPartTwoTest.java
new file mode 100644 (file)
index 0000000..f3c4bb1
--- /dev/null
@@ -0,0 +1,172 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v13;
+
+import static org.junit.Assert.*;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.when;
+
+import java.util.Optional;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.Mockito;
+import org.onap.aai.AAISetup;
+import org.onap.aai.dbmap.DBConnectionType;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.introspection.Loader;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.serialization.db.EdgeSerializer;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.setup.SchemaVersion;
+import org.onap.aai.serialization.engines.QueryStyle;
+import org.onap.aai.serialization.engines.JanusGraphDBEngine;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+
+import org.janusgraph.core.JanusGraphFactory;
+import org.janusgraph.core.JanusGraph;
+import org.janusgraph.core.JanusGraphTransaction;
+import org.janusgraph.core.schema.JanusGraphManagement;
+
+public class MigrateBadWidgetModelsPartTwoTest extends AAISetup {
+
+       private final static ModelType introspectorFactoryType = ModelType.MOXY;
+       private final static QueryStyle queryStyle = QueryStyle.TRAVERSAL;
+       private final static DBConnectionType type = DBConnectionType.REALTIME;
+       private Loader loader;
+       private TransactionalGraphEngine dbEngine;
+       private GraphTraversalSource g;
+       private MockMigrateBadWidgetModelsPartTwo migration;
+       private Vertex modelVer1 = null;
+       private Vertex modelVer3 = null;
+
+       @Before
+       public void setUp() throws Exception {
+               System.setProperty("BUNDLECONFIG_DIR", "src/test/resources");
+               JanusGraphManagement janusgraphManagement = graph.openManagement();
+               g = graph.traversal();
+               loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, schemaVersions.getDefaultVersion());
+               dbEngine = new JanusGraphDBEngine(queryStyle, type, loader);
+               createFirstVertexAndRelatedVertexes();
+               TransactionalGraphEngine spy = spy(dbEngine);
+               TransactionalGraphEngine.Admin adminSpy = spy(dbEngine.asAdmin());
+               GraphTraversalSource traversal = g;
+               when(spy.asAdmin()).thenReturn(adminSpy);
+               when(adminSpy.getTraversalSource()).thenReturn(traversal);
+               Mockito.doReturn(janusgraphManagement).when(adminSpy).getManagementSystem();
+
+               migration = new MockMigrateBadWidgetModelsPartTwo(spy, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+               migration.run();
+       }
+
+       private void createFirstVertexAndRelatedVertexes() throws AAIException {
+               Vertex model1 = g.addV().property("aai-node-type", "model")
+                               .property("model-invariant-id", "model-invariant-id-1").property("model-type", "widget").next();
+               modelVer1 = g.addV().property("aai-node-type", "model-ver").property("model-version-id", "model-version-id-1")
+                               .property("model-name", "connector").property("model-version", "v1.0").next();
+               edgeSerializer.addTreeEdge(g, model1, modelVer1);
+
+               // Create the cousin vertex - modelElement2 for modelVer1
+               Vertex model2 = g.addV().property("aai-node-type", "model")
+                               .property("model-invariant-id", "model-invariant-id-2").property("model-type", "resource").next();
+               Vertex modelVer2 = g.addV().property("aai-node-type", "model-ver")
+                               .property("model-version-id", "model-version-id-2").property("model-name", "resourceMod4Test")
+                               .property("model-version", "v1.0").next();
+               edgeSerializer.addTreeEdge(g, model2, modelVer2);
+               Vertex modelElement2 = g.addV().property("aai-node-type", "model-element")
+                               .property("model-element-uuid", "model-element-uuid-2").property("new-data-del-flag", "T")
+                               .property("cardinality", "unbounded").next();
+               edgeSerializer.addTreeEdge(g, modelVer2, modelElement2);
+               edgeSerializer.addEdge(g, modelVer1, modelElement2);
+
+               // For model3/model-ver3 - we use valid invid/versionIds
+               Vertex model3 = g.addV().property("aai-node-type", "model")
+                               .property("model-invariant-id", "4c01c948-7607-4d66-8a6c-99c2c2717936").property("model-type", "widget")
+                               .next();
+               modelVer3 = g.addV().property("aai-node-type", "model-ver")
+                               .property("model-version-id", "22104c9f-29fd-462f-be07-96cd6b46dd33")
+                               .property("model-name", "connector").property("model-version", "v1.0").next();
+               edgeSerializer.addTreeEdge(g, model3, modelVer3);
+       }
+
+       class MockMigrateBadWidgetModelsPartTwo extends MigrateBadWidgetModelsPartTwo {
+
+               public MockMigrateBadWidgetModelsPartTwo(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) {
+                       super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+               }
+
+               @Override
+               public Optional<String[]> getAffectedNodeTypes() {
+                       return Optional.of(new String[] { "model", "model-element", "model-ver" });
+               }
+
+               @Override
+               public String getMigrationName() {
+                       return "MockMigrateNodesThatUseBadWidgetModelsPartTwo";
+               }
+       }
+
+
+       @Test
+       public void testBelongsToEdgesStillThereX() {
+               assertEquals(true,
+                               g.V().has("aai-node-type", "model-ver").has("model-version-id", "22104c9f-29fd-462f-be07-96cd6b46dd33")
+                                               .out("org.onap.relationships.inventory.BelongsTo")
+                                               .has("model-invariant-id", "4c01c948-7607-4d66-8a6c-99c2c2717936").hasNext());
+               assertEquals(true,
+                               g.V().has("aai-node-type", "model-element").has("model-element-uuid", "model-element-uuid-2")
+                                               .out("org.onap.relationships.inventory.BelongsTo").has("model-version-id", "model-version-id-2")
+                                               .hasNext());
+       }
+       
+       @Test
+       public void testBadVerNodeIsGoneX() {
+               assertEquals(false,
+                               g.V().has("aai-node-type", "model-ver").has("model-version-id", "model-version-id-1").hasNext());
+       }
+       
+       @Test
+       public void testBadModelNodeIsGoneX() {
+               assertEquals(false,
+                               g.V().has("aai-node-type", "model").has("model-invariant-id", "model-invariant-id-1").hasNext());
+       }
+
+       @Test
+       public void testThatNewEdgeAddedX() {
+               assertEquals(true,
+                               g.V().has("aai-node-type", "model-ver").has("model-version-id", "22104c9f-29fd-462f-be07-96cd6b46dd33")
+                                               .in("org.onap.relationships.inventory.IsA").has("model-element-uuid", "model-element-uuid-2")
+                                               .hasNext());
+       }
+
+       @Test
+       public void testThatOldEdgeGone() {
+               assertEquals(false,
+                               g.V().has("aai-node-type", "model-ver").has("model-version-id", "model-version-id-1")
+                                               .in("org.onap.relationships.inventory.IsA").has("model-element-uuid", "model-element-uuid-2")
+                                               .hasNext());
+       }
+
+}
\ No newline at end of file
diff --git a/src/test/java/org/onap/aai/migration/v13/MigrateEdgesBetweenVnfcAndVfModuleTest.java b/src/test/java/org/onap/aai/migration/v13/MigrateEdgesBetweenVnfcAndVfModuleTest.java
new file mode 100644 (file)
index 0000000..90d30fc
--- /dev/null
@@ -0,0 +1,119 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v13;
+
+import static org.junit.Assert.assertEquals;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.when;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.process.traversal.strategy.verification.ReadOnlyStrategy;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.janusgraph.core.JanusGraph;
+import org.janusgraph.core.JanusGraphFactory;
+import org.janusgraph.core.JanusGraphTransaction;
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.Ignore;
+import org.junit.Test;
+import org.onap.aai.AAISetup;
+import org.onap.aai.dbmap.DBConnectionType;
+import org.onap.aai.edges.enums.EdgeProperty;
+import org.onap.aai.introspection.Loader;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.serialization.engines.JanusGraphDBEngine;
+import org.onap.aai.serialization.engines.QueryStyle;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+
+public class MigrateEdgesBetweenVnfcAndVfModuleTest extends AAISetup{
+
+       private final static ModelType introspectorFactoryType = ModelType.MOXY;
+       private final static QueryStyle queryStyle = QueryStyle.TRAVERSAL;
+       private final static DBConnectionType type = DBConnectionType.REALTIME;
+
+       private static Loader loader;
+       private static TransactionalGraphEngine dbEngine;
+       private static JanusGraph graph;
+       private static MigrateEdgesBetweenVnfcAndVfModule migration;
+       private static JanusGraphTransaction tx;
+       private static GraphTraversalSource g;
+
+       @Before
+       public void setUp() throws Exception {
+               graph = JanusGraphFactory.build().set("storage.backend","inmemory").open();
+               tx = graph.newTransaction();
+               g = tx.traversal();
+               loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, schemaVersions.getDefaultVersion());
+               dbEngine = new JanusGraphDBEngine(
+                               queryStyle,
+                               type,
+                               loader);
+               
+               System.setProperty("BUNDLECONFIG_DIR", "src/test/resources");
+
+               Vertex vnfc = g.addV().property("aai-node-type", "vnfc")
+                .property("vnfc-name", "vnfc-name-1").next();
+
+        Vertex vfmodule = g.addV().property("aai-node-type", "vf-module")
+                .property("vf-module-id", "vf-module-id-1").next();
+        
+        
+        //edgeSerializer.addEdge(g, vfmodule, vnfc,"org.onap.relationships.inventory.Uses");
+        
+        vfmodule.addEdge("org.onap.relationships.inventory.Uses", vnfc, EdgeProperty.CONTAINS.toString(), "NONE",
+                       EdgeProperty.DELETE_OTHER_V.toString(), "NONE", EdgeProperty.PREVENT_DELETE.toString(), "OUT");
+        
+        
+        TransactionalGraphEngine spy = spy(dbEngine);
+               TransactionalGraphEngine.Admin adminSpy = spy(dbEngine.asAdmin());
+
+               GraphTraversalSource traversal = g;
+               GraphTraversalSource readOnly = tx.traversal(GraphTraversalSource.build().with(ReadOnlyStrategy.instance()));
+               when (spy.tx()).thenReturn(tx);
+               when(spy.asAdmin()).thenReturn(adminSpy);
+               when(adminSpy.getTraversalSource()).thenReturn(traversal);
+               when(adminSpy.getReadOnlyTraversalSource()).thenReturn(readOnly);
+
+               migration = new MigrateEdgesBetweenVnfcAndVfModule(spy,loaderFactory,edgeIngestor,edgeSerializer,schemaVersions);
+               migration.run();
+       }
+
+       @After
+       public void cleanUp() {
+               tx.tx().rollback();
+               graph.close();
+       }
+
+       @Ignore
+       @Test
+    public void testIdsUpdated() throws Exception {
+               
+               //System.out.println("After Migration: " +migration.asString(g.V().has("aai-node-type","vnfc").inE().next()));
+               
+               assertEquals("vf-module to vnfc migration done", true,
+                               g.V().has("aai-node-type", "vf-module").outE().hasLabel("org.onap.relationships.inventory.Uses") 
+                                               .has(EdgeProperty.DELETE_OTHER_V.toString(), "OUT").hasNext());
+               
+               assertEquals("vf-module to vnfc migration done", true,
+                               g.V().has("aai-node-type", "vnfc").inE().hasLabel("org.onap.relationships.inventory.Uses") 
+                                               .has(EdgeProperty.PREVENT_DELETE.toString(), "NONE").hasNext());  
+    }   
+}
diff --git a/src/test/java/org/onap/aai/migration/v13/MigrateForwarderEvcCircuitIdTest.java b/src/test/java/org/onap/aai/migration/v13/MigrateForwarderEvcCircuitIdTest.java
new file mode 100644 (file)
index 0000000..1006a29
--- /dev/null
@@ -0,0 +1,195 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v13;
+
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.when;
+
+import java.util.Optional;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.process.traversal.strategy.verification.ReadOnlyStrategy;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.junit.*;
+import org.onap.aai.AAISetup;
+import org.onap.aai.dbmap.DBConnectionType;
+import org.onap.aai.introspection.Loader;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.setup.SchemaVersion;
+import org.onap.aai.serialization.engines.QueryStyle;
+import org.onap.aai.serialization.engines.JanusGraphDBEngine;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+
+import org.janusgraph.core.JanusGraphFactory;
+import org.janusgraph.core.JanusGraph;
+import org.janusgraph.core.JanusGraphTransaction;
+
+public class MigrateForwarderEvcCircuitIdTest extends AAISetup {
+
+       private final static ModelType introspectorFactoryType = ModelType.MOXY;
+       private final static QueryStyle queryStyle = QueryStyle.TRAVERSAL;
+       private final static DBConnectionType type = DBConnectionType.REALTIME;
+
+       private Loader loader;
+       private TransactionalGraphEngine dbEngine;
+       private MigrateForwarderEvcCircuitId migration;
+       private GraphTraversalSource g;
+
+       @Before
+       public void setUp() throws Exception {
+               g = tx.traversal();
+               loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, schemaVersions.getDefaultVersion());
+               dbEngine = new JanusGraphDBEngine(
+                               queryStyle,
+                               type,
+                               loader);
+               
+               System.setProperty("BUNDLECONFIG_DIR", "src/test/resources");
+
+               Vertex pnf1 = g.addV().property("aai-node-type", "pnf").property("pnf-name", "pnf-1").next();
+               Vertex pnf2 = g.addV().property("aai-node-type", "pnf").property("pnf-name", "pnf-2").next();
+               Vertex pnf3 = g.addV().property("aai-node-type", "pnf").property("pnf-name", "pnf-3").next();
+               Vertex pnf4 = g.addV().property("aai-node-type", "pnf").property("pnf-name", "pnf-4").next();
+               Vertex pnf5 = g.addV().property("aai-node-type", "pnf").property("pnf-name", "pnf-5").next();
+
+               Vertex pInterface1 = g.addV().property("aai-node-type", "p-interface").property("interface-name", "p-interface-1").next();
+               Vertex pInterface2 = g.addV().property("aai-node-type", "p-interface").property("interface-name", "p-interface-2").next();
+               Vertex pInterface3 = g.addV().property("aai-node-type", "p-interface").property("interface-name", "p-interface-3").next();
+               Vertex pInterface4 = g.addV().property("aai-node-type", "p-interface").property("interface-name", "p-interface-4").next();
+               Vertex pInterface5 = g.addV().property("aai-node-type", "p-interface").property("interface-name", "p-interface-5").next();
+
+               Vertex forwarder1 = g.addV().property("aai-node-type", "forwarder").property("sequence", 1)
+                                       .property("forwarder-role",  "ingress").next();
+               Vertex forwarder2 = g.addV().property("aai-node-type", "forwarder").property("sequence", 1)
+                               .property("forwarder-role",  "ingress").next();
+               Vertex forwarder3 = g.addV().property("aai-node-type", "forwarder").property("sequence", 1)
+                               .property("forwarder-role",  "ingress").next();
+               Vertex forwarder4 = g.addV().property("aai-node-type", "forwarder").property("sequence", 1)
+                               .property("forwarder-role",  "ingress").next();
+               Vertex forwarder5 = g.addV().property("aai-node-type", "forwarder").property("sequence", 1)
+                               .property("forwarder-role",  "ingress").next();
+               
+               
+               Vertex configuration1 = g.addV().property("aai-node-type", "configuration").property("configuration-id", "config-1")
+                               .property("configuration-type", "test").property("configuration-subt-type", "test").next();
+               Vertex configuration2 = g.addV().property("aai-node-type", "configuration").property("configuration-id", "config-2")
+                               .property("configuration-type", "test").property("configuration-subt-type", "test").next();
+               Vertex configuration3 = g.addV().property("aai-node-type", "configuration").property("configuration-id", "config-3")
+                               .property("configuration-type", "test").property("configuration-subt-type", "test").next();
+               Vertex configuration4 = g.addV().property("aai-node-type", "configuration").property("configuration-id", "config-4")
+                               .property("configuration-type", "test").property("configuration-subt-type", "test").next();
+               Vertex configuration5 = g.addV().property("aai-node-type", "configuration").property("configuration-id", "config-5")
+                               .property("configuration-type", "test").property("configuration-subt-type", "test").next();
+
+
+               Vertex forwarderEvc1 = g.addV().property("aai-node-type", "forwarder-evc").property("forwarder-evc-id", "evc-1")
+                               .property("circuit-id", "1").property("resource-version", "v13").next();
+               Vertex forwarderEvc2 = g.addV().property("aai-node-type", "forwarder-evc").property("forwarder-evc-id", "evc-2")
+                               .property("circuit-id", "2").property("resource-version", "v13").next();
+               Vertex forwarderEvc3 = g.addV().property("aai-node-type", "forwarder-evc").property("forwarder-evc-id", "evc-3")
+                               .property("resource-version", "v13").next();
+               Vertex forwarderEvc4 = g.addV().property("aai-node-type", "forwarder-evc").property("forwarder-evc-id", "evc-4")
+                               .property("circuit-id", "3").property("resource-version", "v13").next();
+               Vertex forwarderEvc5 = g.addV().property("aai-node-type", "forwarder-evc").property("forwarder-evc-id", "evc-5")
+                               .property("resource-version", "v13").next();            
+               
+
+               
+               edgeSerializer.addTreeEdge(g, pnf1, pInterface1);
+               edgeSerializer.addEdge(g, pInterface1, forwarder1);
+               edgeSerializer.addEdge(g, forwarder1, configuration1);
+               edgeSerializer.addTreeEdge(g, configuration1, forwarderEvc1);
+               
+               edgeSerializer.addTreeEdge(g, pnf2, pInterface2);
+               edgeSerializer.addEdge(g, pInterface2, forwarder2);
+               edgeSerializer.addEdge(g, forwarder2, configuration2);
+               edgeSerializer.addTreeEdge(g, configuration2, forwarderEvc2);
+               
+               edgeSerializer.addTreeEdge(g, pnf3, pInterface3);
+               edgeSerializer.addEdge(g, pInterface3, forwarder3);
+               edgeSerializer.addEdge(g, forwarder3, configuration3);
+               edgeSerializer.addTreeEdge(g, configuration3, forwarderEvc3);
+               
+               edgeSerializer.addTreeEdge(g, pnf4, pInterface4);
+               edgeSerializer.addEdge(g, pInterface4, forwarder4);
+               edgeSerializer.addEdge(g, forwarder4, configuration4);
+               edgeSerializer.addTreeEdge(g, configuration4, forwarderEvc4);
+               
+               edgeSerializer.addTreeEdge(g, pnf5, pInterface5);
+               edgeSerializer.addEdge(g, pInterface5, forwarder5);
+               edgeSerializer.addEdge(g, forwarder5, configuration5);
+               edgeSerializer.addTreeEdge(g, configuration5, forwarderEvc5);
+
+               TransactionalGraphEngine spy = spy(dbEngine);
+               TransactionalGraphEngine.Admin adminSpy = spy(dbEngine.asAdmin());
+
+               GraphTraversalSource traversal = g;
+               GraphTraversalSource readOnly = tx.traversal(GraphTraversalSource.build().with(ReadOnlyStrategy.instance()));
+               when (spy.tx()).thenReturn(tx);
+               when(spy.asAdmin()).thenReturn(adminSpy);
+               when(adminSpy.getTraversalSource()).thenReturn(traversal);
+               when(adminSpy.getReadOnlyTraversalSource()).thenReturn(readOnly);
+               
+               migration = new MigrateForwarderEvcCircuitId(spy, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+               migration.run();
+       }
+       
+       @Test
+       public void testCircuitIdsUpdated() throws Exception {
+               // check if graph nodes are updated
+               
+               assertEquals("First circuit-id updated", "10", 
+                               g.V().has("aai-node-type", "forwarder-evc").has("circuit-id", "10").next().value("circuit-id").toString());
+
+               assertEquals("Second circuit-id updated", "20", 
+                               g.V().has("aai-node-type", "forwarder-evc").has("circuit-id", "20").next().value("circuit-id").toString());
+
+               assertFalse("Third circuit-id remains empty", g.V().has("aai-node-type", "forwarder-evc").has("forwarder-evc-id", "evc-3")
+                               .next().property("circuit-id").isPresent());
+
+               assertEquals("Fourth circuit-id not updated", "3", 
+                               g.V().has("aai-node-type", "forwarder-evc").has("circuit-id", "3").next().value("circuit-id").toString());
+
+               assertFalse("Fifth circuit-id remains empty", g.V().has("aai-node-type", "forwarder-evc").has("forwarder-evc-id", "evc-5")
+                               .next().property("circuit-id").isPresent());
+       }
+       
+       @Test
+       public void testGetAffectedNodeTypes() {
+               Optional<String[]> types = migration.getAffectedNodeTypes();
+               Optional<String[]> expected = Optional.of(new String[]{"forwarder-evc"});
+               
+               assertNotNull(types);
+               assertArrayEquals(expected.get(), types.get());
+       }
+
+       @Test
+       public void testGetMigrationName() {
+               String migrationName = migration.getMigrationName();
+
+               assertNotNull(migrationName);
+               assertEquals("MigrateForwarderEvcCircuitId", migrationName);
+       }
+}
diff --git a/src/test/java/org/onap/aai/migration/v14/MigrateGenericVnfMgmtOptionsTest.java b/src/test/java/org/onap/aai/migration/v14/MigrateGenericVnfMgmtOptionsTest.java
new file mode 100644 (file)
index 0000000..a35c300
--- /dev/null
@@ -0,0 +1,230 @@
+/**\r
+ * ============LICENSE_START=======================================================\r
+ * org.onap.aai\r
+ * ================================================================================\r
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.\r
+ * ================================================================================\r
+ * Licensed under the Apache License, Version 2.0 (the "License");\r
+ * you may not use this file except in compliance with the License.\r
+ * You may obtain a copy of the License at\r
+ *\r
+ *    http://www.apache.org/licenses/LICENSE-2.0\r
+ *\r
+ * Unless required by applicable law or agreed to in writing, software\r
+ * distributed under the License is distributed on an "AS IS" BASIS,\r
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
+ * See the License for the specific language governing permissions and\r
+ * limitations under the License.\r
+ * ============LICENSE_END=========================================================\r
+ */\r
+package org.onap.aai.migration.v14;\r
+\r
+import org.onap.aai.AAISetup;\r
+import org.onap.aai.edges.EdgeIngestor;\r
+import org.onap.aai.introspection.LoaderFactory;\r
+import org.onap.aai.serialization.db.EdgeSerializer;\r
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;\r
+\r
+\r
+import org.janusgraph.core.JanusGraph;\r
+import org.janusgraph.core.JanusGraphFactory;\r
+import org.janusgraph.core.JanusGraphTransaction;\r
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;\r
+import org.junit.Before;\r
+import org.junit.Test;\r
+import org.onap.aai.dbmap.DBConnectionType;\r
+import org.onap.aai.introspection.Loader;\r
+import org.onap.aai.introspection.ModelType;\r
+import org.onap.aai.setup.SchemaVersions;\r
+import org.onap.aai.setup.SchemaVersion;\r
+import org.onap.aai.migration.Status;\r
+import org.onap.aai.serialization.engines.QueryStyle;\r
+import org.onap.aai.serialization.engines.JanusGraphDBEngine;\r
+\r
+import static org.mockito.Mockito.spy;\r
+import static org.mockito.Mockito.when;\r
+\r
+import java.util.Optional;\r
+\r
+import javax.validation.constraints.AssertFalse;\r
+\r
+import static org.junit.Assert.assertTrue;\r
+\r
+public class MigrateGenericVnfMgmtOptionsTest extends AAISetup {\r
+       \r
+       protected static final String VNF_NODE_TYPE = "generic-vnf";\r
+\r
+       public static class MigrateVnfType extends MigrateGenericVnfMgmtOptions {\r
+        public MigrateVnfType(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions){\r
+            super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);\r
+        }\r
+        @Override\r
+        public Status getStatus() {\r
+            return Status.SUCCESS;\r
+        }\r
+        @Override\r
+        public Optional<String[]> getAffectedNodeTypes() {\r
+               return Optional.of(new String[]{VNF_NODE_TYPE});\r
+        }\r
+        @Override\r
+        public String getMigrationName() {\r
+            return "MockMigrateVnfType";\r
+        }\r
+    }\r
+\r
+    private final static ModelType introspectorFactoryType = ModelType.MOXY;\r
+    private final static QueryStyle queryStyle = QueryStyle.TRAVERSAL;\r
+    private final static DBConnectionType type = DBConnectionType.REALTIME;\r
+    private Loader loader;\r
+    private TransactionalGraphEngine dbEngine;\r
+    private MigrateVnfType migration;\r
+    private GraphTraversalSource g;\r
+\r
+    @Before\r
+    public void setup() throws Exception{\r
+        g = tx.traversal();\r
+        loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, schemaVersions.getDefaultVersion());\r
+        dbEngine = new JanusGraphDBEngine(\r
+                queryStyle,\r
+                type,\r
+                loader);\r
+\r
+        //generic-vnf\r
+        g.addV().property("aai-node-type", "generic-vnf")\r
+                .property("vnf-id", "generic-vnf0")\r
+                .property("vnf-type", "HN")\r
+                .next();\r
+        g.addV().property("aai-node-type", "generic-vnf")\r
+                .property("vnf-id", "generic-vnf1")\r
+                .property("vnf-type", "HN")\r
+                .property("management-option", "")\r
+                .next();\r
+        g.addV().property("aai-node-type", "generic-vnf")\r
+                .property("vnf-id", "generic-vnf2")\r
+                .property("vnf-type", "HN")\r
+                .property("management-option", "existingOption")\r
+                .next();\r
+        \r
+        g.addV().property("aai-node-type", "generic-vnf")\r
+                       .property("vnf-id", "generic-vnf10")\r
+                       .property("vnf-type", "HP")\r
+                       .next();\r
+        g.addV().property("aai-node-type", "generic-vnf")\r
+                       .property("vnf-id", "generic-vnf11")\r
+                       .property("vnf-type", "HP")\r
+                       .property("management-option", "")\r
+                       .next();\r
+        g.addV().property("aai-node-type", "generic-vnf")\r
+                       .property("vnf-id", "generic-vnf12")\r
+                       .property("vnf-type", "HP")\r
+                       .property("management-option", "existingOption")\r
+                       .next();\r
+        \r
+        g.addV().property("aai-node-type", "generic-vnf")\r
+                               .property("vnf-id", "generic-vnf20")\r
+                               .property("vnf-type", "HG")\r
+                               .next();\r
+        g.addV().property("aai-node-type", "generic-vnf")\r
+                               .property("vnf-id", "generic-vnf21")\r
+                               .property("vnf-type", "HG")\r
+                               .property("management-option", "")\r
+                               .next();\r
+        g.addV().property("aai-node-type", "generic-vnf")\r
+                               .property("vnf-id", "generic-vnf22")\r
+                               .property("vnf-type", "HG")\r
+                               .property("management-option", "existingOption")\r
+                               .next();        \r
+        \r
+        // Non-eligible migration conditions - vnf-type = XX\r
+        g.addV().property("aai-node-type", "generic-vnf")\r
+                       .property("vnf-id", "generic-vnf30")\r
+                       .property("vnf-type", "XX")\r
+                       .next();\r
+        g.addV().property("aai-node-type", "generic-vnf")\r
+                       .property("vnf-id", "generic-vnf31")\r
+                       .property("vnf-type", "XX")\r
+                       .property("management-option", "")\r
+                       .next();\r
+        g.addV().property("aai-node-type", "generic-vnf")\r
+                               .property("vnf-id", "generic-vnf32")\r
+                               .property("vnf-type", "XX")\r
+                               .property("management-option", "existingOption")\r
+                               .next(); \r
+        // Non-eligible migration conditions - vnf-type = missing\r
+        g.addV().property("aai-node-type", "generic-vnf")\r
+                       .property("vnf-id", "generic-vnf40")\r
+                       .next();\r
+        g.addV().property("aai-node-type", "generic-vnf")\r
+                       .property("vnf-id", "generic-vnf41")\r
+                        .property("management-option", "")\r
+                       .next();\r
+        g.addV().property("aai-node-type", "generic-vnf")\r
+                               .property("vnf-id", "generic-vnf42")\r
+                               .property("management-option", "existingOption")\r
+                               .next(); \r
+        \r
+        TransactionalGraphEngine spy = spy(dbEngine);\r
+        TransactionalGraphEngine.Admin adminSpy = spy(dbEngine.asAdmin());\r
+        GraphTraversalSource traversal = g;\r
+        when(spy.asAdmin()).thenReturn(adminSpy);\r
+        when(adminSpy.getTraversalSource()).thenReturn(traversal);\r
+        migration = new MigrateVnfType(spy, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);\r
+        migration.run();\r
+        \r
+    }\r
+\r
+    @Test\r
+    public void testMissingProperty(){\r
+       //management-option\r
+        assertTrue("Value of generic-vnf should be updated since the property management-option doesn't exist",\r
+                g.V().has("aai-node-type", "generic-vnf").has("vnf-id", "generic-vnf0").has("management-option", "AT&T Managed-Basic").hasNext());      \r
+        assertTrue("Value of generic-vnf  should be updated since the property management-option doesn't exist",\r
+                g.V().has("aai-node-type", "generic-vnf").has("vnf-id", "generic-vnf10").has("management-option", "AT&T Managed-Basic").hasNext());\r
+        assertTrue("Value of generic-vnf  should be updated since the property management-option doesn't exist",\r
+                g.V().has("aai-node-type", "generic-vnf").has("vnf-id", "generic-vnf20").has("management-option", "AT&T Managed-Basic").hasNext());              \r
+    }\r
+\r
+    @Test\r
+    public void testEmptyValue() {                         \r
+      //management-option\r
+        assertTrue("Value of generic-vnf should be updated since the value for management-option is an empty string",\r
+                g.V().has("aai-node-type", "generic-vnf").has("vnf-id", "generic-vnf1").has("management-option", "AT&T Managed-Basic").hasNext());\r
+        assertTrue("Value of generic-vnf should be updated since the value for management-option is an empty string",\r
+                g.V().has("aai-node-type", "generic-vnf").has("vnf-id", "generic-vnf11").has("management-option", "AT&T Managed-Basic").hasNext());\r
+        assertTrue("Value of generic-vnf should be updated since the value for management-option is an empty string",\r
+                g.V().has("aai-node-type", "generic-vnf").has("vnf-id", "generic-vnf21").has("management-option", "AT&T Managed-Basic").hasNext());\r
+    \r
+    }\r
+    \r
+    @Test\r
+    public void testExistingValues() {\r
+      //management-option\r
+        assertTrue("Value of generic-vnf shouldn't be updated since management-option already exists",\r
+                !g.V().has("aai-node-type", "generic-vnf").has("vnf-id", "generic-vnf2").has("management-option", "AT&T Managed-Basic").hasNext());\r
+        assertTrue("Value of generic-vnf shouldn't be updated since management-option already exists",\r
+                !g.V().has("aai-node-type", "generic-vnf").has("vnf-id", "generic-vnf12").has("management-option", "AT&T Managed-Basic").hasNext());\r
+        assertTrue("Value of generic-vnf shouldn't be updated since management-option already exists",\r
+                !g.V().has("aai-node-type", "generic-vnf").has("vnf-id", "generic-vnf22").has("management-option", "AT&T Managed-Basic").hasNext());\r
+       \r
+        \r
+    }\r
+    \r
+   @Test\r
+    public void testExistingVnfsNotMigrated() {\r
+       //management-option\r
+        assertTrue("Value of generic-vnf shouldn't be updated since vnf-type is not affected",\r
+                !g.V().has("aai-node-type", "generic-vnf").has("vnf-id", "generic-vnf30").has("management-option", "AT&T Managed-Basic").hasNext());\r
+        assertTrue("Value of generic-vnf  shouldn't be updated since vnf-type is not affected",\r
+                !g.V().has("aai-node-type", "generic-vnf").has("vnf-id", "generic-vnf31").has("management-option", "AT&T Managed-Basic").hasNext());\r
+        assertTrue("Value of generic-vnf  shouldn't be updated since vnf-type is not affected and management-option already exists",\r
+                !g.V().has("aai-node-type", "generic-vnf").has("vnf-id", "generic-vnf32").has("management-option", "AT&T Managed-Basic").hasNext());\r
+        \r
+        assertTrue("Value of generic-vnf shouldn't be updated since vnf-type is not present",\r
+                !g.V().has("aai-node-type", "generic-vnf").has("vnf-id", "generic-vnf40").has("management-option", "AT&T Managed-Basic").hasNext());\r
+        assertTrue("Value of generic-vnf  shouldn't be updated since vnf-type is not present",\r
+                !g.V().has("aai-node-type", "generic-vnf").has("vnf-id", "generic-vnf41").has("management-option", "AT&T Managed-Basic").hasNext());\r
+        assertTrue("Value of generic-vnf  shouldn't be updated since vnf-type is not present and management-option already exists",\r
+                !g.V().has("aai-node-type", "generic-vnf").has("vnf-id", "generic-vnf42").has("management-option", "AT&T Managed-Basic").hasNext());\r
+      \r
+    } \r
+}
\ No newline at end of file
diff --git a/src/test/java/org/onap/aai/migration/v14/MigrateMissingFqdnOnPserversTest.java b/src/test/java/org/onap/aai/migration/v14/MigrateMissingFqdnOnPserversTest.java
new file mode 100644 (file)
index 0000000..f4ec345
--- /dev/null
@@ -0,0 +1,129 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v14;
+
+import static org.junit.Assert.assertEquals;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.when;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.janusgraph.core.JanusGraph;
+import org.janusgraph.core.JanusGraphFactory;
+import org.janusgraph.core.JanusGraphTransaction;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.aai.AAISetup;
+import org.onap.aai.dbmap.DBConnectionType;
+import org.onap.aai.introspection.Loader;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.serialization.engines.JanusGraphDBEngine;
+import org.onap.aai.serialization.engines.QueryStyle;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+
+
+public class MigrateMissingFqdnOnPserversTest extends AAISetup{
+
+    private final static ModelType introspectorFactoryType = ModelType.MOXY;
+    private final static QueryStyle queryStyle = QueryStyle.TRAVERSAL;
+    private final static DBConnectionType type = DBConnectionType.REALTIME;
+    private Loader loader;
+    private TransactionalGraphEngine dbEngine;
+    private JanusGraph graph;
+    private MigrateMissingFqdnOnPservers migration;
+    private GraphTraversalSource g;
+    private JanusGraphTransaction tx;
+    Vertex pserver1;
+    Vertex pserver2;
+    Vertex pserver3;
+    Vertex pserver4;
+    Vertex pserver5;
+    
+
+    @Before
+    public void setUp() throws Exception {
+       
+       graph = JanusGraphFactory.build().set("storage.backend", "inmemory").open();
+        tx = graph.newTransaction();
+        g = tx.traversal();
+        loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, schemaVersions.getDefaultVersion());
+        dbEngine = new JanusGraphDBEngine(
+                queryStyle,
+                type,
+                loader);
+        
+        
+        pserver1 = g.addV().property("aai-node-type", MigrateMissingFqdnOnPservers.PSERVER_NODE_TYPE)
+                       .property(MigrateMissingFqdnOnPservers.PSERVER_HOSTNAME, "hostname1.com")
+                       .property(MigrateMissingFqdnOnPservers.PSERVER_SOURCEOFTRUTH, "RO")
+                .next();
+         
+        pserver2 = g.addV().property("aai-node-type", MigrateMissingFqdnOnPservers.PSERVER_NODE_TYPE)
+                       .property(MigrateMissingFqdnOnPservers.PSERVER_HOSTNAME, "hostname2.com")
+                       .property(MigrateMissingFqdnOnPservers.PSERVER_FQDN, "")
+                .next();
+         
+        pserver3 = g.addV().property("aai-node-type", MigrateMissingFqdnOnPservers.PSERVER_NODE_TYPE)
+                       .property(MigrateMissingFqdnOnPservers.PSERVER_HOSTNAME, "akr1")
+                       .property(MigrateMissingFqdnOnPservers.PSERVER_SOURCEOFTRUTH, "RO")
+                       .property(MigrateMissingFqdnOnPservers.PSERVER_FQDN, "akr1")
+                .next();
+        
+        pserver4 = g.addV().property("aai-node-type", MigrateMissingFqdnOnPservers.PSERVER_NODE_TYPE)
+                       .property(MigrateMissingFqdnOnPservers.PSERVER_HOSTNAME, "hostname1")
+                       .property(MigrateMissingFqdnOnPservers.PSERVER_SOURCEOFTRUTH, "RO")
+                       .property(MigrateMissingFqdnOnPservers.PSERVER_FQDN, "")
+                .next();
+        
+        pserver5 = g.addV().property("aai-node-type", MigrateMissingFqdnOnPservers.PSERVER_NODE_TYPE)
+                       .property(MigrateMissingFqdnOnPservers.PSERVER_HOSTNAME, "hostname2")
+                       .property(MigrateMissingFqdnOnPservers.PSERVER_SOURCEOFTRUTH, "RO")
+                       .next();
+        
+        TransactionalGraphEngine spy = spy(dbEngine);
+        TransactionalGraphEngine.Admin adminSpy = spy(dbEngine.asAdmin());
+        GraphTraversalSource traversal = g;
+        when(spy.asAdmin()).thenReturn(adminSpy);
+        when(adminSpy.getTraversalSource()).thenReturn(traversal);
+        migration = new MigrateMissingFqdnOnPservers(spy, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+        migration.run();
+    }
+
+    @After
+    public void cleanUp() {
+        tx.rollback();
+        graph.close();
+    }
+
+
+    /***
+     * checks if the fqdn value was changed
+     */
+
+    @Test
+    public void confirmFQDNValueChanged() {
+
+        assertEquals("hostname1.com",pserver1.property(MigrateMissingFqdnOnPservers.PSERVER_FQDN).value());//created fqdn property
+        assertEquals("hostname2.com",pserver2.property(MigrateMissingFqdnOnPservers.PSERVER_FQDN).value());//updated empty fqdn
+        assertEquals("akr1",pserver3.property(MigrateMissingFqdnOnPservers.PSERVER_FQDN).value());//Not changed
+        assertEquals("",pserver4.property(MigrateMissingFqdnOnPservers.PSERVER_FQDN).value());//Not changed
+    }
+}
\ No newline at end of file
diff --git a/src/test/java/org/onap/aai/migration/v14/MigrateNetworkTechToCloudRegionTest.java b/src/test/java/org/onap/aai/migration/v14/MigrateNetworkTechToCloudRegionTest.java
new file mode 100644 (file)
index 0000000..6438034
--- /dev/null
@@ -0,0 +1,147 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v14;
+
+import static org.junit.Assert.assertEquals;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.when;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.aai.AAISetup;
+import org.onap.aai.dbmap.DBConnectionType;
+import org.onap.aai.introspection.Loader;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.serialization.engines.JanusGraphDBEngine;
+import org.onap.aai.serialization.engines.QueryStyle;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+
+public class MigrateNetworkTechToCloudRegionTest extends AAISetup{
+
+       private final static ModelType introspectorFactoryType = ModelType.MOXY;
+       private final static QueryStyle queryStyle = QueryStyle.TRAVERSAL;
+       private final static DBConnectionType type = DBConnectionType.REALTIME;
+
+       private Loader loader;
+       private TransactionalGraphEngine dbEngine;
+       private GraphTraversalSource g;
+       private MigrateNetworkTechToCloudRegion migration;
+
+       @Before
+       public void setUp() throws Exception {
+               g = tx.traversal();
+        loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, schemaVersions.getDefaultVersion());
+        dbEngine = new JanusGraphDBEngine(
+                queryStyle,
+                type,
+                loader);
+        Vertex cloudRegion1 = g.addV().property("aai-node-type", "cloud-region").property("cloud-region-id", "cloud-region-id-1").property("cloud-owner", "att-aic").property("resource-version", "1").next();
+        Vertex cloudRegion2 = g.addV().property("aai-node-type", "cloud-region").property("cloud-region-id", "cloud-region-id-2").property("cloud-owner", "att-nc").property("resource-version", "2").next();
+               Vertex cloudRegion3 = g.addV().property("aai-node-type", "cloud-region").property("cloud-region-id", "cloud-region-id-3").property("cloud-owner", "att-aic").property("resource-version", "7").next();
+               
+               Vertex networkTech1 = g.addV().property("aai-node-type","network-technology").property("network-technology-id", "network-technology-1").property("network-technology-name", "CONTRAIL").property("resource-version", "3").next();
+               Vertex networkTech2 = g.addV().property("aai-node-type", "network-technology").property("network-technology-id", "network-technology-2").property("network-technology-name", "AIC_SR_IOV").property("resource-version", "4").next();
+               Vertex networkTech3 = g.addV().property("aai-node-type", "network-technology").property("network-technology-id", "network-technology-3").property("network-technology-name", "TEST").property("resource-version", "5").next();
+               Vertex networkTech4 = g.addV().property("aai-node-type", "network-technology").property("network-technology-id", "network-technology-4").property("network-technology-name", "OVS").property("resource-version", "8").next();
+               
+               
+               edgeSerializer.addEdge(g, cloudRegion1, networkTech1);
+               
+               
+               TransactionalGraphEngine spy = spy(dbEngine);
+        TransactionalGraphEngine.Admin adminSpy = spy(dbEngine.asAdmin());
+        GraphTraversalSource traversal = g;
+        when(spy.asAdmin()).thenReturn(adminSpy);
+        when(adminSpy.getTraversalSource()).thenReturn(traversal);
+               
+        
+               migration = new MigrateNetworkTechToCloudRegion(spy, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+               migration.run();
+               
+               /*try {
+                       System.out.println("containsVertexLabel :: "+graph.containsVertexLabel("cloud-region"));
+                       graph.io(IoCore.graphson()).writeGraph("tinkerpop-modern.json");
+                       
+                       try (final InputStream stream = new FileInputStream("tinkerpop-modern.json")) {
+                               graph.io(IoCore.graphson()).reader().create().readGraph(stream, graph);
+                       }
+                       
+                       OutputStream out = new FileOutputStream("tinkerpop-modern.json");
+                       GraphSONWriter objGraphSONWriter = new GraphSONWriter(GraphSONWriter.build());
+                       objGraphSONWriter.writeGraph(out, graph);
+               } catch (IOException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               }*/
+       }
+
+       public void cleanUp() {
+               tx.rollback();
+               graph.close();
+       }
+
+       @Test
+       public void checkEdgeCreatedForNetworkTechnology() {
+               
+               assertEquals(true,
+                               g.V().has("aai-node-type", "cloud-region").has("cloud-region-id", "cloud-region-id-1")
+                                               .out()
+                                               .has("aai-node-type", "network-technology").has("network-technology-id","network-technology-2").hasNext());
+               
+               assertEquals(true,
+                               g.V().has("aai-node-type", "cloud-region").has("cloud-region-id", "cloud-region-id-3")
+                                               .out()
+                                               .has("aai-node-type", "network-technology").has("network-technology-id","network-technology-1").hasNext());
+               
+               
+               assertEquals(true,
+                               g.V().has("aai-node-type", "cloud-region").has("cloud-region-id", "cloud-region-id-2")
+                                               .out()
+                                               .has("aai-node-type", "network-technology").has("network-technology-id","network-technology-4").hasNext());
+       
+               
+               assertEquals("Edge not created", false,
+                               g.V().has("aai-node-type", "cloud-region").has("cloud-region-id", "cloud-region-id-1")
+                                               .out()
+                                               .has("aai-node-type", "network-technology").has("network-technology-id","network-technology-3").hasNext());
+               
+               assertEquals("Edge not created", false,
+                               g.V().has("aai-node-type", "cloud-region").has("cloud-region-id", "cloud-region-id-2")
+                                               .out()
+                                               .has("aai-node-type", "network-technology").has("network-technology-id","network-technology-1").hasNext());
+               
+               
+               assertEquals("Edge exists to 2 cloud regions", new Long(2L),
+                               g.V().has("aai-node-type", "network-technology").has("network-technology-id", "network-technology-1")
+                                               .in().count().next());
+               
+               /*
+               try {
+                       graph.io(IoCore.graphson()).writeGraph("tinkerpop-modern.json");
+               } catch (IOException e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               }*/
+               
+       }
+
+}
diff --git a/src/test/java/org/onap/aai/migration/v14/MigrateSameSourcedRCTROPServerDataTest.java b/src/test/java/org/onap/aai/migration/v14/MigrateSameSourcedRCTROPServerDataTest.java
new file mode 100644 (file)
index 0000000..3795dac
--- /dev/null
@@ -0,0 +1,1056 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v14;
+
+import static org.junit.Assert.*;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.when;
+
+import java.util.List;
+import java.util.UUID;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.process.traversal.strategy.verification.ReadOnlyStrategy;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.junit.*;
+import org.onap.aai.AAISetup;
+import org.onap.aai.dbmap.DBConnectionType;
+import org.onap.aai.introspection.Loader;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.migration.v14.MigrateSameSourcedRCTROPserverData;
+import org.onap.aai.serialization.engines.QueryStyle;
+import org.onap.aai.serialization.engines.JanusGraphDBEngine;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.janusgraph.core.JanusGraphFactory;
+import org.janusgraph.core.JanusGraph;
+import org.janusgraph.core.JanusGraphTransaction;
+
+public class MigrateSameSourcedRCTROPServerDataTest extends AAISetup{
+
+    private final static ModelType introspectorFactoryType = ModelType.MOXY;
+    private final static QueryStyle queryStyle = QueryStyle.TRAVERSAL;
+    private final static DBConnectionType type = DBConnectionType.REALTIME;
+
+     Loader loader;
+     TransactionalGraphEngine dbEngine;
+     JanusGraph graph;
+     MigrateSameSourcedRCTROPserverData migration;
+     JanusGraphTransaction tx;
+     GraphTraversalSource g;
+     Vertex pintOld;
+     Vertex lInterfaceold;
+     Vertex pintOldRo;
+     Vertex lInterfaceoldRo;
+     Vertex pintOldRo1;
+     Vertex pintNewRo1;
+     Vertex plinkROonOldRo1;
+
+     Vertex pintOldScn3;
+     Vertex pintNewScn3;
+     Vertex pLinkOldScn3;
+     Vertex  pLinkNewScn3;
+
+     Vertex  pintOldScn2;
+     Vertex  pintNewScn2;
+     Vertex  pLinkOldScn2;
+     Vertex  pintOld2Scn2;
+     Vertex sriovPfOld;
+      Vertex sriovVfOld;
+
+     Vertex lInterfaceold2;
+     Vertex pintOld2;
+
+     Vertex pLinkMoveScn2;
+     Vertex pLinkMoveScn1;
+     
+     Vertex pint1ROOld;
+     Vertex pint2ROOld;
+     Vertex pint2RONew;
+     Vertex pint3ROOld;
+     Vertex pint3RONew;
+     Vertex pint1ROOldPlink;
+     
+   //ManyToOne edge scenario
+     Vertex pserverRCTScn6;
+     Vertex pserverRCT1Scn6;
+     Vertex zoneScn61;
+     Vertex zoneScn62;
+
+    @Before
+    public void setUp() throws Exception {
+        graph = JanusGraphFactory.build().set("storage.backend","inmemory").open();
+        tx = graph.newTransaction();
+        g = tx.traversal();
+        loader = loaderFactory.createLoaderForVersion(introspectorFactoryType,schemaVersions.getDefaultVersion());
+        dbEngine = new JanusGraphDBEngine(
+                queryStyle,
+                type,
+                loader);
+//rct
+        Vertex pserverOld = g.addV().property("aai-node-type", "pserver")
+                .property("hostname","pserverOld")
+                .property("source-of-truth","RCT")
+                .property("fqdn","tttt.bbbb.cccc.dddd")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserverOld")
+                .property("resource-version","1")
+                .next();
+        Vertex pserverNew = g.addV().property("aai-node-type", "pserver")
+                .property("hostname","pserverNew")
+                .property("source-of-truth","RCT")
+                .property("fqdn","tttt.cccc.cccc.dddd")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserverNew")
+                .property("resource-version","2")
+                .next();
+        pintOld = g.addV()
+                .property("aai-node-type", "p-interface")
+                .property("interface-name", "pintOld")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserverOld/p-interfaces/p-interface/pintOld")
+                .next();
+
+        lInterfaceold = g.addV().property("aai-node-type", "l-interface")
+                .property("interface-name", "linterfaceold")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserverOld/p-interfaces/p-interface/pintOld/l-interfaces/l-interface/linterfaceold")
+                .next();
+
+        pintOld2 = g.addV()
+                .property("aai-node-type", "p-interface")
+                .property("interface-name", "pintOld2")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserverOld/p-interfaces/p-interface/pintOld2")
+                .next();
+        lInterfaceold2 = g.addV().property("aai-node-type", "l-interface")
+                .property("interface-name", "linterfaceold2")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserverOld/p-interfaces/p-interface/pintOld2/l-interfaces/l-interface/linterfaceold2")
+                .next();
+
+        sriovPfOld = g.addV().property("aai-node-type", "sriov-pf")
+                .property("pf-pci-id","sriovPfOld")
+                .property("source-of-truth","RCT")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserverOld/p-interfaces/p-interface/pintOld2/sriov-pfs/sriov-pf/sriovPfOld")
+                .next();
+
+        sriovVfOld = g.addV().property("aai-node-type", "sriov-vf")
+                .property("pci-id","sriovVfOld")
+                .property("source-of-truth","RCT")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserverOld/p-interfaces/p-interface/pintOld2/l-interfaces/l-interface/linterfaceold2/sriov-vfs/sriov-vf/sriovVfOld")
+                .next();
+        
+        
+
+        Vertex vserver3 = g.addV().property("aai-node-type", "vserver")
+                .property("vserver-id", "vserver1")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/vserver3")
+                .next();
+        Vertex lInterface3 = g.addV().property("aai-node-type", "l-interface")
+                .property("interface-name", "linterfaceold3")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/vserver3")
+                .next();
+        Vertex sriovVfOld3 = g.addV().property("aai-node-type", "sriov-vf")
+                .property("pci-id","sriovVfOld3")
+                .property("source-of-truth","RCT")
+//                .property("aai-uri","/cloud-infrastructure/pservers/pserver/vserver3")
+                .next();
+        Vertex complexOld = g.addV().property("aai-node-type", "complex")
+                       .property("physical-location-id", "complexOld")
+                       .property("aai-uri","/cloud-infrastructure/complex/complexOld")
+                .next();
+
+
+//ro
+        Vertex pserverRoOld = g.addV().property("aai-node-type", "pserver")
+                .property("hostname","pserverRo.OldOne.aaaa.bbbbb")
+                .property("source-of-truth","RO")
+                .property("fqdn","aaaa.bbbb.cccc.dddd")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserverRo.OldOne.aaaa.bbbbb")
+                .property("resource-version","1")
+                .next();
+        Vertex pserverRoNew = g.addV().property("aai-node-type", "pserver")
+                .property("hostname","pserverRo.NewOne.aaaa.ccccccccccc")
+                .property("source-of-truth","RO")
+                .property("fqdn","aaaa.cccc.cccc.dddd")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserverRo.NewOne.aaaa.ccccccccccc")
+                .property("resource-version","2")
+                .next();
+        
+        Vertex pserverRo3 = g.addV().property("aai-node-type", "pserver")
+                .property("hostname","pserverRo3")
+                .property("source-of-truth","RO")
+                .property("fqdn","aaaa.cccc.cccc.dddd")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserverRo3")
+                .property("resource-version","2")
+                .next();
+        
+        Vertex pserverRo4 =  g.addV().property("aai-node-type", "pserver")
+                .property("hostname","pserverRoComplexTest.aaa")
+                .property("source-of-truth","RO")
+                .property("fqdn","aaaa.cccc.cccc.dddd")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserverRoComplexTest.aaa")
+                .property("resource-version","2")
+                .next();
+        
+        Vertex pserverRo5 =  g.addV().property("aai-node-type", "pserver")
+                .property("hostname","pserverRoComplexTest.aaaaa")
+                .property("source-of-truth","RO")
+                .property("fqdn","aaaa.cccc.cccc.eeee")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserverRoComplexTest.aaaaa")
+                .property("resource-version","2")
+                .next();
+        
+        pintOldRo = g.addV()
+                .property("aai-node-type", "p-interface")
+                .property("interface-name", "pintOldRo")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserverRo.OldOne.aaaa.bbbbb/p-interfaces/p-interface/pintOldRo")
+                .next();
+        lInterfaceoldRo = g.addV().property("aai-node-type", "l-interface")
+                .property("interface-name", "linterfaceoldRo")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserverRo.OldOne.aaaa.bbbbb/p-interfaces/p-interface/pintOld/l-interfaces/l-interface/linterfaceold")
+                .next();
+        Vertex complexOldRO = g.addV().property("aai-node-type", "complex")
+                       .property("physical-location-id", "complexOldRO")
+                       .property("aai-uri","/cloud-infrastructure/complexes/complex/vserver3")
+                .next();
+
+        pintOldRo1 = g.addV()
+                .property("aai-node-type", "p-interface")
+                .property("interface-name", "pintRo1")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserverRo.OldOne.aaaa.bbbbb/p-interfaces/p-interface/pintRo1")
+                .next();
+        
+        pintNewRo1 = g.addV()
+                .property("aai-node-type", "p-interface")
+                .property("interface-name", "pintRo1")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserverRo.NewOne.aaaa.ccccccccccc/p-interfaces/p-interface/pintRo1")
+                .next();
+        
+        plinkROonOldRo1 = g.addV()
+                       .property("aai-node-type", "physical-link")
+                       .property("link-name", "plinkROonOldRo1")
+                       .next();
+        
+        Vertex pintNew31 = g.addV()
+                .property("aai-node-type", "p-interface")
+                .property("interface-name", "pintRo1")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserverRo3/p-interfaces/p-interface/pintRo1")
+                .next();
+
+        Vertex pintOld41 = g.addV()
+                .property("aai-node-type", "p-interface")
+                .property("interface-name", "pintOld41")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserverRoComplexTest.aaa/p-interfaces/p-interface/pintOld41")
+                .next();
+        
+        Vertex sriovpfOldRo1= g.addV()
+                 .property("aai-node-type", "sriov-pf")
+                 .property("pf-pci-id", "sriovpfOldRo1")
+                 .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserverRo.OldOne.aaaa.bbbbb/p-interfaces/p-interface/pintRo1/sriov-pfs/sriov-pf/sriovpfOldRo1")
+                 .next();
+
+        //Scenario 3 same p interface name, new interface has a seprate plink
+
+        Vertex pserverOldScn3 = g.addV().property("aai-node-type", "pserver")
+                .property("hostname","pserverOldScn3")
+                .property("source-of-truth","RCT")
+                .property("fqdn","eeee.rrrr.tttt.yyyy")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserverOldScn3")
+                .property("resource-version","1")
+                .next();
+        Vertex pserverNewScn3= g.addV().property("aai-node-type", "pserver")
+                .property("hostname","pserverNewScn3")
+                .property("source-of-truth","RCT")
+                .property("fqdn","eeee.rrrr.tttt.yyyy")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserverNewScn3")
+                .property("resource-version","2")
+                .next();
+
+        pintOldScn3= g.addV()
+                .property("aai-node-type", "p-interface")
+                .property("interface-name", "pintOldScn3")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserverOldScn3/p-interfaces/p-interface/pintOldScn3")
+                .next();
+        pintNewScn3= g.addV()
+                .property("aai-node-type", "p-interface")
+                .property("interface-name", "pintNewScn3")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserverNewScn3/p-interfaces/p-interface/pintNewScn3")
+                .next();
+
+        pLinkOldScn3 = g.addV().property("aai-node-type", "physical-link")
+                .property("link-name", "pLinkOldScn3")
+                .property("service-provider-bandwidth-up-value", 0)
+                .property("service-provider-bandwidth-up-units", "empty")
+                .property("service-provider-bandwidth-down-value", 0)
+                .property("service-provider-bandwidth-down-units", "empty")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/vserver3")
+                .next();
+        pLinkNewScn3 = g.addV().property("aai-node-type", "physical-link")
+                .property("link-name", "pLinkNewScn3")
+                .property("service-provider-bandwidth-up-value", 0)
+                .property("service-provider-bandwidth-up-units", "empty")
+                .property("service-provider-bandwidth-down-value", 0)
+                .property("service-provider-bandwidth-down-units", "empty")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/vserver3")
+                .next();
+
+
+
+
+
+//        Scenario 2 missing plink in new pserver same pinterface name
+
+        Vertex pserverOldScn2 = g.addV().property("aai-node-type", "pserver")
+                .property("hostname","pserverOldScn2")
+                .property("source-of-truth","RCT")
+                .property("fqdn","vvvv.rrrr.tttt.yyyy")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserverOldScn2")
+                .property("resource-version","1")
+                .next();
+        Vertex pserverNewScn2= g.addV().property("aai-node-type", "pserver")
+                .property("hostname","pserverNewScn2")
+                .property("source-of-truth","RCT")
+                .property("fqdn","vvvv.rrrr.tttt.yyyy")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserverNewScn2")
+                .property("resource-version","2")
+                .next();
+
+        pintOldScn2= g.addV()
+                .property("aai-node-type", "p-interface")
+                .property("interface-name", "pintScn2")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserverOldScn2/p-interfaces/p-interface/pintScn2")
+                .next();
+        pintOld2Scn2= g.addV()
+                .property("aai-node-type", "p-interface")
+                .property("interface-name", "pintOld2Scn2")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserverOldScn2/p-interfaces/p-interface/pintOld2Scn2")
+                .next();
+
+        pintNewScn2= g.addV()
+                .property("aai-node-type", "p-interface")
+                .property("interface-name", "pintScn2")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserverNewScn2/p-interfaces/p-interface/pintScn2")
+                .next();
+
+        pLinkOldScn2 = g.addV().property("aai-node-type", "physical-link")
+                .property("link-name", "pLinkOldScn2")
+                .property("service-provider-bandwidth-up-value", 0)
+                .property("service-provider-bandwidth-up-units", "empty")
+                .property("service-provider-bandwidth-down-value", 0)
+                .property("service-provider-bandwidth-down-units", "empty")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/vserver3")
+                .next();
+
+//   Scnario 2 RCT Pinter face match moving plink and updating the name
+        Vertex pserver1Scn2 = g.addV().property("aai-node-type", "pserver")
+                .property("hostname","pserver1Scn2")
+                .property("source-of-truth","RCT")
+                .property("fqdn","same.rrrr.tttt.yyyy")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserver1Scn2")
+                .property("resource-version","1")
+                .next();
+        Vertex pserver2Scn2= g.addV().property("aai-node-type", "pserver")
+                .property("hostname","pserver2Scn2")
+                .property("source-of-truth","RCT")
+                .property("fqdn","same.rrrr.tttt.yyyy")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserver2Scn2")
+                .property("resource-version","2")
+                .next();
+        Vertex pserver3Scn2= g.addV().property("aai-node-type", "pserver")
+                .property("hostname","pserver3Scn2")
+                .property("source-of-truth","RCT")
+                .property("fqdn","jkkdahfkjashf.rrrr.tttt.yyyy")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserver3Scn2")
+                .property("resource-version","2")
+                .next();
+
+
+        Vertex pint1Scn2= g.addV()
+                .property("aai-node-type", "p-interface")
+                .property("interface-name", "pint1Scn2")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserver1Scn2/p-interfaces/p-interface/pint1Scn2")
+                .next();
+        Vertex pint2NewScn2= g.addV()
+                .property("aai-node-type", "p-interface")
+                .property("interface-name", "pint1Scn2")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserver2Scn2/p-interfaces/p-interface/pint1Scn2")
+                .next();
+        Vertex pint3Scn2= g.addV()
+                .property("aai-node-type", "p-interface")
+                .property("interface-name", "pint3Scn2")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserver3Scn2/p-interfaces/p-interface/pint3Scn2")
+                .next();
+
+         pLinkMoveScn2= g.addV().property("aai-node-type", "physical-link")
+                .property("link-name", "pserver1Scn2:pint1Scn2|pserver3Scn2:pint3Scn2")
+                .property("aai-uri","/cloud-infrastructure/plink/pLinkMoveScn2")
+                .next();
+
+
+// Scenario 1 RCT  plink name change move everything from old pserver to new pserver, new pserver has no plink
+        Vertex pserver1Scn1 = g.addV().property("aai-node-type", "pserver")
+                .property("hostname","pserver1Scn1")
+                .property("source-of-truth","RCT")
+                .property("fqdn","sameScn1.rrrr.tttt.yyyy")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserver1Scn1")
+                .property("resource-version","1")
+                .next();
+        Vertex pserver2Scn1= g.addV().property("aai-node-type", "pserver")
+                .property("hostname","pserver2Scn1")
+                .property("source-of-truth","RCT")
+                .property("fqdn","sameScn1.rrrr.tttt.yyyy")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserver2Scn1")
+                .property("resource-version","2")
+                .next();
+        Vertex pserver3Scn1= g.addV().property("aai-node-type", "pserver")
+                .property("hostname","pserver3Scn1")
+                .property("source-of-truth","RCT")
+                .property("fqdn","jkkdahfkjashf.rrrr.tttt.yyyy")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserver3Scn1")
+                .property("resource-version","2")
+                .next();
+
+
+        Vertex pint1Scn1= g.addV()
+                .property("aai-node-type", "p-interface")
+                .property("interface-name", "pint1Scn1")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserver1Scn1/p-interfaces/p-interface/pint1Scn1")
+                .next();
+
+        Vertex pint3Scn1= g.addV()
+                .property("aai-node-type", "p-interface")
+                .property("interface-name", "pint3Scn1")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserver3Scn1/p-interfaces/p-interface/pint3Scn1")
+                .next();
+
+
+        pLinkMoveScn1= g.addV().property("aai-node-type", "physical-link")
+                .property("link-name", "pserver1Scn1:pint1Scn1|pserver3Scn1:pint3Scn1")
+                .property("aai-uri","/cloud-infrastructure/plink/pLinkMoveScn1")
+                .next();
+
+
+        //Scnario 2 RCT Pinter face match moving plink and updating the name
+        edgeSerializer.addTreeEdge(g,pserver1Scn2,pint1Scn2);
+
+        edgeSerializer.addTreeEdge(g,pserver2Scn2,pint2NewScn2);
+
+        edgeSerializer.addTreeEdge(g,pserver3Scn2,pint3Scn2);
+
+        edgeSerializer.addEdge(g,pint1Scn2,pLinkMoveScn2);
+        edgeSerializer.addEdge(g,pint3Scn2,pLinkMoveScn2);
+
+
+        // Scenario 1 RCT  plink name change move everything from old pserver to new pserver, new pserver has no plink
+        edgeSerializer.addTreeEdge(g, pserver1Scn1,pint1Scn1);
+        edgeSerializer.addTreeEdge(g, pserver3Scn1,pint3Scn1);
+
+        edgeSerializer.addEdge(g,pint1Scn1,pLinkMoveScn1);
+        edgeSerializer.addEdge(g,pint3Scn1,pLinkMoveScn1);
+
+
+//RCT
+        edgeSerializer.addTreeEdge(g, pserverOld,pintOld);
+        edgeSerializer.addTreeEdge(g, pintOld,lInterfaceold);
+//        rules.addTreeEdge(g, pintOld,sriovPfOld);
+//        rules.addTreeEdge(g, lInterfaceold,sriovVfOld);
+
+        edgeSerializer.addTreeEdge(g, pserverOld,pintOld2);
+        edgeSerializer.addTreeEdge(g, pintOld2,lInterfaceold2);
+        edgeSerializer.addTreeEdge(g, pintOld2,sriovPfOld);
+        edgeSerializer.addTreeEdge(g, sriovVfOld,lInterfaceold2);
+
+        edgeSerializer.addTreeEdge(g,vserver3,lInterface3);
+        edgeSerializer.addTreeEdge(g,lInterface3,sriovVfOld3);
+
+        edgeSerializer.addEdge(g,sriovPfOld,sriovVfOld3);
+        edgeSerializer.addEdge(g,pserverOld,complexOld);
+
+
+
+//ro
+        edgeSerializer.addTreeEdge(g,pserverRoOld,pintOldRo);
+        edgeSerializer.addTreeEdge(g,pintOldRo,lInterfaceoldRo);
+        edgeSerializer.addEdge(g,pserverRoOld,complexOldRO);
+        edgeSerializer.addTreeEdge(g,pserverRoOld,  pintOldRo1);
+        edgeSerializer.addTreeEdge(g,pserverRoNew,  pintNewRo1);
+        edgeSerializer.addEdge(g, pintOldRo1, plinkROonOldRo1);
+        
+        edgeSerializer.addTreeEdge(g, pserverRo3, pintNew31);
+        edgeSerializer.addEdge(g, pintNew31, plinkROonOldRo1);
+        
+        edgeSerializer.addTreeEdge(g, pserverRo4, pintOld41);
+        
+        edgeSerializer.addTreeEdge(g, pintOldRo1, sriovpfOldRo1);
+        
+        
+        // physical-link tests
+        //1. p-int does not exist on longer hostname RO, p-int and p-link moves from shorter to longer
+        pint1ROOld= g.addV()
+                .property("aai-node-type", "p-interface")
+                .property("interface-name", "pint1ROOld")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserverRo.OldOne.aaaa.bbbbb/p-interfaces/p-interface/pint1ROOld")
+                .next();
+
+        Vertex pLink1ROOld = g.addV().property("aai-node-type", "physical-link")
+                .property("link-name", "pLink1ROOld")
+                .property("service-provider-bandwidth-up-value", 0)
+                .property("service-provider-bandwidth-up-units", "empty")
+                .property("service-provider-bandwidth-down-value", 0)
+                .property("service-provider-bandwidth-down-units", "empty")
+                .next();
+        edgeSerializer.addTreeEdge(g,pserverRoOld,pint1ROOld);
+        edgeSerializer.addEdge(g,pint1ROOld,pLink1ROOld);
+        
+        //2. p-int matches on shorter and longer hostname ROP pservers, p-link does not exist on longer RO. p-link moves from shorter to longer hostname
+        pint2ROOld= g.addV()
+                .property("aai-node-type", "p-interface")
+                .property("interface-name", "pint2RO")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserverRo.OldOne.aaaa.bbbbb/p-interfaces/p-interface/pint2RO")
+                .next();
+
+        Vertex pLink2ROOld = g.addV().property("aai-node-type", "physical-link")
+                .property("link-name", "pLink2ROOld")
+                .property("service-provider-bandwidth-up-value", 0)
+                .property("service-provider-bandwidth-up-units", "empty")
+                .property("service-provider-bandwidth-down-value", 0)
+                .property("service-provider-bandwidth-down-units", "empty")
+                .next();
+        
+        pint2RONew= g.addV()
+                .property("aai-node-type", "p-interface")
+                .property("interface-name", "pint2RO")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserverRo.NewOne.aaaa.ccccccccccc/p-interfaces/p-interface/pint2RO")
+                .next();
+
+        edgeSerializer.addTreeEdge(g,pserverRoOld,pint2ROOld);
+        edgeSerializer.addEdge(g,pint2ROOld,pLink2ROOld);
+        edgeSerializer.addTreeEdge(g,pserverRoNew,pint2RONew);
+        
+        //3. p-int matches on shorter and longer hostname ROP pservers, p-link exists on both, no change in plink or p-int on longer
+        pint3ROOld= g.addV()
+                .property("aai-node-type", "p-interface")
+                .property("interface-name", "pint3RO")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserverRo.OldOne.aaaa.bbbbb/p-interfaces/p-interface/pint3RO")
+                .next();
+
+        Vertex pLink3ROOld = g.addV().property("aai-node-type", "physical-link")
+                .property("link-name", "pLink3ROOld")
+                .property("service-provider-bandwidth-up-value", 0)
+                .property("service-provider-bandwidth-up-units", "empty")
+                .property("service-provider-bandwidth-down-value", 0)
+                .property("service-provider-bandwidth-down-units", "empty")
+                .next();
+        
+        pint3RONew= g.addV()
+                .property("aai-node-type", "p-interface")
+                .property("interface-name", "pint3RO")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserverRo.NewOne.aaaa.ccccccccccc/p-interfaces/p-interface/pint3RO")
+                .next();
+        
+        Vertex pLink3RONew = g.addV().property("aai-node-type", "physical-link")
+                .property("link-name", "pLink3RONew")
+                .property("service-provider-bandwidth-up-value", 0)
+                .property("service-provider-bandwidth-up-units", "empty")
+                .property("service-provider-bandwidth-down-value", 0)
+                .property("service-provider-bandwidth-down-units", "empty")
+                .next();
+
+        edgeSerializer.addTreeEdge(g,pserverRoOld,pint3ROOld);
+        edgeSerializer.addEdge(g,pint3ROOld,pLink3ROOld);
+        edgeSerializer.addTreeEdge(g,pserverRoNew,pint3RONew);
+        edgeSerializer.addEdge(g,pint3RONew,pLink3RONew);
+        //End physical-links tests
+
+
+//sc3
+        edgeSerializer.addTreeEdge(g,pserverOldScn3,pintOldScn3);
+        edgeSerializer.addTreeEdge(g,pserverNewScn3,pintNewScn3);
+        edgeSerializer.addEdge(g,pintNewScn3,pLinkNewScn3);
+        edgeSerializer.addEdge(g,pintOldScn3,pLinkOldScn3);
+
+//sc2
+        edgeSerializer.addTreeEdge(g,pserverOldScn2,pintOldScn2);
+        edgeSerializer.addTreeEdge(g,pserverOldScn2,pintOld2Scn2);
+        edgeSerializer.addTreeEdge(g,pserverNewScn2,pintNewScn2);
+        edgeSerializer.addEdge(g,pintOldScn2,pLinkOldScn2);
+
+//RCT fqdn not set new tests
+    
+        Vertex rctP1 = g.addV().property("aai-node-type", "pserver")
+                .property("hostname","rctP1")
+                .property("source-of-truth","RCT")
+                .property("fqdn","sameFqdnScn1.rrrr.tttt.yyyy")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/rctP1")
+                .property("resource-version","1")
+                .next();
+        
+        // Don't throw null pointer with fqdn not set
+        Vertex rctP2 = g.addV().property("aai-node-type", "pserver")
+                .property("hostname","rctP2")
+                .property("source-of-truth","RCT")
+//                .property("fqdn","sameScn1.rrrr.tttt.yyyy")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/rctP2")
+                .property("resource-version","2")
+                .next();
+        
+        Vertex rctP3 = g.addV().property("aai-node-type", "pserver")
+                .property("hostname","rctP3")
+                .property("source-of-truth","RCT")
+                .property("fqdn","sameFqdnScn1.rrrr.tttt.yyyy")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/rctP3")
+                .property("resource-version","3")
+                .next();
+
+        Vertex rctP4 = g.addV().property("aai-node-type", "pserver")
+                .property("hostname","rctP4")
+                .property("source-of-truth","RCT")
+                .property("fqdn","")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/rctP4")
+                .property("resource-version","4")
+                .next();
+        
+        Vertex rctP5 = g.addV().property("aai-node-type", "pserver")
+                .property("hostname","rctP5")
+                .property("source-of-truth","RCT")
+                .property("fqdn","")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/rctP5")
+                .property("resource-version","5")
+                .next();
+        
+        //pint11 does not have a match on rctP3. So, expect this to move to rctP3. Add test
+        Vertex pint11= g.addV()
+                .property("aai-node-type", "p-interface")
+                .property("interface-name", "pint11")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/rctP1/p-interfaces/p-interface/pint11")
+                .next();
+        
+        // matching interface-name on pint12 and pint31. Delete pint12. Don't move it to rctP3. Add test
+               //interface-name matches b/w vertices pint12 and pint31
+               Vertex pint12= g.addV()
+                       .property("aai-node-type", "p-interface")
+                       .property("interface-name", "pint12")
+                       .property("aai-uri","/cloud-infrastructure/pservers/pserver/rctP1/p-interfaces/p-interface/pint12")
+                       .next();
+               
+               //int-name on pint31 is same as pint12
+               Vertex pint31= g.addV()
+                       .property("aai-node-type", "p-interface")
+                       .property("interface-name", "pint12")
+                       .property("aai-uri","/cloud-infrastructure/pservers/pserver/rctP3/p-interfaces/p-interface/pint12")
+                       .next();
+        //End matching interface-name on pint12 and pint31. Delete pint12. Don't move it to rctP3.
+        
+        
+        // Plink exists on both matching pints. Delete old pint, old plink, and edge b/w them - add test
+               // Vertex pint23 has physical link connected to pint14
+                   Vertex pint14= g.addV()
+                               .property("aai-node-type", "p-interface")
+                               .property("interface-name", "pint14")
+                               .property("aai-uri","/cloud-infrastructure/pservers/pserver/rctP1/p-interfaces/p-interface/pint14")
+                               .next();
+               Vertex pint23= g.addV()
+                       .property("aai-node-type", "p-interface")
+                       .property("interface-name", "pint3RCTFqdn2")
+                       .property("aai-uri","/cloud-infrastructure/pservers/pserver/rctP2/p-interfaces/p-interface/pint23")
+                       .next();
+               Vertex plink1423 = g.addV()
+                       .property("aai-node-type", "physical-link")
+                       .property("link-name", "rctP1:pint14|rctP2:pint23")
+                       .property("aai-uri","/network/physical-links/physical-link/plink1423")
+                       .next();
+               
+               // Vertex pint24 has physical link connected to pint33 (Plink exists on both old and new p-int, no change)
+               // Vertex pint33 has same interface-name as pint14
+               Vertex pint33= g.addV()
+                       .property("aai-node-type", "p-interface")
+                       .property("interface-name", "pint14")
+                       .property("aai-uri","/cloud-infrastructure/pservers/pserver/pint33/p-interfaces/p-interface/pint14")
+                       .next();        
+               Vertex pint24= g.addV()
+                       .property("aai-node-type", "p-interface")
+                       .property("interface-name", "pint24")
+                       .property("aai-uri","/cloud-infrastructure/pservers/pserver/rctP2/p-interfaces/p-interface/pint24")
+                       .next();
+               Vertex plink2433 = g.addV()
+                       .property("aai-node-type", "physical-link")
+                       .property("link-name", "rctP2:pint24|rctP3:pint14")
+                       .property("aai-uri","/network/physical-links/physical-link/plinkFqdn2443")
+                       .next();
+            // End Plink exists on both matching pints. Delete old pint, old plink, and edge b/w them - add test
+        
+        Vertex pint41= g.addV()
+                .property("aai-node-type", "p-interface")
+                .property("interface-name", "pserver1RCTFqdn4")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserver1RCTFqdn4/p-interfaces/p-interface/pserver1RCTFqdn4")
+                .next();
+        
+        Vertex pint51= g.addV()
+                .property("aai-node-type", "p-interface")
+                .property("interface-name", "pserver1RCTFqdn5")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserver1RCTFqdn4/p-interfaces/p-interface/pserver1RCTFqdn5")
+                .next();
+        
+      //Case physical link moves from pint13 on rctP1 to pint32 on rctP3 since latest pserver does not have plink - Add test
+             //Vertex pint13 has plink connected to pint21
+               Vertex pint21= g.addV()
+                       .property("aai-node-type", "p-interface")
+                       .property("interface-name", "pint21")
+                       .property("aai-uri","/cloud-infrastructure/pservers/pserver/rctP2/p-interfaces/p-interface/pint21")
+                       .next();
+               Vertex pint13= g.addV()
+                       .property("aai-node-type", "p-interface")
+                       .property("interface-name", "pint13")
+                       .property("aai-uri","/cloud-infrastructure/pservers/pserver/rctP1/p-interfaces/p-interface/pint13")
+                       .next();
+               Vertex plink1321 = g.addV()
+                       .property("aai-node-type", "physical-link")
+                       .property("link-name", "rctP1:pint13|rctP2:pint21")
+                       .property("aai-uri","/network/physical-links/physical-link/plink1321")
+                       .next();
+               
+             //int-name on pint32 is same pint13
+               Vertex pint32= g.addV()
+                       .property("aai-node-type", "p-interface")
+                       .property("interface-name", "pint13")
+                       .property("aai-uri","/cloud-infrastructure/pservers/pserver/rctP3/p-interfaces/p-interface/pint13")
+                       .next();
+               
+               edgeSerializer.addTreeEdge(g,rctP1,pint13);
+               edgeSerializer.addTreeEdge(g,rctP2,pint21);
+               edgeSerializer.addTreeEdge(g, rctP3, pint32);
+               edgeSerializer.addEdge(g, plink1321, pint13);
+               edgeSerializer.addEdge(g, plink1321, pint21);
+               
+           // End Case physical link moves from pint13 on rctP1 to pint32 on rctP3 since latest pserver does not have plink
+        
+        
+        edgeSerializer.addTreeEdge(g,rctP1,pint11);
+        edgeSerializer.addTreeEdge(g,rctP1,pint12);
+        edgeSerializer.addTreeEdge(g,rctP3,pint31);
+        edgeSerializer.addTreeEdge(g,rctP3,pint33);
+        edgeSerializer.addTreeEdge(g,rctP4,pint41);
+        edgeSerializer.addTreeEdge(g,rctP5,pint51);
+        
+        edgeSerializer.addTreeEdge(g, rctP2, pint23);
+        edgeSerializer.addTreeEdge(g, rctP1, pint14);
+        edgeSerializer.addTreeEdge(g, rctP2, pint24);
+        
+        
+        edgeSerializer.addEdge(g, plink1423, pint14);
+        edgeSerializer.addEdge(g, plink1423, pint23);
+        
+        edgeSerializer.addEdge(g, plink2433, pint24);
+        edgeSerializer.addEdge(g, plink2433, pint33);
+
+ //lag-interfaces
+      //lagint11 does not have a match on rctP3. So, expect this to move to rctP3. Add test
+        Vertex lagint11= g.addV()
+                .property("aai-node-type", "lag-interface")
+                .property("interface-name", "lagint11")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/rctP1/lag-interfaces/lag-interface/lagint11")
+                .next();
+        edgeSerializer.addTreeEdge(g, rctP1, lagint11);
+        
+        //lagint12 matches with lagint31 on rctP3
+        Vertex lagint12= g.addV()
+                .property("aai-node-type", "lag-interface")
+                .property("interface-name", "lagint12")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/rctP1/lag-interfaces/lag-interface/lagint12")
+                .next();
+        Vertex lagint31= g.addV()
+                .property("aai-node-type", "lag-interface")
+                .property("interface-name", "lagint12")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/rctP3/lag-interfaces/lag-interface/lagint12")
+                .next();
+        edgeSerializer.addTreeEdge(g, rctP1, lagint12);
+        edgeSerializer.addTreeEdge(g, rctP3, lagint31);
+        
+      //Verify manytoOne edge scenario
+        pserverRCTScn6 = g.addV().property("aai-node-type", "pserver")
+                .property("hostname","pserverRCTScn6")
+                .property("source-of-truth","RCT")
+                .property("fqdn","Scn6.pserverRCTScn6")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserverRCTScn6")
+                .property("resource-version","1")
+                .next();
+        
+        zoneScn61 = g.addV().property("aai-node-type", "zone")
+                       .property("zone-id", "zone-61")
+                       .next();
+        
+        pserverRCT1Scn6 = g.addV().property("aai-node-type", "pserver")
+                .property("hostname","Scn6.pserverRCT1Scn6")
+                .property("source-of-truth","RCT")
+                .property("fqdn","Scn6.pserverRCT1Scn6")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserverRCT1Scn6")
+                .property("resource-version","4")
+                .next();
+        
+        zoneScn62 = g.addV().property("aai-node-type", "zone")
+                       .property("zone-id", "zone-62")
+                       .next();
+        
+        edgeSerializer.addEdge(g,  pserverRCTScn6, zoneScn61);
+        edgeSerializer.addEdge(g,  pserverRCT1Scn6, zoneScn62);
+        
+        //Verify manyToMany edge scenario
+        Vertex gvnf1 = g.addV().property("aai-node-type", "generic-vnf")
+                       .property("vnf-id", "vnf-1")
+                       .next();
+        
+        Vertex gvnf2 = g.addV().property("aai-node-type", "generic-vnf")
+                       .property("vnf-id", "vnf-2")
+                       .next();
+        
+        edgeSerializer.addEdge(g,  pserverRCTScn6, gvnf1);
+        edgeSerializer.addEdge(g,  pserverRCT1Scn6, gvnf2);
+        
+        
+        // Empty string first token test
+        Vertex pserver1EmptyFirstToken = g.addV().property("aai-node-type", "pserver")
+                .property("hostname",".pserver1EmptyFirstToken")
+                .property("source-of-truth","RO")
+                .property("fqdn","sameScn1.rrrr.tttt.yyyy")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/.pserver1EmptyFirstToken")
+                .property("resource-version","1")
+                .next();
+        Vertex pserver2EmptyFirstToken= g.addV().property("aai-node-type", "pserver")
+                .property("hostname",".pserver2EmptyFirstToken.1")
+                .property("source-of-truth","RO")
+                .property("fqdn","sameScn1.rrrr.tttt.yyyy")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/.pserver2EmptyFirstToken")
+                .property("resource-version","2")
+                .next();
+        
+        Vertex pserver1EmptyFirstTokenFqdn = g.addV().property("aai-node-type", "pserver")
+                .property("hostname","pserver1EmptyFirstTokenFqdn")
+                .property("source-of-truth","RCT")
+                .property("fqdn",".rrrr.tttt.yyyy")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserver1EmptyFirstTokenFqdn")
+                .property("resource-version","1")
+                .next();
+        Vertex pserver2EmptyFirstTokenFqdn= g.addV().property("aai-node-type", "pserver")
+                .property("hostname","pserver2EmptyFirstTokenFqdn")
+                .property("source-of-truth","RCT")
+                .property("fqdn",".rrrr.tttt.yyyy")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserver2EmptyFirstTokenFqdn")
+                .property("resource-version","2")
+                .next();
+        
+        TransactionalGraphEngine spy = spy(dbEngine);
+        TransactionalGraphEngine.Admin adminSpy = spy(dbEngine.asAdmin());
+
+        GraphTraversalSource traversal = g;
+        GraphTraversalSource readOnly = tx.traversal(GraphTraversalSource.build().with(ReadOnlyStrategy.instance()));
+        when (spy.tx()).thenReturn(tx);
+        when(spy.asAdmin()).thenReturn(adminSpy);
+        when(adminSpy.getTraversalSource()).thenReturn(traversal);
+        when(adminSpy.getReadOnlyTraversalSource()).thenReturn(readOnly);
+
+        migration = new MigrateSameSourcedRCTROPserverData(spy,loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+        migration.run();
+    }
+
+    @After
+    public void cleanUp() {
+        tx.tx().rollback();
+        graph.close();
+    }
+
+    @Test
+    public void RCT() throws Exception {
+        assertEquals(true, g.V().has("aai-node-type", "pserver").has("hostname","pserverNew").in("tosca.relationships.network.BindsTo").has("aai-node-type","p-interface").hasNext());
+        assertEquals("/cloud-infrastructure/pservers/pserver/pserverNew/p-interfaces/p-interface/pintOld", pintOld.property("aai-uri").value().toString());
+        assertEquals("/cloud-infrastructure/pservers/pserver/pserverNew/p-interfaces/p-interface/pintOld/l-interfaces/l-interface/linterfaceold", lInterfaceold.property("aai-uri").value().toString());
+        assertEquals("/cloud-infrastructure/pservers/pserver/pserverNew/p-interfaces/p-interface/pintOld2/l-interfaces/l-interface/linterfaceold2", lInterfaceold2.property("aai-uri").value().toString());
+        assertEquals("/cloud-infrastructure/pservers/pserver/pserverNew/p-interfaces/p-interface/pintOld2/sriov-pfs/sriov-pf/sriovPfOld",sriovPfOld.property("aai-uri").value().toString());
+        assertEquals(true, g.V().has("aai-node-type", "sriov-pf").has("pf-pci-id","sriovPfOld").in("org.onap.relationships.inventory.Uses").has("aai-node-type","sriov-vf").hasNext());
+        assertEquals("/cloud-infrastructure/pservers/pserver/pserverNew/p-interfaces/p-interface/pintOld2/l-interfaces/l-interface/linterfaceold2/sriov-vfs/sriov-vf/sriovVfOld",sriovVfOld.property("aai-uri").value().toString());
+        assertEquals(true, g.V().has("aai-node-type", "pserver").has("hostname","pserverNew").out("org.onap.relationships.inventory.LocatedIn").has("aai-node-type","complex").hasNext());
+
+    }
+    @Test
+    public void RO() throws Exception {
+        assertEquals(true, g.V().has("aai-node-type", "pserver").has("hostname","pserverRo.NewOne.aaaa.ccccccccccc").in("tosca.relationships.network.BindsTo").has("aai-node-type","p-interface").has("interface-name","pintOldRo").hasNext());
+        assertEquals("/cloud-infrastructure/pservers/pserver/pserverRo.NewOne.aaaa.ccccccccccc/p-interfaces/p-interface/pintOldRo", pintOldRo.property("aai-uri").value().toString());
+        assertEquals("/cloud-infrastructure/pservers/pserver/pserverRo.NewOne.aaaa.ccccccccccc/p-interfaces/p-interface/pintOldRo/l-interfaces/l-interface/linterfaceoldRo", lInterfaceoldRo.property("aai-uri").value().toString());
+        assertEquals(true, g.V().has("aai-node-type", "pserver").has("hostname","pserverRo.NewOne.aaaa.ccccccccccc").out("org.onap.relationships.inventory.LocatedIn").has("aai-node-type","complex").hasNext());
+//        System.out.println("************** SRIOV-PF *************"+g.V().has("aai-node-type","pserver").has("hostname","pserverRo.NewOne.aaaa.ccccccccccc").in("tosca.relationships.network.BindsTo")
+//                     .has("aai-node-type","p-interface").has("interface-name","pintRo1").in().has("aai-node-type", "sriov-pf").toList().get(0).property("pf-pci-id").value().toString());
+        assertEquals(true, g.V().has("aai-node-type","pserver").has("hostname","pserverRo.NewOne.aaaa.ccccccccccc").in("tosca.relationships.network.BindsTo")
+                       .has("aai-node-type","p-interface").has("interface-name","pintRo1").in().has("aai-node-type", "sriov-pf").hasNext());
+        //.has("pf-pci-id","sriovpfOldRo1")
+        assertEquals("/cloud-infrastructure/pservers/pserver/pserverRo.NewOne.aaaa.ccccccccccc/p-interfaces/p-interface/pintRo1/sriov-pfs/sriov-pf/sriovpfOldRo1",
+                       g.V().has("aai-node-type","sriov-pf").has("pf-pci-id","sriovpfOldRo1").toList().get(0).property("aai-uri").value().toString());
+        assertNotEquals("/cloud-infrastructure/pservers/pserver/pserverRo.OldOne.aaaa.bbbbb/p-interfaces/p-interface/pintRo1/sriov-pfs/sriov-pf/sriovpfOldRo1",
+                       g.V().has("aai-node-type","sriov-pf").has("pf-pci-id","sriovpfOldRo1").toList().get(0).property("aai-uri").value().toString());
+    }
+
+    @Test
+    public void RCTplinkScenario3() throws Exception {
+        assertEquals(false, g.V().has("aai-node-type", "pserver").has("hostname","pserverNewplink").in("tosca.relationships.network.BindsTo")
+                .has("aai-node-type","p-interface").out("tosca.relationships.network.LinksTo").has("aai-node-type","physical-link").has("link-name","pLinkOld").hasNext());
+
+    }
+
+    @Test
+    public void RCTpLinkMoveScn2NameChange() throws Exception {
+        assertEquals("pserver2Scn2:pint1Scn2|pserver3Scn2:pint3Scn2", pLinkMoveScn2.property("link-name").value().toString());
+
+    }
+
+    @Test
+    public void RCTpLinkMoveScn1NameChange() throws Exception {
+        assertEquals("pserver2Scn1:pint1Scn1|pserver3Scn1:pint3Scn1", pLinkMoveScn1.property("link-name").value().toString());
+
+    }
+
+    @Test
+    public void RCTplinkScenario2() throws Exception {
+        assertEquals(true, g.V().has("aai-node-type", "pserver").has("hostname","pserverNewScn2").in("tosca.relationships.network.BindsTo")
+                .has("aai-node-type","p-interface").out("tosca.relationships.network.LinksTo").has("aai-node-type","physical-link").has("link-name","pLinkOldScn2").hasNext());
+        assertEquals("/cloud-infrastructure/pservers/pserver/pserverNewScn2/p-interfaces/p-interface/pintOld2Scn2", pintOld2Scn2.property("aai-uri").value().toString());
+
+    }
+    
+    @Test
+    public void roSuccessfulMovePlinkScn1() throws Exception {
+        assertEquals("aaaa.cccc.cccc.dddd",g.V().has("aai-node-type","pserver").has("hostname","pserverRo.NewOne.aaaa.ccccccccccc").next().value("fqdn").toString());
+        assertEquals(false, g.V().has("aai-node-type", "pserver").has("pserverRo.OldOne.aaaa.bbbbb").hasNext());
+        assertEquals(true, g.V().has("aai-node-type", "pserver").has("hostname","pserverRo.NewOne.aaaa.ccccccccccc").in("tosca.relationships.network.BindsTo").has("aai-node-type","p-interface").has("interface-name","pint1ROOld").hasNext());
+        assertEquals("/cloud-infrastructure/pservers/pserver/pserverRo.NewOne.aaaa.ccccccccccc/p-interfaces/p-interface/pint1ROOld", pint1ROOld.property("aai-uri").value().toString());
+        assertEquals(true, g.V().has("aai-node-type", "pserver").has("hostname","pserverRo.NewOne.aaaa.ccccccccccc").in("tosca.relationships.network.BindsTo")
+                       .has("aai-node-type","p-interface").has("interface-name","pint1ROOld").out("tosca.relationships.network.LinksTo").has("link-name","pLink1ROOld").hasNext());
+    }
+    
+    @Test
+    public void roSuccessfulSamePIntScn() throws Exception {
+        assertEquals("aaaa.cccc.cccc.dddd",g.V().has("aai-node-type","pserver").has("hostname","pserverRo.NewOne.aaaa.ccccccccccc").next().value("fqdn").toString());
+        assertEquals(false, g.V().has("aai-node-type", "pserver").has("pserverRo.OldOne.aaaa.bbbbb").hasNext());
+        assertEquals(true, g.V().has("aai-node-type", "pserver").has("hostname","pserverRo.NewOne.aaaa.ccccccccccc").in("tosca.relationships.network.BindsTo").has("aai-node-type","p-interface").has("interface-name","pint2RO").hasNext());
+        assertEquals("/cloud-infrastructure/pservers/pserver/pserverRo.NewOne.aaaa.ccccccccccc/p-interfaces/p-interface/pint2RO", pint2RONew.property("aai-uri").value().toString());
+        assertEquals(true, g.V().has("aai-node-type", "pserver").has("hostname","pserverRo.NewOne.aaaa.ccccccccccc").in("tosca.relationships.network.BindsTo")
+                       .has("aai-node-type","p-interface").has("interface-name","pint2RO").out("tosca.relationships.network.LinksTo").has("link-name","pLink2ROOld").hasNext());
+    }
+    
+    @Test
+    public void roSuccessfulSamePIntScnPlinkExistsOnBoth() throws Exception {
+        assertEquals("aaaa.cccc.cccc.dddd",g.V().has("aai-node-type","pserver").has("hostname","pserverRo.NewOne.aaaa.ccccccccccc").next().value("fqdn").toString());
+        assertEquals(false, g.V().has("aai-node-type", "pserver").has("pserverRo.OldOne.aaaa.bbbbb").hasNext());
+        assertEquals(true, g.V().has("aai-node-type", "pserver").has("hostname","pserverRo.NewOne.aaaa.ccccccccccc").in("tosca.relationships.network.BindsTo").has("aai-node-type","p-interface").has("interface-name","pint3RO").hasNext());
+        assertEquals("/cloud-infrastructure/pservers/pserver/pserverRo.NewOne.aaaa.ccccccccccc/p-interfaces/p-interface/pint3RO", pint3RONew.property("aai-uri").value().toString());
+        assertEquals(true, g.V().has("aai-node-type", "pserver").has("hostname","pserverRo.NewOne.aaaa.ccccccccccc").in("tosca.relationships.network.BindsTo")
+                       .has("aai-node-type","p-interface").has("interface-name","pint3RO").out("tosca.relationships.network.LinksTo").has("link-name","pLink3RONew").hasNext());
+    }
+
+    @Test
+    public void RCThandleNullFqdnSamePints() throws Exception {
+       //1. pint11 from rctP1 moves to rctP3
+       assertEquals(true, g.V().has("aai-node-type", "pserver").has("hostname","rctP3").in("tosca.relationships.network.BindsTo")
+                       .has("aai-node-type","p-interface").has("interface-name","pint11").hasNext());
+       assertEquals(false, g.V().has("aai-node-type", "p-interface").has("aai-uri","/cloud-infrastructure/pservers/pserver/rctP1/p-interfaces/p-interface/pint11").hasNext());
+       assertEquals(true, g.V().has("aai-node-type", "p-interface").has("aai-uri","/cloud-infrastructure/pservers/pserver/rctP3/p-interfaces/p-interface/pint11").hasNext());
+       
+       //2. pint12 int-name matches with pint31. So, verify that p-int does not move from rctP1 to rctP3
+       assertEquals("rctP3 has only 1 pint with name pint12", new Long(1L), g.V().has("aai-node-type", "pserver").has("hostname","rctP3").in("tosca.relationships.network.BindsTo")
+                       .has("aai-node-type","p-interface").has("interface-name","pint12").count().next());
+       
+       //3. Verify that the p-interface from pserver is not moved to another pserver that has null fqdn
+       assertEquals(false, g.V().has("aai-node-type", "pserver").has("hostname","rctP2").in("tosca.relationships.network.BindsTo")
+                       .has("aai-node-type","p-interface").has("interface-name","pint11").hasNext());
+                       
+       //4. If the fqdn is "" within 2 RCT pservers, ignore that case. Don't move the p-int from old resource-version to new resource-version pserver
+       assertEquals(false, g.V().has("aai-node-type", "pserver").has("hostname","rctP5").in("tosca.relationships.network.BindsTo")
+                       .has("aai-node-type","p-interface").has("interface-name","pint41").hasNext());
+       assertEquals("rctP5 has only 1 p-interface", new Long(1L), g.V().has("aai-node-type", "pserver").has("hostname","rctP5").in("tosca.relationships.network.BindsTo")
+                       .has("aai-node-type","p-interface").count().next());            
+       
+       //5. plink is moved from pint3 on pserver fqdn1 to pint2 on pserver fqdn3. Both p-ints have the same interface-name
+       assertEquals(true, g.V().has("aai-node-type", "pserver").has("hostname","rctP3").in("tosca.relationships.network.BindsTo")
+                       .has("aai-node-type","p-interface").has("interface-name","pint13").out().has("aai-node-type", "physical-link").hasNext());
+       System.out.println("plink on pint13 is "+  g.V().has("aai-node-type", "pserver").has("hostname","rctP3").in("tosca.relationships.network.BindsTo")
+               .has("aai-node-type","p-interface").has("interface-name","pint13").out().has("aai-node-type", "physical-link").next().property("link-name").value().toString());
+       
+       assertEquals(true, g.V().has("aai-node-type","physical-link").has("link-name","rctP2:pint21|rctP3:pint13").hasNext());
+       
+       //6. plink is not moved from pint4 on pserver fqdn1 to pint3 on pserver fqdn3. Both p-ints have the same interface-name
+       assertEquals(true, g.V().has("aai-node-type","physical-link").has("link-name","rctP2:pint24|rctP3:pint14").hasNext());
+       assertEquals(false, g.V().has("aai-node-type","physical-link").has("link-name","rctP1:pint14|rctP2:pint23").hasNext());
+       
+    }
+    
+    @Test
+    public void testRCTLagInterfaces() throws Exception {
+       //1. lagint11 from rctP1 moves to rctP3
+       assertEquals(true, g.V().has("aai-node-type", "pserver").has("hostname","rctP3").in("tosca.relationships.network.BindsTo")
+                       .has("aai-node-type","lag-interface").has("interface-name","lagint11").hasNext());
+       assertEquals(false, g.V().has("aai-node-type", "lag-interface").has("aai-uri","/cloud-infrastructure/pservers/pserver/rctP1/lag-interfaces/lag-interface/lagint11").hasNext());
+       assertEquals(true, g.V().has("aai-node-type", "lag-interface").has("aai-uri","/cloud-infrastructure/pservers/pserver/rctP3/lag-interfaces/lag-interface/lagint11").hasNext());
+
+       
+       //2. lagint12 int-name matches with lagint31. So, verify that lag-int does not move from rctP1 to rctP3
+       assertEquals("rctP3 has only 1 lag-interface with name lagint12", new Long(1L), g.V().has("aai-node-type", "pserver").has("hostname","rctP3").in("tosca.relationships.network.BindsTo")
+                       .has("aai-node-type","lag-interface").has("interface-name","lagint12").count().next());
+       
+    }
+    
+    @Test
+    public void checkRCTPserverHasRelnToOnly1Zone() throws Exception {
+
+               assertEquals("Edge to only 1 Zone exists", new Long(1L), g.V().has("aai-node-type", "pserver").has("hostname","Scn6.pserverRCT1Scn6").out("org.onap.relationships.inventory.LocatedIn")
+                .has("aai-node-type","zone").count().next());
+               assertEquals(true, g.V().has("aai-node-type", "zone").has("zone-id","zone-62").hasNext());
+               //Verify no edge exists from zone61 to lower resource-version RCT pserver
+               assertEquals(false, g.V().has("aai-node-type", "zone").has("zone-id","zone-61").in().has("aai-node-type", "pserver").hasNext());
+    }
+    
+    @Test
+    public void checkRCTPserverHasRelnTo2GenericVnfs() throws Exception {
+
+               assertEquals("Edge to 2 generic-vnfs exists", new Long(2L), g.V().has("aai-node-type", "pserver").has("hostname","Scn6.pserverRCT1Scn6").in("tosca.relationships.HostedOn")
+                .has("aai-node-type","generic-vnf").count().next());
+               assertEquals(true, g.V().has("aai-node-type", "generic-vnf").has("vnf-id","vnf-1").out().has("aai-node-type", "pserver").has("hostname", "Scn6.pserverRCT1Scn6").hasNext());
+               //Verify no edge exists from vnf-1 to lower resource-version pserver
+               assertEquals(false, g.V().has("aai-node-type", "generic-vnf").has("vnf-id","vnf-1").out().has("aai-node-type", "pserver").has("hostname", "Scn6.pserverRCTScn6").hasNext());
+    }
+    
+    @Test
+    public void roPlinkNewMovesToLongerHostNameROPserver() throws Exception {
+
+       assertEquals(false, g.V().has("aai-node-type", "pserver").has("hostname","pserverRo.NewOne.aaaa.ccccccccccc").in("tosca.relationships.network.BindsTo").has("aai-node-type","p-interface").has("interface-name","pintOldRo1").hasNext());
+       assertEquals(false, g.V().has("aai-node-type", "pserver").has("hostname","pserverRo.OldOne.aaaa.bbbbb").hasNext());
+       assertEquals(false, g.V().has("id", "pintOldRo1").hasNext());
+       //Verify that the physical link moves to the new pserver
+       assertEquals(true, g.V().has("aai-node-type", "pserver").has("hostname","pserverRo.NewOne.aaaa.ccccccccccc").in("tosca.relationships.network.BindsTo")
+                       .has("aai-node-type","p-interface").has("interface-name","pintRo1").out().has("link-name","plinkROonOldRo1").hasNext());
+       //Verify complex does not get attached to pserverRO5
+       assertEquals("Complex is related to only 1 pserver", new Long(1L), g.V().has("physical-location-id", "complexOldRO").in("org.onap.relationships.inventory.LocatedIn").count().next());
+    }
+    
+    @Test
+    public void ignoreEmptyStringFirstTokenFqdn() throws Exception {
+       List<Vertex> pserverList = g.V().has("aai-node-type", "pserver").has("hostname").toList();
+       pserverList.forEach(v ->System.out.println(v.property("hostname").value().toString()));
+       assertEquals(true, g.V().has("aai-node-type", "pserver").has("hostname","pserver1EmptyFirstTokenFqdn").hasNext());
+       assertEquals(true, g.V().has("aai-node-type", "pserver").has("hostname",".pserver1EmptyFirstToken").hasNext());
+       
+       
+       System.out.println(UUID.randomUUID().toString());
+       System.out.println(UUID.randomUUID().toString());
+       
+    }
+    
+}
diff --git a/src/test/java/org/onap/aai/migration/v14/MigrateSdnaIvlanDataTest.java b/src/test/java/org/onap/aai/migration/v14/MigrateSdnaIvlanDataTest.java
new file mode 100644 (file)
index 0000000..c7b11b9
--- /dev/null
@@ -0,0 +1,283 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v14;
+
+import static org.junit.Assert.*;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.when;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__;
+import org.apache.tinkerpop.gremlin.process.traversal.strategy.verification.ReadOnlyStrategy;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.janusgraph.core.JanusGraph;
+import org.janusgraph.core.JanusGraphFactory;
+import org.janusgraph.core.JanusGraphTransaction;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.aai.AAISetup;
+import org.onap.aai.db.props.AAIProperties;
+import org.onap.aai.dbmap.DBConnectionType;
+import org.onap.aai.introspection.Loader;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.serialization.engines.JanusGraphDBEngine;
+import org.onap.aai.serialization.engines.QueryStyle;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+
+public class MigrateSdnaIvlanDataTest extends AAISetup {
+       
+       private final String CONFIGURATION_NODE_TYPE = "configuration";
+       private final String EVC_NODE_TYPE = "evc";
+       private final String FORWARDER_NODE_TYPE = "forwarder";
+       private final String FORWRDER_EVC_NODE_TYPE = "forwarder-evc";  
+       private final String FORWARDING_PATH_NODE_TYPE = "forwarding-path";
+       private final String LAG_INTERFACE_NODE_TYPE = "lag-interface";
+       private final String P_INTERFACE_NODE_TYPE = "p-interface";
+       private final String PNF_NODE_TYPE = "pnf";
+       private final String SERVICE_INSTANCE_NODE_TYPE = "service-instance";
+       
+       private final static ModelType introspectorFactoryType = ModelType.MOXY;
+       private final static QueryStyle queryStyle = QueryStyle.TRAVERSAL;
+       private final static DBConnectionType type = DBConnectionType.REALTIME;
+
+       private Loader loader;
+       private TransactionalGraphEngine dbEngine;
+       private MigrateSdnaIvlanData migration;
+       private GraphTraversalSource g;
+
+       @Before
+       public void setUp() throws Exception {
+               g = tx.traversal();
+               loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, schemaVersions.getDefaultVersion());
+               dbEngine = new JanusGraphDBEngine(
+                               queryStyle,
+                               type,
+                               loader);
+               
+               System.setProperty("BUNDLECONFIG_DIR", "src/test/resources");
+               
+               //PNF -  pnf1
+               Vertex pnf1 = g.addV()
+                               .property("aai-node-type", "pnf")
+                               .property("pnf-name", "pnf1")
+                               .next();
+               
+               //P-INTERFACE - "11111.1"
+               Vertex pInterface1 = g.addV()
+                               .property("aai-node-type", "p-interface")
+                               .property("interface-name", "11111.1")
+                               .next();
+               
+               //LAG-INTERFACE - lag-interface1
+               Vertex lagInterface1 = g.addV()
+                               .property("aai-node-type", "lag-interface")
+                               .property("interface-name", "lag-interface1")
+                               .next();
+               
+               //CONFIGURATION - "test/evc/one"
+               Vertex configuration1 = g.addV()
+                                               .property("aai-node-type", "configuration")
+                                               .property("configuration-id", "test/evc/one")
+                                               .next();
+               
+               //CONFIGURATION - "test/evc/one-1"
+               Vertex configuration1_1 = g.addV()
+                               .property("aai-node-type", "configuration")
+                               .property("configuration-id", "test/evc/one-1")
+                               .next();
+               //CONFIGURATION - "test/evc/one-2"
+               Vertex configuration1_2 = g.addV()
+                               .property("aai-node-type", "configuration")
+                               .property("configuration-id", "test/evc/one-2")
+                               .next();
+               
+               //FORWARDER - "test/evc/one" sequence 1
+               Vertex forwarder1_1 = g.addV()
+                               .property("aai-node-type", "forwarder")
+                               .property("sequence", 1)
+                               .property("forwarder-role", "ingress")
+                               .next();
+
+               //FORWARDER - "test/evc/one"  sequence 2
+               Vertex forwarder1_2 = g.addV()
+                               .property("aai-node-type", "forwarder")
+                               .property("sequence", 2)
+                               .property("forwarder-role", "ingress")
+                               .next();
+       
+               //FORWARDING-PATH - "test/evc/one"
+                               Vertex forwardingPath1 = g.addV()
+                                               .property("aai-node-type", "forwarding-path")
+                                               .property("forwarding-path-id", "test/evc/one")
+                                               .property("forwarding-path-name", "test/evc/one")               
+                                               .next();
+               
+               //EVC - "test/evc/one"
+               Vertex evc = g.addV()
+                               .property("aai-node-type", "evc")
+                               .property("evc-id", "test/evc/one")
+                               .next();                
+               
+               //FORWARDER-EVC - "test/evc/one-1"
+               Vertex fwdEvc1_1 = g.addV()
+                               .property("aai-node-type", "forwarder-evc")
+                               .property("forwarder-evc-id", "test/evc/one-1")
+                               .next();
+       
+               //FORWARDER-EVC - "test/evc/one-2"
+               Vertex fwdEvc1_2 = g.addV()
+                               .property("aai-node-type", "forwarder-evc")
+                               .property("forwarder-evc-id", "test/evc/one-2")
+                               .next();
+               
+               //pnf -> p-interface -> forwarder -> configuration -> forwarder-evc
+               //pnf1 -> pInterface1 -> forwarder1_1 -> configuration1_1 -> fwdEvc1_1
+               edgeSerializer.addTreeEdge(g, pnf1, pInterface1);
+               edgeSerializer.addEdge(g, pInterface1,forwarder1_1);
+               edgeSerializer.addEdge(g, forwarder1_1, configuration1_1);
+               
+               edgeSerializer.addEdge(g, forwardingPath1, configuration1);
+               edgeSerializer.addTreeEdge(g, forwarder1_1, forwardingPath1);
+               edgeSerializer.addTreeEdge(g, forwarder1_2, forwardingPath1);   
+               
+               edgeSerializer.addTreeEdge(g, configuration1_1, fwdEvc1_1);
+               
+               //pnf -> lag-interface -> forwarder -> configuration -> forwarder-evc
+               //pnf1 -> lagInterface1 -> forwarder1_2 -> configuration1_2 -> fwdEvc1_2
+               edgeSerializer.addTreeEdge(g, pnf1, lagInterface1);
+               edgeSerializer.addEdge(g, forwarder1_2, configuration1_2);
+               edgeSerializer.addEdge(g, lagInterface1, forwarder1_2);
+               edgeSerializer.addTreeEdge(g, configuration1_2, fwdEvc1_2);
+               
+               
+               
+               TransactionalGraphEngine spy = spy(dbEngine);
+        TransactionalGraphEngine.Admin adminSpy = spy(dbEngine.asAdmin());
+        
+        GraphTraversalSource traversal = g;
+               GraphTraversalSource readOnly = tx.traversal(GraphTraversalSource.build().with(ReadOnlyStrategy.instance()));
+               when (spy.tx()).thenReturn(tx);
+               when(spy.asAdmin()).thenReturn(adminSpy);
+               when(adminSpy.getTraversalSource()).thenReturn(traversal);
+               when(adminSpy.getReadOnlyTraversalSource()).thenReturn(readOnly);
+               
+        migration = new MigrateSdnaIvlanData(spy, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+        migration.run();
+                       
+       }
+
+       @Test
+       public void testSdnaIvlanMigration() {          
+       
+               assertTrue("Value of node-type forwarder-evc, forwarder-evc-id of test/evc/one-1 has ben updated with the ivlan property value of 111 ",
+                               g.V()
+                               .has(AAIProperties.NODE_TYPE, PNF_NODE_TYPE).has("pnf-name", "pnf1")
+                               .in("tosca.relationships.network.BindsTo")
+                               .has(AAIProperties.NODE_TYPE, P_INTERFACE_NODE_TYPE).has("interface-name", "11111.1")
+                               .in("org.onap.relationships.inventory.ForwardsTo")
+                               .where(__.out("org.onap.relationships.inventory.BelongsTo").has("forwarding-path-id", "test/evc/one"))
+                               .out("org.onap.relationships.inventory.Uses")
+                               .in("org.onap.relationships.inventory.BelongsTo")
+                               .has("ivlan", "111").hasNext());
+               
+               assertTrue("Value of node-type forwarder-evc, forwarder-evc-id of test/evc/one-2 has ben updated with the ivlan property value of 222 ",
+                               g.V()
+                               .has(AAIProperties.NODE_TYPE, PNF_NODE_TYPE).has("pnf-name", "pnf1")
+                               .in("tosca.relationships.network.BindsTo")
+                               .has(AAIProperties.NODE_TYPE, LAG_INTERFACE_NODE_TYPE).has("interface-name", "lag-interface1")
+                               .in("org.onap.relationships.inventory.ForwardsTo")
+                               .where(__.out("org.onap.relationships.inventory.BelongsTo").has("forwarding-path-id", "test/evc/one"))
+                               .out("org.onap.relationships.inventory.Uses")
+                               .in("org.onap.relationships.inventory.BelongsTo")
+                               .has("ivlan", "222").hasNext());
+               
+               assertTrue("Value of node-type P-INTERFACE with an interface-name of l11111.2 does not exist in Graph. Ivlan not Updated ",
+                               !g.V()
+                               .has(AAIProperties.NODE_TYPE, PNF_NODE_TYPE).has("pnf-name", "pnf1")
+                               .in("tosca.relationships.network.BindsTo")
+                               .has(AAIProperties.NODE_TYPE, P_INTERFACE_NODE_TYPE).has("interface-name", "11111.2")
+                               .in("org.onap.relationships.inventory.ForwardsTo")
+                               .where(__.out("org.onap.relationships.inventory.BelongsTo").has("forwarding-path-id", "test/evc/one"))
+                               .out("org.onap.relationships.inventory.Uses")
+                               .in("org.onap.relationships.inventory.BelongsTo")
+                               .has("ivlan", "333").hasNext());
+               
+               assertTrue("Value of node-type LAG-INTERFACE with an interface-name of lag-interface2 does not exist in Graph. Ivlan not Updated ",
+                               !g.V()
+                               .has(AAIProperties.NODE_TYPE, PNF_NODE_TYPE).has("pnf-name", "pnf1")
+                               .in("tosca.relationships.network.BindsTo")
+                               .has(AAIProperties.NODE_TYPE, LAG_INTERFACE_NODE_TYPE).has("interface-name", "lag-interface2")
+                               .in("org.onap.relationships.inventory.ForwardsTo")
+                               .where(__.out("org.onap.relationships.inventory.BelongsTo").has("forwarding-path-id", "test/evc/one"))
+                               .out("org.onap.relationships.inventory.Uses")
+                               .in("org.onap.relationships.inventory.BelongsTo")
+                               .has("ivlan", "444").hasNext());
+               
+               
+               assertTrue("Value of node-type P-INTERFACE with an interface-name of 11111.3 and evc of test/evc/one_2 does not exist in Graph. Ivlan not Updated ",
+                               !g.V()
+                               .has(AAIProperties.NODE_TYPE, PNF_NODE_TYPE).has("pnf-name", "pnf1")
+                               .in("tosca.relationships.network.BindsTo")
+                               .has(AAIProperties.NODE_TYPE, P_INTERFACE_NODE_TYPE).has("interface-name", "11111.3")
+                               .in("org.onap.relationships.inventory.ForwardsTo")
+                               .where(__.out("org.onap.relationships.inventory.BelongsTo").has("forwarding-path-id", "test/evc/one_2"))
+                               .out("org.onap.relationships.inventory.Uses")
+                               .in("org.onap.relationships.inventory.BelongsTo")
+                               .has("ivlan", "555").hasNext());
+               
+               assertTrue("Value of node-type LAG-INTERFACE with an interface-name of lag-interface3 and evc of test/evc/one_2 does not exist in Graph. Ivlan not Updated ",
+                               !g.V()
+                               .has(AAIProperties.NODE_TYPE, PNF_NODE_TYPE).has("pnf-name", "pnf1")
+                               .in("tosca.relationships.network.BindsTo")
+                               .has(AAIProperties.NODE_TYPE, LAG_INTERFACE_NODE_TYPE).has("interface-name", "lag-interface3")
+                               .in("org.onap.relationships.inventory.ForwardsTo")
+                               .where(__.out("org.onap.relationships.inventory.BelongsTo").has("forwarding-path-id", "test/evc/one_2"))
+                               .out("org.onap.relationships.inventory.Uses")
+                               .in("org.onap.relationships.inventory.BelongsTo")
+                               .has("ivlan", "666").hasNext());        
+               
+               assertTrue("Value of node-type PNF with an pnf-name of pnf2 does not exist in Graph. Ivlan not Updated ",
+                               !g.V()
+                               .has(AAIProperties.NODE_TYPE, PNF_NODE_TYPE).has("pnf-name", "pnf2")
+                               .in("tosca.relationships.network.BindsTo")
+                               .has(AAIProperties.NODE_TYPE, P_INTERFACE_NODE_TYPE).has("interface-name", "22222.2")
+                               .in("org.onap.relationships.inventory.ForwardsTo")
+                               .where(__.out("org.onap.relationships.inventory.BelongsTo").has("forwarding-path-id", "test/evc/two"))
+                               .out("org.onap.relationships.inventory.Uses")
+                               .in("org.onap.relationships.inventory.BelongsTo")
+                               .has("ivlan", "777").hasNext());
+               
+               assertTrue("Value of node-type PNF with an pnf-name of pnf2 does not exist in Graph. Ivlan not Updated ",
+                               !g.V()
+                               .has(AAIProperties.NODE_TYPE, PNF_NODE_TYPE).has("pnf-name", "pnf2")
+                               .in("tosca.relationships.network.BindsTo")
+                               .has(AAIProperties.NODE_TYPE, LAG_INTERFACE_NODE_TYPE).has("interface-name", "lag-interface2")
+                               .in("org.onap.relationships.inventory.ForwardsTo")
+                               .where(__.out("org.onap.relationships.inventory.BelongsTo").has("forwarding-path-id", "test/evc/two"))
+                               .out("org.onap.relationships.inventory.Uses")
+                               .in("org.onap.relationships.inventory.BelongsTo")
+                               .has("ivlan", "888").hasNext());
+               
+       }
+       
+}
diff --git a/src/test/java/org/onap/aai/migration/v14/PserverDedupWithDifferentSourcesOfTruthTest.java b/src/test/java/org/onap/aai/migration/v14/PserverDedupWithDifferentSourcesOfTruthTest.java
new file mode 100644 (file)
index 0000000..a541eca
--- /dev/null
@@ -0,0 +1,496 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v14;
+
+import static org.junit.Assert.*;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.when;
+
+import java.util.List;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.process.traversal.strategy.verification.ReadOnlyStrategy;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.junit.*;
+import org.onap.aai.AAISetup;
+import org.onap.aai.dbmap.DBConnectionType;
+import org.onap.aai.introspection.Loader;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.serialization.engines.QueryStyle;
+import org.onap.aai.serialization.engines.JanusGraphDBEngine;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.janusgraph.core.JanusGraphFactory;
+import org.janusgraph.core.JanusGraph;
+import org.janusgraph.core.JanusGraphTransaction;
+
+public class  PserverDedupWithDifferentSourcesOfTruthTest extends AAISetup{
+
+    private final static ModelType introspectorFactoryType = ModelType.MOXY;
+    private final static QueryStyle queryStyle = QueryStyle.TRAVERSAL;
+    private final static DBConnectionType type = DBConnectionType.REALTIME;
+
+    Loader loader;
+    TransactionalGraphEngine dbEngine;
+    JanusGraph graph;
+    PserverDedupWithDifferentSourcesOfTruth migration;
+    JanusGraphTransaction tx;
+    GraphTraversalSource g;
+
+//scn1
+    Vertex pIntRo;
+    Vertex lInterfaceRo;
+    Vertex pserverRCT;
+    Vertex complexRO;
+//scn2
+
+    Vertex pserverRCTScn2;
+    Vertex pIntRoScn2;
+    Vertex lInterfaceRoScn2;
+    Vertex complexROScn2;
+    Vertex lInterfaceRctScn2;
+    Vertex pIntRctScn2;
+    Vertex complexRctScn2;
+    
+    //physical link
+    Vertex pintPlinkScn1;
+    Vertex samePintScn4RO;
+    Vertex samePintScn4RCT;
+    Vertex pserverRCTPlinkScn4;
+    
+    //Scn3
+    Vertex pserverRCTScn3;
+    Vertex complexScn3;
+    Vertex pserverROScn3;
+    
+    //ManyToOne edge scenario
+    Vertex pserverRCTScn6;
+    Vertex pserverROScn6;
+    Vertex zoneScn61;
+    Vertex zoneScn62;
+
+    @Before
+    public void setUp() throws Exception {
+        graph = JanusGraphFactory.build().set("storage.backend","inmemory").open();
+        tx = graph.newTransaction();
+        g = tx.traversal();
+        loader = loaderFactory.createLoaderForVersion(introspectorFactoryType,schemaVersions.getDefaultVersion());
+        dbEngine = new JanusGraphDBEngine(
+                queryStyle,
+                type,
+                loader);
+//Scn1 empty RCT move everything over
+        pserverRCT = g.addV().property("aai-node-type", "pserver")
+                .property("hostname","pserverRCT")
+                .property("source-of-truth","RCT")
+                .property("fqdn","tttt.bbbb.cccc.dddd")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserverRCT")
+                .property("resource-version","1")
+                .next();
+
+        Vertex pserverRO = g.addV().property("aai-node-type", "pserver")
+                .property("hostname","tttt.RoHostname")
+                .property("source-of-truth","RO")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/tttt.RoHostname")
+                .property("resource-version","2")
+                .next();
+        pIntRo = g.addV()
+                .property("aai-node-type", "p-interface")
+                .property("interface-name", "pIntRo")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/tttt.RoHostname/p-interfaces/p-interface/pIntRo")
+                .next();
+        lInterfaceRo = g.addV().property("aai-node-type", "l-interface")
+                .property("interface-name", "lInterfaceRo")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/tttt.RoHostname/p-interfaces/p-interface/pIntRo/l-interfaces/l-interface/lInterfaceRo")
+                .next();
+        complexRO = g.addV().property("aai-node-type", "complex")
+                .property("physical-location-id","complexRO")
+                .property("aai-uri","/cloud-infrastructure/complexes/complex/complexRO")
+                .next();
+        
+        // physical-link tests
+        //1. p-int does not exist on RCT, p-int and p-link moves from RO to RCT
+        pintPlinkScn1= g.addV()
+                .property("aai-node-type", "p-interface")
+                .property("interface-name", "pintPlinkScn1")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/tttt.RoHostname/p-interfaces/p-interface/pintPlinkScn1")
+                .next();
+
+        Vertex pLinkScn1 = g.addV().property("aai-node-type", "physical-link")
+                .property("link-name", "pLinkScn1")
+                .property("service-provider-bandwidth-up-value", 0)
+                .property("service-provider-bandwidth-up-units", "empty")
+                .property("service-provider-bandwidth-down-value", 0)
+                .property("service-provider-bandwidth-down-units", "empty")
+                .property("aai-uri","/network/physical-links/physical-link/pLinkScn1")
+                .next();
+        edgeSerializer.addTreeEdge(g,pserverRO,pintPlinkScn1);
+        edgeSerializer.addEdge(g,pintPlinkScn1,pLinkScn1);
+        
+        //2. p-int matches on RCT, p-int and p-link don't move from RO to RCT
+        
+        Vertex pserverROSPlinkScn4 = g.addV().property("aai-node-type", "pserver")
+                .property("hostname","Scn4.pserverROSPlinkScn4")
+                .property("source-of-truth","RO")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/Scn4.pserverROSPlinkScn4")
+                .property("resource-version","4")
+                .next();
+        
+        pserverRCTPlinkScn4 = g.addV().property("aai-node-type", "pserver")
+                .property("hostname","pserverRCTPlinkScn4")
+                .property("source-of-truth","RCT")
+                .property("fqdn","Scn4.pserverRCTPlinkScn4")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserverRCTPlinkScn4")
+                .property("resource-version","3")
+                .next();
+        
+        samePintScn4RO= g.addV()
+                .property("aai-node-type", "p-interface")
+                .property("interface-name", "pintPlinkScn4")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/Scn4.pserverROSPlinkScn4/p-interfaces/p-interface/pintPlinkScn4")
+                .next();
+
+        Vertex plinkScn2 = g.addV().property("aai-node-type", "physical-link")
+                .property("link-name", "plinkScn2")
+                .property("service-provider-bandwidth-up-value", 0)
+                .property("service-provider-bandwidth-up-units", "empty")
+                .property("service-provider-bandwidth-down-value", 0)
+                .property("service-provider-bandwidth-down-units", "empty")
+                .property("aai-uri","/network/physical-links/physical-link/pLinkScn2")
+                .next();
+        
+        samePintScn4RCT= g.addV()
+                .property("aai-node-type", "p-interface")
+                .property("interface-name", "pintPlinkScn4")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserverRCTPlinkScn4/p-interfaces/p-interface/pintPlinkScn4")
+                .next();
+        
+        edgeSerializer.addTreeEdge(g,pserverROSPlinkScn4,samePintScn4RO);
+        edgeSerializer.addEdge(g,samePintScn4RO,plinkScn2);
+        edgeSerializer.addTreeEdge(g,pserverRCTPlinkScn4,samePintScn4RCT);
+        //End physical-links tests
+
+//Scn2 RCT has children already
+
+        pserverRCTScn2 = g.addV().property("aai-node-type", "pserver")
+                .property("hostname","pserverRCTScn2")
+                .property("source-of-truth","RCT")
+                .property("fqdn","Scn2.pserverRCTScn2")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserverRCTScn2")
+                .property("resource-version","3")
+                .next();
+        pIntRctScn2 = g.addV()
+                .property("aai-node-type", "p-interface")
+                .property("interface-name", "pIntRctScn2")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserverRCTScn2/p-interfaces/p-interface/pIntRctScn2")
+                .next();
+        lInterfaceRctScn2 = g.addV().property("aai-node-type", "l-interface")
+                .property("interface-name", "lInterfaceRctScn2")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserverRCTScn2/p-interfaces/p-interface/pIntRctScn2/l-interfaces/l-interface/lInterfaceRctScn2")
+                .next();
+        complexRctScn2 = g.addV().property("aai-node-type", "complex")
+                .property("physical-location-id","complexRctScn2")
+                 .property("aai-uri","/cloud-infrastructure/complexes/complex/complexRctScn2")
+                .next();
+        Vertex pserverROScn2 = g.addV().property("aai-node-type", "pserver")
+                .property("hostname","Scn2.pserverROScn2")
+                .property("source-of-truth","RO")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/Scn2.pserverROScn2")
+                .property("resource-version","4")
+                .next();
+        pIntRoScn2 = g.addV()
+                .property("aai-node-type", "p-interface")
+                .property("interface-name", "pIntRoScn2")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/Scn2.pserverROScn2/p-interfaces/p-interface/pIntRoScn2")
+                .next();
+        lInterfaceRoScn2 = g.addV().property("aai-node-type", "l-interface")
+                .property("interface-name", "lInterfaceRoScn2")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/Scn2.pserverROScn2/p-interfaces/p-interface/pIntRoScn2/l-interfaces/l-interface/lInterfaceRoScn2")
+                .next();
+        complexROScn2 = g.addV().property("aai-node-type", "complex")
+                .property("physical-location-id","complexROScn2")
+                 .property("aai-uri","/cloud-infrastructure/complexes/complex/complexROScn2")
+                .next();
+
+
+        //Scn1
+        edgeSerializer.addTreeEdge(g,pserverRO,pIntRo);
+        edgeSerializer.addTreeEdge(g,pIntRo,lInterfaceRo);
+        edgeSerializer.addEdge(g,pserverRO,complexRO);
+        
+        
+
+        //Scn2
+        edgeSerializer.addTreeEdge(g,pserverRCTScn2,pIntRctScn2);
+        edgeSerializer.addTreeEdge(g,pIntRctScn2,lInterfaceRctScn2);
+        edgeSerializer.addEdge(g,pserverRCTScn2,complexRctScn2);
+        edgeSerializer.addTreeEdge(g,pserverROScn2,pIntRoScn2);
+        edgeSerializer.addTreeEdge(g,pIntRoScn2,lInterfaceRoScn2);
+        edgeSerializer.addEdge(g,pserverROScn2,complexROScn2);
+        
+        //Scn3
+        pserverRCTScn3 = g.addV().property("aai-node-type", "pserver")
+                .property("hostname","pserverRCTScn3")
+                .property("source-of-truth","RCT")
+                .property("fqdn","Scn3.pserverRCTScn3")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserverRCTScn3")
+                .property("resource-version","3")
+                .next();
+        
+        complexScn3 = g.addV().property("aai-node-type", "complex")
+                .property("physical-location-id","complexScn3")
+                .property("aai-uri","/cloud-infrastructure/complexes/complex/complexScn3")
+                .next();
+        
+        pserverROScn3 = g.addV().property("aai-node-type", "pserver")
+                .property("hostname","Scn3.pserverROScn3")
+                .property("source-of-truth","RO")
+                .property("fqdn","Scn2.pserverRCTScn2")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/Scn3.pserverROScn3")
+                .property("resource-version","4")
+                .next();
+        
+        edgeSerializer.addEdge(g, pserverRCTScn3, complexScn3);
+        edgeSerializer.addEdge(g, pserverROScn3, complexScn3);
+        
+        //Verify manytoOne edge scenario
+        pserverRCTScn6 = g.addV().property("aai-node-type", "pserver")
+                .property("hostname","pserverRCTScn6")
+                .property("source-of-truth","RCT")
+                .property("fqdn","Scn6.pserverRCTScn6")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserverRCTScn6")
+                .property("resource-version","1")
+                .next();
+        
+        zoneScn61 = g.addV().property("aai-node-type", "zone")
+                       .property("zone-id", "zone-61")
+                       .property("aai-uri","/network/zones/zone/zone-61")
+                       .next();
+        
+        pserverROScn6 = g.addV().property("aai-node-type", "pserver")
+                .property("hostname","Scn6.pserverROScn6")
+                .property("source-of-truth","RO")
+                .property("fqdn","Scn6.pserverRCTScn6")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/Scn6.pserverROScn6")
+                .property("resource-version","4")
+                .next();
+        
+        zoneScn62 = g.addV().property("aai-node-type", "zone")
+                       .property("zone-id", "zone-62")
+                       .property("aai-uri","/network/zones/zone/zone-62")
+                       .next();
+        
+        edgeSerializer.addEdge(g,  pserverRCTScn6, zoneScn61);
+        edgeSerializer.addEdge(g,  pserverROScn6, zoneScn62);
+        
+        //Verify manyToMany edge scenario
+        Vertex gvnf1 = g.addV().property("aai-node-type", "generic-vnf")
+                       .property("vnf-id", "vnf-1")
+                       .property("aai-uri","/cloud-infrastructure/pservers/pserver/Scn6.pserverROScn6")
+                       .next();
+        
+        Vertex gvnf2 = g.addV().property("aai-node-type", "generic-vnf")
+                       .property("vnf-id", "vnf-2")
+                       .property("aai-uri","/network/generic-vnfs/generic-vnf/vnf-2")
+                       .next();
+        
+        edgeSerializer.addEdge(g,  pserverRCTScn6, gvnf1);
+        edgeSerializer.addEdge(g,  pserverROScn6, gvnf2);
+        
+        // Verify empty string scenario
+        Vertex pserver1EmptyFirstToken = g.addV().property("aai-node-type", "pserver")
+                .property("hostname",".pserver1EmptyFirstToken")
+                .property("source-of-truth","RO")
+                .property("fqdn","sameScn1.rrrr.tttt.yyyy")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/.pserver1EmptyFirstToken")
+                .property("resource-version","1")
+                .next();
+        
+        Vertex pserver1EmptyFirstTokenFqdn = g.addV().property("aai-node-type", "pserver")
+                .property("hostname","pserver1EmptyFirstTokenFqdn")
+                .property("source-of-truth","RCT")
+                .property("fqdn",".rrrr.tttt.yyyy")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserver1EmptyFirstTokenFqdn")
+                .property("resource-version","1")
+                .next();
+
+      //lag-interfaces
+        
+        Vertex roP1 = g.addV().property("aai-node-type", "pserver")
+                .property("hostname","pserver.ro")
+                .property("source-of-truth","RO")
+                .property("fqdn","pserver.rrrr.tttt.yyyy")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/pserver.ro")
+                .property("resource-version","1")
+                .next();
+        
+        Vertex rctP1 = g.addV().property("aai-node-type", "pserver")
+                .property("hostname","rctP1")
+                .property("source-of-truth","RCT")
+                .property("fqdn","pserver.rrrr.tttt.yyyy")
+                .property("aai-uri","/cloud-infrastructure/pservers/pserver/rctP1")
+                .property("resource-version","3")
+                .next();
+        
+        //lagint11 does not have a match on rctP1. So, expect this to move to rctP1. Add test
+          Vertex lagint11= g.addV()
+                  .property("aai-node-type", "lag-interface")
+                  .property("interface-name", "lagint11")
+                  .property("aai-uri","/cloud-infrastructure/pservers/pserver/roP1/lag-interfaces/lag-interface/lagint11")
+                  .next();
+          edgeSerializer.addTreeEdge(g, roP1, lagint11);
+          
+          //lagint12 matches with lagint31 on rctP3
+          Vertex lagint12= g.addV()
+                  .property("aai-node-type", "lag-interface")
+                  .property("interface-name", "lagint12")
+                  .property("aai-uri","/cloud-infrastructure/pservers/pserver/roP1/lag-interfaces/lag-interface/lagint12")
+                  .next();
+          Vertex lagint31= g.addV()
+                  .property("aai-node-type", "lag-interface")
+                  .property("interface-name", "lagint12")
+                  .property("aai-uri","/cloud-infrastructure/pservers/pserver/rctP3/lag-interfaces/lag-interface/lagint12")
+                  .next();
+          edgeSerializer.addTreeEdge(g, roP1, lagint12);
+          edgeSerializer.addTreeEdge(g, rctP1, lagint31);
+          
+        TransactionalGraphEngine spy = spy(dbEngine);
+        TransactionalGraphEngine.Admin adminSpy = spy(dbEngine.asAdmin());
+
+        GraphTraversalSource traversal = g;
+        GraphTraversalSource readOnly = tx.traversal(GraphTraversalSource.build().with(ReadOnlyStrategy.instance()));
+        when (spy.tx()).thenReturn(tx);
+        when(spy.asAdmin()).thenReturn(adminSpy);
+        when(adminSpy.getTraversalSource()).thenReturn(traversal);
+        when(adminSpy.getReadOnlyTraversalSource()).thenReturn(readOnly);
+
+        migration = new PserverDedupWithDifferentSourcesOfTruth(spy,loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+        migration.run();
+    }
+
+    @After
+    public void cleanUp() {
+        tx.tx().rollback();
+        graph.close();
+    }
+
+    @Test
+    public void rctSuccessfulMoveScn1() throws Exception {
+
+        assertEquals("tttt.RoHostname",pserverRCT.property("fqdn").value().toString());
+        assertEquals(true, g.V().has("aai-node-type", "pserver").has("hostname","pserverRCT").in("tosca.relationships.network.BindsTo").has("aai-node-type","p-interface").hasNext());
+        assertEquals("/cloud-infrastructure/pservers/pserver/pserverRCT/p-interfaces/p-interface/pIntRo", pIntRo.property("aai-uri").value().toString());
+        assertEquals("/cloud-infrastructure/pservers/pserver/pserverRCT/p-interfaces/p-interface/pIntRo/l-interfaces/l-interface/lInterfaceRo", lInterfaceRo.property("aai-uri").value().toString());
+        assertEquals(true,pserverRCT.property("pserver-id").isPresent());
+    }
+    
+    @Test
+    public void rctSuccessfulMovePlink() throws Exception {
+
+        assertEquals("tttt.RoHostname",pserverRCT.property("fqdn").value().toString());
+        assertEquals(true, g.V().has("aai-node-type", "pserver").has("hostname","pserverRCT").in("tosca.relationships.network.BindsTo")
+                       .has("aai-node-type","p-interface").has("interface-name","pintPlinkScn1").hasNext());
+        assertEquals("/cloud-infrastructure/pservers/pserver/pserverRCT/p-interfaces/p-interface/pintPlinkScn1", pintPlinkScn1.property("aai-uri").value().toString());
+        assertEquals(true, g.V().has("aai-node-type", "pserver").has("hostname","pserverRCT").in("tosca.relationships.network.BindsTo")
+                       .has("aai-node-type","p-interface").has("interface-name","pintPlinkScn1").out("tosca.relationships.network.LinksTo").has("link-name","pLinkScn1").hasNext());
+    }
+    
+    @Test
+    public void rctNoChangeSamePIntScenario() throws Exception {
+
+        assertEquals("Scn4.pserverROSPlinkScn4",pserverRCTPlinkScn4.property("fqdn").value().toString());
+        assertEquals(true, g.V().has("aai-node-type", "pserver").has("hostname","pserverRCTPlinkScn4").in("tosca.relationships.network.BindsTo")
+                       .has("aai-node-type","p-interface").has("interface-name","pintPlinkScn4").hasNext());
+        assertEquals("only 1 p-int is present on RCT pserver", new Long(1L), g.V().has("aai-node-type", "pserver").has("hostname","pserverRCTPlinkScn4").in("tosca.relationships.network.BindsTo")
+                       .has("aai-node-type","p-interface").has("interface-name","pintPlinkScn4").count().next());
+        assertEquals("/cloud-infrastructure/pservers/pserver/pserverRCTPlinkScn4/p-interfaces/p-interface/pintPlinkScn4", samePintScn4RCT.property("aai-uri").value().toString());
+        //plink is not  moved from RO to RCT when p-int matches
+        assertEquals(false, g.V().has("aai-node-type", "pserver").has("hostname","pserverRCT").in("tosca.relationships.network.BindsTo")
+                       .has("aai-node-type","p-interface").has("interface-name","pintPlinkScn4").out("tosca.relationships.network.LinksTo").hasNext());
+        assertEquals(false, g.V().has("aai-node-type", "pserver").has("hostname","pserverROPlinkScn4").hasNext());
+        //Verify that no orphan nodes are present in the graph
+        assertEquals(false, g.V().has("aai-node-type","p-interface").has("interface-name","pintPlinkScn4").out("tosca.relationships.network.LinksTo").has("link-name","pLinkScn2").hasNext());
+        assertEquals(false, g.V().has("aai-node-type","physical-link").has("link-name","pLinkScn2").hasNext());
+    }
+
+    @Test
+    public void rctSuccessfulMoveScn2() throws Exception {
+
+        assertEquals("Scn2.pserverROScn2",pserverRCTScn2.property("fqdn").value().toString());
+        assertEquals(true, g.V().has("aai-node-type", "pserver").has("hostname","pserverRCTScn2").in("tosca.relationships.network.BindsTo").has("aai-node-type","p-interface").hasNext());
+        assertEquals("/cloud-infrastructure/pservers/pserver/pserverRCTScn2/p-interfaces/p-interface/pIntRoScn2", pIntRoScn2.property("aai-uri").value().toString());
+        assertEquals("/cloud-infrastructure/pservers/pserver/pserverRCTScn2/p-interfaces/p-interface/pIntRoScn2/l-interfaces/l-interface/lInterfaceRoScn2", lInterfaceRoScn2.property("aai-uri").value().toString());
+        assertEquals(true,pserverRCTScn2.property("pserver-id").isPresent());
+        assertEquals(true, g.V().has("aai-node-type", "pserver").has("hostname","pserverRCTScn2").out("org.onap.relationships.inventory.LocatedIn")
+                .has("aai-node-type","complex").has("physical-location-id","complexRctScn2").hasNext());
+    }
+    
+    @Test
+    public void checkRCTPserverHasRelnToOnly1Complex() throws Exception {
+
+               assertEquals("Edge to only 1 complex exists", new Long(1L), g.V().has("aai-node-type", "pserver").has("hostname","pserverRCTScn3").out("org.onap.relationships.inventory.LocatedIn")
+                .has("aai-node-type","complex").count().next());
+    }
+    
+    @Test
+    public void checkRCTPserverHasRelnToOnly1Zone() throws Exception {
+
+               assertEquals("Edge to only 1 Zone exists", new Long(1L), g.V().has("aai-node-type", "pserver").has("hostname","pserverRCTScn6").out("org.onap.relationships.inventory.LocatedIn")
+                .has("aai-node-type","zone").count().next());
+               assertEquals(true, g.V().has("aai-node-type", "zone").has("zone-id","zone-62").hasNext());
+               //Verify no edge exists from zone62 to RO pserver
+               assertEquals(false, g.V().has("aai-node-type", "zone").has("zone-id","zone-62").in().has("aai-node-type", "pserver").hasNext());
+               
+    }
+    
+    @Test
+    public void checkRCTPserverHasRelnTo2GenericVnfs() throws Exception {
+
+               assertEquals("Edge to 2 generic-vnfs exists", new Long(2L), g.V().has("aai-node-type", "pserver").has("hostname","pserverRCTScn6").in("tosca.relationships.HostedOn")
+                .has("aai-node-type","generic-vnf").count().next());
+               assertEquals(true, g.V().has("aai-node-type", "generic-vnf").has("vnf-id","vnf-2").out().has("aai-node-type", "pserver").has("hostname", "pserverRCTScn6").hasNext());
+               //Verify no edge exists from zone62 to RO pserver
+               assertEquals(false, g.V().has("aai-node-type", "generic-vnf").has("vnf-id","vnf-2").out().has("aai-node-type", "pserver").has("hostname", "pserverROScn6").hasNext());
+    }
+    
+    @Test
+    public void ignoreEmptyStringFirstTokenFqdn() throws Exception {
+//     List<Vertex> pserverList = g.V().has("aai-node-type", "pserver").has("hostname").toList();
+//     pserverList.forEach(v ->System.out.println(v.property("hostname").value().toString()));
+       assertEquals(true, g.V().has("aai-node-type", "pserver").has("hostname","pserver1EmptyFirstTokenFqdn").hasNext());
+       assertEquals(true, g.V().has("aai-node-type", "pserver").has("hostname",".pserver1EmptyFirstToken").hasNext());
+       
+    }
+    
+    @Test
+    public void testLagInterfaces() throws Exception {
+       //1. lagint11 from roP1 moves to rctP1
+       assertEquals(true, g.V().has("aai-node-type", "pserver").has("hostname","rctP1").in("tosca.relationships.network.BindsTo")
+                       .has("aai-node-type","lag-interface").has("interface-name","lagint11").hasNext());
+       assertEquals(false, g.V().has("aai-node-type", "lag-interface").has("aai-uri","/cloud-infrastructure/pservers/pserver/pserver.ro/lag-interfaces/lag-interface/lagint11").hasNext());
+       assertEquals(true, g.V().has("aai-node-type", "lag-interface").has("aai-uri","/cloud-infrastructure/pservers/pserver/rctP1/lag-interfaces/lag-interface/lagint11").hasNext());
+
+       
+       //2. lagint12 int-name matches with lagint31. So, verify that lag-int does not move from rctP1 to rctP3
+       assertEquals("rctP1 has only 1 lag-interface with name lagint12", new Long(1L), g.V().has("aai-node-type", "pserver").has("hostname","rctP1").in("tosca.relationships.network.BindsTo")
+                       .has("aai-node-type","lag-interface").has("interface-name","lagint12").count().next());
+       
+    }
+
+}
diff --git a/src/test/java/org/onap/aai/migration/v15/MigrateBooleanDefaultsToFalseTest.java b/src/test/java/org/onap/aai/migration/v15/MigrateBooleanDefaultsToFalseTest.java
new file mode 100644 (file)
index 0000000..1474ca1
--- /dev/null
@@ -0,0 +1,385 @@
+/**\r
+ * ============LICENSE_START=======================================================\r
+ * org.onap.aai\r
+ * ================================================================================\r
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.\r
+ * ================================================================================\r
+ * Licensed under the Apache License, Version 2.0 (the "License");\r
+ * you may not use this file except in compliance with the License.\r
+ * You may obtain a copy of the License at\r
+ *\r
+ *    http://www.apache.org/licenses/LICENSE-2.0\r
+ *\r
+ * Unless required by applicable law or agreed to in writing, software\r
+ * distributed under the License is distributed on an "AS IS" BASIS,\r
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
+ * See the License for the specific language governing permissions and\r
+ * limitations under the License.\r
+ * ============LICENSE_END=========================================================\r
+ */\r
+package org.onap.aai.migration.v15;\r
+\r
+import org.onap.aai.AAISetup;\r
+import org.onap.aai.edges.EdgeIngestor;\r
+import org.onap.aai.introspection.LoaderFactory;\r
+import org.onap.aai.serialization.db.EdgeSerializer;\r
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;\r
+\r
+\r
+import org.janusgraph.core.JanusGraph;\r
+import org.janusgraph.core.JanusGraphFactory;\r
+import org.janusgraph.core.JanusGraphTransaction;\r
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;\r
+import org.junit.Before;\r
+import org.junit.Test;\r
+import org.onap.aai.dbmap.DBConnectionType;\r
+import org.onap.aai.introspection.Loader;\r
+import org.onap.aai.introspection.ModelType;\r
+import org.onap.aai.setup.SchemaVersions;\r
+import org.onap.aai.setup.SchemaVersion;\r
+import org.onap.aai.migration.Status;\r
+import org.onap.aai.migration.v15.MigrateBooleanDefaultsToFalse;\r
+import org.onap.aai.serialization.engines.QueryStyle;\r
+import org.onap.aai.serialization.engines.JanusGraphDBEngine;\r
+\r
+import static org.mockito.Mockito.spy;\r
+import static org.mockito.Mockito.when;\r
+\r
+import java.util.Optional;\r
+\r
+import static org.junit.Assert.assertTrue;\r
+\r
+public class MigrateBooleanDefaultsToFalseTest extends AAISetup {\r
+\r
+       public static class BooleanDefaultMigrator extends MigrateBooleanDefaultsToFalse {\r
+        public BooleanDefaultMigrator(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions){\r
+            super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);\r
+        }\r
+        @Override\r
+        public Status getStatus() {\r
+            return Status.SUCCESS;\r
+        }\r
+        @Override\r
+        public Optional<String[]> getAffectedNodeTypes() {\r
+               return Optional.of(new String[]{VNF_NODE_TYPE,VSERVER_NODE_TYPE,VNFC_NODE_TYPE,L3NETWORK_NODE_TYPE,SUBNET_NODE_TYPE,LINTERFACE_NODE_TYPE,VFMODULE_NODE_TYPE});\r
+        }\r
+        @Override\r
+        public String getMigrationName() {\r
+            return "MockBooleanDefaultMigrator";\r
+        }\r
+    }\r
+\r
+    private final static ModelType introspectorFactoryType = ModelType.MOXY;\r
+    private final static QueryStyle queryStyle = QueryStyle.TRAVERSAL;\r
+    private final static DBConnectionType type = DBConnectionType.REALTIME;\r
+    private Loader loader;\r
+    private TransactionalGraphEngine dbEngine;\r
+    private BooleanDefaultMigrator migration;\r
+    private GraphTraversalSource g;\r
+\r
+    @Before\r
+    public void setup() throws Exception{\r
+        g = tx.traversal();\r
+        loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, schemaVersions.getDefaultVersion());\r
+        dbEngine = new JanusGraphDBEngine(\r
+                queryStyle,\r
+                type,\r
+                loader);\r
+\r
+        //generic-vnf\r
+        g.addV().property("aai-node-type", "generic-vnf")\r
+                .property("vnf-id", "generic-vnf0")\r
+                .next();\r
+        g.addV().property("aai-node-type", "generic-vnf")\r
+                .property("vnf-id", "generic-vnf1")\r
+                .property("is-closed-loop-disabled", "")\r
+                .next();\r
+        g.addV().property("aai-node-type", "generic-vnf")\r
+                .property("vnf-id", "generic-vnf2")\r
+                .property("is-closed-loop-disabled", true)\r
+                .next();\r
+        g.addV().property("aai-node-type", "generic-vnf")\r
+                       .property("vnf-id", "generic-vnf3")\r
+                       .property("is-closed-loop-disabled", false)\r
+                       .next();\r
+        //vnfc\r
+        g.addV().property("aai-node-type", "vnfc")\r
+                .property("vnfc-name", "vnfc0")\r
+                .next();\r
+        g.addV().property("aai-node-type", "vnfc")\r
+                .property("vnfc-name", "vnfc1")\r
+                .property("is-closed-loop-disabled", "")\r
+                .next();\r
+        g.addV().property("aai-node-type", "vnfc")\r
+                .property("vnfc-name", "vnfc2")\r
+                .property("is-closed-loop-disabled", true)\r
+                .next();\r
+        g.addV().property("aai-node-type", "vnfc")\r
+                       .property("vnfc-name", "vnfc3")\r
+                       .property("is-closed-loop-disabled", false)\r
+                       .next();\r
+        //vserver\r
+        g.addV().property("aai-node-type", "vserver")\r
+                .property("vserver-id", "vserver0")\r
+                .next();\r
+        g.addV().property("aai-node-type", "vserver")\r
+                .property("vserver-id", "vserver1")\r
+                .property("is-closed-loop-disabled", "")\r
+                .next();\r
+        g.addV().property("aai-node-type", "vserver")\r
+                .property("vserver-id", "vserver2")\r
+                .property("is-closed-loop-disabled", true)\r
+                .next();\r
+        g.addV().property("aai-node-type", "vserver")\r
+                       .property("vserver-id", "vserver3")\r
+                       .property("is-closed-loop-disabled", false)\r
+                       .next();        \r
+      //l3-network\r
+        g.addV().property("aai-node-type", "l3-network")\r
+                .property("network-id", "l3-network0")\r
+                               .property("network-name", "l3-network-name0")\r
+                .next();\r
+        g.addV().property("aai-node-type", "l3-network")\r
+                .property("network-id", "l3-network1")\r
+                               .property("network-name", "l3-network-name1")\r
+                .property("is-bound-to-vpn", "")\r
+                .property("is-provider-network", "")\r
+                               .property("is-shared-network", "")\r
+                               .property("is-external-network", "")\r
+                .next();\r
+        g.addV().property("aai-node-type", "l3-network")\r
+                .property("network-id", "l3-network2")\r
+                               .property("network-name", "l3-network-name2")\r
+                .property("is-bound-to-vpn", true)\r
+                .property("is-provider-network", true)\r
+                               .property("is-shared-network", true)\r
+                               .property("is-external-network", true)\r
+                .next();\r
+        g.addV().property("aai-node-type", "l3-network")\r
+                       .property("network-id", "l3-network3")\r
+                               .property("network-name", "l3-network-name3")\r
+                       .property("is-bound-to-vpn", false)\r
+                       .property("is-provider-network", false)\r
+                               .property("is-shared-network", false)\r
+                               .property("is-external-network", false)\r
+                       .next();       \r
+        //subnet\r
+        g.addV().property("aai-node-type", "subnet")\r
+                .property("subnet-id", "subnet0")\r
+                .next();\r
+        g.addV().property("aai-node-type", "subnet")\r
+                .property("subnet-id", "subnet1")\r
+                .property("dhcp-enabled", "")\r
+                .next();\r
+        g.addV().property("aai-node-type", "subnet")\r
+                .property("subnet-id", "subnet2")\r
+                .property("dhcp-enabled", true)\r
+                .next();\r
+        g.addV().property("aai-node-type", "subnet")\r
+                       .property("subnet-id", "subnet3")\r
+                       .property("dhcp-enabled", false)\r
+                       .next();\r
+      //l-interface\r
+        g.addV().property("aai-node-type", "l-interface")\r
+                .property("interface-name", "l-interface0")\r
+                               .property("in-maint", false)\r
+                .next();\r
+        g.addV().property("aai-node-type", "l-interface")\r
+                .property("interface-name", "l-interface1")\r
+                .property("in-maint", false)\r
+                               .property("is-port-mirrored", "")\r
+                               .property("is-ip-unnumbered", "")\r
+                .next();\r
+        g.addV().property("aai-node-type", "l-interface")\r
+                .property("interface-name", "l-interface2")\r
+                .property("in-maint", false)\r
+                               .property("is-port-mirrored", true)\r
+                               .property("is-ip-unnumbered", true)\r
+                .next();\r
+        g.addV().property("aai-node-type", "l-interface")\r
+                       .property("interface-name", "l-interface3")\r
+                       .property("in-maint", false)\r
+                               .property("is-port-mirrored", false)\r
+                               .property("is-ip-unnumbered", false)\r
+                       .next(); \r
+      //vf-module\r
+        g.addV().property("aai-node-type", "vf-module")\r
+                .property("vf-module-id", "vf-module0")\r
+                .next();\r
+        g.addV().property("aai-node-type", "vf-module")\r
+                .property("vf-module-id", "vf-module1")\r
+                               .property("is-base-vf-module", "")\r
+                .next();\r
+        g.addV().property("aai-node-type", "vf-module")\r
+                .property("vf-module-id", "vf-module2")\r
+                               .property("is-base-vf-module", true)\r
+                .next();\r
+        g.addV().property("aai-node-type", "vf-module")\r
+                       .property("vf-module-id", "vf-module3")\r
+                               .property("is-base-vf-module", false)                           \r
+                       .next(); \r
+                     \r
+      //vlan\r
+        g.addV().property("aai-node-type", "vlan")\r
+                .property("vlan-interface", "vlan0")\r
+                .next();\r
+        g.addV().property("aai-node-type", "vlan")\r
+                .property("vlan-interface", "vlan1")\r
+                               .property("is-ip-unnumbered", "")\r
+                .next();\r
+        g.addV().property("aai-node-type", "vlan")\r
+                .property("vlan-interface", "vlan2")\r
+                               .property("is-ip-unnumbered", true)\r
+                .next();\r
+        g.addV().property("aai-node-type", "vlan")\r
+                       .property("vlan-interface", "vlan3")\r
+                               .property("is-ip-unnumbered", false)                            \r
+                       .next();\r
+        \r
+        TransactionalGraphEngine spy = spy(dbEngine);\r
+        TransactionalGraphEngine.Admin adminSpy = spy(dbEngine.asAdmin());\r
+        GraphTraversalSource traversal = g;\r
+        when(spy.asAdmin()).thenReturn(adminSpy);\r
+        when(adminSpy.getTraversalSource()).thenReturn(traversal);\r
+        migration = new BooleanDefaultMigrator(spy, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);\r
+        migration.run();\r
+        \r
+    }\r
+\r
+    @Test\r
+    public void testMissingProperty(){\r
+       //is-closed-loop-disabled\r
+        assertTrue("Value of generic-vnf should be updated since the property is-closed-loop-disabled doesn't exist",\r
+                g.V().has("aai-node-type", "generic-vnf").has("vnf-id", "generic-vnf0").has("is-closed-loop-disabled", false).hasNext());\r
+        assertTrue("Value of vnfc should be updated since the property is-closed-loop-disabled doesn't exist",\r
+                g.V().has("aai-node-type", "vnfc").has("vnfc-name", "vnfc0").has("is-closed-loop-disabled", false).hasNext());\r
+        assertTrue("Value of vserver should be updated since the property is-closed-loop-disabled doesn't exist",\r
+                g.V().has("aai-node-type", "vserver").has("vserver-id", "vserver0").has("is-closed-loop-disabled", false).hasNext());\r
+        //dhcp-enabled\r
+        assertTrue("Value of subnet should be updated since the property dhcp-enabled doesn't exist",\r
+                g.V().has("aai-node-type", "subnet").has("subnet-id", "subnet0").has("dhcp-enabled", false).hasNext());\r
+        //l3-network: is-bound-to-vpn, is-shared-network, is-external-network\r
+        assertTrue("Value of l3-network should be updated since the property is-bound-to-vpn doesn't exist",\r
+                g.V().has("aai-node-type", "l3-network").has("network-id", "l3-network0").has("network-name", "l3-network-name0").has("is-bound-to-vpn", false).hasNext());  \r
+        assertTrue("Value of l3-network should be updated since the property is-provider-network doesn't exist",\r
+                g.V().has("aai-node-type", "l3-network").has("network-id", "l3-network0").has("network-name", "l3-network-name0").has("is-provider-network", false).hasNext());  \r
+        assertTrue("Value of l3-network should be updated since the property is-shared-network doesn't exist",\r
+                g.V().has("aai-node-type", "l3-network").has("network-id", "l3-network0").has("network-name", "l3-network-name0").has("is-shared-network", false).hasNext());  \r
+               assertTrue("Value of l3-network should be updated since the property is-external-network doesn't exist",\r
+                g.V().has("aai-node-type", "l3-network").has("network-id", "l3-network0").has("network-name", "l3-network-name0").has("is-external-network", false).hasNext()); \r
+               //l-interface: is-port-mirrored, is-ip-unnumbered\r
+               assertTrue("Value of l-interface should be updated since the property is-port-mirrored doesn't exist",\r
+                g.V().has("aai-node-type", "l-interface").has("interface-name", "l-interface0").has("is-port-mirrored", false).hasNext());  \r
+               assertTrue("Value of l-interface should be updated since the property is-ip-unnumbered doesn't exist",\r
+                g.V().has("aai-node-type", "l-interface").has("interface-name", "l-interface0").has("is-ip-unnumbered", false).hasNext());\r
+               //vf-module: is-base-vf-module\r
+               assertTrue("Value of vf-module should be updated since the property is-base-vf-module doesn't exist",\r
+                g.V().has("aai-node-type", "vf-module").has("vf-module-id", "vf-module0").has("is-base-vf-module", false).hasNext());  \r
+               //vlan: is-ip-unnumbered\r
+               assertTrue("Value of vlan should be updated since the property is-ip-unnumbered doesn't exist",\r
+                g.V().has("aai-node-type", "vlan").has("vlan-interface", "vlan0").has("is-ip-unnumbered", false).hasNext());\r
+    }\r
+\r
+    @Test\r
+    public void testEmptyValue() {                         \r
+      //is-closed-loop-disabled\r
+        assertTrue("Value of generic-vnf should be updated since the value for is-closed-loop-disabled is an empty string",\r
+                g.V().has("aai-node-type", "generic-vnf").has("vnf-id", "generic-vnf1").has("is-closed-loop-disabled", false).hasNext());\r
+        assertTrue("Value of vnfc should be updated since the value for is-closed-loop-disabled is an empty string",\r
+                g.V().has("aai-node-type", "vnfc").has("vnfc-name", "vnfc1").has("is-closed-loop-disabled", false).hasNext());\r
+        assertTrue("Value of vserver should be updated since the value for is-closed-loop-disabled is an empty string",\r
+                g.V().has("aai-node-type", "vserver").has("vserver-id", "vserver1").has("is-closed-loop-disabled", false).hasNext());\r
+        //dhcp-enabled\r
+        assertTrue("Value of subnet should be updated since the value for dhcp-enabled is an empty string",\r
+                g.V().has("aai-node-type", "subnet").has("subnet-id", "subnet1").has("dhcp-enabled", false).hasNext());\r
+        //l3-network: is-bound-to-vpn, is-shared-network, is-external-network\r
+        assertTrue("Value of l3-network should be updated since the value for is-bound-to-vpn is an empty string",\r
+                               g.V().has("aai-node-type", "l3-network").has("network-id", "l3-network1").has("network-name", "l3-network-name1").has("is-bound-to-vpn", false).hasNext());         \r
+        assertTrue("Value of l3-network should be updated since the value for is-provider-network is an empty string",\r
+                               g.V().has("aai-node-type", "l3-network").has("network-id", "l3-network1").has("network-name", "l3-network-name1").has("is-provider-network", false).hasNext());        \r
+               assertTrue("Value of l3-network should be updated since the value for is-shared-network is an empty string",\r
+                               g.V().has("aai-node-type", "l3-network").has("network-id", "l3-network1").has("network-name", "l3-network-name1").has("is-shared-network", false).hasNext());\r
+               assertTrue("Value of l3-network should be updated since the value for is-external-network is an empty string",\r
+                               g.V().has("aai-node-type", "l3-network").has("network-id", "l3-network1").has("network-name", "l3-network-name1").has("is-external-network", false).hasNext());\r
+               //l-interface: is-port-mirrored, is-ip-unnumbered\r
+               assertTrue("Value of l-interface should be updated since the property is-port-mirrored  is an empty string",\r
+                g.V().has("aai-node-type", "l-interface").has("interface-name", "l-interface1").has("is-port-mirrored", false).hasNext());  \r
+               assertTrue("Value of l-interface should be updated since the property is-ip-unnumbered  is an empty string",\r
+                g.V().has("aai-node-type", "l-interface").has("interface-name", "l-interface1").has("is-ip-unnumbered", false).hasNext());\r
+               //vf-module: is-base-vf-module, is-ip-unnumbered\r
+               assertTrue("Value of vf-module should be updated since the property is-base-vf-module  is an empty string",\r
+                g.V().has("aai-node-type", "vf-module").has("vf-module-id", "vf-module1").has("is-base-vf-module", false).hasNext());  \r
+               //vlan: is-ip-unnumbered\r
+               assertTrue("Value of vlan should be updated since the property is-ip-unnumbered is an empty string",\r
+                g.V().has("aai-node-type", "vlan").has("vlan-interface", "vlan1").has("is-ip-unnumbered", false).hasNext());\r
+    }\r
+    \r
+    @Test\r
+    public void testExistingTrueValues() {\r
+      //is-closed-loop-disabled\r
+        assertTrue("Value of generic-vnf shouldn't be update since is-closed-loop-disabled already exists",\r
+                g.V().has("aai-node-type", "generic-vnf").has("vnf-id", "generic-vnf2").has("is-closed-loop-disabled", true).hasNext());\r
+        assertTrue("Value of vnfc shouldn't be update since is-closed-loop-disabled already exists",\r
+                g.V().has("aai-node-type", "vnfc").has("vnfc-name", "vnfc2").has("is-closed-loop-disabled", true).hasNext());\r
+        assertTrue("Value of vserver shouldn't be update since is-closed-loop-disabled already exists",\r
+                g.V().has("aai-node-type", "vserver").has("vserver-id", "vserver2").has("is-closed-loop-disabled", true).hasNext());\r
+       //dhcp-enabled\r
+        assertTrue("Value of subnet shouldn't be update since dhcp-enabled already exists",\r
+                g.V().has("aai-node-type", "subnet").has("subnet-id", "subnet2").has("dhcp-enabled", true).hasNext()); \r
+      //l3-network: is-bound-to-vpn, is-shared-network, is-external-network\r
+        assertTrue("Value of l3-network shouldn't be updated since is-bound-to-vpn already exists",\r
+                               g.V().has("aai-node-type", "l3-network").has("network-id", "l3-network2").has("network-name", "l3-network-name2").has("is-bound-to-vpn", true).hasNext());\r
+        assertTrue("Value of l3-network shouldn't be updated since is-provider-network already exists",\r
+                               g.V().has("aai-node-type", "l3-network").has("network-id", "l3-network2").has("network-name", "l3-network-name2").has("is-provider-network", true).hasNext());\r
+               assertTrue("Value of l3-network shouldn't be updated since is-shared-network already exists",\r
+                               g.V().has("aai-node-type", "l3-network").has("network-id", "l3-network2").has("network-name", "l3-network-name2").has("is-shared-network", true).hasNext());\r
+               assertTrue("Value of l3-network shouldn't be updated since is-external-network already exists",\r
+                               g.V().has("aai-node-type", "l3-network").has("network-id", "l3-network2").has("network-name", "l3-network-name2").has("is-external-network", true).hasNext());                          \r
+               //l-interface: is-port-mirrored, is-ip-unnumbered\r
+               assertTrue("Value of l-interface shouldn't be updated since is-port-mirrored already exists",\r
+                g.V().has("aai-node-type", "l-interface").has("interface-name", "l-interface2").has("is-port-mirrored", true).hasNext());  \r
+               assertTrue("Value of ll-interface shouldn't be updated since is-ip-unnumbered already exists",\r
+                g.V().has("aai-node-type", "l-interface").has("interface-name", "l-interface2").has("is-ip-unnumbered", true).hasNext());              \r
+               //vf-module: is-base-vf-module\r
+               assertTrue("Value of vf-module shouldn't be updated since is-base-vf-module already exists",\r
+                g.V().has("aai-node-type", "vf-module").has("vf-module-id", "vf-module2").has("is-base-vf-module", true).hasNext());  \r
+               //vlan: is-ip-unnumbered\r
+               assertTrue("Value of vlan shouldn't be updated since is-ip-unnumbered already exists",\r
+                g.V().has("aai-node-type", "vlan").has("vlan-interface", "vlan2").has("is-ip-unnumbered", true).hasNext());\r
+        \r
+    }\r
+    \r
+    @Test\r
+    public void testExistingFalseValues() {\r
+       //is-closed-loop-disabled\r
+        assertTrue("Value of generic-vnf shouldn't be update since is-closed-loop-disabled already exists",\r
+                g.V().has("aai-node-type", "generic-vnf").has("vnf-id", "generic-vnf3").has("is-closed-loop-disabled", false).hasNext());\r
+        assertTrue("Value of vnfc shouldn't be update since is-closed-loop-disabled already exists",\r
+                g.V().has("aai-node-type", "vnfc").has("vnfc-name", "vnfc3").has("is-closed-loop-disabled", false).hasNext());\r
+        assertTrue("Value of vserver shouldn't be update since is-closed-loop-disabled already exists",\r
+                g.V().has("aai-node-type", "vserver").has("vserver-id", "vserver3").has("is-closed-loop-disabled", false).hasNext());\r
+        //dhcp-enabled\r
+        assertTrue("Value of subnet shouldn't be update since dhcp-enabled already exists",\r
+                g.V().has("aai-node-type", "subnet").has("subnet-id", "subnet3").has("dhcp-enabled", false).hasNext());\r
+        //l3-network: is-bound-to-vpn, is-shared-network, is-external-network\r
+        assertTrue("Value of l3-network shouldn't be updated since is-bound-to-vpn already exists",\r
+                               g.V().has("aai-node-type", "l3-network").has("network-id", "l3-network3").has("network-name", "l3-network-name3").has("is-bound-to-vpn", false).hasNext());  \r
+        assertTrue("Value of l3-network shouldn't be updated since is-provider-network already exists",\r
+                               g.V().has("aai-node-type", "l3-network").has("network-id", "l3-network3").has("network-name", "l3-network-name3").has("is-provider-network", false).hasNext());  \r
+        assertTrue("Value of l3-network shouldn't be updated since is-shared-network already exists",\r
+                               g.V().has("aai-node-type", "l3-network").has("network-id", "l3-network3").has("network-name", "l3-network-name3").has("is-shared-network", false).hasNext());\r
+               assertTrue("Value of l3-network shouldn't be updated since is-external-network already exists",\r
+                               g.V().has("aai-node-type", "l3-network").has("network-id", "l3-network3").has("network-name", "l3-network-name3").has("is-external-network", false).hasNext());                 \r
+               //l-interface: is-port-mirrored, is-ip-unnumbered\r
+               assertTrue("Value of l-interface shouldn't be updated since is-port-mirrored already exists",\r
+                g.V().has("aai-node-type", "l-interface").has("interface-name", "l-interface3").has("is-port-mirrored", false).hasNext());  \r
+               assertTrue("Value of ll-interface shouldn't be updated since is-ip-unnumbered already exists",\r
+                g.V().has("aai-node-type", "l-interface").has("interface-name", "l-interface3").has("is-ip-unnumbered", false).hasNext());                             \r
+               //vf-module: is-base-vf-module\r
+               assertTrue("Value of vf-module shouldn't be updated since is-base-vf-module already exists",\r
+                g.V().has("aai-node-type", "vf-module").has("vf-module-id", "vf-module3").has("is-base-vf-module", false).hasNext());  \r
+               //vlan: is-ip-unnumbered\r
+               assertTrue("Value of vlan shouldn't be updated since is-ip-unnumbered already exists",\r
+                g.V().has("aai-node-type", "vlan").has("vlan-interface", "vlan3").has("is-ip-unnumbered", false).hasNext());\r
+    } \r
+}
\ No newline at end of file
diff --git a/src/test/java/org/onap/aai/migration/v15/MigrateCloudRegionUpgradeCycleTest.java b/src/test/java/org/onap/aai/migration/v15/MigrateCloudRegionUpgradeCycleTest.java
new file mode 100644 (file)
index 0000000..1b84d86
--- /dev/null
@@ -0,0 +1,122 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v15;
+
+import static org.junit.Assert.assertEquals;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.when;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.aai.AAISetup;
+import org.onap.aai.dbmap.DBConnectionType;
+import org.onap.aai.introspection.Loader;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.setup.SchemaVersion;
+import org.onap.aai.serialization.engines.QueryStyle;
+import org.onap.aai.serialization.engines.JanusGraphDBEngine;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+
+import org.janusgraph.core.JanusGraphFactory;
+import org.janusgraph.core.JanusGraph;
+import org.janusgraph.core.JanusGraphTransaction;
+
+
+public class MigrateCloudRegionUpgradeCycleTest extends AAISetup{
+
+    private final static ModelType introspectorFactoryType = ModelType.MOXY;
+    private final static QueryStyle queryStyle = QueryStyle.TRAVERSAL;
+    private final static DBConnectionType type = DBConnectionType.REALTIME;
+    private Loader loader;
+    private TransactionalGraphEngine dbEngine;
+    private JanusGraph graph;
+    private MigrateCloudRegionUpgradeCycle migration;
+    private GraphTraversalSource g;
+    private JanusGraphTransaction tx;
+    Vertex cloudRegion1;
+    Vertex cloudRegion2;
+    Vertex cloudRegion3;
+   
+
+    @Before
+    public void setUp() throws Exception {
+       
+       graph = JanusGraphFactory.build().set("storage.backend", "inmemory").open();
+        tx = graph.newTransaction();
+        g = tx.traversal();
+        loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, schemaVersions.getDefaultVersion());
+        dbEngine = new JanusGraphDBEngine(
+                queryStyle,
+                type,
+                loader);
+        
+        System.setProperty("BUNDLECONFIG_DIR", "src/test/resources");
+               
+        cloudRegion1 = g.addV().property("aai-node-type", MigrateCloudRegionUpgradeCycle.CLOUD_REGION_NODE_TYPE)
+                       .property(MigrateCloudRegionUpgradeCycle.CLOUD_REGION_ID, "akr1")
+                       .property(MigrateCloudRegionUpgradeCycle.CLOUD_OWNER, "att-aic")
+                .property( MigrateCloudRegionUpgradeCycle.UPGRADE_CYCLE, "Test")
+                .next();
+         
+         cloudRegion2 = g.addV().property("aai-node-type", MigrateCloudRegionUpgradeCycle.CLOUD_REGION_NODE_TYPE)
+                        .property(MigrateCloudRegionUpgradeCycle.CLOUD_REGION_ID, "amsnl1b")
+                        .property(MigrateCloudRegionUpgradeCycle.CLOUD_OWNER, "att-aic")
+                 //.property( MigrateCloudRegionUpgradeCycle.UPGRADE_CYCLE, "server")
+                 .next();
+         
+         cloudRegion3 = g.addV().property("aai-node-type", MigrateCloudRegionUpgradeCycle.CLOUD_REGION_NODE_TYPE)
+                        .property(MigrateCloudRegionUpgradeCycle.CLOUD_REGION_ID, "alp1")
+                        .property(MigrateCloudRegionUpgradeCycle.CLOUD_OWNER, "Test")
+                 .property( MigrateCloudRegionUpgradeCycle.UPGRADE_CYCLE, "server1")
+                 .next();
+        
+        TransactionalGraphEngine spy = spy(dbEngine);
+        TransactionalGraphEngine.Admin adminSpy = spy(dbEngine.asAdmin());
+        GraphTraversalSource traversal = g;
+        when(spy.asAdmin()).thenReturn(adminSpy);
+        when(adminSpy.getTraversalSource()).thenReturn(traversal);
+        migration = new MigrateCloudRegionUpgradeCycle(spy, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+        migration.run();
+    }
+
+    @After
+    public void cleanUp() {
+        tx.rollback();
+        graph.close();
+    }
+
+
+    /***
+     * checks if the upgrade cycle value was changed
+     */
+
+    @Test
+    public void confirmUpgradeCycleChanged() {
+
+        assertEquals("E",cloudRegion1.property(MigrateCloudRegionUpgradeCycle.UPGRADE_CYCLE).value());
+        assertEquals("B",cloudRegion2.property(MigrateCloudRegionUpgradeCycle.UPGRADE_CYCLE).value());
+        assertEquals("server1",cloudRegion3.property(MigrateCloudRegionUpgradeCycle.UPGRADE_CYCLE).value());//Not changed
+    }
+}
\ No newline at end of file
diff --git a/src/test/java/org/onap/aai/migration/v15/MigrateInMaintDefaultToFalseTest.java b/src/test/java/org/onap/aai/migration/v15/MigrateInMaintDefaultToFalseTest.java
new file mode 100644 (file)
index 0000000..1bf1344
--- /dev/null
@@ -0,0 +1,411 @@
+/**\r
+ * ============LICENSE_START=======================================================\r
+ * org.onap.aai\r
+ * ================================================================================\r
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.\r
+ * ================================================================================\r
+ * Licensed under the Apache License, Version 2.0 (the "License");\r
+ * you may not use this file except in compliance with the License.\r
+ * You may obtain a copy of the License at\r
+ *\r
+ *    http://www.apache.org/licenses/LICENSE-2.0\r
+ *\r
+ * Unless required by applicable law or agreed to in writing, software\r
+ * distributed under the License is distributed on an "AS IS" BASIS,\r
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
+ * See the License for the specific language governing permissions and\r
+ * limitations under the License.\r
+ * ============LICENSE_END=========================================================\r
+ */\r
+package org.onap.aai.migration.v15;\r
+\r
+import org.onap.aai.edges.EdgeIngestor;\r
+import org.onap.aai.serialization.db.EdgeSerializer;\r
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;\r
+\r
+import com.att.eelf.configuration.EELFLogger;\r
+import com.att.eelf.configuration.EELFManager;\r
+\r
+import org.janusgraph.core.JanusGraph;\r
+import org.janusgraph.core.JanusGraphFactory;\r
+import org.janusgraph.core.JanusGraphTransaction;\r
+import org.janusgraph.core.schema.JanusGraphManagement;\r
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;\r
+import org.junit.Before;\r
+import org.junit.Test;\r
+import org.onap.aai.AAISetup;\r
+import org.onap.aai.dbmap.DBConnectionType;\r
+import org.onap.aai.introspection.Loader;\r
+import org.onap.aai.introspection.LoaderFactory;\r
+import org.onap.aai.introspection.ModelType;\r
+import org.onap.aai.setup.SchemaVersions;\r
+import org.onap.aai.setup.SchemaVersion;\r
+import org.onap.aai.migration.Status;\r
+import org.onap.aai.migration.v15.MigrateInMaintDefaultToFalse;\r
+import org.onap.aai.migration.v15.MigrateInMaintDefaultToFalseTest.InMaintDefaultMigrator;\r
+import org.onap.aai.serialization.engines.QueryStyle;\r
+import org.onap.aai.serialization.engines.JanusGraphDBEngine;\r
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;\r
+\r
+import static org.mockito.Mockito.spy;\r
+import static org.mockito.Mockito.when;\r
+\r
+import java.util.HashMap;\r
+import java.util.Map;\r
+import java.util.Optional;\r
+\r
+import static org.junit.Assert.assertTrue;\r
+import static org.junit.Assert.assertEquals;\r
+\r
+public class MigrateInMaintDefaultToFalseTest extends\r
+               AAISetup {\r
+       protected static final String VNF_NODE_TYPE = "generic-vnf";\r
+       protected static final String LINTERFACE_NODE_TYPE = "l-interface";\r
+       protected static final String LAG_INTERFACE_NODE_TYPE = "lag-interface";\r
+       protected static final String LOGICAL_LINK_NODE_TYPE = "logical-link";\r
+       protected static final String PINTERFACE_NODE_TYPE = "p-interface";\r
+       protected static final String VLAN_NODE_TYPE = "vlan";\r
+       protected static final String VNFC_NODE_TYPE = "vnfc";\r
+       protected static final String VSERVER_NODE_TYPE = "vserver";\r
+       protected static final String PSERVER_NODE_TYPE = "pserver";\r
+       protected static final String PNF_NODE_TYPE = "pnf";\r
+       protected static final String NOS_SERVER_NODE_TYPE = "nos-server";\r
+\r
+       public static class InMaintDefaultMigrator extends MigrateInMaintDefaultToFalse {\r
+        public InMaintDefaultMigrator(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions){\r
+            super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);\r
+        }\r
+        @Override\r
+        public Status getStatus() {\r
+            return Status.SUCCESS;\r
+        }\r
+        @Override\r
+        public Optional<String[]> getAffectedNodeTypes() {\r
+               return Optional.of(new String[]{VNF_NODE_TYPE,LINTERFACE_NODE_TYPE,LAG_INTERFACE_NODE_TYPE,LOGICAL_LINK_NODE_TYPE,PINTERFACE_NODE_TYPE,VLAN_NODE_TYPE,VNFC_NODE_TYPE,VSERVER_NODE_TYPE,PSERVER_NODE_TYPE,PNF_NODE_TYPE,NOS_SERVER_NODE_TYPE});\r
+        }\r
+        @Override\r
+        public String getMigrationName() {\r
+            return "MockInMaintDefaultMigrator";\r
+        }\r
+    }\r
+\r
+    private final static ModelType introspectorFactoryType = ModelType.MOXY;\r
+    private final static QueryStyle queryStyle = QueryStyle.TRAVERSAL;\r
+    private final static DBConnectionType type = DBConnectionType.REALTIME;\r
+    private Loader loader;\r
+    private TransactionalGraphEngine dbEngine;\r
+    private InMaintDefaultMigrator migration;\r
+    private GraphTraversalSource g;\r
+\r
+    @Before\r
+    public void setup() throws Exception{\r
+        g = tx.traversal();\r
+        loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, schemaVersions.getDefaultVersion());\r
+        dbEngine = new JanusGraphDBEngine(\r
+                queryStyle,\r
+                type,\r
+                loader);\r
+\r
+        //generic-vnf\r
+        g.addV().property("aai-node-type", "generic-vnf")\r
+                .property("vnf-id", "generic-vnf0")\r
+                .next();\r
+        g.addV().property("aai-node-type", "generic-vnf")\r
+                .property("vnf-id", "generic-vnf1")\r
+                .property("in-maint", "")\r
+                .next();\r
+        g.addV().property("aai-node-type", "generic-vnf")\r
+                .property("vnf-id", "generic-vnf2")\r
+                .property("in-maint", true)\r
+                .next();\r
+        g.addV().property("aai-node-type", "generic-vnf")\r
+                       .property("vnf-id", "generic-vnf3")\r
+                       .property("in-maint", false)\r
+                       .next();        \r
+      //l-interface\r
+        g.addV().property("aai-node-type", "l-interface")\r
+                .property("interface-name", "l-interface0")\r
+                .next();\r
+        g.addV().property("aai-node-type", "l-interface")\r
+                .property("interface-name", "l-interface1")\r
+                .property("in-maint", "")\r
+                .next();\r
+        g.addV().property("aai-node-type", "l-interface")\r
+                .property("interface-name", "l-interface2")\r
+                .property("in-maint", true)\r
+                .next();\r
+        g.addV().property("aai-node-type", "l-interface")\r
+                       .property("interface-name", "l-interface3")\r
+                       .property("in-maint", false)\r
+                       .next();         \r
+      //lag-interface\r
+        g.addV().property("aai-node-type", "lag-interface")\r
+                .property("interface-name", "lag-interface0")\r
+                .next();\r
+        g.addV().property("aai-node-type", "lag-interface")\r
+                .property("interface-name", "lag-interface1")\r
+                .property("in-maint", "")\r
+                .next();\r
+        g.addV().property("aai-node-type", "lag-interface")\r
+                .property("interface-name", "lag-interface2")\r
+                .property("in-maint", true)\r
+                .next();\r
+        g.addV().property("aai-node-type", "lag-interface")\r
+                       .property("interface-name", "lag-interface3")\r
+                       .property("in-maint", false)\r
+                       .next();        \r
+      //logical-link\r
+        g.addV().property("aai-node-type", "logical-link")\r
+                .property("link-name", "logical-link0")\r
+                .next();\r
+        g.addV().property("aai-node-type", "logical-link")\r
+                .property("link-name", "logical-link1")\r
+                .property("in-maint", "")\r
+                .next();\r
+        g.addV().property("aai-node-type", "logical-link")\r
+                .property("link-name", "logical-link2")\r
+                .property("in-maint", true)\r
+                .next();\r
+        g.addV().property("aai-node-type", "logical-link")\r
+                       .property("link-name", "logical-link3")\r
+                       .property("in-maint", false)\r
+                       .next();      \r
+      //p-interface\r
+        g.addV().property("aai-node-type", "p-interface")\r
+                .property("interface-name", "p-interface0")\r
+                .next();\r
+        g.addV().property("aai-node-type", "p-interface")\r
+                .property("interface-name", "p-interface1")\r
+                .property("in-maint", "")\r
+                .next();\r
+        g.addV().property("aai-node-type", "p-interface")\r
+                .property("interface-name", "p-interface2")\r
+                .property("in-maint", true)\r
+                .next();\r
+        g.addV().property("aai-node-type", "p-interface")\r
+                       .property("interface-name", "p-interface3")\r
+                       .property("in-maint", false)\r
+                       .next();        \r
+      //pnf\r
+        g.addV().property("aai-node-type", "pnf")\r
+                .property("pnf-name", "pnf0")\r
+                .next();\r
+        g.addV().property("aai-node-type", "pnf")\r
+                .property("pnf-name", "pnf1")\r
+                .property("in-maint", "")\r
+                .next();\r
+        g.addV().property("aai-node-type", "pnf")\r
+                .property("pnf-name", "pnf2")\r
+                .property("in-maint", true)\r
+                .next();\r
+        g.addV().property("aai-node-type", "pnf")\r
+                       .property("pnf-name", "pnf3")\r
+                       .property("in-maint", false)\r
+                       .next();        \r
+      //pserver\r
+        g.addV().property("aai-node-type", "pserver")\r
+                .property("pserver-id", "pserver0")\r
+                .next();\r
+        g.addV().property("aai-node-type", "pserver")\r
+                .property("pserver-id", "pserver1")\r
+                .property("in-maint", "")\r
+                .next();\r
+        g.addV().property("aai-node-type", "pserver")\r
+                .property("pserver-id", "pserver2")\r
+                .property("in-maint", true)\r
+                .next();\r
+        g.addV().property("aai-node-type", "pserver")\r
+                       .property("pserver-id", "pserver3")\r
+                       .property("in-maint", false)\r
+                       .next();       \r
+      //vlan\r
+        g.addV().property("aai-node-type", "vlan")\r
+                .property("vlan-interface", "vlan0")\r
+                .next();\r
+        g.addV().property("aai-node-type", "vlan")\r
+                .property("vlan-interface", "vlan1")\r
+                .property("in-maint", "")\r
+                .next();\r
+        g.addV().property("aai-node-type", "vlan")\r
+                .property("vlan-interface", "vlan2")\r
+                .property("in-maint", true)\r
+                .next();\r
+        g.addV().property("aai-node-type", "vlan")\r
+                       .property("vlan-interface", "vlan3")\r
+                       .property("in-maint", false)\r
+                       .next();\r
+      //vnfc\r
+        g.addV().property("aai-node-type", "vnfc")\r
+                .property("vnfc-name", "vnfc0")\r
+                .next();\r
+        g.addV().property("aai-node-type", "vnfc")\r
+                .property("vnfc-name", "vnfc1")\r
+                .property("in-maint", "")\r
+                .next();\r
+        g.addV().property("aai-node-type", "vnfc")\r
+                .property("vnfc-name", "vnfc2")\r
+                .property("in-maint", true)\r
+                .next();\r
+        g.addV().property("aai-node-type", "vnfc")\r
+                       .property("vnfc-name", "vnfc3")\r
+                       .property("in-maint", false)\r
+                       .next();\r
+      //vserver\r
+        g.addV().property("aai-node-type", "vserver")\r
+                .property("vserver-id", "vserver0")\r
+                .next();\r
+        g.addV().property("aai-node-type", "vserver")\r
+                .property("vserver-id", "vserver1")\r
+                .property("in-maint", "")\r
+                .next();\r
+        g.addV().property("aai-node-type", "vserver")\r
+                .property("vserver-id", "vserver2")\r
+                .property("in-maint", true)\r
+                .property("is-closed-loop-disabled", true)\r
+                .next();\r
+        g.addV().property("aai-node-type", "vserver")\r
+                       .property("vserver-id", "vserver3")\r
+                       .property("in-maint", false)\r
+                       .next();\r
+      //nos-server\r
+        g.addV().property("aai-node-type", "nos-server")\r
+                .property("nos-server-id", "nos-server0")\r
+                               .property("nos-server-name", "nos-server-name0")\r
+                               .property("vendor", "vendor0")\r
+                               .property("nos-server-selflink", "nos-server-selflink0")\r
+                .next();\r
+        g.addV().property("aai-node-type", "nos-server")\r
+                .property("nos-server-id", "nos-server1")\r
+                               .property("nos-server-name", "nos-server-name1")\r
+                               .property("vendor", "vendor1")\r
+                               .property("nos-server-selflink", "nos-server-selflink1")                                \r
+                .property("in-maint", "")\r
+                .next();\r
+        g.addV().property("aai-node-type", "nos-server")\r
+                .property("nos-server-id", "nos-server2")\r
+                               .property("nos-server-name", "nos-server-name2")\r
+                               .property("vendor", "vendor2")\r
+                               .property("nos-server-selflink", "nos-server-selflink2")\r
+                .property("in-maint", true)\r
+                .next();\r
+        g.addV().property("aai-node-type", "nos-server")\r
+                       .property("nos-server-id", "nos-server3")\r
+                               .property("nos-server-name", "nos-server-name3")\r
+                               .property("vendor", "vendor3")\r
+                               .property("nos-server-selflink", "nos-server-selflink3")\r
+                       .property("in-maint", false)\r
+                       .next();\r
+        \r
+        TransactionalGraphEngine spy = spy(dbEngine);\r
+        TransactionalGraphEngine.Admin adminSpy = spy(dbEngine.asAdmin());\r
+        GraphTraversalSource traversal = g;\r
+        when(spy.asAdmin()).thenReturn(adminSpy);\r
+        when(adminSpy.getTraversalSource()).thenReturn(traversal);\r
+        migration = new InMaintDefaultMigrator(spy, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);\r
+        migration.run();\r
+    }\r
+\r
+    @Test\r
+    public void testMissingProperty(){\r
+        assertTrue("Value of generic-vnf should be updated since the property in-maint doesn't exist",\r
+                g.V().has("aai-node-type", "generic-vnf").has("vnf-id", "generic-vnf0").has("in-maint", false).hasNext());\r
+        assertTrue("Value of l-interface should be updated since the property in-maint doesn't exist",\r
+                g.V().has("aai-node-type", "l-interface").has("interface-name", "l-interface0").has("in-maint", false).hasNext());\r
+        assertTrue("Value of lag-interface should be updated since the property in-maint doesn't exist",\r
+                g.V().has("aai-node-type", "lag-interface").has("interface-name", "lag-interface0").has("in-maint", false).hasNext());\r
+        assertTrue("Value of logical-link should be updated since the property in-maint doesn't exist",\r
+                g.V().has("aai-node-type", "logical-link").has("link-name", "logical-link0").has("in-maint", false).hasNext());\r
+        assertTrue("Value of p-interface should be updated since the property in-maint doesn't exist",\r
+                g.V().has("aai-node-type", "p-interface").has("interface-name", "p-interface0").has("in-maint", false).hasNext());\r
+        assertTrue("Value of pnf should be updated since the property in-maint doesn't exist",\r
+                g.V().has("aai-node-type", "pnf").has("pnf-name", "pnf0").has("in-maint", false).hasNext());\r
+        assertTrue("Value of pserver should be updated since the property in-maint doesn't exist",\r
+                g.V().has("aai-node-type", "pserver").has("pserver-id", "pserver0").has("in-maint", false).hasNext());\r
+        assertTrue("Value of vlan should be updated since the property in-maint doesn't exist",\r
+                g.V().has("aai-node-type", "vlan").has("vlan-interface", "vlan0").has("in-maint", false).hasNext());\r
+        assertTrue("Value of vnfc should be updated since the property in-maint doesn't exist",\r
+                g.V().has("aai-node-type", "vnfc").has("vnfc-name", "vnfc0").has("in-maint", false).hasNext());\r
+        assertTrue("Value of vserver should be updated since the property in-maint doesn't exist",\r
+                g.V().has("aai-node-type", "vserver").has("vserver-id", "vserver0").has("in-maint", false).hasNext());    \r
+        assertTrue("Value of nos-server should be updated since the property in-maint doesn't exist",\r
+                g.V().has("aai-node-type", "nos-server").has("nos-server-id", "nos-server0").has("in-maint", false).hasNext()); \r
+    }\r
+\r
+    @Test\r
+    public void testEmptyValue() {                \r
+        assertTrue("Value of generic-vnf should be updated since the value for in-maint is an empty string",\r
+                g.V().has("aai-node-type", "generic-vnf").has("vnf-id", "generic-vnf1").has("in-maint", false).hasNext());\r
+        assertTrue("Value of l-interface should be updated since the value for in-maint is an empty string",\r
+                g.V().has("aai-node-type", "l-interface").has("interface-name", "l-interface1").has("in-maint", false).hasNext());\r
+        assertTrue("Value of lag-interface should be updated since the value for in-maint is an empty string",\r
+                g.V().has("aai-node-type", "lag-interface").has("interface-name", "lag-interface1").has("in-maint", false).hasNext());\r
+        assertTrue("Value of logical-link should be updated since the value for in-maint is an empty string",\r
+                g.V().has("aai-node-type", "logical-link").has("link-name", "logical-link1").has("in-maint", false).hasNext());\r
+        assertTrue("Value of p-interface should be updated since the value for in-maint is an empty string",\r
+                g.V().has("aai-node-type", "p-interface").has("interface-name", "p-interface1").has("in-maint", false).hasNext());\r
+        assertTrue("Value of pnf should be updated since the value for in-maint is an empty string",\r
+                g.V().has("aai-node-type", "pnf").has("pnf-name", "pnf1").has("in-maint", false).hasNext());\r
+        assertTrue("Value of pserver should be updated since the value for in-maint is an empty string",\r
+                g.V().has("aai-node-type", "pserver").has("pserver-id", "pserver1").has("in-maint", false).hasNext());\r
+        assertTrue("Value of vlan should be updated since the value for in-maint is an empty string",\r
+                g.V().has("aai-node-type", "vlan").has("vlan-interface", "vlan1").has("in-maint", false).hasNext());\r
+        assertTrue("Value of vnfc should be updated since the value for in-maint is an empty string",\r
+                g.V().has("aai-node-type", "vnfc").has("vnfc-name", "vnfc1").has("in-maint", false).hasNext());\r
+        assertTrue("Value of vserver should be updated since the value for in-maint is an empty string",\r
+                g.V().has("aai-node-type", "vserver").has("vserver-id", "vserver1").has("in-maint", false).hasNext());\r
+        assertTrue("Value of nos-server should be updated since the value for in-maint is an empty string",\r
+                g.V().has("aai-node-type", "nos-server").has("nos-server-id", "nos-server1").has("in-maint", false).hasNext());\r
+    }\r
+    \r
+    @Test\r
+    public void testExistingTrueValues() {\r
+        assertTrue("Value of generic-vnf shouldn't be updated since in-maint already exists",\r
+                g.V().has("aai-node-type", "generic-vnf").has("vnf-id", "generic-vnf2").has("in-maint", true).hasNext());\r
+        assertTrue("Value of l-interface shouldn't be updated since in-maint already exists",\r
+                g.V().has("aai-node-type", "l-interface").has("interface-name", "l-interface2").has("in-maint", true).hasNext());\r
+        assertTrue("Value of lag-interface shouldn't be updated since in-maint already exists",\r
+                g.V().has("aai-node-type", "lag-interface").has("interface-name", "lag-interface2").has("in-maint", true).hasNext());\r
+        assertTrue("Value of logical-link shouldn't be updated since in-maint already exists",\r
+                g.V().has("aai-node-type", "logical-link").has("link-name", "logical-link2").has("in-maint", true).hasNext());\r
+        assertTrue("Value of p-interface shouldn't be updated since in-maint already exists",\r
+                g.V().has("aai-node-type", "p-interface").has("interface-name", "p-interface2").has("in-maint", true).hasNext());\r
+        assertTrue("Value of pnf shouldn't be updated since in-maint already exists",\r
+                g.V().has("aai-node-type", "pnf").has("pnf-name", "pnf2").has("in-maint", true).hasNext());\r
+        assertTrue("Value of pserver shouldn't be updated since in-maint already exists",\r
+                g.V().has("aai-node-type", "pserver").has("pserver-id", "pserver2").has("in-maint", true).hasNext());\r
+        assertTrue("Value of vlan shouldn't be updated since in-maint already exists",\r
+                g.V().has("aai-node-type", "vlan").has("vlan-interface", "vlan2").has("in-maint", true).hasNext());\r
+        assertTrue("Value of vnfc shouldn't be updated since in-maint already exists",\r
+                g.V().has("aai-node-type", "vnfc").has("vnfc-name", "vnfc2").has("in-maint", true).hasNext());\r
+        assertTrue("Value of vserver shouldn't be updated since in-maint already exists",\r
+                g.V().has("aai-node-type", "vserver").has("vserver-id", "vserver2").has("in-maint", true).hasNext());\r
+        assertTrue("Value of nos-server shouldn't be updated since in-maint already exists",\r
+                g.V().has("aai-node-type", "nos-server").has("nos-server-id", "nos-server2").has("in-maint", true).hasNext());\r
+    }\r
+    \r
+    @Test\r
+    public void testExistingFalseValues() {\r
+        assertTrue("Value of generic-vnf shouldn't be updated since in-maint already exists",\r
+                g.V().has("aai-node-type", "generic-vnf").has("vnf-id", "generic-vnf3").has("in-maint", false).hasNext());\r
+        assertTrue("Value of l-interface shouldn't be updated since in-maint already exists",\r
+                g.V().has("aai-node-type", "l-interface").has("interface-name", "l-interface3").has("in-maint", false).hasNext());\r
+        assertTrue("Value of lag-interface shouldn't be updated since in-maint already exists",\r
+                g.V().has("aai-node-type", "lag-interface").has("interface-name", "lag-interface3").has("in-maint", false).hasNext());\r
+        assertTrue("Value of logical-link shouldn't be updated since in-maint already exists",\r
+                g.V().has("aai-node-type", "logical-link").has("link-name", "logical-link3").has("in-maint", false).hasNext());\r
+        assertTrue("Value of p-interface shouldn't be updated since in-maint already exists",\r
+                g.V().has("aai-node-type", "p-interface").has("interface-name", "p-interface3").has("in-maint", false).hasNext());\r
+        assertTrue("Value of pnf shouldn't be updated since in-maint already exists",\r
+                g.V().has("aai-node-type", "pnf").has("pnf-name", "pnf3").has("in-maint", false).hasNext());\r
+        assertTrue("Value of pserver shouldn't be updated since in-maint already exists",\r
+                g.V().has("aai-node-type", "pserver").has("pserver-id", "pserver3").has("in-maint", false).hasNext());\r
+        assertTrue("Value of vlan shouldn't be updated since in-maint already exists",\r
+                g.V().has("aai-node-type", "vlan").has("vlan-interface", "vlan3").has("in-maint", false).hasNext());\r
+        assertTrue("Value of vnfc shouldn't be updated since in-maint already exists",\r
+                g.V().has("aai-node-type", "vnfc").has("vnfc-name", "vnfc3").has("in-maint", false).hasNext());\r
+        assertTrue("Value of vserver shouldn't be updated since in-maint already exists",\r
+                g.V().has("aai-node-type", "vserver").has("vserver-id", "vserver3").has("in-maint", false).hasNext());\r
+        assertTrue("Value of nos-server shouldn't be updated since in-maint already exists",\r
+                g.V().has("aai-node-type", "nos-server").has("nos-server-id", "nos-server3").has("in-maint", false).hasNext());\r
+    }\r
+}
\ No newline at end of file
diff --git a/src/test/java/org/onap/aai/migration/v15/MigrateRadcomChangesTest.java b/src/test/java/org/onap/aai/migration/v15/MigrateRadcomChangesTest.java
new file mode 100644 (file)
index 0000000..ad101ed
--- /dev/null
@@ -0,0 +1,509 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright Â© 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v15;
+
+import static org.junit.Assert.*;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.when;
+
+import java.util.Optional;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.process.traversal.strategy.verification.ReadOnlyStrategy;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.junit.*;
+import org.onap.aai.AAISetup;
+import org.onap.aai.dbmap.DBConnectionType;
+import org.onap.aai.introspection.Loader;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.setup.SchemaVersion;
+import org.onap.aai.serialization.engines.QueryStyle;
+import org.onap.aai.serialization.engines.JanusGraphDBEngine;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.janusgraph.core.JanusGraphFactory;
+import org.janusgraph.core.JanusGraph;
+import org.janusgraph.core.JanusGraphTransaction;
+
+public class MigrateRadcomChangesTest extends AAISetup {
+
+       private final static ModelType introspectorFactoryType = ModelType.MOXY;
+       private final static QueryStyle queryStyle = QueryStyle.TRAVERSAL;
+       private final static DBConnectionType type = DBConnectionType.REALTIME;
+
+       private Loader loader;
+       private TransactionalGraphEngine dbEngine;
+       private MigrateRadcomChanges migration;
+       private GraphTraversalSource g;
+
+       @Before
+       public void setUp() throws Exception {
+               g = tx.traversal();
+               loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, schemaVersions.getDefaultVersion());
+               dbEngine = new JanusGraphDBEngine(
+                               queryStyle,
+                               type,
+                               loader);
+               
+               System.setProperty("BUNDLECONFIG_DIR", "src/test/resources");
+               
+               Vertex genericVnf1 = g.addV().property("aai-node-type", "generic-vnf").property("vnf-id", "test-1")
+                               .property("vnf-name",  "name-1").property("vnf-type", "test")
+                               .property("model-invariant-id-local", "test").property("model-version-id-local", "test").property("model-customization-id", "test").next();
+               Vertex genericVnf2 = g.addV().property("aai-node-type", "generic-vnf").property("vnf-id", "test-2")
+                               .property("vnf-name",  "name-2").property("vnf-type", "test")
+                               .property("model-invariant-id-local", "change").property("model-version-id-local", "change").property("model-customization-id", "change").next();
+               Vertex genericVnf3 = g.addV().property("aai-node-type", "generic-vnf").property("vnf-id", "test-3")
+                               .property("vnf-name",  "no-service").property("vnf-type", "test")
+                               .property("model-invariant-id-local", "test").property("model-version-id-local", "test").property("model-customization-id", "test").next();
+               Vertex genericVnf4 = g.addV().property("aai-node-type", "generic-vnf").property("vnf-id", "test-4")
+                               .property("vnf-name",  "no-invariant").property("vnf-type", "test")
+                               .property("model-version-id-local", "test").property("model-customization-id", "test").next();
+               Vertex genericVnf5 = g.addV().property("aai-node-type", "generic-vnf").property("vnf-id", "test-5")
+                               .property("vnf-name",  "no-version").property("vnf-type", "test")
+                               .property("model-invariant-id-local", "test").property("model-customization-id", "test").next();
+               Vertex genericVnf6 = g.addV().property("aai-node-type", "generic-vnf").property("vnf-id", "test-6")
+                               .property("vnf-name",  "no-customization").property("vnf-type", "test")
+                               .property("model-invariant-id-local", "test").property("model-version-id-local", "test").next();
+               Vertex genericVnf7 = g.addV().property("aai-node-type", "generic-vnf").property("vnf-id", "test-7")
+                               .property("vnf-name",  "no ids").property("vnf-type", "test").next();
+               Vertex genericVnf8 = g.addV().property("aai-node-type", "generic-vnf").property("vnf-id", "test-8")
+                               .property("vnf-name",  "many-service-1").property("vnf-type", "test")
+                               .property("model-invariant-id-local", "test").property("model-version-id-local", "test").property("model-customization-id", "test").next();
+               Vertex genericVnf9 = g.addV().property("aai-node-type", "generic-vnf").property("vnf-id", "test-9")
+                               .property("vnf-name",  "many-service-2").property("vnf-type", "test")
+                               .property("model-invariant-id-local", "test").property("model-version-id-local", "test").property("model-customization-id", "test").next();
+               Vertex genericVnf10 = g.addV().property("aai-node-type", "generic-vnf").property("vnf-id", "test-10")
+                               .property("vnf-name",  "multi-name").property("vnf-type", "test").next();
+               Vertex genericVnf11 = g.addV().property("aai-node-type", "generic-vnf").property("vnf-id", "test-11")
+                               .property("vnf-name",  "multi-name").property("vnf-type", "test")
+                               .property("model-invariant-id-local", "test").property("model-version-id-local", "test").property("model-customization-id", "test").next();
+               Vertex genericVnf12 = g.addV().property("aai-node-type", "generic-vnf").property("vnf-id", "test-12")
+                               .property("vnf-name",  "wrong-type").property("vnf-type", "none").next();
+               Vertex genericVnf13 = g.addV().property("aai-node-type", "generic-vnf").property("vnf-id", "test-13")
+                               .property("vnf-name",  "wrong-name").property("vnf-type", "test")
+                               .property("model-invariant-id-local", "test").property("model-version-id-local", "test").property("model-customization-id", "test").next();
+
+               Vertex serviceInstance1 = g.addV().property("aai-node-type", "service-instance")
+                               .property("service-instance-id", "service-1")
+                               .property("model-invariant-id-local", "test").property("model-version-id-local", "test").next();
+               Vertex serviceInstance2 = g.addV().property("aai-node-type", "service-instance")
+                               .property("service-instance-id", "service-2")
+                               .property("model-invariant-id-local", "diff").property("model-version-id-local", "diff").next();
+               Vertex serviceInstance3 = g.addV().property("aai-node-type", "service-instance")
+                               .property("service-instance-id", "no-invariant")
+                               .property("model-version-id-local", "test").next();
+               Vertex serviceInstance4 = g.addV().property("aai-node-type", "service-instance")
+                               .property("service-instance-id", "no-version")
+                               .property("model-invariant-id-local", "test").next();
+               Vertex serviceInstance5 = g.addV().property("aai-node-type", "service-instance")
+                               .property("service-instance-id", "no ids").next();
+               Vertex serviceInstance6 = g.addV().property("aai-node-type", "service-instance")
+                               .property("service-instance-id", "service-many")
+                               .property("model-invariant-id-local", "test").property("model-version-id-local", "test").next();
+               Vertex serviceInstance7 = g.addV().property("aai-node-type", "service-instance")
+                               .property("service-instance-id", "wrong").next();
+               Vertex serviceInstance8 = g.addV().property("aai-node-type", "service-instance")
+                               .property("service-instance-id", "connected-wrong")
+                               .property("model-invariant-id-local", "test").property("model-version-id-local", "test").next();
+               
+               Vertex serviceModel = g.addV().property("aai-node-type", "model")
+                               .property("model-invariant-id", "new-service-inv").property("model-type", "Service").next();
+               Vertex serviceModelVer = g.addV().property("aai-node-type", "model-ver")
+                               .property("model-version-id", "new-service-ver").property("model-name", "test-service")
+                               .property("version", "test").next();
+               Vertex resourceModel = g.addV().property("aai-node-type", "model")
+                               .property("model-invariant-id", "new-resource-inv").property("model-type", "VNF-resource").next();
+               Vertex resourceModelVer = g.addV().property("aai-node-type", "model-ver")
+                               .property("model-version-id", "new-resource-ver").property("model-name", "test-resource")
+                               .property("version", "test").next();
+               Vertex resourceModelElement1 = g.addV().property("aai-node-type", "model-element")
+                               .property("model-element-uuid", "resource-element-start").property("new-data-del-flag", "T")
+                               .property("cardinality", "unbounded").next();
+               Vertex newVfModuleModelVer2 = g.addV().property("aai-node-type", "model-ver")
+                               .property("model-version-id", "new-vf-module-ver-2").property("model-name", "model-2")
+                               .property("version", "test").next();
+               Vertex newVfModuleModel2 = g.addV().property("aai-node-type", "model")
+                               .property("model-invariant-id", "new-vf-module-inv-2").property("model-type", "2").next();
+               Vertex resourceModelElement2 = g.addV().property("aai-node-type", "model-element")
+                               .property("model-element-uuid", "resource-element-depth-1").property("new-data-del-flag", "T")
+                               .property("cardinality", "unbounded").next();
+               Vertex newVfModuleModelVer3 = g.addV().property("aai-node-type", "model-ver")
+                               .property("model-version-id", "new-vf-module-ver-3").property("model-name", "model-3")
+                               .property("version", "test").next();
+               Vertex newVfModuleModel3 = g.addV().property("aai-node-type", "model")
+                               .property("model-invariant-id", "new-vf-module-inv-3").property("model-type", "3").next();
+               Vertex resourceModelElement3 = g.addV().property("aai-node-type", "model-element")
+                               .property("model-element-uuid", "resource-element-depth-2-1").property("new-data-del-flag", "T")
+                               .property("cardinality", "unbounded").next();
+               Vertex newVfModuleModelVer4 = g.addV().property("aai-node-type", "model-ver")
+                               .property("model-version-id", "new-vf-module-ver-4").property("model-name", "model-4")
+                               .property("version", "test").next();
+               Vertex newVfModuleModel4 = g.addV().property("aai-node-type", "model")
+                               .property("model-invariant-id", "new-vf-module-inv-4").property("model-type", "4").next();
+               Vertex resourceModelElement4 = g.addV().property("aai-node-type", "model-element")
+                               .property("model-element-uuid", "resource-element-depth-2-2").property("new-data-del-flag", "T")
+                               .property("cardinality", "unbounded").next();
+               Vertex newVfModuleModelVer5 = g.addV().property("aai-node-type", "model-ver")
+                               .property("model-version-id", "new-vf-module-ver-5").property("model-name", "model-5")
+                               .property("version", "test").next();
+               Vertex newVfModuleModel5 = g.addV().property("aai-node-type", "model")
+                               .property("model-invariant-id", "new-vf-module-inv-5").property("model-type", "5").next();
+               Vertex resourceModelElement5 = g.addV().property("aai-node-type", "model-element")
+                               .property("model-element-uuid", "resource-element-depth-2-3").property("new-data-del-flag", "T")
+                               .property("cardinality", "unbounded").next();
+               Vertex newVfModuleModelVer1 = g.addV().property("aai-node-type", "model-ver")
+                               .property("model-version-id", "new-vf-module-ver-1").property("model-name", "model-1")
+                               .property("version", "test").next();
+               Vertex newVfModuleModel1 = g.addV().property("aai-node-type", "model")
+                               .property("model-invariant-id", "new-vf-module-inv-1").property("model-type", "1").next();
+               
+               Vertex vfModule1 = g.addV().property("aai-node-type", "vf-module")
+                               .property("vf-module-id", "vf-module-1")
+                               .property("model-invariant-id-local", "test").property("model-version-id-local", "test").next();
+               Vertex vfModule2 = g.addV().property("aai-node-type", "vf-module")
+                               .property("vf-module-id", "vf-module-2")
+                               .property("model-invariant-id-local", "test").property("model-version-id-local", "test").next();
+               Vertex vfModule3 = g.addV().property("aai-node-type", "vf-module")
+                               .property("vf-module-id", "no-invariant")
+                               .property("model-version-id-local", "test").next();
+               Vertex vfModule4 = g.addV().property("aai-node-type", "vf-module")
+                               .property("vf-module-id", "no-ver")
+                               .property("model-invariant-id-local", "test").next();
+               Vertex vfModule5 = g.addV().property("aai-node-type", "vf-module")
+                               .property("vf-module-id", "no-ids").next();
+               Vertex badVfModule = g.addV().property("aai-node-type", "vf-module")
+                               .property("vf-module-id", "bad").next();
+               
+               Vertex vserver1 = g.addV().property("aai-node-type", "vserver")
+                               .property("vserver-id", "vserver-1").property("vserver-name", "vname-1").next();
+               Vertex vserver2 = g.addV().property("aai-node-type", "vserver")
+                               .property("vserver-id", "vserver-2").property("vserver-name", "vname-2").next();
+               Vertex unchangedVserver = g.addV().property("aai-node-type", "vserver")
+                               .property("vserver-id", "unchanged").property("vserver-name", "unchanged").next();
+               
+               Vertex image1 = g.addV().property("aai-node-type", "image")
+                               .property("image-id", "image-id-1").property("image-name", "image-1").next();
+               Vertex image2 = g.addV().property("aai-node-type", "image")
+                               .property("image-id", "image-id-2").property("image-name", "image-2").next();
+               Vertex oldImage = g.addV().property("aai-node-type", "image")
+                               .property("image-id", "image-old").property("image-name", "image-old-name").next();
+               Vertex badImage = g.addV().property("aai-node-type", "image")
+                               .property("image-id", "image-bad").property("image-name", "image-bad").next();
+               
+               Vertex tenant1 = g.addV().property("aai-node-type", "tenant")
+                               .property("tenant-id", "tenant-id-1").property("tenant-name", "tenant-1").next();
+               Vertex tenant2 = g.addV().property("aai-node-type", "tenant")
+                               .property("tenant-id", "tenant-id-2").property("tenant-name", "tenant-2").next();
+               Vertex cloudRegion1 = g.addV().property("aai-node-type", "cloud-region")
+                               .property("cloud-region-id", "region-1").property("cloud-owner", "owner-1").next();
+               Vertex cloudRegion2 = g.addV().property("aai-node-type", "cloud-region")
+                               .property("cloud-region-id", "region-2").property("cloud-owner", "owner-2").next();
+       
+               
+               edgeSerializer.addEdge(g, genericVnf1, serviceInstance1);
+               edgeSerializer.addEdge(g, genericVnf2, serviceInstance2);
+               edgeSerializer.addEdge(g, genericVnf4, serviceInstance3);
+               edgeSerializer.addEdge(g, genericVnf5, serviceInstance4);
+               edgeSerializer.addEdge(g, genericVnf6, serviceInstance5);
+               edgeSerializer.addEdge(g, genericVnf8, serviceInstance6);
+               edgeSerializer.addEdge(g, genericVnf9, serviceInstance6);
+               edgeSerializer.addEdge(g, genericVnf12, serviceInstance8);
+               
+               edgeSerializer.addTreeEdge(g, genericVnf2, vfModule1);
+               edgeSerializer.addTreeEdge(g, genericVnf4, vfModule2);
+               edgeSerializer.addTreeEdge(g, genericVnf5, vfModule3);
+               edgeSerializer.addTreeEdge(g, genericVnf6, vfModule4);
+               edgeSerializer.addTreeEdge(g, genericVnf7, vfModule5);
+               edgeSerializer.addTreeEdge(g, genericVnf12, badVfModule);
+               
+               edgeSerializer.addTreeEdge(g, serviceModel, serviceModelVer);
+               edgeSerializer.addTreeEdge(g, resourceModel, resourceModelVer);
+               edgeSerializer.addTreeEdge(g, resourceModelVer, resourceModelElement1);
+               edgeSerializer.addEdge(g, resourceModelElement1, newVfModuleModelVer2);
+               edgeSerializer.addTreeEdge(g, newVfModuleModelVer2, newVfModuleModel2);
+               edgeSerializer.addTreeEdge(g, resourceModelElement1, resourceModelElement2);
+               edgeSerializer.addEdge(g, resourceModelElement2, newVfModuleModelVer3);
+               edgeSerializer.addTreeEdge(g, newVfModuleModelVer3, newVfModuleModel3);
+               edgeSerializer.addTreeEdge(g, resourceModelElement2, resourceModelElement3);
+               edgeSerializer.addTreeEdge(g, resourceModelElement2, resourceModelElement4);
+               edgeSerializer.addTreeEdge(g, resourceModelElement2, resourceModelElement5);
+               edgeSerializer.addEdge(g, resourceModelElement3, newVfModuleModelVer4);
+               edgeSerializer.addTreeEdge(g, newVfModuleModelVer4, newVfModuleModel4);
+               edgeSerializer.addEdge(g, resourceModelElement4, newVfModuleModelVer5);
+               edgeSerializer.addTreeEdge(g, newVfModuleModelVer5, newVfModuleModel5);
+               edgeSerializer.addEdge(g, resourceModelElement5, newVfModuleModelVer1);
+               edgeSerializer.addTreeEdge(g, newVfModuleModelVer1, newVfModuleModel1); 
+               
+               edgeSerializer.addEdge(g, vfModule1, vserver1);
+               edgeSerializer.addEdge(g, vfModule2, vserver2);
+               edgeSerializer.addEdge(g, vfModule4, unchangedVserver);
+               edgeSerializer.addEdge(g, vserver2, oldImage);
+               edgeSerializer.addEdge(g, unchangedVserver, badImage);
+               edgeSerializer.addTreeEdge(g, image1, cloudRegion1);
+               edgeSerializer.addTreeEdge(g, tenant1, cloudRegion1);
+               edgeSerializer.addTreeEdge(g, tenant1, vserver1);
+               edgeSerializer.addTreeEdge(g, image2, cloudRegion2);
+               edgeSerializer.addTreeEdge(g, tenant2, cloudRegion2);
+               edgeSerializer.addTreeEdge(g, tenant2, vserver2);
+               
+               TransactionalGraphEngine spy = spy(dbEngine);
+               TransactionalGraphEngine.Admin adminSpy = spy(dbEngine.asAdmin());
+
+               GraphTraversalSource traversal = g;
+               GraphTraversalSource readOnly = tx.traversal(GraphTraversalSource.build().with(ReadOnlyStrategy.instance()));
+               when (spy.tx()).thenReturn(tx);
+               when(spy.asAdmin()).thenReturn(adminSpy);
+               when(adminSpy.getTraversalSource()).thenReturn(traversal);
+               when(adminSpy.getReadOnlyTraversalSource()).thenReturn(readOnly);
+               
+               migration = new MigrateRadcomChanges(spy, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+               migration.run();
+       }
+       
+       @Test
+       public void testGenericVnfsUpdated() throws Exception {
+               // check if generic-vnf nodes are updated
+               
+               assertEquals("First generic-vnf updated invariant", "new-resource-inv", 
+                               g.V().has("aai-node-type", "generic-vnf").has("vnf-name", "name-1").next().value("model-invariant-id-local"));
+               assertEquals("First generic-vnf updated version", "new-resource-ver", 
+                               g.V().has("aai-node-type", "generic-vnf").has("vnf-name", "name-1").next().value("model-version-id-local"));
+               assertEquals("First generic-vnf updated customization", "new-resource-cust", 
+                               g.V().has("aai-node-type", "generic-vnf").has("vnf-name", "name-1").next().value("model-customization-id"));
+               
+               assertEquals("Second generic-vnf updated invariant", "new-resource-inv", 
+                               g.V().has("aai-node-type", "generic-vnf").has("vnf-name", "name-2").next().value("model-invariant-id-local"));
+               assertEquals("Second generic-vnf updated version", "new-resource-ver", 
+                               g.V().has("aai-node-type", "generic-vnf").has("vnf-name", "name-2").next().value("model-version-id-local"));
+               assertEquals("Second generic-vnf updated customization", "new-resource-cust", 
+                               g.V().has("aai-node-type", "generic-vnf").has("vnf-name", "name-2").next().value("model-customization-id"));
+               
+               assertEquals("Generic-vnf with no service updated invariant", "new-resource-inv", 
+                               g.V().has("aai-node-type", "generic-vnf").has("vnf-name", "no-service").next().value("model-invariant-id-local"));
+               assertEquals("Generic-vnf with no service updated version", "new-resource-ver", 
+                               g.V().has("aai-node-type", "generic-vnf").has("vnf-name", "no-service").next().value("model-version-id-local"));
+               assertEquals("Generic-vnf with no service updated customization", "new-resource-cust", 
+                               g.V().has("aai-node-type", "generic-vnf").has("vnf-name", "no-service").next().value("model-customization-id"));
+               
+               assertEquals("Generic-vnf with no invariant updated invariant", "new-resource-inv", 
+                               g.V().has("aai-node-type", "generic-vnf").has("vnf-name", "no-invariant").next().value("model-invariant-id-local"));
+               assertEquals("Generic-vnf with no invariant updated version", "new-resource-ver", 
+                               g.V().has("aai-node-type", "generic-vnf").has("vnf-name", "no-invariant").next().value("model-version-id-local"));
+               assertEquals("Generic-vnf with no invariant updated customization", "new-resource-cust", 
+                               g.V().has("aai-node-type", "generic-vnf").has("vnf-name", "no-invariant").next().value("model-customization-id"));
+               
+               assertEquals("Generic-vnf with no version updated invariant", "new-resource-inv", 
+                               g.V().has("aai-node-type", "generic-vnf").has("vnf-name", "no-version").next().value("model-invariant-id-local"));
+               assertEquals("Generic-vnf with no version updated version", "new-resource-ver", 
+                               g.V().has("aai-node-type", "generic-vnf").has("vnf-name", "no-version").next().value("model-version-id-local"));
+               assertEquals("Generic-vnf with no version updated customization", "new-resource-cust", 
+                               g.V().has("aai-node-type", "generic-vnf").has("vnf-name", "no-version").next().value("model-customization-id"));
+               
+               assertEquals("Generic-vnf with no customization updated invariant", "new-resource-inv", 
+                               g.V().has("aai-node-type", "generic-vnf").has("vnf-name", "no-customization").next().value("model-invariant-id-local"));
+               assertEquals("Generic-vnf with no customization updated version", "new-resource-ver", 
+                               g.V().has("aai-node-type", "generic-vnf").has("vnf-name", "no-customization").next().value("model-version-id-local"));
+               assertEquals("Generic-vnf with no customization updated customization", "new-resource-cust", 
+                               g.V().has("aai-node-type", "generic-vnf").has("vnf-name", "no-customization").next().value("model-customization-id"));
+               
+               assertEquals("Generic-vnf with no ids updated invariant", "new-resource-inv", 
+                               g.V().has("aai-node-type", "generic-vnf").has("vnf-name", "no ids").next().value("model-invariant-id-local"));
+               assertEquals("Generic-vnf with no ids updated version", "new-resource-ver", 
+                               g.V().has("aai-node-type", "generic-vnf").has("vnf-name", "no ids").next().value("model-version-id-local"));
+               assertEquals("Generic-vnf with no version updated customization", "new-resource-cust", 
+                               g.V().has("aai-node-type", "generic-vnf").has("vnf-name", "no-version").next().value("model-customization-id"));
+               
+               assertEquals("First generic-vnf for many-to-service test updated invariant", "new-resource-inv", 
+                               g.V().has("aai-node-type", "generic-vnf").has("vnf-name", "many-service-1").next().value("model-invariant-id-local"));
+               assertEquals("First generic-vnf for many-to-service test updated version", "new-resource-ver", 
+                               g.V().has("aai-node-type", "generic-vnf").has("vnf-name", "many-service-1").next().value("model-version-id-local"));
+               assertEquals("First generic-vnf for many-to-service test updated customization", "new-resource-cust", 
+                               g.V().has("aai-node-type", "generic-vnf").has("vnf-name", "many-service-1").next().value("model-customization-id"));
+               
+               assertEquals("Second generic-vnf for many-to-service test updated invariant", "new-resource-inv", 
+                               g.V().has("aai-node-type", "generic-vnf").has("vnf-name", "many-service-2").next().value("model-invariant-id-local"));
+               assertEquals("Second generic-vnf for many-to-service test updated version", "new-resource-ver", 
+                               g.V().has("aai-node-type", "generic-vnf").has("vnf-name", "many-service-2").next().value("model-version-id-local"));
+               assertEquals("Second generic-vnf for many-to-service test updated customization", "new-resource-cust", 
+                               g.V().has("aai-node-type", "generic-vnf").has("vnf-name", "many-service-2").next().value("model-customization-id"));
+               
+               
+               assertEquals("First generic-vnf for multi-name test updated invariant", "new-resource-inv", 
+                               g.V().has("aai-node-type", "generic-vnf").has("vnf-id", "test-10").next().value("model-invariant-id-local"));
+               assertEquals("First generic-vnf for multi-name test updated version", "new-resource-ver", 
+                               g.V().has("aai-node-type", "generic-vnf").has("vnf-id", "test-10").next().value("model-version-id-local"));
+               assertEquals("First generic-vnf for multi-name test updated customization", "new-resource-cust", 
+                               g.V().has("aai-node-type", "generic-vnf").has("vnf-id", "test-10").next().value("model-customization-id"));
+               
+               assertEquals("Second generic-vnf for multi-name test updated invariant", "new-resource-inv", 
+                               g.V().has("aai-node-type", "generic-vnf").has("vnf-id", "test-11").next().value("model-invariant-id-local"));
+               assertEquals("Second generic-vnf for multi-name test updated version", "new-resource-ver", 
+                               g.V().has("aai-node-type", "generic-vnf").has("vnf-id", "test-11").next().value("model-version-id-local"));
+               assertEquals("Second generic-vnf for multi-name test customization", "new-resource-cust", 
+                               g.V().has("aai-node-type", "generic-vnf").has("vnf-id", "test-11").next().value("model-customization-id"));
+       }
+       
+
+       @Test
+       public void testServiceInstancesUpdated() throws Exception {
+               // check if service-instance nodes are updated  
+               
+               assertEquals("First service-instance updated invariant", "new-service-inv", 
+                               g.V().has("aai-node-type", "service-instance").has("service-instance-id", "service-1").next().value("model-invariant-id-local"));
+               assertEquals("First service-instance-updated version", "new-service-ver", 
+                               g.V().has("aai-node-type", "service-instance").has("service-instance-id", "service-1").next().value("model-version-id-local"));
+               
+               assertEquals("Second service-instance updated invariant", "new-service-inv", 
+                               g.V().has("aai-node-type", "service-instance").has("service-instance-id", "service-2").next().value("model-invariant-id-local"));
+               assertEquals("Second service-instance-updated version", "new-service-ver", 
+                               g.V().has("aai-node-type", "service-instance").has("service-instance-id", "service-2").next().value("model-version-id-local"));
+               
+               assertEquals("Service-instance with no invariant updated invariant", "new-service-inv", 
+                               g.V().has("aai-node-type", "service-instance").has("service-instance-id", "no-invariant").next().value("model-invariant-id-local"));
+               assertEquals("Service-instance with no invariant updated version", "new-service-ver", 
+                               g.V().has("aai-node-type", "service-instance").has("service-instance-id", "no-invariant").next().value("model-version-id-local"));
+               
+               assertEquals("Service-instance with no version updated invariant", "new-service-inv", 
+                               g.V().has("aai-node-type", "service-instance").has("service-instance-id", "no-version").next().value("model-invariant-id-local"));
+               assertEquals("Service-instance with no version updated version", "new-service-ver", 
+                               g.V().has("aai-node-type", "service-instance").has("service-instance-id", "no-version").next().value("model-version-id-local"));
+               
+               assertEquals("Service-instance with no ids updated invariant", "new-service-inv", 
+                               g.V().has("aai-node-type", "service-instance").has("service-instance-id", "no ids").next().value("model-invariant-id-local"));
+               assertEquals("Service-instance with no ids updated version", "new-service-ver", 
+                               g.V().has("aai-node-type", "service-instance").has("service-instance-id", "no ids").next().value("model-version-id-local"));
+               
+               assertEquals("Service-instance for many-to-service test updated invariant", "new-service-inv", 
+                               g.V().has("aai-node-type", "service-instance").has("service-instance-id", "service-many").next().value("model-invariant-id-local"));
+               assertEquals("Service-instance for many-to-service test updated version", "new-service-ver", 
+                               g.V().has("aai-node-type", "service-instance").has("service-instance-id", "service-many").next().value("model-version-id-local"));              
+       }
+       
+       @Test
+       public void testVfModulesUpdated() throws Exception {
+               //test if vf-module nodes are updated
+               
+               assertEquals("First vf-module updated invariant", "new-vf-module-inv-1", 
+                               g.V().has("aai-node-type", "vf-module").has("vf-module-id", "vf-module-1").next().value("model-invariant-id-local"));
+               assertEquals("First vf-module updated version", "new-vf-module-ver-1", 
+                               g.V().has("aai-node-type", "vf-module").has("vf-module-id", "vf-module-1").next().value("model-version-id-local"));
+               
+               assertEquals("Second vf-module updated invariant", "new-vf-module-inv-2", 
+                               g.V().has("aai-node-type", "vf-module").has("vf-module-id", "vf-module-2").next().value("model-invariant-id-local"));
+               assertEquals("Second vf-module updated version", "new-vf-module-ver-2", 
+                               g.V().has("aai-node-type", "vf-module").has("vf-module-id", "vf-module-2").next().value("model-version-id-local"));
+               
+               assertEquals("Vf-module with no invariant updated invariant", "new-vf-module-inv-3", 
+                               g.V().has("aai-node-type", "vf-module").has("vf-module-id", "no-invariant").next().value("model-invariant-id-local"));
+               assertEquals("Vf-module with no invariant updated version", "new-vf-module-ver-3", 
+                               g.V().has("aai-node-type", "vf-module").has("vf-module-id", "no-invariant").next().value("model-version-id-local"));
+               
+               assertEquals("Vf-module with no version updated invariant", "new-vf-module-inv-4", 
+                               g.V().has("aai-node-type", "vf-module").has("vf-module-id", "no-ver").next().value("model-invariant-id-local"));
+               assertEquals("Vf-module with no version updated version", "new-vf-module-ver-4", 
+                               g.V().has("aai-node-type", "vf-module").has("vf-module-id", "no-ver").next().value("model-version-id-local"));
+               
+               assertEquals("Vf-module with no ids updated invariant", "new-vf-module-inv-5", 
+                               g.V().has("aai-node-type", "vf-module").has("vf-module-id", "no-ids").next().value("model-invariant-id-local"));
+               assertEquals("Vf-module with no ids updated version", "new-vf-module-ver-5", 
+                               g.V().has("aai-node-type", "vf-module").has("vf-module-id", "no-ids").next().value("model-version-id-local"));
+       }
+       
+       @Test
+       public void testVserverAndImageUpdated() throws Exception {
+               //test if vserver-image relationships are updated
+               assertTrue("Vserver not connected to image is connected to new image",
+                               g.V().has("aai-node-type", "vserver").has("vserver-id", "vserver-1").out("org.onap.relationships.inventory.Uses")
+                               .has("aai-node-type", "image").has("image-id", "image-id-1").hasNext());
+               assertTrue("Vserver connected to existing image is connected to new image",
+                               g.V().has("aai-node-type", "vserver").has("vserver-id", "vserver-2").out("org.onap.relationships.inventory.Uses")
+                               .has("aai-node-type", "image").has("image-id", "image-id-2").hasNext());
+               assertFalse("Vserver connected to existing image is not connected to that image",
+                               g.V().has("aai-node-type", "vserver").has("vserver-id", "vserver-2").out("org.onap.relationships.inventory.Uses")
+                               .has("aai-node-type", "image").has("image-id", "image-old").hasNext()); 
+               assertTrue("Existing image still exists",
+                               g.V().has("aai-node-type", "image").has("image-id", "image-old").hasNext());
+       }
+       
+       @Test
+       public void testNodesNotUpdated() throws Exception {
+               // negative tests
+       
+               assertFalse("Generic-vnf with wrong type has unchanged invariant", 
+                               g.V().has("aai-node-type", "generic-vnf").has("vnf-name", "wrong-type").next()
+                               .property("model-invariant-id-local").isPresent());
+               assertFalse("Generic-vnf with wrong type has unchanged version", 
+                               g.V().has("aai-node-type", "generic-vnf").has("vnf-name", "wrong-type").next()
+                               .property("model-version-id-local").isPresent());
+               assertFalse("Generic-vnf with wrong type has unchanged customization", 
+                               g.V().has("aai-node-type", "generic-vnf").has("vnf-name", "wrong-type").next()
+                               .property("model-customizaiton-id").isPresent());
+               
+               assertEquals("Generic-vnf with wrong name has unchanged invariant", "test", 
+                               g.V().has("aai-node-type", "generic-vnf").has("vnf-name", "wrong-name").next().value("model-invariant-id-local"));
+               assertEquals("Generic-vnf with wrong name has unchanged version", "test", 
+                               g.V().has("aai-node-type", "generic-vnf").has("vnf-name", "wrong-name").next().value("model-version-id-local"));
+               assertEquals("Generic-vnf with wrong name has unchanged customization", "test", 
+                               g.V().has("aai-node-type", "generic-vnf").has("vnf-name", "wrong-name").next().value("model-customization-id"));
+               
+               assertFalse("Unconnected service-instance has unchanged invariant", 
+                               g.V().has("aai-node-type", "service-instance").has("service-instance-id", "wrong").next()
+                               .property("model-invariant-id-local").isPresent());
+               assertFalse("Unconnected service-instance has unchanged version", 
+                               g.V().has("aai-node-type", "service-instance").has("service-instance-id", "wrong").next()
+                               .property("model-version-id-local").isPresent());
+               
+               assertEquals("Service-instance connected to unctouched generic-vnf has unchanged invariant", "test", 
+                               g.V().has("aai-node-type", "service-instance").has("service-instance-id", "connected-wrong")
+                               .next().value("model-invariant-id-local"));
+               assertEquals("Service-instance connected to untouched generic-vnf has unchanged version", "test", 
+                               g.V().has("aai-node-type", "service-instance").has("service-instance-id", "connected-wrong")
+                               .next().value("model-version-id-local"));       
+               
+               assertFalse("Vf-module connected to untouched generic-vnf has unchanged invariant", 
+                               g.V().has("aai-node-type", "vf-module").has("vf-module-id", "bad")
+                               .next().property("model-invariant-id-local").isPresent());
+               assertFalse("Vf-module connected to untouched generic-vnf has unchanged version", 
+                               g.V().has("aai-node-type", "vf-module").has("vf-module-id", "bad")
+                               .next().property("model-version-id-local").isPresent());
+
+               assertTrue("Untouched vserver still connected to image",
+                               g.V().has("aai-node-type", "vserver").has("vserver-id", "unchanged").out("org.onap.relationships.inventory.Uses")
+                               .has("aai-node-type", "image").has("image-id", "image-bad").hasNext());
+       }
+       
+       @Test
+       public void testGetAffectedNodeTypes() {
+               Optional<String[]> types = migration.getAffectedNodeTypes();
+               Optional<String[]> expected = Optional.of(new String[]{"generic-vnf", "service-instance", "vf-module", "vserver", "image"});
+               
+               assertNotNull(types);
+               assertArrayEquals(expected.get(), types.get());
+       }
+
+       @Test
+       public void testGetMigrationName() {
+               String migrationName = migration.getMigrationName();
+
+               assertNotNull(migrationName);
+               assertEquals("MigrateRadcomChanges", migrationName);
+       }
+}
index 6719d0f..c7ca3d3 100644 (file)
@@ -71,30 +71,35 @@ public class SendMigrationNotificationsTest extends AAISetup {
                        Vertex pnf1 = g.addV()
                                        .property("aai-node-type", "pnf")
                                        .property("pnf-name", SendMigrationNotifications.class.getSimpleName()+"-pnf-1")
+                                       .property("aai-uri", "/network/pnfs/pnf/" + SendMigrationNotifications.class.getSimpleName()+"-pnf-1")
                                        .property(AAIProperties.RESOURCE_VERSION, "123")
                                        .next();
 
                        Vertex pnf2 = g.addV()
                                        .property("aai-node-type", "pnf")
                                        .property("pnf-name", SendMigrationNotifications.class.getSimpleName()+"-pnf-2")
+                                       .property("aai-uri", "/network/pnfs/pnf/" + SendMigrationNotifications.class.getSimpleName()+"-pnf-2")
                                        .property(AAIProperties.RESOURCE_VERSION, "456")
                                        .next();
 
                        Vertex pnf3 = g.addV()
                                        .property("aai-node-type", "pnf")
                                        .property("pnf-name", SendMigrationNotifications.class.getSimpleName()+"-pnf-3")
+                                       .property("aai-uri", "/network/pnfs/pnf/" + SendMigrationNotifications.class.getSimpleName()+"-pnf-3")
                                        .property(AAIProperties.RESOURCE_VERSION, "111")
                                        .next();
 
                        Vertex pinterface1 = g.addV()
                                        .property("aai-node-type", "p-interface")
                                        .property("interface-name", SendMigrationNotifications.class.getSimpleName()+"-pinterface-1")
+                                       .property("aai-uri", "/network/pnfs/pnf/" + SendMigrationNotifications.class.getSimpleName()+"-pnf-1" + "/p-interfaces/p-interface/" + SendMigrationNotifications.class.getSimpleName()+"-pinterface-1")
                                        .property(AAIProperties.RESOURCE_VERSION, "789")
                                        .next();
 
                        Vertex pserver1 = g.addV()
                                        .property("aai-node-type", "pserver")
                                        .property("hostname", SendMigrationNotifications.class.getSimpleName()+"-pserver-1")
+                                       .property("aai-uri", "/cloud-infrastructure/pservers/pserver/" + SendMigrationNotifications.class.getSimpleName()+"-pserver-1")
                                        .property(AAIProperties.RESOURCE_VERSION, "333")
                                        .next();
 
diff --git a/src/test/resources/groomingInput b/src/test/resources/groomingInput
new file mode 100644 (file)
index 0000000..4189b0f
--- /dev/null
@@ -0,0 +1,51 @@
+ ============ Summary ==============
+Ran PARTIAL data grooming just looking at data added/updated in the last 100 minutes.
+
+Ran these nodeTypes:
+
+There were this many delete candidates from previous run =  0
+Deleted this many delete candidates =  0
+Dummy-index-update to delete candidates =  0
+Total number of nodes looked at =  0
+Ghost Nodes identified = 0
+Orphan Nodes identified =  0
+Missing aai-node-type Nodes identified =  0
+Bad Edges identified =  3
+Duplicate Groups count =  0
+MisMatching Label/aai-node-type count =  0
+
+ ------------- Delete Candidates ---------
+DeleteCandidate: Bad EDGE Edge-id = [c9c-j28-74l-izc]
+DeleteCandidate: Bad EDGE Edge-id = [8oz-cns-36d-cso]
+DeleteCandidate: Bad EDGE Edge-id = [9w5-9m0-f8d1-9i0]
+
+-- NOTE - To see DeleteCandidates for Duplicates, you need to look in the Duplicates Detail section below.
+
+ ------------- GHOST NODES - detail
+ ------------- Missing aai-node-type NODES - detail:
+ ------------- Missing Dependent Edge ORPHAN NODES - detail:
+ ------------- EDGES pointing to empty/bad vertices:
+>  Edge pointing to bad vertex (Vid = 24600) EdgeId = c9c-j28-74l-izc
+Label: [org.onap.relationships.inventory.MemberOf]
+Prop: [p[private->false]], val = [false]
+Prop: [p[aai-uuid->9eb4b690-0572-4504-9]], val = [9eb4b690-0572-4504-97d1-c4870108c7ea]
+Prop: [p[delete-other-v->NONE]], val = [NONE]
+Prop: [p[contains-other-v->NONE]], val = [NONE]
+Prop: [p[prevent-delete->IN]], val = [IN]
+
+>  Edge pointing to bad vertex (Vid = 16408) EdgeId = 8oz-cns-36d-cso
+Label: [org.onap.relationships.inventory.BelongsTo]
+Prop: [p[private->false]], val = [false]
+Prop: [p[prevent-delete->IN]], val = [IN]
+Prop: [p[contains-other-v->IN]], val = [IN]
+Prop: [p[aai-uuid->c7a0abfd-85eb-4893-8]], val = [c7a0abfd-85eb-4893-8440-a50d21eb8460]
+Prop: [p[delete-other-v->NONE]], val = [NONE]
+
+>  Edge pointing to bad vertex (Vid = 12312) EdgeId = 9w5-9m0-f8d1-9i0
+Label: [BadEdge]
+
+ ------------- Duplicates:
+ ------------- Mis-matched Label/aai-node-type Nodes:
+
+ ------------- Got these errors while processing:
+
diff --git a/src/test/resources/migration-input-files/ALTS-migration-data/ALTS-migration-input.csv b/src/test/resources/migration-input-files/ALTS-migration-data/ALTS-migration-input.csv
new file mode 100644 (file)
index 0000000..1353a6e
--- /dev/null
@@ -0,0 +1,5 @@
+SOFTWARE_ASSET_TAG,SOFTWARE_PACKAGE_NAME,SOFTWARE_PACKAGE_VERSION,HOST_NAME,OS_AND_VERSION,TRACKING_LICENSE,LICENSE_KEY,NUM_CPUS,NUM_CORES,VPMO_NUMBER,SWM_USER_ID,USED,UPDATE_DATE,REP_UPDATE,PHONE_HOME_DATE,RESOURCE_UUID,PURCHASING_SYSTEM_SOURCE,ASSIGNMENT_STATUS,ASSIGNMENT_DATE,VNF_ID,LICENSE_NUMBER,INVARIANT_UUID
+new-ruuid-entitlement,guuid-entitlement,Unallocated,NEWHOST-123456,Unallocated,N,,,,,,Y,1-Jul-16,1-Jul-16,,GPS-45908182,GPS,in use,1-Jul-16,123456789,,
+new-ruuid-license,guuid-license,Unallocated,NEWHOST-123456,Unallocated,N,,,,,,Y,1-Jul-16,1-Jul-16,,GPS-45908182,GPS,in use,1-Jul-16,123456789,,
+new-ruuid-entitlement2,guuid,Unallocated,NEWHOST-123456,Unallocated,N,,,,,,Y,1-Jul-16,1-Jul-16,,GPS-45908182,GPS,in use,1-Jul-16,23456789,,
+new-ruuid-license2,guuid,Unallocated,NEWHOST-123456,Unallocated,N,,,,,,Y,1-Jul-16,1-Jul-16,,GPS-45908182,GPS,in use,1-Jul-16,23456789,,
diff --git a/src/test/resources/migration-input-files/CloudRegion-ART-migration-data/CloudRegion-ART-migration-data.csv b/src/test/resources/migration-input-files/CloudRegion-ART-migration-data/CloudRegion-ART-migration-data.csv
new file mode 100644 (file)
index 0000000..e087cff
--- /dev/null
@@ -0,0 +1,8 @@
+Region,Alias,Production,Upgrade Cycle
+akr1,,ORT,E
+akr1b,,ST,C
+all1,,TEST,D
+all1b,,ORT,G
+alp1,,E2E,G
+alp1b,,TEST,E
+ams1b,amsnl1b,ST,B
diff --git a/src/test/resources/migration-input-files/VNT-migration-data/VNT-migration-input.csv b/src/test/resources/migration-input-files/VNT-migration-data/VNT-migration-input.csv
new file mode 100644 (file)
index 0000000..bade411
--- /dev/null
@@ -0,0 +1,24 @@
+"entitlement-pool-uuid  vendor-allowed-max-bandwidth (VNT)"
+"599a2d74-cfbd-413d-aedb-ec4875817313  1000"
+"ea9a547e-137b-48e9-a788-c3fb4e631a2a  3000"
+"8ce33a8c-b83f-4103-9967-caf95dc47009  10000"
+"d3d93cc4-8961-43ef-a916-161107fe2721  400"
+"63a5bd41-4e43-4c14-8f68-1d518970cc2e  100"
+"4b3cc5a8-39bb-46a3-b112-f7041f33c8b5  10000"
+"1b33b22f-559e-441f-ab5d-01be6d0e6fd9  100"
+"e40a3806-ea7f-43cd-99d0-7694d7daff24  400"
+"f8a20afe-078b-47eb-b039-8ad747799274  200"
+"c00d30be-8db2-4c2c-99af-7e6fc89fba74  3000"
+"360d847c-76e9-403c-9c3c-364dc07eed16  3000"
+"d4248a5a-95d1-42a6-8bec-59dd76dc757f  200"
+"e60183ad-5664-4911-a53b-3cd8639569b3  400"
+"867cc331-59b6-4432-a7e4-2734798a9d1d  1000"
+"6813c65c-7c1c-4bcf-8330-70828fa776c3  100"
+"82530f84-05d6-4548-a3de-8e968d099522  200"
+"8b176fde-7e7d-4d82-a02a-1bf2dc9a1cbf  1000"
+"ed88c374-aa3e-4027-b05e-f58cb95e99c7  3000"
+"987750b0-e3ac-42d0-b491-76de11cac222  1000"
+"3f9000aa-4fca-405e-ac5a-a67080ed91e5  100"
+"275829e8-6132-44d7-b001-d10043f5ad2e  400"
+"09b46095-9424-4f06-bbcd-1283f70d3d3d  10000"
+"16a0d09c-4d56-4cfd-a3e3-fa637aa20b28  200"
diff --git a/src/test/resources/migration-input-files/radcom-changes/INPUT-MODEL.csv b/src/test/resources/migration-input-files/radcom-changes/INPUT-MODEL.csv
new file mode 100644 (file)
index 0000000..17c9a11
--- /dev/null
@@ -0,0 +1,9 @@
+Model-type,vnf-type,model-invariant-id,model-version-id,model-customization-id,model-name,image-name
+Service,test,new-service-inv,new-service-ver,,,
+VNF-resource,test,new-resource-inv,new-resource-ver,new-resource-cust,,
+vf-module-1,,,,,model-1,image-1
+vf-module-2,,,,,model-2,image-2
+no-invariant,,,,,model-3,
+no-ver,,,,,model-4,keep-image
+no-ids,,,,,model-5,
+
diff --git a/src/test/resources/migration-input-files/radcom-changes/INPUT-VNF.csv b/src/test/resources/migration-input-files/radcom-changes/INPUT-VNF.csv
new file mode 100644 (file)
index 0000000..2cbe329
--- /dev/null
@@ -0,0 +1,12 @@
+vnf-name,vnf-type
+name-1,test
+name-2,test
+no-service,test
+no-invariant,test
+no-version,test
+no-customization,test
+no ids,test
+many-service-1,test
+many-service-2,test
+multi-name,test
+wrong-type,test
diff --git a/src/test/resources/migration-input-files/sarea-inventory/circuitIds.csv b/src/test/resources/migration-input-files/sarea-inventory/circuitIds.csv
new file mode 100644 (file)
index 0000000..f1a8caf
--- /dev/null
@@ -0,0 +1,6 @@
+pnf,port-aid,existing-reservation-circuit-id,existing-alias-collector-circuit-id ,new-reservation-circuit-id,new-alias-collector-circuit-id 
+pnf-1,p-interface-1,1,,10,
+pnf-2,p-interface-2,2,,20,
+pnf-3,p-interface-3,,,30,
+pnf-4,p-interface-4,3,,,
+pnf-5,p-interface-5,,,,
diff --git a/src/test/resources/migration-input-files/sarea-inventory/hub.csv b/src/test/resources/migration-input-files/sarea-inventory/hub.csv
new file mode 100644 (file)
index 0000000..45d1e1c
--- /dev/null
@@ -0,0 +1,8 @@
+ptnii-name,internal-vlan,uni-svlan,nni-svlan,evc-name
+SGSNGP2002MG2,4054,740,6,evc-name-1
+PHMKTI1002MG2,4084,486,16,evc-name-2
+N/A,,,,
+SGSNGP2002MG2,4054,740,12,evc-name-2
+SGSNGP2002MG2,4054,740,12,evc-name-2
+SGSNGP2002MG2,4054,740,12,evc-name-3
+SGSNGP2002MG2,4054,740,12,evc-name-4
diff --git a/src/test/resources/migration-input-files/sarea-inventory/inv.csv b/src/test/resources/migration-input-files/sarea-inventory/inv.csv
new file mode 100644 (file)
index 0000000..33463ec
--- /dev/null
@@ -0,0 +1,5 @@
+ptnii-name,fic,equipment-model,equipment-role,equipment-role-additional,ip-addr,subnet-mask,slot-name,card-type,card-port-lock,card-vlan-lock,port-aid,port-type,port-role,port-lock,vlan-lock,reservation-name,collector-interconnect-type,tag-mode,media-type,media-speed-value+media-speed-units,uni-cir-value+uni-cir-units,evc-name
+pnf-name-collector-1,06000D.121,5150,AED,,2001:1890:fcfe:7000:7021:0:1:2,64,,,,,"1.7   ",SFP_1GE/Ethernet_10/100/1000M,ACCESS,N,N,M0651881_ST,SHARED,DOUBLE,SFP-1GE-LX,1000Mbps,,evc-name-1
+pnf-name-collector-1,06000D.121,5150,AED,,2001:1890:fcfe:7000:7021:0:1:2,64,,,,,"1.8   ",SFP_1GE/Ethernet_10/100/1000M,ACCESS,N,N,M0651882_ST,SHARED,DOUBLE,SFP-1GE-LX,1000Mbps,,evc-name-3
+pnf-name-1,06000D.121,5150,AED,,2001:1890:fcfe:7000:7021:0:1:2,64,,,,,1.1,SFP_1GE/Ethernet_10/100/1000M,ACCESS,N,N,M0651882_ST,SHARED,DOUBLE,SFP-1GE-LX,1000Mbps,,evc-name-3
+pnf-name-1,06000D.121,5150,AED,,2001:1890:fcfe:7000:7021:0:1:2,64,,,,,1.2,SFP_1GE/Ethernet_10/100/1000M,ACCESS,N,N,M0651882_ST,SHARED,DOUBLE,SFP-1GE-LX,1000Mbps,,evc-name-3
diff --git a/src/test/resources/migration-input-files/sarea-inventory/ivlanData.csv b/src/test/resources/migration-input-files/sarea-inventory/ivlanData.csv
new file mode 100644 (file)
index 0000000..2085a5b
--- /dev/null
@@ -0,0 +1,9 @@
+EVC Name, PNF (PTNII), Interface AID, ivlan,UNI_NNI
+test/evc/one, pnf1,11111.1,111,UNI
+test/evc/one, pnf1, lag-interface1,222,UNI
+test/evc/one, pnf1,11111.2,333,UNI
+test/evc/one, pnf1, lag-interface2,444,UNI
+test/evc/one_2, pnf1,11111.3,555,UNI
+test/evc/one_2, pnf1, lag-interface3,666,UNI
+test/evc/two, pnf2,22222.2,777,UNI
+test/evc/two, pnf2, lag-interface2,888,UNI
diff --git a/src/test/resources/migration-input-files/sarea-inventory/path.csv b/src/test/resources/migration-input-files/sarea-inventory/path.csv
new file mode 100644 (file)
index 0000000..e39b5cf
--- /dev/null
@@ -0,0 +1,11 @@
+pe/vpe-name,evc-name,subscriber-name,esp-name,network-side-circuit-id,network-side-tag-mode,cvlan,svlan,ptnii-name,ipv4 MGMT Address/Loopback 0,ipv6 MGMT Address,slot-name,port-aid,port-type,esp-side-circuit-id,esp-side-tag-mode,cvlan,svlan,ptnii-name,ipv4 MGMT Address/Loopback 0,ipv6 MGMT Address,slot-name,port-aid,port-type
+SGSNGP3001ME2,evc-name-4,AT&T  Global Access Management_000000000,ST,IZEZ.508988..ATI,DOUBLE,33,15,pnf-name-1,32.120.75.3,2001:1890:fcfe:7000:7021:0:1:1,,1.1,SFP_1GE/Ethernet_10/100/1000M,M0630051,DOUBLE,,15,pnf-name-1,32.120.75.3,2001:1890:fcfe:7000:7021:0:1:1,,1.41,SFP_1GE/Ethernet_10/100/1000M
+SGSNGP3001ME2,evc-name-4,AT&T  Global Access Management_000000000,ST,IZEZ.508988..ATI,DOUBLE,33,15,pnf-name-1,32.120.75.3,2001:1890:fcfe:7000:7021:0:1:1,,1.1,SFP_1GE/Ethernet_10/100/1000M,M0630051,DOUBLE,,15,pnf-name-1,32.120.75.3,2001:1890:fcfe:7000:7021:0:1:1,,1.41,SFP_1GE/Ethernet_10/100/1000M
+SGSNGP3001ME2,evc-name-1,AT&T  Global Access Management_000000000,ST,IZEZ.508988..ATI,DOUBLE,33,15,pnf-name-1,32.120.75.3,2001:1890:fcfe:7000:7021:0:1:1,,1.1,SFP_1GE/Ethernet_10/100/1000M,M0630051,DOUBLE,,15,pnf-name-1,32.120.75.3,2001:1890:fcfe:7000:7021:0:1:1,,1.41,SFP_1GE/Ethernet_10/100/1000M
+PLWRSW1002ME2,evc-name-2,AT&T  Global Access Management_000000000,BPT,BFEZ.552785..ATI,DOUBLE,,14,pnf-name-2,32.109.138.34,2001:1890:fcfe:4000:6161:0:1:1,"MODULE2     ",ae1_2.1,XFP_10GE,66595,DOUBLE,,401,pnf-name-2,32.109.138.34,2001:1890:fcfe:4000:6161:0:1:1,,1.25,SFP_1GE/Ethernet_10/100/1000M
+,evc-name-2,,,IZEZ.517090..ATI,DOUBLE,,4,pnf-name-3,32.109.138.50,2001:1890:fcfe:4000:6161:0:1:2,,1.32,SFP_1GE/Ethernet_10/100/1000M,BFEZ.552785..ATI,DOUBLE,,14,pnf-name-3,32.109.138.50,2001:1890:fcfe:4000:6161:0:1:2,MODULE2,ae1_2.1,XFP_10GE
+SGSNGP2003ME2,evc-name-3,AT&T  Global Access Management_000000000,ST,BFEZ.536900..ATI,DOUBLE,34,740,pnf-name-4,32.120.75.18,2001:1890:fcfe:7000:7021:0:1:2,"MODULE3    ",ae101_3.1,XFP_10GE,M0651881,DOUBLE,34,8,pnf-name-4,32.120.75.18,2001:1890:fcfe:7000:7021:0:1:2,,1.7,SFP_1GE/Ethernet_10/100/1000M
+,evc-name-3,,,BFEZ.542287..ATI,DOUBLE,37,740,pnf-name-5,32.109.201.14,,,104_4,SFP_1GE/SFP+_10GE,BFEZ.536900..ATI,DOUBLE,35,740,pnf-name-5,32.109.201.14,,,101_1,SFP_1GE/SFP+_10GE
+,evc-name-3,,,IZEZ.597112..ATI,DOUBLE,36,3,pnf-name-6,32.120.75.66,2001:1890:fcfe:7000:7021:0:1:5,,1.39,SFP_1GE/Ethernet_10/100/1000M,BFEZ.542287..ATI,DOUBLE,36,740,pnf-name-6,32.120.75.66,2001:1890:fcfe:7000:7021:0:1:5,MODULE3,ae104_3.1,XFP_10GE
+SGSNGP2003ME2,evc-name-3,AT&T  Global Access Management_000000000,ST,BFEZ.536900..ATI,DOUBLE,34,740,pnf-name-1,32.120.75.18,2001:1890:fcfe:7000:7021:0:1:2,"MODULE3    ",1.41,XFP_10GE,M0651881,DOUBLE,34,8,pnf-name-1,32.120.75.18,2001:1890:fcfe:7000:7021:0:1:2,,1.1,SFP_1GE/Ethernet_10/100/1000M
+SGSNGP2003ME2,evc-name-3,AT&T  Global Access Management_000000000,ST,BFEZ.536900..ATI,DOUBLE,34,740,pnf-name-1,32.120.75.18,2001:1890:fcfe:7000:7021:0:1:2,"MODULE3    ",1.A1,XFP_10GE,M0651881,DOUBLE,34,8,pnf-name-1,32.120.75.18,2001:1890:fcfe:7000:7021:0:1:2,,1.1,SFP_1GE/Ethernet_10/100/1000M
diff --git a/src/test/resources/migration-input-files/sarea-inventory/sar.csv b/src/test/resources/migration-input-files/sarea-inventory/sar.csv
new file mode 100644 (file)
index 0000000..e565bbd
--- /dev/null
@@ -0,0 +1,5 @@
+evc-name,subscriber-name,esp-name,bearer-circuit-id,bearer-tag-mode,cvlan,svlan,ptnii-name,slot-name,port-aid,port-type,collector-circuit-id,collector-tag-mode,cvlan,svlan,ptnii-name,slot-name,port-aid,port-type,espEVCCircuit ID,EVC Access CIR
+evc-name-1,AT&T  Global Access Management_000000000,PLDT,IZEZ.573163..ATI,DOUBLE,,2,pnf-name-bearer-1,,p-int-bearer-1,SFP_1GE/Ethernet_10/100/1000M,A&TAP01-052013-14812_PLDT,DOUBLE,,41,pnf-name-collector-1,,p-int-collector-1,SFP_1GE/Ethernet_10/100/1000M,,40Mbps
+evc-name-2,AT&T  Global Access Management_000000000,PLDT,IZEZ.573163..ATI,DOUBLE,,2,pnf-name-bearer-2,,p-int-bearer-2,SFP_1GE/Ethernet_10/100/1000M,A&TAP01-052013-14812_PLDT,DOUBLE,,41,pnf-name-collector-1,,p-int-collector-2,SFP_1GE/Ethernet_10/100/1000M,,4Mbps
+evc-name-3,AT&T  Global Access Management_000000000,PLDT,IZEZ.573163..ATI,DOUBLE,,2,pnf-name-bearer-1,,p-int-bearer-2,SFP_1GE/Ethernet_10/100/1000M,A&TAP01-052013-14812_PLDT,DOUBLE,,41,pnf-name-collector-1,,p-int-collector-2,SFP_1GE/Ethernet_10/100/1000M,,4Mbps
+evc-name-4,AT&T  Global Access Management_000000000,PLDT,IZEZ.573163..ATI,DOUBLE,,2,pnf-name-bearer-1,,p-int-bearer-1,SFP_1GE/Ethernet_10/100/1000M,A&TAP01-052013-14812_PLDT,DOUBLE,,41,pnf-name-collector-1,,p-int-collector-1,SFP_1GE/Ethernet_10/100/1000M,,40Mbps
diff --git a/src/test/resources/migration-input-files/sarea-inventory/secondary-hub-path.csv b/src/test/resources/migration-input-files/sarea-inventory/secondary-hub-path.csv
new file mode 100644 (file)
index 0000000..fc833c4
--- /dev/null
@@ -0,0 +1,11 @@
+pe/vpe-name,evc-name,subscriber-name,esp-name,network-side-circuit-id,network-side-tag-mode,cvlan,svlan,ptnii-name,ipv4 MGMT Address/Loopback 0,ipv6 MGMT Address,slot-name,port-aid,port-type,esp-side-circuit-id,esp-side-tag-mode,cvlan,svlan,ptnii-name,ipv4 MGMT Address/Loopback 0,ipv6 MGMT Address,slot-name,port-aid,port-type
+SGSNGP3001ME2,evc-name-4,AT&T  Global Access Management_000000000,ST,IZEZ.508988..ATI,DOUBLE,33,15,pnf-name-1,32.120.75.3,2001:1890:fcfe:7000:7021:0:1:1,,1.1,SFP_1GE/Ethernet_10/100/1000M,M0630051,DOUBLE,,15,pnf-name-1,32.120.75.3,2001:1890:fcfe:7000:7021:0:1:1,,1.41,SFP_1GE/Ethernet_10/100/1000M
+SGSNGP3001ME2,evc-name-4,AT&T  Global Access Management_000000000,ST,IZEZ.508988..ATI,DOUBLE,33,15,pnf-name-1,32.120.75.3,2001:1890:fcfe:7000:7021:0:1:1,,1.1,SFP_1GE/Ethernet_10/100/1000M,M0630051,DOUBLE,,15,pnf-name-1,32.120.75.3,2001:1890:fcfe:7000:7021:0:1:1,,1.41,SFP_1GE/Ethernet_10/100/1000M
+SGSNGP3001ME2,evc-name-1,AT&T  Global Access Management_000000000,ST,IZEZ.508988..ATI,DOUBLE,33,15,pnf-name-1,32.120.75.3,2001:1890:fcfe:7000:7021:0:1:1,,1.1,SFP_1GE/Ethernet_10/100/1000M,M0630051,DOUBLE,,15,pnf-name-1,32.120.75.3,2001:1890:fcfe:7000:7021:0:1:1,,1.41,SFP_1GE/Ethernet_10/100/1000M
+PLWRSW1002ME2,evc-name-2,AT&T  Global Access Management_000000000,BPT,BFEZ.552785..ATI,DOUBLE,,14,pnf-name-2,32.109.138.34,2001:1890:fcfe:4000:6161:0:1:1,"MODULE2     ",ae1_2.1,XFP_10GE,66595,DOUBLE,,401,pnf-name-2,32.109.138.34,2001:1890:fcfe:4000:6161:0:1:1,,1.25,SFP_1GE/Ethernet_10/100/1000M
+,evc-name-2,,,IZEZ.517090..ATI,DOUBLE,,4,pnf-name-3,32.109.138.50,2001:1890:fcfe:4000:6161:0:1:2,,1.32,SFP_1GE/Ethernet_10/100/1000M,BFEZ.552785..ATI,DOUBLE,,14,pnf-name-3,32.109.138.50,2001:1890:fcfe:4000:6161:0:1:2,MODULE2,ae2_2.1,XFP_10GE
+SGSNGP2003ME2,evc-name-3,AT&T  Global Access Management_000000000,ST,BFEZ.536900..ATI,DOUBLE,34,740,pnf-name-4,32.120.75.18,2001:1890:fcfe:7000:7021:0:1:2,"MODULE3    ",ae101_3.1,XFP_10GE,M0651881,DOUBLE,34,8,pnf-name-4,32.120.75.18,2001:1890:fcfe:7000:7021:0:1:2,,1.7,SFP_1GE/Ethernet_10/100/1000M
+,evc-name-3,,,BFEZ.542287..ATI,DOUBLE,37,740,pnf-name-5,32.109.201.14,,,104_4,SFP_1GE/SFP+_10GE,BFEZ.536900..ATI,DOUBLE,35,740,pnf-name-5,32.109.201.14,,,101_1,SFP_1GE/SFP+_10GE
+,evc-name-3,,,IZEZ.597112..ATI,DOUBLE,36,3,pnf-name-6,32.120.75.66,2001:1890:fcfe:7000:7021:0:1:5,,1.39,SFP_1GE/Ethernet_10/100/1000M,BFEZ.542287..ATI,DOUBLE,36,740,pnf-name-6,32.120.75.66,2001:1890:fcfe:7000:7021:0:1:5,MODULE3,ae104_3.1,XFP_10GE
+SGSNGP2003ME2,evc-name-3,AT&T  Global Access Management_000000000,ST,BFEZ.536900..ATI,DOUBLE,34,740,pnf-name-1,32.120.75.18,2001:1890:fcfe:7000:7021:0:1:2,"MODULE3    ",1.41,XFP_10GE,M0651881,DOUBLE,34,8,pnf-name-1,32.120.75.18,2001:1890:fcfe:7000:7021:0:1:2,,1.1,SFP_1GE/Ethernet_10/100/1000M
+SGSNGP2003ME2,evc-name-3,AT&T  Global Access Management_000000000,ST,BFEZ.536900..ATI,DOUBLE,34,740,pnf-name-1,32.120.75.18,2001:1890:fcfe:7000:7021:0:1:2,"MODULE3    ",1.A1,XFP_10GE,M0651881,DOUBLE,34,8,pnf-name-1,32.120.75.18,2001:1890:fcfe:7000:7021:0:1:2,,1.1,SFP_1GE/Ethernet_10/100/1000M
diff --git a/src/test/resources/migration-input-files/widget-model-migration-data/widget-model-migration-input.csv b/src/test/resources/migration-input-files/widget-model-migration-data/widget-model-migration-input.csv
new file mode 100644 (file)
index 0000000..0d723b5
--- /dev/null
@@ -0,0 +1,92 @@
+action,fd7fb09e-d930-41b9-b83f-cfde9df48640,af593b4b-490e-4665-ad74-2f6351c0a7ce
+action-data,2f80c596-27e5-4ca9-b5bb-e03a7fd4c0fd,9551346c-7d8b-4daf-9926-b93e96e2344a
+allotted-resource,7ad0915f-25c0-4a70-b9bc-185a75f87564,f6d6a23d-a1a9-48ff-8419-b6530da2d381
+availability-zone,6c092fb1-21b2-456b-9e01-67fb4de1896e,61b88c01-d819-41c0-8e21-7fd7ba47148e
+az-and-dvs-switches,b2dea88d-78a0-49bf-95c9-5819df08e966,53dc00d4-e6d9-48ec-b6cc-3d3797e9b896
+class-of-service,d2fb27cc-15eb-4c4e-828e-71d41aaecc5b,18094b19-d16d-4822-8acf-e92c6aefa178
+cloud-region,2a160989-b202-47dd-874b-4a0f275998f7,425b2158-e51d-4509-9945-dad4556474a3
+complex,3a8ab1ee-9220-4fe8-b89c-9251d160ddc2,af91c2f7-35fc-43cf-a13d-443f385b2353
+configuration,5a175add-57e4-4a5d-8b02-c36f1d69c52b,166c050d-f69d-4305-943e-0bc58c3a26cf
+connector,22104c9f-29fd-462f-be07-96cd6b46dd33,4c01c948-7607-4d66-8a6c-99c2c2717936
+constrained-element-set,01102126-9c04-4a89-945b-b131e61e95d7,c0292b4f-ee97-40cc-8c2e-f967c48f5701
+ctag-assignment,44e5cb1f-0938-41aa-b766-d4595109fe89,fcb8d46b-b656-4ad6-8fa4-22cef74b443f
+ctag-pool,2056c41f-23b9-4de7-9f50-819adad37d76,46c51d4e-d67e-4a9c-b1f5-49b1e9c6fcaa
+customer,d4df5c27-98a1-4812-a8aa-c17f055b7a3f,c1d4305f-cdbd-4bbe-9069-a2f4978fd89e
+cvlan-tag-entry,c3878ffb-8d85-4114-bee6-e4074a9db10b,245cf4b0-7cc5-4eea-bbd9-753e939adcab
+dvs-switch,4cb44ae8-e3ab-452a-9f95-bcc8a44c55ea,98fbb471-1f86-428e-bd8a-c8a25de6fa23
+edge-prop-names,f0442326-8201-4d0e-857c-74b4ddcbfc9f,7a08cad4-8759-46a5-8245-095d1ba57ac6
+element-choice-set,af27fbfd-598d-44da-aeae-0f9d3a5fcd6a,9a011958-7165-47a3-b872-00951d1f09ae
+entitlement,7e27ba2e-b7db-4e13-9fae-d142152ef98a,ae75b5a0-d5e1-4f3a-b8fb-37626a753da3
+esr-system-info,5ded840b-42aa-4692-a7ac-9a6ed5f3146b,0562518b-e5b3-49f4-8ad8-19b43766c79d
+flavor,36200fb5-f251-4f5d-a520-7c5ad5c2cd4b,bace8d1c-a261-4041-9e37-823117415d0f
+generic-vnf,93a6166f-b3d5-4f06-b4ba-aed48d009ad9,acc6edd8-a8d4-4b93-afaa-0994068be14c
+group-assignment,fe578080-ce19-4604-8760-fc264fbb2565,7cc05f25-7ba2-42b7-a237-c5662a1689e1
+image,f6a038c2-820c-42ba-8c2b-375e24e8f932,3f4c7204-739b-4bbb-87a7-8a6856439c90
+include-node-filter,f05f804d-7057-4ffe-bdc5-39f2f0c9c9fd,2a2d8ad2-af0a-4e1f-9982-0c899e7dc827
+instance-group,8e6ee9dc-9017-444a-83b3-219edb018128,3bf1e610-45f7-4ad6-b833-ca4c5ee6a3fd
+inventory-item,69957f4a-2155-4b95-8d72-d6dd9b88b27b,cd57d844-9017-4078-aa19-926935a3d77c
+inventory-item-data,0e54bb87-bd6e-4a2b-ad1c-6d935b87ae51,87a383ae-cf03-432e-a9de-04e6a622d0fd
+ipsec-configuration,d949fd10-36bf-408a-ac7a-cad5004d2e0d,aca4c310-cb45-42bd-9f88-73e40ba7b962
+key-data,c23ea04d-1a3b-453d-bc49-a6c783a5e92b,f5faa464-c2f2-4cc3-89d2-a90452dc3a07
+l-interface,a32613fd-18b9-459e-aab8-fffb3912966a,cea0a982-8d55-4093-921e-418fbccf7060
+l3-interface-ipv4-address-list,41e76b6f-1e06-4fd4-82cd-81c50fc4574b,aad85df2-09be-40fa-b867-16415e4e10e2
+l3-interface-ipv6-address-list,d040621d-541a-477b-bb1b-a2b61b14e295,82966045-43ee-4982-8307-7e9610866140
+l3-network,9111f20f-e680-4001-b83f-19a2fc23bfc1,3d560d81-57d0-438b-a2a1-5334dba0651a
+lag-interface,ce95f7c3-b61b-4758-ae9e-7e943b1c103d,e0ee9bde-c1fc-4651-a95d-8e0597bf7d70
+lag-link,d29a087a-af59-4053-a3f8-0f95a92faa75,86ffe6e5-4d0e-4cec-80b5-5c38aa3eff98
+license,6889274b-a1dc-40ab-9090-93677e13e2e6,b9a9b337-1f86-42d3-b9f9-f987a089507c
+license-key-resource,24b25f8c-b8bd-4c62-9421-87c12667aac9,9022ebfe-b54f-4911-a6b2-8c3f5ec189b7
+logical-link,a1481a38-f8ba-4ae4-bdf1-06c2c6af4c54,fe012535-2c31-4a39-a739-612374c638a0
+metadatum,6bae950e-8939-41d3-a6a7-251b03e4c1fc,86dbb63a-265e-4614-993f-6771c30b56a5
+model,1f51c05c-b164-4c27-9c03-5cbb239fd6be,06d1418a-5faa-452d-a94b-a2829df5f67b
+model-constraint,ad70dd19-f156-4fb5-a865-97b5563b0d37,c28966f3-e758-4483-b37b-a90b05d3dd33
+model-element,753e813a-ba9e-4a1d-ab34-b2f6dc6eec0c,2076e726-3577-477a-a300-7fa65cd4df11
+model-ver,93f2f8bc-cb12-4a01-96c8-3d2649e4ab8f,b5cd462f-e426-4146-b1fe-5475ae272c3d
+multicast-configuration,666a06ee-4b57-46df-bacf-908da8f10c3f,ea78c9e3-514d-4a0a-9162-13837fa54c35
+named-query,5c3b7c33-afa3-4be5-8da7-1a5ac6f99896,80b712fd-0ad3-4180-a99c-8c995cf1cc32
+named-query-element,204c641a-3494-48c8-979a-86856f5fd32a,3c504d40-b847-424c-9d25-4fb7e0a3e994
+network-policy,a0ccd9dc-7062-4940-9bcc-e91dd28af510,6aa05779-94d7-4d8b-9bee-59ef2ab0c246
+network-profile,01f45471-4240-498c-a9e1-235dc0b8b4a6,2734b44a-b8a2-40f6-957d-6256589e5d00
+newvce,7c79e11f-a408-4593-aa86-ba948a1236af,4b05ec9c-c55d-4987-83ff-e08d6ddb694f
+oam-network,f4fb34f3-fd6e-4a8f-a3fb-4ab61a343b79,2851cf01-9c40-4064-87d4-6184a6fcff35
+p-interface,d2cdb2d0-fc1f-4a57-a89e-591b1c4e3754,94043c37-4e73-439c-a790-0fdd697924cd
+physical-link,9c523936-95b4-4d7f-9f53-6bdfe0cf2c05,c822d81f-822f-4304-9623-1025b53da568
+pnf,e9f1fa7d-c839-418a-9601-03dc0d2ad687,862b25a1-262a-4961-bdaa-cdc55d69785a
+port-group,03e8bb6b-b48a-46ae-b5d4-e5af577e6844,8ce940fb-55d7-4230-9e7f-a56cc2741f77
+property-constraint,81706bbd-981e-4362-ae20-995cbcb2d995,f4a863c3-6886-470a-a6ae-05723837ea45
+pserver,72f0d495-bc27-4653-9e1a-eef76bd34bc9,6d932c8f-463b-4e76-83fb-87acfbaa2e2d
+related-lookup,0988bab5-bf4f-4938-a419-ab249867d12a,468f6f5b-2996-41bb-b2a3-7cf9613ebb9b
+reserved-prop-names,ac49d26d-9163-430e-934a-13b738a04f5c,0c3e0ba3-618c-498d-9127-c8d42b00170f
+result-data,4e9b50aa-5227-4f6f-b489-62e6bbc03c79,ff656f23-6185-406f-9006-4b26834f3e1c
+route-table-reference,fed7e326-03a7-45ff-a3f2-471470d268c4,a8614b63-2636-4c4f-98df-fd448c4241db
+routing-instance,3ccbcbc7-d19e-44d5-a52f-7e18aa8d69fa,1c2ded4f-8b01-4193-829c-966847dfec3e
+secondary-filter,1380619d-dd1a-4cec-b755-c6407833e065,738ff299-6290-4c00-8998-bd0e96a07b93
+segmentation-assignment,c5171ae0-44fb-4c04-b482-d56702241a44,6e814aee-46e1-4583-a9d4-0049bfd2b59b
+service,ecce2c42-3957-4ae0-9442-54bc6afe27b6,07a3a60b-1b6c-4367-8173-8014386f89e3
+service-capability,f9cfec1b-18da-4bba-bd83-4b26cca115cd,b1a7cc05-d19d-443b-a5d1-733e325c4232
+service-instance,46b92144-923a-4d20-b85a-3cbd847668a9,82194af1-3c2c-485a-8f44-420e22a9eaa4
+service-subscription,5e68299a-79f2-4bfb-8fbc-2bae877a2459,2e1a602a-acd8-4f78-94ff-618b802a303b
+site-pair,7106bc02-6552-4fc3-8a56-4f3df9034531,db63f3e6-f8d1-484e-8d5e-191600b7914b
+site-pair-set,a5c6c1bc-dc38-468e-9459-bb08f87247df,5d4dae3e-b402-4bfd-909e-ece12ff75d26
+snapshot,962a7c8b-687f-4d32-a775-fe098e214bcd,24de00ef-aead-4b52-995b-0adf8d4bd90d
+sriov-vf,1e8b331f-3d4a-4160-b7aa-f4d5a8916625,04b2935f-33c4-40a9-8af0-8b52690042dc
+start-node-filter,aad96fd3-e75f-42fc-9777-3450c36f1168,083093a3-e407-447a-ba5d-7583e4d23e1d
+subnet,f902a6bc-6be4-4fe5-8458-a6ec0056b374,1b2c9ba7-e449-4831-ba15-3073672f5ef2
+tagged-inventory-item-list,c246f6e2-e3a1-4697-94c0-5672a7fbbf04,e78a7eaa-f65d-4919-9c2b-5b258c8c4d7e
+tenant,abcc54bc-bb74-49dc-9043-7f7171707545,97c26c99-6870-44c1-8a07-1d900d3f4ce6
+tunnel-xconnect,e7cb4ca8-e1a5-4487-a716-4ae0bcd8aef5,50b9e2fa-005c-4bbe-b651-3251dece4cd8
+update-node-key,6004cfa6-eb6d-4062-971f-b1fde6b74aa0,fe81c801-f65d-408a-b2b7-a729a18f8154
+vce,b6cf54b5-ec45-43e1-be64-97b4e1513333,bab6dceb-e7e6-4301-a5e0-a7399b48d792
+vf-module,c00563ae-812b-4e62-8330-7c4d0f47088a,ef86f9c5-2165-44f3-8fc3-96018b609ea5
+vig-server,8e8c22f1-fbdf-48ea-844c-8bdeb44e7b16,bed7c3b7-35d0-4cd9-abde-41b20e68b28e
+virtual-data-center,6dd43ced-d789-47af-a759-d3abc14e3ac1,5150abcf-0c5f-4593-9afe-a19c48fc4824
+vlan,257d88a5-a269-4c35-944f-aca04fbdb791,d2b1eaf1-ae59-4116-9ee4-aa0179faa4f8
+vnf-image,c4d3e747-ba4a-4b17-9896-94c6f18c19d3,f9a628ff-7aa0-40e2-a93d-02d91c950982
+vnfc,5761e0a7-c6df-4d8a-9ebd-b8f445054dec,96129eb9-f0de-4e05-8af2-73146473f766
+volume,0fbe2e8f-4d91-4415-a772-88387049b38d,ddd739b4-2b25-46c4-affc-41a32af5cc42
+volume-group,99d44c90-1f61-4418-b9a6-56586bf38c79,fcec1b02-b2d0-4834-aef8-d71be04717dd
+vpe,203817d3-829c-42d4-942d-2a935478e993,053ec3a7-5b72-492d-b54d-123805a9b967
+vpls-pe,b1566228-6785-4ce1-aea2-053736f80341,457ba89b-334c-4fbd-acc4-160ac0e0cdc0
+vpn-binding,21a146e5-9901-448c-9197-723076770119,9e23b675-db2b-488b-b459-57aa9857baa0
+vserver,8ecb2c5d-7176-4317-a255-26274edfdd53,ff69d4e0-a8e8-4108-bdb0-dd63217e63c7
+zone,f7f21a66-4714-431c-af17-52d64e21de95,16f7cb93-e807-4065-816b-9cdf391d4992