Initial seed code for graphadmin 03/60303/3
authorKajur, Harish (vk250x) <vk250x@att.com>
Mon, 13 Aug 2018 09:32:35 +0000 (05:32 -0400)
committerKajur, Harish (vk250x) <vk250x@att.com>
Mon, 13 Aug 2018 18:09:01 +0000 (14:09 -0400)
Issue-ID: AAI-1469
Change-Id: Ic170c326ad1fe4b43960de674797766f6f7b94bf
Signed-off-by: Kajur, Harish (vk250x) <vk250x@att.com>
205 files changed:
.gitignore [new file with mode: 0644]
LICENSE.TXT [new file with mode: 0644]
README.md [new file with mode: 0755]
pom.xml [new file with mode: 0755]
src/main/assembly/descriptor.xml [new file with mode: 0644]
src/main/docker/Dockerfile [new file with mode: 0755]
src/main/docker/aai.sh [new file with mode: 0644]
src/main/docker/docker-entrypoint.sh [new file with mode: 0644]
src/main/java/org/onap/aai/GraphAdminApp.java [new file with mode: 0644]
src/main/java/org/onap/aai/Profiles.java [new file with mode: 0644]
src/main/java/org/onap/aai/config/AuditorConfiguration.java [new file with mode: 0644]
src/main/java/org/onap/aai/config/DslConfiguration.java [new file with mode: 0644]
src/main/java/org/onap/aai/config/JettyPasswordDecoder.java [new file with mode: 0644]
src/main/java/org/onap/aai/config/PasswordDecoder.java [new file with mode: 0644]
src/main/java/org/onap/aai/config/PropertyPasswordConfiguration.java [new file with mode: 0644]
src/main/java/org/onap/aai/datacleanup/DataCleanupTasks.java [new file with mode: 0644]
src/main/java/org/onap/aai/datagrooming/DataGrooming.java [new file with mode: 0644]
src/main/java/org/onap/aai/datagrooming/DataGroomingTasks.java [new file with mode: 0644]
src/main/java/org/onap/aai/datasnapshot/DataSnapshot.java [new file with mode: 0644]
src/main/java/org/onap/aai/datasnapshot/DataSnapshotTasks.java [new file with mode: 0644]
src/main/java/org/onap/aai/datasnapshot/PartialPropAndEdgeLoader.java [new file with mode: 0644]
src/main/java/org/onap/aai/datasnapshot/PartialVertexLoader.java [new file with mode: 0644]
src/main/java/org/onap/aai/datasnapshot/PrintVertexDetails.java [new file with mode: 0644]
src/main/java/org/onap/aai/db/schema/AuditDoc.java [new file with mode: 0644]
src/main/java/org/onap/aai/db/schema/AuditJanusGraph.java [new file with mode: 0644]
src/main/java/org/onap/aai/db/schema/AuditOXM.java [new file with mode: 0644]
src/main/java/org/onap/aai/db/schema/Auditor.java [new file with mode: 0644]
src/main/java/org/onap/aai/db/schema/AuditorFactory.java [new file with mode: 0644]
src/main/java/org/onap/aai/db/schema/CompareByName.java [new file with mode: 0644]
src/main/java/org/onap/aai/db/schema/DBIndex.java [new file with mode: 0644]
src/main/java/org/onap/aai/db/schema/DBProperty.java [new file with mode: 0644]
src/main/java/org/onap/aai/db/schema/EdgeProperty.java [new file with mode: 0644]
src/main/java/org/onap/aai/db/schema/ManageJanusGraphSchema.java [new file with mode: 0644]
src/main/java/org/onap/aai/db/schema/Named.java [new file with mode: 0644]
src/main/java/org/onap/aai/db/schema/ScriptDriver.java [new file with mode: 0644]
src/main/java/org/onap/aai/dbgen/DupeTool.java [new file with mode: 0644]
src/main/java/org/onap/aai/dbgen/ForceDeleteTool.java [new file with mode: 0644]
src/main/java/org/onap/aai/dbgen/GraphMLTokens.java [new file with mode: 0644]
src/main/java/org/onap/aai/dbgen/schemamod/SchemaMod.java [new file with mode: 0644]
src/main/java/org/onap/aai/dbgen/schemamod/SchemaModInternal.java [new file with mode: 0644]
src/main/java/org/onap/aai/dbgen/tags/Command.java [new file with mode: 0644]
src/main/java/org/onap/aai/interceptors/AAIContainerFilter.java [new file with mode: 0644]
src/main/java/org/onap/aai/interceptors/AAIHeaderProperties.java [new file with mode: 0644]
src/main/java/org/onap/aai/interceptors/package-info.java [new file with mode: 0644]
src/main/java/org/onap/aai/interceptors/post/AAIResponseFilterPriority.java [new file with mode: 0644]
src/main/java/org/onap/aai/interceptors/post/InvalidResponseStatus.java [new file with mode: 0644]
src/main/java/org/onap/aai/interceptors/post/ResetLoggingContext.java [new file with mode: 0644]
src/main/java/org/onap/aai/interceptors/post/ResponseHeaderManipulation.java [new file with mode: 0644]
src/main/java/org/onap/aai/interceptors/post/ResponseTransactionLogging.java [new file with mode: 0644]
src/main/java/org/onap/aai/interceptors/pre/AAIRequestFilterPriority.java [new file with mode: 0644]
src/main/java/org/onap/aai/interceptors/pre/HeaderValidation.java [new file with mode: 0644]
src/main/java/org/onap/aai/interceptors/pre/HttpHeaderInterceptor.java [new file with mode: 0644]
src/main/java/org/onap/aai/interceptors/pre/OneWaySslAuthorization.java [new file with mode: 0644]
src/main/java/org/onap/aai/interceptors/pre/RequestHeaderManipulation.java [new file with mode: 0644]
src/main/java/org/onap/aai/interceptors/pre/RequestModification.java [new file with mode: 0644]
src/main/java/org/onap/aai/interceptors/pre/RequestTransactionLogging.java [new file with mode: 0644]
src/main/java/org/onap/aai/interceptors/pre/RetiredInterceptor.java [new file with mode: 0644]
src/main/java/org/onap/aai/interceptors/pre/SetLoggingContext.java [new file with mode: 0644]
src/main/java/org/onap/aai/interceptors/pre/TwoWaySslAuthorization.java [new file with mode: 0644]
src/main/java/org/onap/aai/interceptors/pre/VersionInterceptor.java [new file with mode: 0644]
src/main/java/org/onap/aai/interceptors/pre/VersionLatestInterceptor.java [new file with mode: 0644]
src/main/java/org/onap/aai/migration/EdgeMigrator.java [new file with mode: 0644]
src/main/java/org/onap/aai/migration/EdgeSwingMigrator.java [new file with mode: 0644]
src/main/java/org/onap/aai/migration/Enabled.java [new file with mode: 0644]
src/main/java/org/onap/aai/migration/EventAction.java [new file with mode: 0644]
src/main/java/org/onap/aai/migration/MigrationController.java [new file with mode: 0644]
src/main/java/org/onap/aai/migration/MigrationControllerInternal.java [new file with mode: 0644]
src/main/java/org/onap/aai/migration/MigrationDangerRating.java [new file with mode: 0644]
src/main/java/org/onap/aai/migration/MigrationPriority.java [new file with mode: 0644]
src/main/java/org/onap/aai/migration/Migrator.java [new file with mode: 0644]
src/main/java/org/onap/aai/migration/NotificationHelper.java [new file with mode: 0644]
src/main/java/org/onap/aai/migration/PropertyMigrator.java [new file with mode: 0644]
src/main/java/org/onap/aai/migration/Status.java [new file with mode: 0644]
src/main/java/org/onap/aai/migration/ValueMigrator.java [new file with mode: 0644]
src/main/java/org/onap/aai/migration/VertexMerge.java [new file with mode: 0644]
src/main/java/org/onap/aai/migration/v12/ContainmentDeleteOtherVPropertyMigration.java [new file with mode: 0644]
src/main/java/org/onap/aai/migration/v12/DeletePInterface.java [new file with mode: 0644]
src/main/java/org/onap/aai/migration/v12/EdgeReportForToscaMigration.java [new file with mode: 0644]
src/main/java/org/onap/aai/migration/v12/MigrateModelVerDistriubutionStatusProperty.java [new file with mode: 0644]
src/main/java/org/onap/aai/migration/v12/MigrateServiceInstanceToConfiguration.java [new file with mode: 0644]
src/main/java/org/onap/aai/migration/v12/SDWANSpeedChangeMigration.java [new file with mode: 0644]
src/main/java/org/onap/aai/migration/v12/UpdateAaiUriIndexMigration.java [new file with mode: 0644]
src/main/java/org/onap/aai/migration/v12/UriMigration.java [new file with mode: 0644]
src/main/java/org/onap/aai/migration/v13/MigrateBooleanDefaultsToFalse.java [new file with mode: 0644]
src/main/java/org/onap/aai/migration/v13/MigrateInMaintDefaultToFalse.java [new file with mode: 0644]
src/main/java/org/onap/aai/migration/v13/MigrateInstanceGroupModelInvariantId.java [new file with mode: 0644]
src/main/java/org/onap/aai/migration/v13/MigrateInstanceGroupModelVersionId.java [new file with mode: 0644]
src/main/java/org/onap/aai/migration/v13/MigrateInstanceGroupSubType.java [new file with mode: 0644]
src/main/java/org/onap/aai/migration/v13/MigrateInstanceGroupType.java [new file with mode: 0644]
src/main/java/org/onap/aai/migration/v13/MigrateModelVer.java [new file with mode: 0644]
src/main/java/org/onap/aai/migration/v13/MigratePserverAndPnfEquipType.java [new file with mode: 0644]
src/main/java/org/onap/aai/migration/v13/MigrateVnfcModelInvariantId.java [new file with mode: 0644]
src/main/java/org/onap/aai/migration/v13/MigrateVnfcModelVersionId.java [new file with mode: 0644]
src/main/java/org/onap/aai/rest/ExceptionHandler.java [new file with mode: 0644]
src/main/java/org/onap/aai/rest/QueryConsumer.java [new file with mode: 0644]
src/main/java/org/onap/aai/rest/dsl/DslListener.java [new file with mode: 0644]
src/main/java/org/onap/aai/rest/dsl/DslQueryProcessor.java [new file with mode: 0644]
src/main/java/org/onap/aai/rest/search/GenericQueryProcessor.java [new file with mode: 0644]
src/main/java/org/onap/aai/rest/search/GroovyShellImpl.java [new file with mode: 0644]
src/main/java/org/onap/aai/rest/search/QueryProcessorType.java [new file with mode: 0644]
src/main/java/org/onap/aai/rest/util/EchoResponse.java [new file with mode: 0644]
src/main/java/org/onap/aai/schema/GenTester.java [new file with mode: 0644]
src/main/java/org/onap/aai/service/AuthorizationService.java [new file with mode: 0644]
src/main/java/org/onap/aai/service/RetiredService.java [new file with mode: 0644]
src/main/java/org/onap/aai/util/PositiveNumValidator.java [new file with mode: 0644]
src/main/java/org/onap/aai/util/SendDeleteMigrationNotifications.java [new file with mode: 0644]
src/main/java/org/onap/aai/util/SendDeleteMigrationNotificationsMain.java [new file with mode: 0644]
src/main/java/org/onap/aai/util/SendMigrationNotifications.java [new file with mode: 0644]
src/main/java/org/onap/aai/util/SendMigrationNotificationsMain.java [new file with mode: 0644]
src/main/java/org/onap/aai/util/UniquePropertyCheck.java [new file with mode: 0644]
src/main/java/org/onap/aai/web/JerseyConfiguration.java [new file with mode: 0644]
src/main/java/org/onap/aai/web/LocalHostAccessLog.java [new file with mode: 0644]
src/main/resources/antlr4/org/onap/aai/AAIDsl.g4 [new file with mode: 0644]
src/main/resources/application.properties [new file with mode: 0644]
src/main/resources/dupeTool-logback.xml [new file with mode: 0644]
src/main/resources/dynamicPayloadGenerator-logback.xml [new file with mode: 0644]
src/main/resources/etc/appprops/aaiEventDMaaPPublisher.properties [new file with mode: 0644]
src/main/resources/etc/appprops/aaiconfig.properties [new file with mode: 0644]
src/main/resources/etc/appprops/datatoolscrons.properties [new file with mode: 0644]
src/main/resources/etc/appprops/dynamic.properties [new file with mode: 0644]
src/main/resources/etc/appprops/error.properties [new file with mode: 0644]
src/main/resources/etc/appprops/janusgraph-cached.properties [new file with mode: 0644]
src/main/resources/etc/appprops/janusgraph-realtime.properties [new file with mode: 0644]
src/main/resources/etc/appprops/logging.properties [new file with mode: 0644]
src/main/resources/etc/auth/aai_keystore [new file with mode: 0644]
src/main/resources/etc/auth/realm.properties [new file with mode: 0644]
src/main/resources/etc/scriptdata/addmanualdata/README [new file with mode: 0644]
src/main/resources/etc/scriptdata/addmanualdata/tenant_isolation/README [new file with mode: 0644]
src/main/resources/etc/scriptdata/tenant_isolation/inputFilters.json [new file with mode: 0644]
src/main/resources/etc/scriptdata/tenant_isolation/inputFiltersAllzones.json [new file with mode: 0644]
src/main/resources/etc/scriptdata/tenant_isolation/inputFilters_E2E.json [new file with mode: 0644]
src/main/resources/etc/scriptdata/tenant_isolation/inputFilters_IST.json [new file with mode: 0644]
src/main/resources/etc/scriptdata/tenant_isolation/inputFilters_PROD.json [new file with mode: 0644]
src/main/resources/etc/scriptdata/tenant_isolation/nodes.json [new file with mode: 0644]
src/main/resources/etc/scriptdata/tenant_isolation/nodesAZCloud.json [new file with mode: 0644]
src/main/resources/etc/scriptdata/tenant_isolation/nodesIncremental.json [new file with mode: 0644]
src/main/resources/etc/scriptdata/tenant_isolation/nodesNoAZ.json [new file with mode: 0644]
src/main/resources/forceDelete-logback.xml [new file with mode: 0644]
src/main/resources/localhost-access-logback.xml [new file with mode: 0644]
src/main/resources/logback.xml [new file with mode: 0644]
src/main/resources/migration-logback.xml [new file with mode: 0644]
src/main/resources/retired.properties [new file with mode: 0644]
src/main/resources/schemaMod-logback.xml [new file with mode: 0644]
src/main/resources/uniquePropertyCheck-logback.xml [new file with mode: 0644]
src/main/scripts/audit_schema.sh [new file with mode: 0644]
src/main/scripts/common_functions.sh [new file with mode: 0644]
src/main/scripts/createDBSchema.sh [new file with mode: 0644]
src/main/scripts/dataGrooming.sh [new file with mode: 0644]
src/main/scripts/dataRestoreFromSnapshot.sh [new file with mode: 0644]
src/main/scripts/dataSnapshot.sh [new file with mode: 0644]
src/main/scripts/dupeTool.sh [new file with mode: 0644]
src/main/scripts/dynamicPayloadArchive.sh [new file with mode: 0644]
src/main/scripts/dynamicPayloadGenerator.sh [new file with mode: 0644]
src/main/scripts/dynamicPayloadPartial.sh [new file with mode: 0644]
src/main/scripts/forceDeleteTool.sh [new file with mode: 0644]
src/main/scripts/migration_verification.sh [new file with mode: 0644]
src/main/scripts/run_Migrations.sh [new file with mode: 0644]
src/main/scripts/run_SendDeleteMigrationNotification.sh [new file with mode: 0644]
src/main/scripts/run_SendMigrationNotification.sh [new file with mode: 0644]
src/main/scripts/schemaMod.sh [new file with mode: 0644]
src/main/scripts/uniquePropertyCheck.sh [new file with mode: 0644]
src/main/scripts/updatePem.sh [new file with mode: 0644]
src/test/java/org/onap/aai/AAIGremlinQueryTest.java [new file with mode: 0644]
src/test/java/org/onap/aai/AAISetup.java [new file with mode: 0644]
src/test/java/org/onap/aai/GraphAdminTestConfiguration.java [new file with mode: 0644]
src/test/java/org/onap/aai/PayloadUtil.java [new file with mode: 0644]
src/test/java/org/onap/aai/dbgen/DupeToolTest.java [new file with mode: 0644]
src/test/java/org/onap/aai/dbgen/ForceDeleteToolTest.java [new file with mode: 0644]
src/test/java/org/onap/aai/migration/EdgeSwingMigratorTest.java [new file with mode: 0644]
src/test/java/org/onap/aai/migration/MigrationControllerInternalTest.java [new file with mode: 0644]
src/test/java/org/onap/aai/migration/PropertyMigratorTest.java [new file with mode: 0644]
src/test/java/org/onap/aai/migration/ValueMigratorTest.java [new file with mode: 0644]
src/test/java/org/onap/aai/migration/VertexMergeTest.java [new file with mode: 0644]
src/test/java/org/onap/aai/migration/v12/ContainmentDeleteOtherVPropertyMigrationTest.java [new file with mode: 0644]
src/test/java/org/onap/aai/migration/v12/DeletePInterfaceTest.java [new file with mode: 0644]
src/test/java/org/onap/aai/migration/v12/MigrateModelVerDistributionStatusPropertyTest.java [new file with mode: 0644]
src/test/java/org/onap/aai/migration/v12/MigrateServiceInstanceToConfigurationTest.java [new file with mode: 0644]
src/test/java/org/onap/aai/migration/v12/MigrateServiceInstanceToConfigurationTestPreMigrationMock.java [new file with mode: 0644]
src/test/java/org/onap/aai/migration/v12/SDWANSpeedChangeMigrationTest.java [new file with mode: 0644]
src/test/java/org/onap/aai/migration/v12/UriMigrationTest.java [new file with mode: 0644]
src/test/java/org/onap/aai/migration/v13/MigrateBooleanDefaultsToFalseTest.java [new file with mode: 0644]
src/test/java/org/onap/aai/migration/v13/MigrateInMaintDefaultToFalseTest.java [new file with mode: 0644]
src/test/java/org/onap/aai/migration/v13/MigrateInstanceGroupModelInvariantIdTest.java [new file with mode: 0644]
src/test/java/org/onap/aai/migration/v13/MigrateInstanceGroupModelVersionIdTest.java [new file with mode: 0644]
src/test/java/org/onap/aai/migration/v13/MigrateInstanceGroupSubTypeTest.java [new file with mode: 0644]
src/test/java/org/onap/aai/migration/v13/MigrateInstanceGroupTypeTest.java [new file with mode: 0644]
src/test/java/org/onap/aai/migration/v13/MigrateModelVerTest.java [new file with mode: 0644]
src/test/java/org/onap/aai/migration/v13/MigratePServerAndPnfEquipTypeTest.java [new file with mode: 0644]
src/test/java/org/onap/aai/migration/v13/MigrateVnfcModelInvariantIdTest.java [new file with mode: 0644]
src/test/java/org/onap/aai/migration/v13/MigrateVnfcModelVersionIdTest.java [new file with mode: 0644]
src/test/java/org/onap/aai/schema/db/ManageSchemaTest.java [new file with mode: 0644]
src/test/java/org/onap/aai/util/SendDeleteMigrationNotificationsTest.java [new file with mode: 0644]
src/test/java/org/onap/aai/util/SendMigrationNotificationsTest.java [new file with mode: 0644]
src/test/resources/logback.xml [new file with mode: 0644]
src/test/resources/payloads/templates/cloud-region-with-linterface.json [new file with mode: 0644]
src/test/resources/payloads/templates/cloud-region-with-vserver.json [new file with mode: 0644]
src/test/resources/payloads/templates/custom-query.json [new file with mode: 0644]
src/test/resources/payloads/templates/dsl-query.json [new file with mode: 0644]
src/test/resources/payloads/templates/generic-vnf.json [new file with mode: 0644]
src/test/resources/payloads/templates/gremlin-query.json [new file with mode: 0644]
src/test/resources/payloads/templates/model-ver.json [new file with mode: 0644]
src/test/resources/payloads/templates/model.json [new file with mode: 0644]
src/test/resources/payloads/templates/pserver.json [new file with mode: 0644]
src/test/resources/schema-ingest.properties [new file with mode: 0644]
src/test/resources/updateEdgeTestRules.json [new file with mode: 0644]

diff --git a/.gitignore b/.gitignore
new file mode 100644 (file)
index 0000000..d904233
--- /dev/null
@@ -0,0 +1,14 @@
+/target/
+/oxm/
+.idea/
+.settings/
+debug-logs/
+.project
+/logs/
+**/oxm/**
+**/dbedgerules/**
+*.iml
+.settings/
+.project
+.classpath
+src/main/resources/etc/scriptdata/addmanualdata/tenant_isolation/payload/
diff --git a/LICENSE.TXT b/LICENSE.TXT
new file mode 100644 (file)
index 0000000..3455862
--- /dev/null
@@ -0,0 +1,17 @@
+============LICENSE_START=======================================================
+org.onap.aai
+================================================================================
+Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+================================================================================
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+   http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+============LICENSE_END=========================================================
diff --git a/README.md b/README.md
new file mode 100755 (executable)
index 0000000..e69de29
diff --git a/pom.xml b/pom.xml
new file mode 100755 (executable)
index 0000000..46d6d0b
--- /dev/null
+++ b/pom.xml
@@ -0,0 +1,1175 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+
+    ============LICENSE_START=======================================================
+    org.onap.aai
+    ================================================================================
+    Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+    ================================================================================
+    Licensed under the Apache License, Version 2.0 (the "License");
+    you may not use this file except in compliance with the License.
+    You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing, software
+    distributed under the License is distributed on an "AS IS" BASIS,
+    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    See the License for the specific language governing permissions and
+    limitations under the License.
+    ============LICENSE_END=========================================================
+
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+    <parent>
+        <groupId>org.onap.oparent</groupId>
+        <artifactId>oparent</artifactId>
+        <version>1.1.0</version>
+    </parent>
+    <groupId>org.onap.aai.graphadmin</groupId>
+    <artifactId>aai-graphadmin</artifactId>
+    <version>1.0.0-SNAPSHOT</version>
+
+    <properties>
+
+        <!-- Start of Compiler Related Properties -->
+        <java.version>1.8</java.version>
+        <maven.compiler.source>1.8</maven.compiler.source>
+        <maven.compiler.target>1.8</maven.compiler.target>
+        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+        <!-- End of Compiler Related Properties -->
+
+        <!-- Start of Test Related Properties -->
+        <skip.unit.tests>false</skip.unit.tests>
+        <skip.integration.tests>true</skip.integration.tests>
+        <!-- End of Test Related Properties -->
+
+        <!-- Start of Sonar Related Properties -->
+        <sonar.language>java</sonar.language>
+        <sonar.java.coveragePlugin>jacoco</sonar.java.coveragePlugin>
+        <sonar.surefire.reportsPath>${project.build.directory}/surefire-reports</sonar.surefire.reportsPath>
+        <sonar.jacoco.reportPath>${project.build.directory}/coverage-reports/jacoco.exec</sonar.jacoco.reportPath>
+        <sonar.jacoco.reportMissing.force.zero>false</sonar.jacoco.reportMissing.force.zero>
+        <sonar.projectVersion>${project.version}</sonar.projectVersion>
+        <!-- End of Sonar Related Properties -->
+
+        <!-- Start of Docker Related Properties -->
+        <docker.fabric.version>0.23.0</docker.fabric.version>
+        <!-- Default docker registry that maven fabric plugin will try to pull from -->
+        <docker.registry>docker.io</docker.registry>
+        <!-- Specifying the docker push registry where the image should be pushed -->
+        <!-- This value should be overwritten at runtime to wherever need to be pushed to -->
+        <docker.push.registry>localhost:5000</docker.push.registry>
+        <aai.docker.version>1.0.0</aai.docker.version>
+        <aai.build.directory>${project.build.directory}/${project.artifactId}-${project.version}-build/
+        </aai.build.directory>
+        <aai.docker.namespace>onap</aai.docker.namespace>
+        <!-- End of Docker Related Properties -->
+
+        <license.goal.type>check</license.goal.type>
+
+        <!--
+            Nexus Proxy Properties and Snapshot Locations
+            Ideally this can be overwritten at runtime per internal environment specific values at runtime
+        -->
+        <nexusproxy>https://nexus.onap.org</nexusproxy>
+        <site.path>/content/sites/site/org/onap/aai/graphadmin/${project.artifactId}/${project.version}</site.path>
+        <release.path>/content/repositories/releases/</release.path>
+        <snapshot.path>/content/repositories/snapshots/</snapshot.path>
+        <!-- GMaven plugin uses this property to figure out the name of the docker tag -->
+        <aai.project.version>${project.version}</aai.project.version>
+
+        <!-- Start of Database Related Properties -->
+        <hbase.version>1.0.2</hbase.version>
+        <janusgraph.version>0.2.0</janusgraph.version>
+        <gremlin.driver.version>3.0.1-incubating</gremlin.driver.version>
+        <gremlin.version>3.2.2</gremlin.version>
+        <!-- End of Database Related Properties -->
+
+        <activemq.version>5.14.3</activemq.version>
+        <logback.version>1.2.3</logback.version>
+        <mockito.version>1.10.19</mockito.version>
+        <httpclient.version>4.5.1</httpclient.version>
+
+        <gson.version>2.7</gson.version>
+        <json.version>20090211</json.version>
+
+        <aai.core.version>1.3.0-SNAPSHOT</aai.core.version>
+        <aai.schema.version>1.3.0-SNAPSHOT</aai.schema.version>
+
+        <netty.handler.version>4.1.9.Final</netty.handler.version>
+        <netty.version>4.0.37.Final</netty.version>
+
+        <spring.test.version>4.3.6.RELEASE</spring.test.version>
+        <spring.web.version>4.3.6.RELEASE</spring.web.version>
+
+        <cxf.version>3.2.2</cxf.version>
+
+        <dme2.version>2.8.5</dme2.version>
+        <jsonassert.version>1.4.0</jsonassert.version>
+
+        <antlr4.visitor>true</antlr4.visitor>
+        <antlr4.listener>true</antlr4.listener>
+        <antlr.version>4.7</antlr.version>
+
+        <eelf.core.version>1.0.0</eelf.core.version>
+        <google.guava.version>16.0</google.guava.version>
+        <jaxb.version>2.2.11</jaxb.version>
+        <eclipse.persistence.version>2.6.2</eclipse.persistence.version>
+        <dmaap.client.version>0.2.12</dmaap.client.version>
+
+        <springframework.version>4.3.4.RELEASE</springframework.version>
+        <spring.jms.version>4.3.2.RELEASE</spring.jms.version>
+        <spring.security.version>1.0.3.RELEASE</spring.security.version>
+
+        <hamcrest.junit.version>2.0.0.0</hamcrest.junit.version>
+        <junit.version>4.12</junit.version>
+
+        <start-class>org.onap.aai.GraphAdminApp</start-class>
+
+        <snapshot.file>${project.basedir}/snapshots/int1-data.graphson</snapshot.file>
+        <jacoco.line.coverage.limit>0.27</jacoco.line.coverage.limit>
+
+        <!-- Start of Default ONAP Schema Properties -->
+        <schema.source.name>onap</schema.source.name>
+        <schema.configuration.location>N/A</schema.configuration.location>
+        <schema.nodes.location>${project.basedir}/src/main/resources/schema/${schema.source.name}/oxm</schema.nodes.location>
+        <schema.edges.location>${project.basedir}/src/main/resources/schema/${schema.source.name}/dbedgerules</schema.edges.location>
+        <schema.version.depth.start>v9</schema.version.depth.start>
+        <schema.version.related.link.start>v10</schema.version.related.link.start>
+        <schema.version.app.root.start>v11</schema.version.app.root.start>
+        <schema.version.namespace.change.start>v12</schema.version.namespace.change.start>
+        <schema.version.edge.label.start>v12</schema.version.edge.label.start>
+        <schema.version.api.default>v14</schema.version.api.default>
+        <schema.version.list>v8,v9,v10,v11,v12,v13,v14</schema.version.list>
+        <schema.uri.base.path>/aai</schema.uri.base.path>
+        <!-- End of Default ONAP Schema Properties -->
+    </properties>
+
+    <profiles>
+        <!-- Used during verify stage in Jenkins -->
+        <profile>
+            <id>all-tests</id>
+            <properties>
+                <build.profile.id>all-tests</build.profile.id>
+
+                <!-- All tests are run. -->
+                <skip.integration.tests>false</skip.integration.tests>
+                <skip.unit.tests>false</skip.unit.tests>
+            </properties>
+        </profile>
+        <profile>
+            <id>skipTests</id>
+            <activation>
+                <property>
+                    <name>skipTests</name>
+                </property>
+            </activation>
+            <properties>
+                <!-- All tests are skipped. -->
+                <skip.integration.tests>true</skip.integration.tests>
+                <skip.unit.tests>true</skip.unit.tests>
+            </properties>
+        </profile>
+        <profile>
+            <id>dev</id>
+        </profile>
+        <profile>
+            <id>integration-test</id>
+            <properties>
+                <!-- Only integration tests are run. -->
+                <build.profile.id>integration-test</build.profile.id>
+                <skip.integration.tests>false</skip.integration.tests>
+                <skip.unit.tests>true</skip.unit.tests>
+            </properties>
+        </profile>
+        <!-- Start of ONAP Profile -->
+        <profile>
+            <id>onap</id>
+            <properties>
+                <schema.source.name>onap</schema.source.name>
+                <schema.version.namespace.change.start>v12</schema.version.namespace.change.start>
+                <schema.version.list>v8,v9,v10,v11,v12,v13,v14</schema.version.list>
+            </properties>
+        </profile>
+        <!-- End of ONAP Profile -->
+        <profile>
+            <id>runAjsc</id>
+            <properties>
+                <skipTests>${skip.unit.tests}</skipTests>
+            </properties>
+            <build>
+                <defaultGoal>pre-integration-test</defaultGoal>
+                <plugins>
+                    <plugin>
+                        <groupId>org.codehaus.mojo</groupId>
+                        <artifactId>exec-maven-plugin</artifactId>
+                        <version>1.6.0</version>
+                        <executions>
+                            <execution>
+                                <id>run-spring-boot</id>
+                                <phase>package</phase>
+                                <goals>
+                                    <goal>java</goal>
+                                </goals>
+                            </execution>
+                        </executions>
+                        <configuration>
+                            <mainClass>${start-class}</mainClass>
+                            <systemProperties>
+                                <property>
+                                    <key>snapshot.location</key>
+                                    <value>${snapshot.file}</value>
+                                </property>
+                                <property>
+                                    <key>schema.source.name</key>
+                                    <value>${schema.source.name}</value>
+                                </property>
+                                <property>
+                                    <key>schema.configuration.location</key>
+                                    <value>${schema.configuration.location}</value>
+                                </property>
+                                <property>
+                                    <key>schema.nodes.location</key>
+                                    <value>${schema.nodes.location}</value>
+                                </property>
+                                <property>
+                                    <key>schema.edges.location</key>
+                                    <value>${schema.edges.location}</value>
+                                </property>
+                                <property>
+                                    <key>schema.version.depth.start</key>
+                                    <value>${schema.version.depth.start}</value>
+                                </property>
+                                <property>
+                                    <key>schema.version.related.link.start</key>
+                                    <value>${schema.version.related.link.start}</value>
+                                </property>
+                                <property>
+                                    <key>schema.version.app.root.start</key>
+                                    <value>${schema.version.app.root.start}</value>
+                                </property>
+                                <property>
+                                    <key>schema.version.namespace.change.start</key>
+                                    <value>${schema.version.namespace.change.start}</value>
+                                </property>
+                                <property>
+                                    <key>schema.version.edge.label.start</key>
+                                    <value>${schema.version.edge.label.start}</value>
+                                </property>
+                                <property>
+                                    <key>schema.version.api.default</key>
+                                    <value>${schema.version.api.default}</value>
+                                </property>
+                                <property>
+                                    <key>schema.version.list</key>
+                                    <value>${schema.version.list}</value>
+                                </property>
+                                <property>
+                                    <key>schema.uri.base.path</key>
+                                    <value>${schema.uri.base.path}</value>
+                                </property>
+                            </systemProperties>
+                            <executable>java</executable>
+                        </configuration>
+                    </plugin>
+                </plugins>
+            </build>
+        </profile>
+        <!-- Docker profile to be used for building docker image and pushing to nexus -->
+        <profile>
+            <id>docker</id>
+            <build>
+                <plugins>
+                    <plugin>
+                        <groupId>io.fabric8</groupId>
+                        <artifactId>docker-maven-plugin</artifactId>
+                        <version>${docker.fabric.version}</version>
+                        <configuration>
+                            <verbose>true</verbose>
+                            <apiVersion>1.23</apiVersion>
+                            <images>
+                                
+                            </images>
+                        </configuration>
+                        <executions>
+                            <execution>
+                                <id>clean-images</id>
+                                <phase>pre-clean</phase>
+                                <goals>
+                                    <goal>remove</goal>
+                                </goals>
+                                <configuration>
+                                    <removeAll>true</removeAll>
+                                </configuration>
+                            </execution>
+                            <execution>
+                                <id>generate-images</id>
+                                <phase>package</phase>
+                                <goals>
+                                    <goal>build</goal>
+                                </goals>
+                            </execution>
+                            <execution>
+                                <id>push-images</id>
+                                <phase>deploy</phase>
+                                <goals>
+                                    <goal>push</goal>
+                                </goals>
+                            </execution>
+                        </executions>
+                    </plugin>
+                </plugins>
+            </build>
+        </profile>
+    </profiles>
+
+    <dependencyManagement>
+        <dependencies>
+            <dependency>
+                <groupId>org.springframework.boot</groupId>
+                <artifactId>spring-boot-starter-parent</artifactId>
+                <version>1.5.12.RELEASE</version>
+                <scope>import</scope>
+                <type>pom</type>
+            </dependency>
+        </dependencies>
+    </dependencyManagement>
+
+    <dependencies>
+        <!-- Start of Logback Dependencies -->
+        <dependency>
+            <groupId>com.att.eelf</groupId>
+            <artifactId>eelf-core</artifactId>
+            <version>${eelf.core.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>ch.qos.logback</groupId>
+            <artifactId>logback-core</artifactId>
+            <version>${logback.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>ch.qos.logback</groupId>
+            <artifactId>logback-classic</artifactId>
+            <version>${logback.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>ch.qos.logback</groupId>
+            <artifactId>logback-access</artifactId>
+            <version>${logback.version}</version>
+        </dependency>
+        <!-- End of Logback Dependencies -->
+        <dependency>
+            <groupId>com.google.guava</groupId>
+            <artifactId>guava</artifactId>
+            <version>${google.guava.version}</version>
+        </dependency>
+        <!-- Start of Janus Graph Dependencies -->
+        <dependency>
+            <groupId>org.janusgraph</groupId>
+            <artifactId>janusgraph-core</artifactId>
+            <version>${janusgraph.version}</version>
+            <exclusions>
+                <exclusion>
+                    <groupId>org.slf4j</groupId>
+                    <artifactId>slf4j-log4j12</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>org.janusgraph</groupId>
+            <artifactId>janusgraph-hbase-parent</artifactId>
+            <version>${janusgraph.version}</version>
+            <type>pom</type>
+            <exclusions>
+                <exclusion>
+                    <groupId>org.slf4j</groupId>
+                    <artifactId>slf4j-log4j12</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>org.janusgraph</groupId>
+            <artifactId>janusgraph-hbase</artifactId>
+            <version>${janusgraph.version}</version>
+            <exclusions>
+                <exclusion>
+                    <groupId>org.slf4j</groupId>
+                    <artifactId>slf4j-log4j12</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>org.janusgraph</groupId>
+            <artifactId>janusgraph-cassandra</artifactId>
+            <version>${janusgraph.version}</version>
+            <exclusions>
+                <exclusion>
+                    <groupId>org.slf4j</groupId>
+                    <artifactId>slf4j-log4j12</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <!-- End of Janus Graph Dependencies -->
+        <!-- Start of Tinkerpop Dependencies -->
+        <dependency>
+            <groupId>org.apache.tinkerpop</groupId>
+            <artifactId>tinkergraph-gremlin</artifactId>
+            <version>${gremlin.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.tinkerpop</groupId>
+            <artifactId>gremlin-core</artifactId>
+            <version>${gremlin.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.tinkerpop</groupId>
+            <artifactId>gremlin-driver</artifactId>
+            <version>${gremlin.version}</version>
+        </dependency>
+        <!-- End of Tinkerpop Dependencies -->
+        <dependency>
+            <groupId>com.fasterxml.jackson.jaxrs</groupId>
+            <artifactId>jackson-jaxrs-json-provider</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.json</groupId>
+            <artifactId>json</artifactId>
+            <version>${json.version}</version>
+        </dependency>
+        <!-- Start of JAXB Dependencies -->
+        <dependency>
+            <groupId>javax.xml.bind</groupId>
+            <artifactId>jaxb-api</artifactId>
+            <version>${jaxb.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>com.sun.xml.bind</groupId>
+            <artifactId>jaxb-impl</artifactId>
+            <version>${jaxb.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>com.sun.xml.bind</groupId>
+            <artifactId>jaxb-core</artifactId>
+            <version>${jaxb.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>com.sun.xml.bind</groupId>
+            <artifactId>jaxb-xjc</artifactId>
+            <version>${jaxb.version}</version>
+        </dependency>
+        <!-- End of JAXB Dependencies -->
+        <dependency>
+            <groupId>org.eclipse.persistence</groupId>
+            <artifactId>eclipselink</artifactId>
+            <version>${eclipse.persistence.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.eclipse.persistence</groupId>
+            <artifactId>org.eclipse.persistence.moxy</artifactId>
+            <version>${eclipse.persistence.version}</version>
+            <scope>compile</scope>
+        </dependency>
+        <dependency>
+            <groupId>com.google.code.gson</groupId>
+            <artifactId>gson</artifactId>
+            <version>${gson.version}</version>
+        </dependency>
+        <!--
+            Do not use activemq-all because they force you to use a specific logging
+            and they shade it so you can't simply exclude it and when you deploy the
+            jar, you will notice failure
+        -->
+        <dependency>
+            <groupId>org.apache.activemq</groupId>
+            <artifactId>activemq-broker</artifactId>
+            <version>${activemq.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.activemq</groupId>
+            <artifactId>activemq-client</artifactId>
+            <version>${activemq.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.onap.aai.aai-common</groupId>
+            <artifactId>aai-core</artifactId>
+            <version>${aai.core.version}</version>
+            <exclusions>
+                <exclusion>
+                    <groupId>com.sun.jersey</groupId>
+                    <artifactId>jersey-core</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>org.slf4j</groupId>
+                    <artifactId>slf4j-log4j12</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>org.apache.cxf</groupId>
+                    <artifactId>cxf-core</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>com.sun.jersey</groupId>
+            <artifactId>jersey-client</artifactId>
+            <version>1.18</version>
+        </dependency>
+        <dependency>
+            <groupId>com.sun.jersey</groupId>
+            <artifactId>jersey-json</artifactId>
+            <version>1.18</version>
+        </dependency>
+        <!-- Do not use the jersey-client since jersey client 1.0 version clashes
+            with jersey 2 which we are using -->
+        <!-- Use this to make http requests instead of jersey 1.0 client -->
+        <dependency>
+            <groupId>org.apache.httpcomponents</groupId>
+            <artifactId>httpclient</artifactId>
+            <version>${httpclient.version}</version>
+        </dependency>
+        <!-- Start of Spring Framework Dependencies -->
+        <!--
+            Explicitly stating the security spring framework and
+            exclude the bouncy castle since that is somehow overwriting
+            our p12 file decryption that's built into java security
+            This will cause the password is incorrect
+            This needs to be added back if org.bouncy castle dependency
+            sneaks backs in and causing issues with the two way ssl
+        -->
+        <dependency>
+            <groupId>org.springframework.security</groupId>
+            <artifactId>spring-security-rsa</artifactId>
+            <version>${spring.security.version}</version>
+            <exclusions>
+                <exclusion>
+                    <groupId>org.bouncycastle</groupId>
+                    <artifactId>bcpkix-jdk15on</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>org.springframework</groupId>
+            <artifactId>spring-jms</artifactId>
+            <version>${spring.jms.version}</version>
+            <exclusions>
+                <exclusion>
+                    <groupId>com.sun.jersey</groupId>
+                    <artifactId>jersey-core</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-starter-web</artifactId>
+            <exclusions>
+                <exclusion>
+                    <groupId>org.springframework.boot</groupId>
+                    <artifactId>spring-boot-starter-tomcat</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-starter-jetty</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-starter-test</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-starter-jersey</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.springframework</groupId>
+            <artifactId>spring-web</artifactId>
+            <version>${spring.web.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.springframework</groupId>
+            <artifactId>spring-test</artifactId>
+            <version>${spring.test.version}</version>
+            <scope>test</scope>
+        </dependency>
+        <!-- End of Spring Framework Dependencies -->
+        <!-- Start of Netty Dependencies -->
+        <dependency>
+            <groupId>io.netty</groupId>
+            <artifactId>netty-all</artifactId>
+            <version>${netty.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>io.netty</groupId>
+            <artifactId>netty-handler</artifactId>
+            <version>${netty.handler.version}</version>
+        </dependency>
+        <!-- End of Netty Dependencies -->
+        <!-- Start of Antlr dependencies for DSL -->
+        <dependency>
+            <groupId>org.antlr</groupId>
+            <artifactId>antlr4-runtime</artifactId>
+            <version>${antlr.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.antlr</groupId>
+            <artifactId>antlr4-maven-plugin</artifactId>
+            <version>${antlr.version}</version>
+            <exclusions>
+                <exclusion>
+                    <groupId>org.sonatype.sisu</groupId>
+                    <artifactId>sisu-guava</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <!-- End of Antlr dependencies for DSL -->
+        <!-- Start of Junit Test Dependencies -->
+        <dependency>
+            <groupId>org.hamcrest</groupId>
+            <artifactId>hamcrest-junit</artifactId>
+            <version>${hamcrest.junit.version}</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <version>${junit.version}</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.mockito</groupId>
+            <artifactId>mockito-all</artifactId>
+            <version>${mockito.version}</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.skyscreamer</groupId>
+            <artifactId>jsonassert</artifactId>
+            <version>${jsonassert.version}</version>
+            <scope>test</scope>
+        </dependency>
+        <!-- End of Junit Test Dependencies -->
+    </dependencies>
+
+    <build>
+        <pluginManagement>
+            <plugins>
+                <!--This plugin's configuration is used to store Eclipse m2e settings
+                    only. It has no influence on the Maven build itself. -->
+                <plugin>
+                    <groupId>org.eclipse.m2e</groupId>
+                    <artifactId>lifecycle-mapping</artifactId>
+                    <version>1.0.0</version>
+                    <configuration>
+                        <lifecycleMappingMetadata>
+                            <pluginExecutions>
+                                <pluginExecution>
+                                    <pluginExecutionFilter>
+                                        <groupId>com.github.kongchen</groupId>
+                                        <artifactId>swagger-maven-plugin</artifactId>
+                                        <versionRange>3.1.3</versionRange>
+                                        <goals>
+                                            <goal>generate</goal>
+                                        </goals>
+                                    </pluginExecutionFilter>
+                                    <action>
+                                        <ignore/>
+                                    </action>
+                                </pluginExecution>
+                            </pluginExecutions>
+                        </lifecycleMappingMetadata>
+                    </configuration>
+                </plugin>
+                <plugin>
+                    <groupId>org.apache.maven.plugins</groupId>
+                    <artifactId>maven-dependency-plugin</artifactId>
+                    <version>2.8</version>
+                    <executions>
+                        <execution>
+                            <id>unpack-schema-dependency</id>
+                            <phase>initialize</phase>
+                            <goals>
+                                <goal>unpack</goal>
+                            </goals>
+                            <configuration>
+                                <artifactItems>
+                                    <artifactItem>
+                                        <groupId>org.onap.aai.aai-common</groupId>
+                                        <artifactId>aai-schema</artifactId>
+                                        <version>${aai.schema.version}</version>
+                                        <outputDirectory>${project.basedir}/src/main/resources/schema/</outputDirectory>
+                                        <includes>**/oxm/**/*.xml</includes>
+                                    </artifactItem>
+                                </artifactItems>
+                                <!-- other configurations here -->
+                            </configuration>
+                        </execution>
+                        <execution>
+                            <id>unpack-edgerules-dependency</id>
+                            <phase>initialize</phase>
+                            <goals>
+                                <goal>unpack</goal>
+                            </goals>
+                            <configuration>
+                                <artifactItems>
+                                    <artifactItem>
+                                        <groupId>org.onap.aai.aai-common</groupId>
+                                        <artifactId>aai-schema</artifactId>
+                                        <version>${aai.schema.version}</version>
+                                        <outputDirectory>${project.basedir}/src/main/resources/schema/</outputDirectory>
+                                        <includes>**/dbedgerules/**/*.json</includes>
+                                    </artifactItem>
+                                </artifactItems>
+                                <!-- other configurations here -->
+                            </configuration>
+                        </execution>
+                    </executions>
+                </plugin>
+                <plugin>
+                    <groupId>org.codehaus.groovy.maven</groupId>
+                    <artifactId>gmaven-plugin</artifactId>
+                    <version>1.0</version>
+                    <executions>
+                        <execution>
+                            <phase>validate</phase>
+                            <goals>
+                                <goal>execute</goal>
+                            </goals>
+                            <configuration>
+                                <source>
+                                    println project.properties['aai.project.version'];
+                                    def versionArray;
+                                    if (project.properties['aai.project.version'] != null) {
+                                        versionArray = project.properties['aai.project.version'].split('\\.');
+                                    }
+
+                                    project.properties["project.major.version"] = versionArray[0];
+                                    project.properties["project.minor.version"] = versionArray[1];
+                                    project.properties['project.docker.latesttag.version'] = versionArray[0] + '.' + versionArray[1] + '-STAGING-latest';
+                                    println 'New Tag for docker:' + project.properties['project.docker.latesttag.version'];
+                                </source>
+                            </configuration>
+                        </execution>
+                    </executions>
+                </plugin>
+                <plugin>
+                    <groupId>org.apache.maven.plugins</groupId>
+                    <artifactId>maven-deploy-plugin</artifactId>
+                    <executions>
+                        <execution>
+                            <id>default-deploy</id>
+                            <phase>none</phase>
+                        </execution>
+                    </executions>
+                </plugin>
+                <plugin>
+                    <groupId>org.sonatype.plugins</groupId>
+                    <artifactId>nexus-staging-maven-plugin</artifactId>
+                    <version>1.6.7</version>
+                    <extensions>true</extensions>
+                    <configuration>
+                        <nexusUrl>${nexusproxy}</nexusUrl>
+                        <stagingProfileId>176c31dfe190a</stagingProfileId>
+                        <serverId>ecomp-staging</serverId>
+                    </configuration>
+                </plugin>
+                <plugin>
+                    <groupId>com.mycila</groupId>
+                    <artifactId>license-maven-plugin</artifactId>
+                    <version>3.0</version>
+                    <configuration>
+                        <header>LICENSE.TXT</header>
+                        <includes>
+                            <include>src/main/java/**</include>
+                            <include>src/test/java/**</include>
+                            <include>pom.xml</include>
+                        </includes>
+                        <skipExistingHeaders>false</skipExistingHeaders>
+                        <skip>false</skip>
+                    </configuration>
+                    <executions>
+                        <execution>
+                            <goals>
+                                <!-- Set goal to "format" to auto update license headers -->
+                                <goal>${license.goal.type}</goal>
+                            </goals>
+                            <phase>process-sources</phase>
+                        </execution>
+                    </executions>
+                </plugin>
+            </plugins>
+        </pluginManagement>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-site-plugin</artifactId>
+                <version>3.6</version>
+                <configuration>
+                    <reportPlugins>
+                        <plugin>
+                            <groupId>org.apache.maven.plugins</groupId>
+                            <artifactId>maven-project-info-reports-plugin</artifactId>
+                            <version>2.4</version>
+                            <configuration>
+                                <dependencyDetailsEnabled>false</dependencyDetailsEnabled>
+                                <dependencyLocationsEnabled>false</dependencyLocationsEnabled>
+                            </configuration>
+                            <reports>
+                                <report>dependencies</report>
+                            </reports>
+                        </plugin>
+                        <plugin>
+                            <groupId>org.apache.maven.plugins</groupId>
+                            <artifactId>maven-javadoc-plugin</artifactId>
+                            <version>2.8</version>
+                            <configuration>
+                                <additionalparam>-Xdoclint:none</additionalparam>
+                            </configuration>
+                        </plugin>
+                    </reportPlugins>
+                </configuration>
+                <dependencies>
+                    <dependency>
+                        <groupId>org.apache.maven.wagon</groupId>
+                        <artifactId>wagon-webdav-jackrabbit</artifactId>
+                        <version>2.10</version>
+                    </dependency>
+                </dependencies>
+            </plugin>
+            <plugin>
+                <groupId>org.codehaus.mojo</groupId>
+                <artifactId>sonar-maven-plugin</artifactId>
+                <version>3.3</version>
+            </plugin>
+            <plugin>
+                <groupId>org.codehaus.groovy.maven</groupId>
+                <artifactId>gmaven-plugin</artifactId>
+            </plugin>
+            <plugin>
+                <groupId>com.mycila</groupId>
+                <artifactId>license-maven-plugin</artifactId>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-clean-plugin</artifactId>
+                <version>2.4.1</version>
+                <configuration>
+                    <filesets>
+                        <fileset>
+                            <directory>${project.basedir}/src/main/resources/etc/oxm</directory>
+                            <includes>
+                                <include>**/*</include>
+                            </includes>
+                            <followSymlinks>false</followSymlinks>
+                        </fileset>
+                        <fileset>
+                            <directory>${project.basedir}/src/main/resources/etc/dbedgerules</directory>
+                            <includes>
+                                <include>**/*</include>
+                            </includes>
+                            <followSymlinks>false</followSymlinks>
+                        </fileset>
+                        <fileset>
+                            <directory>${project.basedir}/src/main/resources/schema</directory>
+                            <includes>
+                                <include>**/*</include>
+                            </includes>
+                            <followSymlinks>false</followSymlinks>
+                        </fileset>
+                    </filesets>
+                </configuration>
+            </plugin>
+            <plugin>
+                <artifactId>maven-dependency-plugin</artifactId>
+            </plugin>
+            <plugin>
+                <groupId>org.jacoco</groupId>
+                <artifactId>jacoco-maven-plugin</artifactId>
+                <version>0.7.9</version>
+                <configuration>
+                    <dumpOnExit>true</dumpOnExit>
+                </configuration>
+                <executions>
+                    <execution>
+                        <id>jacoco-initialize-unit-tests</id>
+                        <goals>
+                            <goal>prepare-agent</goal>
+                        </goals>
+                        <configuration>
+                            <destFile>${project.build.directory}/coverage-reports/jacoco.exec</destFile>
+                            <!-- <append>true</append> -->
+                        </configuration>
+                    </execution>
+                    <execution>
+                        <id>post-unit-test</id>
+                        <phase>test</phase>
+                        <goals>
+                            <goal>report</goal>
+                        </goals>
+                        <configuration>
+                            <!-- Sets the path to the file which contains the execution data. -->
+                            <dataFile>${project.build.directory}/coverage-reports/jacoco.exec</dataFile>
+                            <!-- Sets the output directory for the code coverage report. -->
+                            <outputDirectory>${project.reporting.outputDirectory}/jacoco</outputDirectory>
+                        </configuration>
+                    </execution>
+                    <execution>
+                        <id>default-check</id>
+                        <goals>
+                            <goal>check</goal>
+                        </goals>
+                        <configuration>
+                            <dataFile>${project.build.directory}/coverage-reports/jacoco.exec</dataFile>
+                            <rules>
+                                <!--  implementation is needed only for Maven 2  -->
+                                <rule implementation="org.jacoco.maven.RuleConfiguration">
+                                    <element>BUNDLE</element>
+                                    <limits>
+                                        <limit implementation="org.jacoco.report.check.Limit">
+                                            <counter>LINE</counter>
+                                            <value>COVEREDRATIO</value>
+                                            <minimum>${jacoco.line.coverage.limit}</minimum>
+                                        </limit>
+                                    </limits>
+                                </rule>
+                            </rules>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+            <!-- Used for unit tests -->
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-surefire-plugin</artifactId>
+                <version>2.12.4</version>
+                <configuration>
+                    <argLine>-noverify ${argLine}</argLine>
+                    <runOrder>alphabetical</runOrder>
+                </configuration>
+            </plugin>
+            <plugin>
+                <groupId>org.springframework.boot</groupId>
+                <artifactId>spring-boot-maven-plugin</artifactId>
+                <configuration>
+                    <mainClass>${start-class}</mainClass>
+                    <layout>ZIP</layout>
+                </configuration>
+                <executions>
+                    <execution>
+                        <goals>
+                            <goal>repackage</goal>
+                        </goals>
+                    </execution>
+                </executions>
+            </plugin>
+            <plugin>
+                <artifactId>maven-assembly-plugin</artifactId>
+                <configuration>
+                    <descriptors>
+                        <descriptor>src/main/assembly/descriptor.xml</descriptor>
+                    </descriptors>
+                </configuration>
+                <executions>
+                    <execution>
+                        <id>make-assembly</id> <!-- this is used for inheritance merges -->
+                        <phase>package</phase> <!-- bind to the packaging phase -->
+                        <goals>
+                            <goal>single</goal>
+                        </goals>
+                    </execution>
+                </executions>
+            </plugin>
+            <plugin>
+                <groupId>org.antlr</groupId>
+                <artifactId>antlr4-maven-plugin</artifactId>
+                <version>4.7</version>
+                <executions>
+                    <execution>
+                        <goals>
+                            <goal>antlr4</goal>
+                        </goals>
+                        <configuration>
+                            <sourceDirectory>src/main/resources/antlr4</sourceDirectory>
+                            <!-- <outputDirectory>src/main/java/antlr4</outputDirectory> -->
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+        </plugins>
+        <!-- mention the logback.xml location through system property or environment
+            variable to edit logback.xml at run time -->
+        <resources>
+            <resource>
+                <directory>src/main/resources</directory>
+                <filtering>true</filtering>
+                <includes>
+                    <include>**/*</include>
+                </includes>
+            </resource>
+            <resource>
+                <directory>${project.basedir}/src/main/swm</directory>
+                <targetPath>${project.build.directory}/swm</targetPath>
+                <filtering>false</filtering>
+            </resource>
+            <resource>
+                <directory>${project.basedir}/src/main/resources/etc/appprops/</directory>
+                <includes>
+                    <include>janusgraph-realtime.properties</include>
+                    <include>janusgraph-cached.properties</include>
+                    <include>aaiconfig.properties</include>
+                    <include>aaiEventDMaaPPublisher.properties</include>
+                    <include>preferredRoute.txt</include>
+                    <include>datatoolscrons.properties</include>
+                </includes>
+                <targetPath>${project.build.directory}/swm/package/nix/dist_files/opt/app/${project.artifactId}/appconfig
+                </targetPath>
+                <filtering>false</filtering>
+            </resource>
+            <resource>
+                <directory>${project.basedir}/src/main/resources</directory>
+                <includes>
+                    <include>application.properties</include>
+                    <include>*logback.xml</include>
+                    <include>hbase-site.xml</include>
+                </includes>
+                <targetPath>${project.build.directory}/swm/package/nix/dist_files/opt/app/${project.artifactId}/appconfig
+                </targetPath>
+                <filtering>false</filtering>
+            </resource>
+            <resource>
+                <directory>${project.basedir}/src/main/resources/schema/</directory>
+                <includes>
+                    <include>**/oxm/**/*.xml</include>
+                    <include>**/dbedgerules/**/*.json</include>
+                </includes>
+                <targetPath>${project.build.directory}/swm/package/nix/dist_files/opt/app/${project.artifactId}/appconfig/schema</targetPath>
+                <filtering>false</filtering>
+            </resource>
+            <resource>
+                <directory>${project.basedir}/src/main/docker</directory>
+                <includes>
+                    <include>**/*</include>
+                </includes>
+                <targetPath>${aai.build.directory}</targetPath>
+                <filtering>true</filtering>
+            </resource>
+            <resource>
+                <directory>${project.basedir}/src/main/resources/etc/auth</directory>
+                <includes>
+                    <include>**/*</include>
+                </includes>
+                <targetPath>${project.build.directory}/swm/package/nix/dist_files/opt/app/${project.artifactId}/appconfig
+                </targetPath>
+                <filtering>false</filtering>
+            </resource>
+            <!--
+               Place any files related for migration related into migration-input-files
+               so they can be packaged into this location here so it can be volumed
+               into the container as one folder rather than do this for each subfolder
+           -->
+            <resource>
+                <directory>${project.basedir}/src/main/resources/migration-input-files</directory>
+                <includes>
+                    <include>**/*</include>
+                </includes>
+                <targetPath>
+                    ${project.build.directory}/swm/package/nix/dist_files/opt/app/${project.artifactId}/appconfig/migration-input-files
+                </targetPath>
+                <filtering>false</filtering>
+            </resource>
+        </resources>
+    </build>
+
+    <distributionManagement>
+        <snapshotRepository>
+            <id>ecomp-snapshots</id>
+            <name>ECOMP Snapshot Repository</name>
+            <url>${onap.nexus.url}/content/repositories/snapshots/</url>
+        </snapshotRepository>
+        <site>
+            <id>ecomp-site</id>
+            <url>dav:${onap.nexus.url}${sitePath}</url>
+        </site>
+    </distributionManagement>
+    <reporting>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-javadoc-plugin</artifactId>
+                <version>2.10.4</version>
+                <configuration>
+                    <failOnError>false</failOnError>
+                    <doclet>org.umlgraph.doclet.UmlGraphDoc</doclet>
+                    <docletArtifact>
+                        <groupId>org.umlgraph</groupId>
+                        <artifactId>umlgraph</artifactId>
+                        <version>5.6</version>
+                    </docletArtifact>
+                    <additionalparam>-views -Xdoclint:none</additionalparam>
+                    <excludePackageNames>org.onap.aai.domain.yang.*:org.onap.aai.util.*</excludePackageNames>
+                    <useStandardDocletOptions>true</useStandardDocletOptions>
+                </configuration>
+            </plugin>
+        </plugins>
+    </reporting>
+
+    <!-- Start of ONAP Specific Repositories -->
+    <repositories>
+        <repository>
+            <id>AJSC</id>
+            <name>AJSC repository</name>
+            <url>https://mvnrepository.com/artifact/com.att.ajsc</url>
+        </repository>
+        <repository>
+            <id>restlet</id>
+            <name>maven reslet</name>
+            <url>https://maven.restlet.com/</url>
+        </repository>
+
+        <repository>
+            <id>central</id>
+            <name>Maven 2 repository 2</name>
+            <url>http://repo2.maven.org/maven2/</url>
+        </repository>
+        <repository>
+            <id>ecomp-releases</id>
+            <name>ECOMP Release Repository</name>
+            <url>${onap.nexus.url}/content/repositories/releases/</url>
+        </repository>
+        <repository>
+            <id>ecomp-staging</id>
+            <name>ECOMP Staging Repository</name>
+            <url>${onap.nexus.url}/content/repositories/staging/</url>
+        </repository>
+        <repository>
+            <id>ecomp-snapshots</id>
+            <name>ECOMP Snapshot Repository</name>
+            <url>${onap.nexus.url}/content/repositories/snapshots/</url>
+        </repository>
+    </repositories>
+    <!-- End of ONAP Specific Repositories -->
+</project>
+
diff --git a/src/main/assembly/descriptor.xml b/src/main/assembly/descriptor.xml
new file mode 100644 (file)
index 0000000..91e8e18
--- /dev/null
@@ -0,0 +1,32 @@
+<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2"\r
+          xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"\r
+          xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2 http://maven.apache.org/xsd/assembly-1.1.2.xsd">\r
+    <id>build</id>\r
+    <includeBaseDirectory>false</includeBaseDirectory>\r
+    <formats>\r
+        <format>dir</format>\r
+    </formats>\r
+    <fileSets>\r
+        <fileSet>\r
+            <directory>${project.basedir}/src/main/resources</directory>\r
+            <outputDirectory>/resources</outputDirectory>\r
+            <includes>\r
+                <include>**/*</include>\r
+            </includes>\r
+        </fileSet>\r
+        <fileSet>\r
+            <directory>${project.basedir}/src/main/scripts</directory>\r
+            <outputDirectory>/bin</outputDirectory>\r
+            <includes>\r
+                <include>**/*</include>\r
+            </includes>\r
+        </fileSet>\r
+        <fileSet>\r
+            <directory>${project.build.directory}</directory>\r
+            <outputDirectory>/lib</outputDirectory>\r
+            <includes>\r
+                <include>${project.artifactId}-${project.version}.jar</include>\r
+            </includes>\r
+        </fileSet>\r
+    </fileSets>\r
+</assembly>\r
diff --git a/src/main/docker/Dockerfile b/src/main/docker/Dockerfile
new file mode 100755 (executable)
index 0000000..f1454e3
--- /dev/null
@@ -0,0 +1,22 @@
+FROM aaionap/aai-common:1.3.0
+
+# Add the proper files into the docker image from your build
+WORKDIR /opt/app/aai-graphadmin
+
+# Expose the ports for outside linux to use
+# 8447 is the important one to be used
+EXPOSE 8449
+
+HEALTHCHECK --interval=40s --timeout=10s --retries=3 CMD nc -z -v localhost 8449 || exit 1
+
+ENTRYPOINT ["/bin/bash", "/opt/app/aai-graphadmin/docker-entrypoint.sh"]
+
+RUN  mkdir -p /opt/aaihome/aaiadmin /opt/aai/logroot/AAI-GA
+
+VOLUME /opt/aai/logroot/AAI-GA
+VOLUME /opt/data
+VOLUME /opt/tools
+
+COPY /maven/aai-graphadmin/ .
+
+ENV AAI_BUILD_VERSION @aai.docker.version@
diff --git a/src/main/docker/aai.sh b/src/main/docker/aai.sh
new file mode 100644 (file)
index 0000000..f68dc21
--- /dev/null
@@ -0,0 +1,44 @@
+#
+# ============LICENSE_START=======================================================
+# org.onap.aai
+# ================================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+
+PROJECT_HOME=/opt/app/aai-graphadmin
+export PROJECT_HOME
+
+JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64
+export JAVA_HOME
+
+AAIENV=dev
+export AAIENV
+
+PATH=/usr/lib/jvm/java-8-openjdk-amd64:$PATH
+
+PROJECT_OWNER=aaiadmin
+PROJECT_GROUP=aaiadmin
+PROJECT_UNIXHOMEROOT=/opt/aaihome
+export PROJECT_OWNER PROJECT_GROUP PROJECT_UNIXHOMEROOT
+umask 0022
+
+export idns_api_url=
+export idnscred=
+export idnstenant=
+
+
diff --git a/src/main/docker/docker-entrypoint.sh b/src/main/docker/docker-entrypoint.sh
new file mode 100644 (file)
index 0000000..2f90ce5
--- /dev/null
@@ -0,0 +1,134 @@
+###
+# ============LICENSE_START=======================================================
+# org.onap.aai
+# ================================================================================
+# Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# 
+#      http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+###
+
+APP_HOME=$(pwd);
+RESOURCES_HOME=${APP_HOME}/resources/;
+
+export SERVER_PORT=${SERVER_PORT:-8449};
+
+USER_ID=${LOCAL_USER_ID:-9001}
+GROUP_ID=${LOCAL_GROUP_ID:-9001}
+
+echo "Project Build Version: ${AAI_BUILD_VERSION}";
+
+if [ $(cat /etc/passwd | grep aaiadmin | wc -l) -eq 0 ]; then
+
+       groupadd aaiadmin -g ${GROUP_ID} || {
+               echo "Unable to create the group id for ${GROUP_ID}";
+               exit 1;
+       }
+       useradd --shell=/bin/bash -u ${USER_ID} -g ${GROUP_ID} -o -c "" -m aaiadmin || {
+               echo "Unable to create the user id for ${USER_ID}";
+               exit 1;
+       }
+fi;
+
+chown -R aaiadmin:aaiadmin /opt/app /opt/aai/logroot /var/chef
+find /opt/app/ -name "*.sh" -exec chmod +x {} +
+
+if [ -f ${APP_HOME}/aai.sh ]; then
+    gosu aaiadmin ln -s bin scripts
+    gosu aaiadmin ln -s /opt/aai/logroot/AAI-GA logs
+
+    mv ${APP_HOME}/aai.sh /etc/profile.d/aai.sh
+    chmod 755 /etc/profile.d/aai.sh
+
+    scriptName=$1;
+
+    if [ ! -z $scriptName ]; then
+
+        if [ -f ${APP_HOME}/bin/${scriptName} ]; then
+            shift 1;
+            gosu aaiadmin ${APP_HOME}/bin/${scriptName} "$@" || {
+                echo "Failed to run the ${scriptName}";
+                exit 1;
+            }
+        else
+            echo "Unable to find the script ${scriptName} in ${APP_HOME}/bin";
+            exit 1;
+        fi;
+
+        exit 0;
+    fi;
+
+fi;
+
+mkdir -p /opt/app/aai-graphadmin/logs/gc
+chown -R aaiadmin:aaiadmin /opt/app/aai-graphadmin/logs/gc
+
+if [ -f ${APP_HOME}/resources/aai-graphadmin-swm-vars.sh ]; then
+    source ${APP_HOME}/resources/aai-graphadmin-swm-vars.sh;
+fi;
+
+MIN_HEAP_SIZE=${MIN_HEAP_SIZE:-512m};
+MAX_HEAP_SIZE=${MAX_HEAP_SIZE:-1024m};
+MAX_PERM_SIZE=${MAX_PERM_SIZE:-512m};
+PERM_SIZE=${PERM_SIZE:-512m};
+
+JAVA_CMD="exec gosu aaiadmin java";
+
+JVM_OPTS="${PRE_JVM_ARGS} -Xloggc:/opt/app/aai-graphadmin/logs/gc/aai_gc.log";
+JVM_OPTS="${JVM_OPTS} -XX:HeapDumpPath=/opt/app/aai-graphadmin/logs/ajsc-jetty/heap-dump";
+JVM_OPTS="${JVM_OPTS} -Xms${MIN_HEAP_SIZE}";
+JVM_OPTS="${JVM_OPTS} -Xmx${MAX_HEAP_SIZE}";
+
+JVM_OPTS="${JVM_OPTS} -XX:+PrintGCDetails";
+JVM_OPTS="${JVM_OPTS} -XX:+PrintGCTimeStamps";
+JVM_OPTS="${JVM_OPTS} -XX:MaxPermSize=${MAX_PERM_SIZE}";
+JVM_OPTS="${JVM_OPTS} -XX:PermSize=${PERM_SIZE}";
+
+JVM_OPTS="${JVM_OPTS} -server";
+JVM_OPTS="${JVM_OPTS} -XX:NewSize=512m";
+JVM_OPTS="${JVM_OPTS} -XX:MaxNewSize=512m";
+JVM_OPTS="${JVM_OPTS} -XX:SurvivorRatio=8";
+JVM_OPTS="${JVM_OPTS} -XX:+DisableExplicitGC";
+JVM_OPTS="${JVM_OPTS} -verbose:gc";
+JVM_OPTS="${JVM_OPTS} -XX:+UseParNewGC";
+JVM_OPTS="${JVM_OPTS} -XX:+CMSParallelRemarkEnabled";
+JVM_OPTS="${JVM_OPTS} -XX:+CMSClassUnloadingEnabled";
+JVM_OPTS="${JVM_OPTS} -XX:+UseConcMarkSweepGC";
+JVM_OPTS="${JVM_OPTS} -XX:-UseBiasedLocking";
+JVM_OPTS="${JVM_OPTS} -XX:ParallelGCThreads=4";
+JVM_OPTS="${JVM_OPTS} -XX:LargePageSizeInBytes=128m";
+JVM_OPTS="${JVM_OPTS} -XX:+PrintGCDetails";
+JVM_OPTS="${JVM_OPTS} -XX:+PrintGCTimeStamps";
+JVM_OPTS="${JVM_OPTS} -Dsun.net.inetaddr.ttl=180";
+JVM_OPTS="${JVM_OPTS} -XX:+HeapDumpOnOutOfMemoryError";
+JVM_OPTS="${JVM_OPTS} ${POST_JVM_ARGS}";
+JAVA_OPTS="${PRE_JAVA_OPTS} -DAJSC_HOME=$APP_HOME";
+if [ -f ${INTROSCOPE_LIB}/Agent.jar ] && [ -f ${INTROSCOPE_AGENTPROFILE} ]; then
+        JAVA_OPTS="${JAVA_OPTS} -javaagent:${INTROSCOPE_LIB}/Agent.jar -noverify -Dcom.wily.introscope.agentProfile=${INTROSCOPE_AGENTPROFILE} -Dintroscope.agent.agentName=graphadmin"
+fi
+JAVA_OPTS="${JAVA_OPTS} -Dserver.port=${SERVER_PORT}";
+JAVA_OPTS="${JAVA_OPTS} -DBUNDLECONFIG_DIR=./resources";
+JAVA_OPTS="${JAVA_OPTS} -Dserver.local.startpath=${RESOURCES_HOME}";
+JAVA_OPTS="${JAVA_OPTS} -DAAI_CHEF_ENV=${AAI_CHEF_ENV}";
+JAVA_OPTS="${JAVA_OPTS} -DSCLD_ENV=${SCLD_ENV}";
+JAVA_OPTS="${JAVA_OPTS} -DAFT_ENVIRONMENT=${AFT_ENVIRONMENT}";
+JAVA_OPTS="${JAVA_OPTS} -DlrmName=com.att.ajsc.aai-graphadmin";
+JAVA_OPTS="${JAVA_OPTS} -DAAI_BUILD_VERSION=${AAI_BUILD_VERSION}";
+JAVA_OPTS="${JAVA_OPTS} -Djava.security.egd=file:/dev/./urandom";
+JAVA_OPTS="${JAVA_OPTS} -Dlogback.configurationFile=./resources/logback.xml";
+JAVA_OPTS="${JAVA_OPTS} -Dloader.path=$APP_HOME/resources";
+JAVA_OPTS="${JAVA_OPTS} ${POST_JAVA_OPTS}";
+
+JAVA_MAIN_JAR=$(ls lib/aai-graphadmin*.jar);
+
+${JAVA_CMD} ${JVM_OPTS} ${JAVA_OPTS} -jar ${JAVA_MAIN_JAR};
diff --git a/src/main/java/org/onap/aai/GraphAdminApp.java b/src/main/java/org/onap/aai/GraphAdminApp.java
new file mode 100644 (file)
index 0000000..aa9c457
--- /dev/null
@@ -0,0 +1,129 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.onap.aai.config.PropertyPasswordConfiguration;
+import org.onap.aai.dbmap.AAIGraph;
+import org.onap.aai.logging.LoggingContext;
+import org.onap.aai.nodes.NodeIngestor;
+import org.onap.aai.util.AAIConfig;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.SpringApplication;
+import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
+import org.springframework.boot.autoconfigure.SpringBootApplication;
+import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration;
+import org.springframework.boot.autoconfigure.orm.jpa.HibernateJpaAutoConfiguration;
+import org.springframework.context.annotation.ComponentScan;
+import org.springframework.core.env.Environment;
+import org.springframework.scheduling.annotation.EnableAsync;
+import org.springframework.scheduling.annotation.EnableScheduling;
+
+import javax.annotation.PostConstruct;
+import javax.annotation.PreDestroy;
+import java.util.UUID;
+
+@SpringBootApplication
+// Scan the specific packages that has the beans/components
+// This will add the ScheduledTask that was created in aai-common
+// Add more packages where you would need to scan for files
+@ComponentScan(basePackages = {
+        "org.onap.aai.tasks",
+        "org.onap.aai.config",
+        "org.onap.aai.service",
+        "org.onap.aai.setup",
+        "org.onap.aai.rest",
+        "org.onap.aai.web",
+        "org.onap.aai.interceptors",
+        "org.onap.aai.datasnapshot",
+        "org.onap.aai.datagrooming",
+        "org.onap.aai.datacleanup"
+})
+@EnableAsync
+@EnableScheduling
+@EnableAutoConfiguration(exclude = {DataSourceAutoConfiguration.class, HibernateJpaAutoConfiguration.class})
+public class GraphAdminApp {
+
+    public static final String APP_NAME = "GraphAdmin";
+    private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(GraphAdminApp.class);
+
+    @Autowired
+    private Environment env;
+
+    @Autowired
+    private NodeIngestor nodeIngestor;
+
+    @PostConstruct
+    private void initialize(){
+        loadDefaultProps();
+
+        LoggingContext.save();
+        LoggingContext.component("init");
+        LoggingContext.partnerName("NA");
+        LoggingContext.targetEntity(APP_NAME);
+        LoggingContext.requestId(UUID.randomUUID().toString());
+        LoggingContext.serviceName(APP_NAME);
+        LoggingContext.targetServiceName("contextInitialized");
+    }
+
+    @PreDestroy
+    public void cleanup(){
+        AAIGraph.getInstance().graphShutdown();
+    }
+
+    public static void main(String[] args) throws Exception {
+
+        loadDefaultProps();
+        SpringApplication app = new SpringApplication(GraphAdminApp.class);
+        app.setRegisterShutdownHook(true);
+        app.addInitializers(new PropertyPasswordConfiguration());
+        Environment env = app.run(args).getEnvironment();
+
+        LOGGER.info(
+                "Application '{}' is running on {}!" ,
+                env.getProperty("spring.application.name"),
+                env.getProperty("server.port")
+        );
+        // The main reason this was moved from the constructor is due
+        // to the SchemaGenerator needs the bean and during the constructor
+        // the Spring Context is not yet initialized
+
+        AAIConfig.init();
+        AAIGraph.getInstance();
+
+        System.setProperty("org.onap.aai.graphadmin.started", "true");             
+        LOGGER.info("GraphAdmin MicroService Started");
+        LOGGER.error("GraphAdmin MicroService Started");
+        LOGGER.debug("GraphAdmin MicroService Started");
+        System.out.println("GraphAdmin Microservice Started");
+    }
+
+    public static void loadDefaultProps(){
+
+        if(System.getProperty("AJSC_HOME") == null){
+            System.setProperty("AJSC_HOME", ".");
+        }
+
+        if(System.getProperty("BUNDLECONFIG_DIR") == null){
+            System.setProperty("BUNDLECONFIG_DIR", "src/main/resources");
+        }
+    }
+}
diff --git a/src/main/java/org/onap/aai/Profiles.java b/src/main/java/org/onap/aai/Profiles.java
new file mode 100644 (file)
index 0000000..f0419d8
--- /dev/null
@@ -0,0 +1,31 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai;
+
+public final class Profiles {
+
+    public static final String DMAAP        = "dmaap";
+    public static final String DME2         = "dme2";
+
+    public static final String ONE_WAY_SSL  = "one-way-ssl";
+    public static final String TWO_WAY_SSL  = "two-way-ssl";
+
+    private Profiles(){}
+}
diff --git a/src/main/java/org/onap/aai/config/AuditorConfiguration.java b/src/main/java/org/onap/aai/config/AuditorConfiguration.java
new file mode 100644 (file)
index 0000000..9377393
--- /dev/null
@@ -0,0 +1,34 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.config;
+
+import org.onap.aai.db.schema.AuditorFactory;
+import org.onap.aai.introspection.LoaderFactory;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+
+@Configuration
+public class AuditorConfiguration {
+
+    @Bean
+    public AuditorFactory auditorFactory(LoaderFactory loaderFactory){
+        return new AuditorFactory(loaderFactory);
+    }
+}
diff --git a/src/main/java/org/onap/aai/config/DslConfiguration.java b/src/main/java/org/onap/aai/config/DslConfiguration.java
new file mode 100644 (file)
index 0000000..74bc046
--- /dev/null
@@ -0,0 +1,44 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.config;
+
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.rest.dsl.DslListener;
+import org.onap.aai.rest.dsl.DslQueryProcessor;
+import org.springframework.beans.factory.config.ConfigurableBeanFactory;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.context.annotation.Scope;
+
+@Configuration
+public class DslConfiguration {
+
+    @Bean
+    @Scope(scopeName = ConfigurableBeanFactory.SCOPE_PROTOTYPE)
+    public DslListener dslListener(EdgeIngestor edgeIngestor){
+        return new DslListener(edgeIngestor);
+    }
+
+    @Bean
+    @Scope(scopeName = ConfigurableBeanFactory.SCOPE_PROTOTYPE)
+    public DslQueryProcessor dslQueryProcessor(DslListener dslListener){
+        return new DslQueryProcessor(dslListener);
+    }
+}
diff --git a/src/main/java/org/onap/aai/config/JettyPasswordDecoder.java b/src/main/java/org/onap/aai/config/JettyPasswordDecoder.java
new file mode 100644 (file)
index 0000000..944f951
--- /dev/null
@@ -0,0 +1,33 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.config;
+
+import org.eclipse.jetty.util.security.Password;
+
+public class JettyPasswordDecoder implements PasswordDecoder {
+
+    @Override
+    public String decode(String input) {
+        if (input.startsWith("OBF:")) {
+            return Password.deobfuscate(input);
+        }
+        return Password.deobfuscate("OBF:" + input);
+    }
+}
diff --git a/src/main/java/org/onap/aai/config/PasswordDecoder.java b/src/main/java/org/onap/aai/config/PasswordDecoder.java
new file mode 100644 (file)
index 0000000..0dcb845
--- /dev/null
@@ -0,0 +1,25 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.config;
+
+public interface PasswordDecoder {
+
+    String decode(String input);
+}
diff --git a/src/main/java/org/onap/aai/config/PropertyPasswordConfiguration.java b/src/main/java/org/onap/aai/config/PropertyPasswordConfiguration.java
new file mode 100644 (file)
index 0000000..9befb13
--- /dev/null
@@ -0,0 +1,78 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.config;
+
+import org.springframework.context.ApplicationContextInitializer;
+import org.springframework.context.ConfigurableApplicationContext;
+import org.springframework.core.env.ConfigurableEnvironment;
+import org.springframework.core.env.EnumerablePropertySource;
+import org.springframework.core.env.MapPropertySource;
+import org.springframework.core.env.PropertySource;
+
+import java.util.LinkedHashMap;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+public class PropertyPasswordConfiguration implements ApplicationContextInitializer<ConfigurableApplicationContext> {
+
+    private static final Pattern decodePasswordPattern = Pattern.compile("password\\((.*?)\\)");
+
+    private PasswordDecoder passwordDecoder = new JettyPasswordDecoder();
+
+    @Override
+    public void initialize(ConfigurableApplicationContext applicationContext) {
+        ConfigurableEnvironment environment = applicationContext.getEnvironment();
+        for (PropertySource<?> propertySource : environment.getPropertySources()) {
+            Map<String, Object> propertyOverrides = new LinkedHashMap<>();
+            decodePasswords(propertySource, propertyOverrides);
+            if (!propertyOverrides.isEmpty()) {
+                PropertySource<?> decodedProperties = new MapPropertySource("decoded "+ propertySource.getName(), propertyOverrides);
+                environment.getPropertySources().addBefore(propertySource.getName(), decodedProperties);
+            }
+        }
+    }
+
+    private void decodePasswords(PropertySource<?> source, Map<String, Object> propertyOverrides) {
+        if (source instanceof EnumerablePropertySource) {
+            EnumerablePropertySource<?> enumerablePropertySource = (EnumerablePropertySource<?>) source;
+            for (String key : enumerablePropertySource.getPropertyNames()) {
+                Object rawValue = source.getProperty(key);
+                if (rawValue instanceof String) {
+                    String decodedValue = decodePasswordsInString((String) rawValue);
+                    propertyOverrides.put(key, decodedValue);
+                }
+            }
+        }
+    }
+
+    private String decodePasswordsInString(String input) {
+        if (input == null) return null;
+        StringBuffer output = new StringBuffer();
+        Matcher matcher = decodePasswordPattern.matcher(input);
+        while (matcher.find()) {
+            String replacement = passwordDecoder.decode(matcher.group(1));
+            matcher.appendReplacement(output, replacement);
+        }
+        matcher.appendTail(output);
+        return output.toString();
+    }
+
+}
diff --git a/src/main/java/org/onap/aai/datacleanup/DataCleanupTasks.java b/src/main/java/org/onap/aai/datacleanup/DataCleanupTasks.java
new file mode 100644 (file)
index 0000000..a3dc708
--- /dev/null
@@ -0,0 +1,317 @@
+/**\r
+ * ============LICENSE_START=======================================================\r
+ * org.onap.aai\r
+ * ================================================================================\r
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.\r
+ * ================================================================================\r
+ * Licensed under the Apache License, Version 2.0 (the "License");\r
+ * you may not use this file except in compliance with the License.\r
+ * You may obtain a copy of the License at\r
+ *\r
+ *    http://www.apache.org/licenses/LICENSE-2.0\r
+ *\r
+ * Unless required by applicable law or agreed to in writing, software\r
+ * distributed under the License is distributed on an "AS IS" BASIS,\r
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
+ * See the License for the specific language governing permissions and\r
+ * limitations under the License.\r
+ * ============LICENSE_END=========================================================\r
+ */\r
+package org.onap.aai.datacleanup;\r
+import java.io.File;\r
+import java.io.FileInputStream;\r
+import java.io.FileOutputStream;\r
+import java.io.IOException;\r
+import java.nio.file.Files;\r
+import java.nio.file.attribute.BasicFileAttributes;\r
+import java.nio.file.attribute.FileTime;\r
+import java.text.DateFormat;\r
+import java.text.ParseException;\r
+import java.text.SimpleDateFormat;\r
+import java.time.ZoneId;\r
+import java.util.ArrayList;\r
+import java.util.Calendar;\r
+import java.util.Date;\r
+import java.util.zip.ZipEntry;\r
+import java.util.zip.ZipOutputStream;\r
+\r
+import org.onap.aai.exceptions.AAIException;\r
+import org.springframework.context.annotation.PropertySource;\r
+import org.springframework.scheduling.annotation.Scheduled;\r
+import org.springframework.stereotype.Component;\r
+\r
+import com.att.eelf.configuration.EELFLogger;\r
+import com.att.eelf.configuration.EELFManager;\r
+\r
+import org.onap.aai.logging.ErrorLogHelper;\r
+import org.onap.aai.logging.LoggingContext;\r
+import org.onap.aai.logging.LoggingContext.StatusCode;\r
+import org.onap.aai.util.AAIConfig;\r
+import org.onap.aai.util.AAIConstants;\r
+\r
+import com.att.eelf.configuration.EELFLogger;\r
+\r
+@Component\r
+@PropertySource("file:${server.local.startpath}/etc/appprops/datatoolscrons.properties")\r
+public class DataCleanupTasks {\r
+\r
+       private static final EELFLogger logger = EELFManager.getInstance().getLogger(DataCleanupTasks.class);\r
+       private static final SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyyMMdd");\r
+       \r
+       /**The function archives/deletes files that end in .out (Ie. dataGrooming.201511111305.out) that sit in our log/data directory structure.\r
+               logDir is the {project_home}/logs\r
+               archiveDir is the ARCHIVE directory where the files will be stored after 5 days.\r
+               ageZip is the number of days after which the file will be moved to the ARCHIVE folder.\r
+               ageDelete is the number of days after which the data files will be deleted i.e after 30 days.\r
+       */\r
+       @Scheduled(cron = "${datagroomingcleanup.cron}" )\r
+       public void dataGroomingCleanup() throws AAIException, Exception {\r
+               \r
+               logger.info("Started cron job dataGroomingCleanup @ " + simpleDateFormat.format(new Date()));\r
+               \r
+               try {\r
+                       String logDir = AAIConstants.AAI_HOME + AAIConstants.AAI_FILESEP + "logs";\r
+                       String dataGroomingDir = logDir + AAIConstants.AAI_FILESEP + "data" + AAIConstants.AAI_FILESEP + "dataGrooming";\r
+                       String archiveDir = dataGroomingDir + AAIConstants.AAI_FILESEP + "ARCHIVE";\r
+                       String dataGroomingArcDir = archiveDir + AAIConstants.AAI_FILESEP + "dataGrooming";             \r
+                       File path = new File(dataGroomingDir);\r
+                       File archivepath = new File(archiveDir);\r
+                       File dataGroomingPath = new File(dataGroomingArcDir);\r
+               \r
+                       logger.info("The logDir is " + logDir);\r
+                       logger.info("The dataGroomingDir is " + dataGroomingDir);\r
+                       logger.info("The archiveDir is " + archiveDir );\r
+                       logger.info("The dataGroomingArcDir is " + dataGroomingArcDir );\r
+               \r
+                       boolean exists = directoryExists(logDir);\r
+                       logger.info("Directory" + logDir + "exists: " + exists);\r
+                       if(exists == false)\r
+                               logger.error("The directory" + logDir +"does not exists");\r
+               \r
+                       Integer ageZip = AAIConfig.getInt("aai.datagrooming.agezip");\r
+                       Integer ageDelete = AAIConfig.getInt("aai.datagrooming.agedelete");\r
+                                                       \r
+                       Date newAgeZip = getZipDate(ageZip);\r
+                                                               \r
+                       //Iterate through the dataGroomingDir\r
+                       File[] listFiles = path.listFiles();  \r
+                       if(listFiles != null) {\r
+                               for(File listFile : listFiles) {\r
+                                       if (listFile.toString().contains("ARCHIVE")){\r
+                                               continue;\r
+                                       }\r
+                                       if(listFile.isFile()){\r
+                                               logger.info("The file name in dataGrooming: " +listFile.getName()); \r
+                                               Date fileCreateDate = fileCreationMonthDate(listFile);\r
+                                               logger.info("The fileCreateDate in dataGrooming is " + fileCreateDate);\r
+                                               if( fileCreateDate.compareTo(newAgeZip) < 0) {\r
+                                               archive(listFile,archiveDir,dataGroomingArcDir);                                                \r
+                                               }\r
+                                       }\r
+                               }\r
+                       }\r
+               \r
+                       Date newAgeDelete = getZipDate(ageDelete);\r
+                       //Iterate through the archive/dataGrooming dir\r
+                       File[] listFilesArchive = dataGroomingPath.listFiles(); \r
+                       if(listFilesArchive != null) {\r
+                               for(File listFileArchive : listFilesArchive) { \r
+                                       if(listFileArchive.isFile()) {\r
+                               logger.info("The file name in ARCHIVE/dataGrooming: " +listFileArchive.getName()); \r
+                               Date fileCreateDate = fileCreationMonthDate(listFileArchive);\r
+                               logger.info("The fileCreateDate in ARCHIVE/dataGrooming is " + fileCreateDate);\r
+                               if(fileCreateDate.compareTo(newAgeDelete) < 0) {\r
+                                       delete(listFileArchive);\r
+                                       }\r
+                               }       \r
+                       }\r
+                       }\r
+               }\r
+               catch (Exception e) {\r
+                       ErrorLogHelper.logError("AAI_4000", "Exception running cron job for DataCleanup"+e.toString());\r
+                       logger.info("AAI_4000", "Exception running cron job for DataCleanup"+e.toString());\r
+                       throw e;\r
+               }\r
+       }\r
+       \r
+    /**\r
+     * This method checks if the directory exists\r
+     * @param DIR\r
+     * \r
+     */\r
+    public boolean directoryExists(String dir) {\r
+       File path = new File(dir);\r
+               boolean exists = path.exists();\r
+               return exists;  \r
+    }\r
+    \r
+    public Date getZipDate(Integer days) throws Exception {\r
+       return getZipDate(days, new Date());\r
+    }\r
+    \r
+    public Date getZipDate(Integer days, Date date) throws Exception{\r
+       \r
+       Calendar cal = Calendar.getInstance();\r
+       logger.info("The current date is " + date );\r
+       cal.setTime(date);      \r
+       cal.add(Calendar.DATE, -days);\r
+       Date newAgeZip = cal.getTime();\r
+               logger.info("The newAgeDate is " +newAgeZip);\r
+               return newAgeZip;               \r
+    }\r
+    \r
+    \r
+    public Date fileCreationMonthDate (File file) throws Exception {\r
+\r
+        BasicFileAttributes attr = Files.readAttributes(file.toPath(),\r
+                                                        BasicFileAttributes.class);\r
+        FileTime time = attr.creationTime();\r
+           String formatted = simpleDateFormat.format( new Date( time.toMillis() ) );\r
+           Date d = simpleDateFormat.parse(formatted);\r
+           return d;\r
+    }\r
+    \r
+    /**\r
+     * This method will zip the files and add it to the archive folder\r
+     * Checks if the archive folder exists, if not then creates one\r
+     * After adding the file to archive folder it deletes the file from the filepath\r
+     * @throws AAIException\r
+     * @throws Exception\r
+     */\r
+    public void archive(File file, String archiveDir, String afterArchiveDir) throws AAIException, Exception {\r
+               \r
+       logger.info("Inside the archive folder");  \r
+       String filename = file.getName();\r
+       logger.info("file name is " +filename);\r
+               File archivepath = new File(archiveDir);\r
+               \r
+               String zipFile = afterArchiveDir + AAIConstants.AAI_FILESEP + filename;\r
+               \r
+               File dataGroomingPath = new File(afterArchiveDir);\r
+       \r
+               boolean exists = directoryExists(archiveDir);\r
+               logger.info("Directory" + archiveDir + "exists: " + exists);            \r
+               if(exists == false) {\r
+                       logger.error("The directory" + archiveDir +"does not exists so will create a new archive folder");\r
+                       //Create an archive folder if does not exists           \r
+                       boolean flag = dataGroomingPath.mkdirs();\r
+                       if(flag == false)\r
+                               logger.error("Failed to create ARCHIVE folder");                \r
+               }\r
+               try {           \r
+                       FileOutputStream outputstream = new FileOutputStream(zipFile + ".gz");\r
+                       ZipOutputStream zoutputstream = new ZipOutputStream(outputstream);\r
+                       ZipEntry ze = new ZipEntry(file.getName());\r
+                       zoutputstream.putNextEntry(ze);\r
+                       //read the file and write to the zipOutputStream\r
+                       FileInputStream inputstream = new FileInputStream(file);\r
+                       byte[] buffer = new byte[1024];\r
+                       int len;\r
+                       while ((len = inputstream.read(buffer)) > 0) {\r
+                               zoutputstream.write(buffer,0,len);\r
+                       }                       \r
+                       //close all the sources\r
+                       zoutputstream.closeEntry();\r
+                       zoutputstream.close();\r
+                       inputstream.close();\r
+                       outputstream.close();\r
+                       //Delete the file after been added to archive folder\r
+                       delete(file);\r
+                       logger.info("The file archived is " + file + " at " + afterArchiveDir );\r
+               }       \r
+        catch (IOException e) {\r
+                ErrorLogHelper.logError("AAI_4000", "Exception running cron job for DataCleanup " + e.getStackTrace());\r
+                logger.info("AAI_4000", "Exception running cron job for DataCleanup", e);\r
+                throw e;\r
+               }\r
+    }\r
+    \r
+    /**\r
+     * This method will delete all the files from the archive folder that are older than 60 days\r
+     * @param file\r
+     */\r
+    public static void delete(File file) {\r
+       \r
+       logger.info("Deleting the file " + file);\r
+       boolean deleteStatus = file.delete();\r
+               if(deleteStatus == false){\r
+                       logger.error("Failed to delete the file" +file);                        \r
+               }\r
+    }\r
+    \r
+    /**The function archives/deletes files that end in .out (Ie. dataGrooming.201511111305.out) that sit in our log/data directory structure.\r
+       logDir is the {project_home}/logs\r
+       archiveDir is the ARCHIVE directory where the files will be stored after 5 days.\r
+       ageZip is the number of days after which the file will be moved to the ARCHIVE folder.\r
+       ageDelete is the number of days after which the data files will be deleted i.e after 30 days.\r
+*/\r
+    @Scheduled(cron = "${datasnapshotcleanup.cron}" )\r
+    public void dataSnapshotCleanup() throws AAIException, Exception {\r
+       \r
+       logger.info("Started cron job dataSnapshotCleanup @ " + simpleDateFormat.format(new Date()));\r
+       \r
+       try {\r
+               String logDir = AAIConstants.AAI_HOME + AAIConstants.AAI_FILESEP + "logs";\r
+               String dataSnapshotDir = logDir + AAIConstants.AAI_FILESEP + "data" + AAIConstants.AAI_FILESEP + "dataSnapshots";\r
+               String archiveDir = dataSnapshotDir + AAIConstants.AAI_FILESEP + "ARCHIVE";\r
+               String dataSnapshotArcDir = archiveDir + AAIConstants.AAI_FILESEP + "dataSnapshots";            \r
+               File path = new File(dataSnapshotDir);\r
+               File archivepath = new File(archiveDir);\r
+               File dataSnapshotPath = new File(dataSnapshotArcDir);\r
+       \r
+               logger.info("The logDir is " + logDir);\r
+               logger.info("The dataSnapshotDir is " + dataSnapshotDir);\r
+               logger.info("The archiveDir is " + archiveDir );\r
+               logger.info("The dataSnapshotArcDir is " + dataSnapshotArcDir );\r
+       \r
+               boolean exists = directoryExists(logDir);\r
+               logger.info("Directory" + logDir + "exists: " + exists);\r
+               if(exists == false)\r
+                       logger.error("The directory" + logDir +"does not exists");\r
+       \r
+               Integer ageZipSnapshot = AAIConfig.getInt("aai.datasnapshot.agezip");\r
+               Integer ageDeleteSnapshot = AAIConfig.getInt("aai.datasnapshot.agedelete");\r
+               \r
+               Date newAgeZip = getZipDate(ageZipSnapshot);\r
+                                       \r
+               //Iterate through the dataGroomingDir\r
+               File[] listFiles = path.listFiles();  \r
+               if(listFiles != null) {\r
+                       for(File listFile : listFiles) {\r
+                               if (listFile.toString().contains("ARCHIVE")){\r
+                                       continue;\r
+                               }\r
+                               if(listFile.isFile()){\r
+                                       logger.info("The file name in dataSnapshot: " +listFile.getName()); \r
+                                       Date fileCreateDate = fileCreationMonthDate(listFile);\r
+                                       logger.info("The fileCreateDate in dataSnapshot is " + fileCreateDate);\r
+                                       if( fileCreateDate.compareTo(newAgeZip) < 0) {\r
+                                               archive(listFile,archiveDir,dataSnapshotArcDir);                                                \r
+                                       }\r
+                               }\r
+                       }\r
+               }\r
+       \r
+               Date newAgeDelete = getZipDate(ageDeleteSnapshot);\r
+               //Iterate through the archive/dataSnapshots dir\r
+               File[] listFilesArchive = dataSnapshotPath.listFiles(); \r
+               if(listFilesArchive != null) {\r
+                       for(File listFileArchive : listFilesArchive) { \r
+                               if(listFileArchive.isFile()) {\r
+                                       logger.info("The file name in ARCHIVE/dataSnapshot: " +listFileArchive.getName()); \r
+                                       Date fileCreateDate = fileCreationMonthDate(listFileArchive);\r
+                                       logger.info("The fileCreateDate in ARCHIVE/dataSnapshot is " + fileCreateDate);\r
+                                       if(fileCreateDate.compareTo(newAgeDelete) < 0) {\r
+                                               delete(listFileArchive);\r
+                                       }\r
+                               }       \r
+                       }\r
+               }\r
+       }\r
+       catch (Exception e) {\r
+               ErrorLogHelper.logError("AAI_4000", "Exception running cron job for DataCleanup"+e.toString());\r
+               logger.info("AAI_4000", "Exception running cron job for DataCleanup"+e.toString());\r
+               throw e;\r
+       }\r
+  }   \r
+}\r
diff --git a/src/main/java/org/onap/aai/datagrooming/DataGrooming.java b/src/main/java/org/onap/aai/datagrooming/DataGrooming.java
new file mode 100644 (file)
index 0000000..6149dd9
--- /dev/null
@@ -0,0 +1,2853 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.datagrooming;
+
+import java.io.BufferedReader;
+import java.io.BufferedWriter;
+import java.io.File;
+import java.io.FileReader;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.LinkedHashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Properties;
+import java.util.Set;
+import java.util.UUID;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__;
+import org.apache.tinkerpop.gremlin.structure.Direction;
+import org.apache.tinkerpop.gremlin.structure.Edge;
+import org.apache.tinkerpop.gremlin.structure.Graph;
+import org.apache.tinkerpop.gremlin.structure.Property;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.apache.tinkerpop.gremlin.structure.VertexProperty;
+import org.onap.aai.GraphAdminApp;
+import org.onap.aai.dbmap.AAIGraph;
+import org.onap.aai.dbmap.AAIGraphConfig;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.introspection.Introspector;
+import org.onap.aai.introspection.Loader;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.introspection.exceptions.AAIUnknownObjectException;
+import org.onap.aai.logging.ErrorLogHelper;
+import org.onap.aai.logging.LogFormatTools;
+import org.onap.aai.logging.LoggingContext;
+import org.onap.aai.edges.enums.AAIDirection;
+import org.onap.aai.edges.enums.EdgeProperty;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.util.*;
+import org.onap.aai.logging.LoggingContext.StatusCode;
+
+import com.att.eelf.configuration.Configuration;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.janusgraph.core.JanusGraphFactory;
+import org.janusgraph.core.JanusGraph;
+import org.springframework.context.annotation.AnnotationConfigApplicationContext;
+
+
+public class DataGrooming {
+
+       private static EELFLogger LOGGER = EELFManager.getInstance().getLogger(DataGrooming.class);
+
+       private static final String FROMAPPID = "AAI-DB";
+       private static final String TRANSID = UUID.randomUUID().toString();
+       private int dupeGrpsDeleted = 0;
+
+       private LoaderFactory loaderFactory;
+       private SchemaVersions schemaVersions;
+
+       public DataGrooming(LoaderFactory loaderFactory, SchemaVersions schemaVersions){
+               this.loaderFactory  = loaderFactory;
+               this.schemaVersions = schemaVersions;
+       }
+
+       public void execute(String[] args){
+
+               String ver = "version"; // Placeholder
+               Boolean doAutoFix = false;
+               Boolean edgesOnlyFlag = false;
+               Boolean dontFixOrphansFlag = false;
+               Boolean skipHostCheck = false;
+               Boolean singleCommits = false;
+               Boolean dupeCheckOff = false;
+               Boolean dupeFixOn = false;
+               Boolean ghost2CheckOff = false;
+               Boolean ghost2FixOn = false;
+               Boolean neverUseCache = false;
+               Boolean skipEdgeCheckFlag = false;
+               Boolean skipIndexUpdateFix = false;
+
+               // A value of 0 means that we will not have a time-window -- we will look
+               // at all nodes of the passed-in nodeType.
+               int timeWindowMinutes = 0;
+
+               int maxRecordsToFix = AAIConstants.AAI_GROOMING_DEFAULT_MAX_FIX;
+               int sleepMinutes = AAIConstants.AAI_GROOMING_DEFAULT_SLEEP_MINUTES;
+               try {
+                       String maxFixStr = AAIConfig.get("aai.grooming.default.max.fix");
+                       if( maxFixStr != null &&  !maxFixStr.equals("") ){
+                               maxRecordsToFix = Integer.parseInt(maxFixStr);
+                       }
+                       String sleepStr = AAIConfig.get("aai.grooming.default.sleep.minutes");
+                       if( sleepStr != null &&  !sleepStr.equals("") ){
+                               sleepMinutes = Integer.parseInt(sleepStr);
+                       }
+               }
+               catch ( Exception e ){
+                       // Don't worry, we'll just use the defaults that we got from AAIConstants
+                       LOGGER.warn("WARNING - could not pick up aai.grooming values from aaiconfig.properties file. ");
+               }
+
+               String prevFileName = "";
+               String singleNodeType = "";
+               dupeGrpsDeleted = 0;
+               FormatDate fd = new FormatDate("yyyyMMddHHmm", "GMT");
+               String dteStr = fd.getDateTime();
+
+               if (args.length > 0) {
+                       // They passed some arguments in that will affect processing
+                       for (int i = 0; i < args.length; i++) {
+                               String thisArg = args[i];
+                               if (thisArg.equals("-edgesOnly")) {
+                                       edgesOnlyFlag = true;
+                               } else if (thisArg.equals("-autoFix")) {
+                                       doAutoFix = true;
+                               } else if (thisArg.equals("-skipHostCheck")) {
+                                       skipHostCheck = true;
+                               } else if (thisArg.equals("-dontFixOrphans")) {
+                                       dontFixOrphansFlag = true;
+                               } else if (thisArg.equals("-singleCommits")) {
+                                       singleCommits = true;
+                               } else if (thisArg.equals("-dupeCheckOff")) {
+                                       dupeCheckOff = true;
+                               } else if (thisArg.equals("-dupeFixOn")) {
+                                       dupeFixOn = true;
+                               } else if (thisArg.equals("-ghost2CheckOff")) {
+                                       ghost2CheckOff = true;
+                               } else if (thisArg.equals("-neverUseCache")) {
+                                       neverUseCache = true;
+                               } else if (thisArg.equals("-ghost2FixOn")) {
+                                       ghost2FixOn = true;
+                               } else if (thisArg.equals("-skipEdgeChecks")) {
+                                       skipEdgeCheckFlag = true;
+                               } else if (thisArg.equals("-skipIndexUpdateFix")) {
+                                       skipIndexUpdateFix = true;
+                               } else if (thisArg.equals("-maxFix")) {
+                                       i++;
+                                       if (i >= args.length) {
+                                               LoggingContext.statusCode(StatusCode.ERROR);
+                                               LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
+                                               LOGGER.error(" No value passed with -maxFix option.  ");
+                                               AAISystemExitUtil.systemExitCloseAAIGraph(0);
+                                       }
+                                       String nextArg = args[i];
+                                       try {
+                                               maxRecordsToFix = Integer.parseInt(nextArg);
+                                       } catch (Exception e) {
+                                               LoggingContext.statusCode(StatusCode.ERROR);
+                                               LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
+                                               LOGGER.error("Bad value passed with -maxFix option: ["
+                                                               + nextArg + "]");
+                                               AAISystemExitUtil.systemExitCloseAAIGraph(0);
+                                       }
+                               } else if (thisArg.equals("-sleepMinutes")) {
+                                       i++;
+                                       if (i >= args.length) {
+                                               LoggingContext.statusCode(StatusCode.ERROR);
+                                               LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
+                                               LOGGER.error("No value passed with -sleepMinutes option.");
+                                               AAISystemExitUtil.systemExitCloseAAIGraph(0);
+                                       }
+                                       String nextArg = args[i];
+                                       try {
+                                               sleepMinutes = Integer.parseInt(nextArg);
+                                       } catch (Exception e) {
+                                               LoggingContext.statusCode(StatusCode.ERROR);
+                                               LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
+                                               LOGGER.error("Bad value passed with -sleepMinutes option: ["
+                                                               + nextArg + "]");
+                                               AAISystemExitUtil.systemExitCloseAAIGraph(0);
+                                       }
+                               } else if (thisArg.equals("-timeWindowMinutes")) {
+                                       i++;
+                                       if (i >= args.length) {
+                                               LoggingContext.statusCode(StatusCode.ERROR);
+                                               LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
+                                               LOGGER.error("No value passed with -timeWindowMinutes option.");
+                                               AAISystemExitUtil.systemExitCloseAAIGraph(0);
+                                       }
+                                       String nextArg = args[i];
+                                       try {
+                                               timeWindowMinutes = Integer.parseInt(nextArg);
+                                       } catch (Exception e) {
+                                               LoggingContext.statusCode(StatusCode.ERROR);
+                                               LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
+                                               LOGGER.error("Bad value passed with -timeWindowMinutes option: ["
+                                                               + nextArg + "]");
+                                               AAISystemExitUtil.systemExitCloseAAIGraph(0);
+                                       }
+
+                               } else if (thisArg.equals("-f")) {
+                                       i++;
+                                       if (i >= args.length) {
+                                               LoggingContext.statusCode(StatusCode.ERROR);
+                                               LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
+                                               LOGGER.error(" No value passed with -f option. ");
+                                               AAISystemExitUtil.systemExitCloseAAIGraph(0);
+                                       }
+                                       prevFileName = args[i];
+                               } else if (thisArg.equals("-singleNodeType")) {
+                                       i++;
+                                       if (i >= args.length) {
+                                               LoggingContext.statusCode(StatusCode.ERROR);
+                                               LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
+                                               LOGGER.error(" No value passed with -onlyThisNodeType option. ");
+                                               AAISystemExitUtil.systemExitCloseAAIGraph(0);
+                                       }
+                                       singleNodeType = args[i];
+                               } else {
+                                       LoggingContext.statusCode(StatusCode.ERROR);
+                                       LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
+                                       LOGGER.error(" Unrecognized argument passed to DataGrooming: ["
+                                                       + thisArg + "]. ");
+                                       LOGGER.error(" Valid values are: -f -autoFix -maxFix -edgesOnly -skipEdgeChecks -dupeFixOn -donFixOrphans -timeWindowMinutes -sleepMinutes -neverUseCache");
+                                       AAISystemExitUtil.systemExitCloseAAIGraph(0);
+                               }
+                       }
+               }
+
+               String windowTag = "FULL";
+               if( timeWindowMinutes > 0 ){
+                       windowTag = "PARTIAL";
+               }
+               String groomOutFileName = "dataGrooming." + windowTag + "." + dteStr + ".out";
+
+               try {
+                       loaderFactory.createLoaderForVersion(ModelType.MOXY, schemaVersions.getDefaultVersion());
+               }
+               catch (Exception ex){
+                       LoggingContext.statusCode(StatusCode.ERROR);
+                       LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
+                       LOGGER.error("ERROR - Could not create loader " + LogFormatTools.getStackTop(ex));
+                       AAISystemExitUtil.systemExitCloseAAIGraph(1);
+               }
+
+               try {
+                       if (!prevFileName.equals("")) {
+                               // They are trying to fix some data based on a data in a
+                               // previous file.
+                               LOGGER.info(" Call doTheGrooming() with a previous fileName ["
+                                               + prevFileName + "] for cleanup. ");
+                               Boolean finalShutdownFlag = true;
+                               Boolean cacheDbOkFlag = false;
+                               doTheGrooming(prevFileName, edgesOnlyFlag, dontFixOrphansFlag,
+                                               maxRecordsToFix, groomOutFileName, ver, singleCommits,
+                                               dupeCheckOff, dupeFixOn, ghost2CheckOff, ghost2FixOn,
+                                               finalShutdownFlag, cacheDbOkFlag,
+                                               skipEdgeCheckFlag, timeWindowMinutes,
+                                               singleNodeType, skipIndexUpdateFix );
+                       } else if (doAutoFix) {
+                               // They want us to run the processing twice -- first to look for
+                               // delete candidates, then after
+                               // napping for a while, run it again and delete any candidates
+                               // that were found by the first run.
+                               // Note: we will produce a separate output file for each of the
+                               // two runs.
+                               LOGGER.info(" Doing an auto-fix call to Grooming. ");
+                               LOGGER.info(" First, Call doTheGrooming() to look at what's out there. ");
+                               Boolean finalShutdownFlag = false;
+                               Boolean cacheDbOkFlag = true;
+                               int fixCandCount = doTheGrooming("", edgesOnlyFlag,
+                                               dontFixOrphansFlag, maxRecordsToFix, groomOutFileName,
+                                               ver, singleCommits, dupeCheckOff, dupeFixOn, ghost2CheckOff, ghost2FixOn,
+                                               finalShutdownFlag, cacheDbOkFlag,
+                                               skipEdgeCheckFlag, timeWindowMinutes,
+                                               singleNodeType, skipIndexUpdateFix );
+                               if (fixCandCount == 0) {
+                                       LOGGER.info(" No fix-Candidates were found by the first pass, so no second/fix-pass is needed. ");
+                               } else {
+                                       // We'll sleep a little and then run a fix-pass based on the
+                                       // first-run's output file.
+                                       try {
+                                               LOGGER.info("About to sleep for " + sleepMinutes
+                                                               + " minutes.");
+                                               int sleepMsec = sleepMinutes * 60 * 1000;
+                                               Thread.sleep(sleepMsec);
+                                       } catch (InterruptedException ie) {
+                                               LOGGER.info("\n >>> Sleep Thread has been Interrupted <<< ");
+                                               AAISystemExitUtil.systemExitCloseAAIGraph(0);
+                                       }
+
+                                       dteStr = fd.getDateTime();
+                                       String secondGroomOutFileName = "dataGrooming." + windowTag + "." + dteStr + ".out";
+                                       LOGGER.info(" Now, call doTheGrooming() a second time and pass in the name of the file "
+                                                       + "generated by the first pass for fixing: ["
+                                                       + groomOutFileName + "]");
+                                       finalShutdownFlag = true;
+                                       cacheDbOkFlag = false;
+                                       doTheGrooming(groomOutFileName, edgesOnlyFlag,
+                                                       dontFixOrphansFlag, maxRecordsToFix,
+                                                       secondGroomOutFileName, ver, singleCommits,
+                                                       dupeCheckOff, dupeFixOn, ghost2CheckOff, ghost2FixOn,
+                                                       finalShutdownFlag, cacheDbOkFlag,
+                                                       skipEdgeCheckFlag, timeWindowMinutes,
+                                                       singleNodeType, skipIndexUpdateFix );
+                               }
+                       } else {
+                               // Do the grooming - plain vanilla (no fix-it-file, no
+                               // auto-fixing)
+                               Boolean finalShutdownFlag = true;
+                               LOGGER.info(" Call doTheGrooming() ");
+                               Boolean cacheDbOkFlag = true;
+                               if( neverUseCache ){
+                                       // They have forbidden us from using a cached db connection.
+                                       cacheDbOkFlag = false;
+                               }
+                               doTheGrooming("", edgesOnlyFlag, dontFixOrphansFlag,
+                                               maxRecordsToFix, groomOutFileName, ver, singleCommits,
+                                               dupeCheckOff, dupeFixOn, ghost2CheckOff, ghost2FixOn,
+                                               finalShutdownFlag, cacheDbOkFlag,
+                                               skipEdgeCheckFlag, timeWindowMinutes,
+                                               singleNodeType, skipIndexUpdateFix );
+                       }
+               } catch (Exception ex) {
+                       LoggingContext.statusCode(StatusCode.ERROR);
+                       LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                       LOGGER.error("Exception while grooming data " + LogFormatTools.getStackTop(ex));
+               }
+               LOGGER.info(" Done! ");
+               AAISystemExitUtil.systemExitCloseAAIGraph(0);
+       }
+       
+       /**
+        * The main method.
+        *
+        * @param args the arguments
+        */
+       public static void main(String[] args) {
+
+               // Set the logging file properties to be used by EELFManager
+               System.setProperty("aai.service.name", DataGrooming.class.getSimpleName());
+
+               LoggingContext.init();
+               LoggingContext.partnerName(FROMAPPID);
+               LoggingContext.serviceName(GraphAdminApp.APP_NAME);
+               LoggingContext.component("dataGrooming");
+               LoggingContext.targetEntity(GraphAdminApp.APP_NAME);
+               LoggingContext.targetServiceName("main");
+               LoggingContext.requestId(TRANSID);
+               LoggingContext.statusCode(StatusCode.COMPLETE);
+               LoggingContext.responseCode(LoggingContext.SUCCESS);
+
+               Properties props = System.getProperties();
+               props.setProperty(Configuration.PROPERTY_LOGGING_FILE_NAME, AAIConstants.AAI_LOGBACK_PROPS);
+               props.setProperty(Configuration.PROPERTY_LOGGING_FILE_PATH, AAIConstants.AAI_HOME_BUNDLECONFIG);
+
+               AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext(
+                               "org.onap.aai.config",
+                               "org.onap.aai.setup"
+               );
+
+               LoaderFactory loaderFactory = ctx.getBean(LoaderFactory.class);
+               SchemaVersions schemaVersions = ctx.getBean(SchemaVersions.class);
+               DataGrooming dataGrooming = new DataGrooming(loaderFactory, schemaVersions);
+               dataGrooming.execute(args);
+       }
+
+       /**
+        * Do the grooming.
+        *
+        * @param fileNameForFixing the file name for fixing
+        * @param edgesOnlyFlag the edges only flag
+        * @param dontFixOrphansFlag the dont fix orphans flag
+        * @param maxRecordsToFix the max records to fix
+        * @param groomOutFileName the groom out file name
+        * @param version the version
+        * @param singleCommits the single commits
+        * @param dupeCheckOff the dupe check off
+        * @param dupeFixOn the dupe fix on
+        * @param ghost2CheckOff the ghost 2 check off
+        * @param ghost2FixOn the ghost 2 fix on
+        * @param finalShutdownFlag the final shutdown flag
+        * @param cacheDbOkFlag the cacheDbOk flag
+        * @return the int
+        */
+       private int doTheGrooming( String fileNameForFixing,
+                       Boolean edgesOnlyFlag, Boolean dontFixOrphansFlag,
+                       int maxRecordsToFix, String groomOutFileName, String version,
+                       Boolean singleCommits, 
+                       Boolean dupeCheckOff, Boolean dupeFixOn,
+                       Boolean ghost2CheckOff, Boolean ghost2FixOn, 
+                       Boolean finalShutdownFlag, Boolean cacheDbOkFlag,
+                       Boolean skipEdgeCheckFlag, int timeWindowMinutes,
+                       String singleNodeType, Boolean skipIndexUpdateFix ) {
+
+               LOGGER.debug(" Entering doTheGrooming \n");
+
+               int cleanupCandidateCount = 0;
+               long windowStartTime = 0; // Translation of the window into a starting timestamp 
+               BufferedWriter bw = null;
+               JanusGraph graph = null;
+               JanusGraph graph2 = null;
+               int deleteCount = 0;
+               int dummyUpdCount = 0;
+               boolean executeFinalCommit = false;
+               Set<String> deleteCandidateList = new LinkedHashSet<>();
+               Set<String> processedVertices = new LinkedHashSet<>();
+               Set<String> postCommitRemoveList = new LinkedHashSet<>();
+
+               Graph g = null;
+               Graph g2 = null;
+               try {
+                       if( timeWindowMinutes > 0 ){
+                               // Translate the window value (ie. 30 minutes) into a unix timestamp like
+                               //    we use in the db - so we can select data created after that time.
+                               windowStartTime = figureWindowStartTime( timeWindowMinutes );
+                       }
+                       
+                       AAIConfig.init();
+                       String targetDir = AAIConstants.AAI_HOME + AAIConstants.AAI_FILESEP
+                                       + "logs" + AAIConstants.AAI_FILESEP + "data"
+                                       + AAIConstants.AAI_FILESEP + "dataGrooming";
+
+                       // Make sure the target directory exists
+                       new File(targetDir).mkdirs();
+
+                       if (!fileNameForFixing.equals("")) {
+                               deleteCandidateList = getDeleteList(targetDir,
+                                               fileNameForFixing, edgesOnlyFlag, dontFixOrphansFlag,
+                                               dupeFixOn);
+                       }
+
+                       if (deleteCandidateList.size() > maxRecordsToFix) {
+                               LoggingContext.statusCode(StatusCode.ERROR);
+                               LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                               LOGGER.warn(" >> WARNING >>  Delete candidate list size ("
+                                               + deleteCandidateList.size()
+                                               + ") is too big.  The maxFix we are using is: "
+                                               + maxRecordsToFix
+                                               + ".  No candidates will be deleted. ");
+                               // Clear out the list so it won't be processed below.
+                               deleteCandidateList = new LinkedHashSet<>();
+                       }
+
+                       String fullOutputFileName = targetDir + AAIConstants.AAI_FILESEP
+                                       + groomOutFileName;
+                       File groomOutFile = new File(fullOutputFileName);
+                       try {
+                               groomOutFile.createNewFile();
+                       } catch (IOException e) {
+                               String emsg = " Problem creating output file ["
+                                               + fullOutputFileName + "], exception=" + e.getMessage();
+                               throw new AAIException("AAI_6124", emsg);
+                       }
+
+                       LOGGER.info(" Will write to " + fullOutputFileName );
+                       bw = new BufferedWriter(new FileWriter(groomOutFile.getAbsoluteFile()));
+                       ErrorLogHelper.loadProperties();
+                       
+                       LOGGER.info("    ---- NOTE --- about to open graph (takes a little while)--------\n");
+
+                       if( cacheDbOkFlag ){
+                               // Since we're just reading (not deleting/fixing anything), we can use 
+                               // a cached connection to the DB
+                               
+                               // -- note JanusGraphFactory has been leaving db connections open
+                               //graph = JanusGraphFactory.open(new AAIGraphConfig.Builder(AAIConstants.CACHED_DB_CONFIG).forService(DataGrooming.class.getSimpleName()).withGraphType("cached").buildConfiguration());
+                               graph = AAIGraph.getInstance().getGraph();
+                       }
+                       else {
+                               // -- note JanusGraphFactory has been leaving db connections open
+                               //graph = JanusGraphFactory.open(new AAIGraphConfig.Builder(AAIConstants.REALTIME_DB_CONFIG).forService(DataGrooming.class.getSimpleName()).withGraphType("realtime1").buildConfiguration());
+                               graph = AAIGraph.getInstance().getGraph();
+                       }
+                       if (graph == null) {
+                               String emsg = "null graph object in DataGrooming\n";
+                               throw new AAIException("AAI_6101", emsg);
+                       }
+               
+                       LOGGER.debug(" Got the graph object. ");
+                       
+                       g = graph.newTransaction();
+                       if (g == null) {
+                               String emsg = "null graphTransaction object in DataGrooming\n";
+                               throw new AAIException("AAI_6101", emsg);
+                       }
+                       GraphTraversalSource source1 = g.traversal();
+                       
+                       ArrayList<String> errArr = new ArrayList<>();
+                       int totalNodeCount = 0;
+                       HashMap<String, String> misMatchedHash = new HashMap<String, String>();
+                       HashMap<String, Vertex> orphanNodeHash = new HashMap<String, Vertex>();
+                       HashMap<String, Vertex> missingAaiNtNodeHash = new HashMap<String, Vertex>();
+                       HashMap<String, Edge> oneArmedEdgeHash = new HashMap<String, Edge>();
+                       HashMap<String, String> emptyVertexHash = new HashMap<String, String>();
+                       HashMap<String, Vertex> ghostNodeHash = new HashMap<String, Vertex>();
+                       ArrayList<String> dupeGroups = new ArrayList<>();
+
+                       Loader loader = loaderFactory.createLoaderForVersion(ModelType.MOXY, schemaVersions.getDefaultVersion());
+
+
+                       // NOTE --- At one point, we tried explicitly searching for
+                       // nodes that were missing their aai-node-type (which does
+                       // happen sometimes), but the search takes too long and cannot
+                       // be restricted to a date-range since these nodes usually do
+                       // not have timestamps either. Instead, when we run across them
+                       // as orphans, we will not treat them as orphans, but catagorize
+                       // them as "missingAaiNodeType" - which we will treat more like
+                       // ghost nodes - that is, delete them without asking permission.
+                       //
+                       // Note Also - It's a little surprising that we can run
+                       // across these when looking for orphans since that search at
+                       // least begins based on a given aai-node-type.  But watching
+                       // where they come up, they are getting discovered when a node
+                       // is looking for its parent node.  So, say, a “tenant” node
+                       // follows a “contains” edge and finds the bad node.
+
+
+
+                       Set<Entry<String, Introspector>> entrySet = loader.getAllObjects().entrySet();
+                       String ntList = "";
+                       LOGGER.info("  Starting DataGrooming Processing ");
+
+                       if (edgesOnlyFlag) {
+                               LOGGER.info(" NOTE >> Skipping Node processing as requested.  Will only process Edges. << ");
+                       } 
+                       else {
+                               for (Entry<String, Introspector> entry : entrySet) {
+                                       String nType = entry.getKey();
+                                       int thisNtCount = 0;
+                                       int thisNtDeleteCount = 0;
+                                       
+                                       if( !singleNodeType.equals("") && !singleNodeType.equals(nType) ){
+                                               // We are only going to process this one node type
+                                               continue;
+                                       }
+
+                                       LOGGER.debug(" >  Look at : [" + nType + "] ...");
+                                       ntList = ntList + "," + nType;
+
+                                       // Get a collection of the names of the key properties for this nodeType to use later
+                                       // Determine what the key fields are for this nodeType - use an arrayList so they
+                                       // can be gotten out in a consistent order.
+                                       Set <String> keyPropsSet = entry.getValue().getKeys();
+                                       ArrayList <String> keyProps = new ArrayList <String> ();
+                                       keyProps.addAll(keyPropsSet);
+                                       
+                                       Set <String> indexedPropsSet = entry.getValue().getIndexedProperties();
+                                       ArrayList <String> indexedProps = new ArrayList <String> ();
+                                       indexedProps.addAll(indexedPropsSet);
+
+                                       Iterator<String> indPropItr = indexedProps.iterator();
+                                       HashMap <String,String> propTypeHash = new HashMap <String, String> ();
+                                       while( indPropItr.hasNext() ){
+                                               String propName = indPropItr.next();
+                                               String propType = entry.getValue().getType(propName);
+                                               propTypeHash.put(propName, propType);
+                                       }
+
+                                       // Get the types of nodes that this nodetype depends on for uniqueness (if any)
+                                       Collection <String> depNodeTypes = loader.introspectorFromName(nType).getDependentOn();
+                                       
+                                       // Loop through all the nodes of this Node type
+                                       int lastShownForNt = 0;
+                                       ArrayList <Vertex> tmpList = new ArrayList <> ();
+                                       Iterator <Vertex> iterv =  source1.V().has("aai-node-type",nType); 
+                                       while (iterv.hasNext()) {
+                                               // We put the nodes into an ArrayList because the graph.query iterator can time out
+                                               tmpList.add(iterv.next());
+                                       }
+                                       
+                                       Iterator <Vertex> iter = tmpList.iterator();
+                                       while (iter.hasNext()) {
+                                               try {
+                                                       thisNtCount++;
+                                                       if( thisNtCount == lastShownForNt + 1000 ){
+                                                               lastShownForNt = thisNtCount;
+                                                               LOGGER.debug("count for " + nType + " so far = " + thisNtCount );
+                                                       }
+                                                       Vertex thisVtx = iter.next();
+                                                       if( windowStartTime > 0 ){
+                                                               // They are using the time-window, so we only want nodes that are updated after a
+                                                               // passed-in timestamp OR that have no last-modified-timestamp which means they are suspicious.
+                                                               Object objModTimeStamp = thisVtx.property("aai-last-mod-ts").orElse(null);
+                                                               if( objModTimeStamp != null ){
+                                                                       long thisNodeModTime = (long)objModTimeStamp;
+                                                                       if( thisNodeModTime < windowStartTime ){
+                                                                               // It has a last modified ts and is NOT in our window, so we can pass over it
+                                                                               continue;
+                                                                       }
+                                                               }
+                                                       }
+                                                       
+                                                       String thisVid = thisVtx.id().toString();
+                                                       if (processedVertices.contains(thisVid)) {
+                                                               LOGGER.debug("skipping already processed vertex: " + thisVid);
+                                                               continue;
+                                                       }
+                                                       totalNodeCount++;
+                                                       List <Vertex> secondGetList = new ArrayList <> ();
+                                                       // -----------------------------------------------------------------------
+                                                       // For each vertex of this nodeType, we want to:
+                                                       //              a) make sure that it can be retrieved using it's AAI defined key
+                                                       //      b) make sure that it is not a duplicate
+                                                       // -----------------------------------------------------------------------
+                                                       
+                                                       // For this instance of this nodeType, get the key properties 
+                                                       HashMap<String, Object> propHashWithKeys = new HashMap<>();
+                                                       Iterator<String> keyPropI = keyProps.iterator();
+                                                       while (keyPropI.hasNext()) {
+                                                               String propName = keyPropI.next();
+                                                               String propVal = "";
+                                                               //delete an already deleted vertex
+                                                               Object obj = thisVtx.<Object>property(propName).orElse(null);
+                                                               if (obj != null) {
+                                                                       propVal = obj.toString();
+                                                               }
+                                                               propHashWithKeys.put(propName, propVal);
+                                                       }
+                                                       try {
+                                                               // If this node is dependent on another for uniqueness, then do the query from that parent node
+                                                               // Note - all of our nodes that are dependent on others for uniqueness are 
+                                                               //              "children" of that node.
+                                                               boolean depNodeOk = true;
+                                                               if( depNodeTypes.isEmpty() ){
+                                                                       // This kind of node is not dependent on any other.
+                                                                       // Make sure we can get it back using it's key properties (that is the
+                                                                       //   phantom checking) and that we only get one.  Note - we also need
+                                                                       //   to collect data for a second type of dupe-checking which is done later.
+                                                                       secondGetList = getNodeJustUsingKeyParams( TRANSID, FROMAPPID, source1, nType, 
+                                                                                       propHashWithKeys, version );
+                                                               } 
+                                                               else {
+                                                                       // This kind of node is dependent on another for uniqueness.  
+                                                                       // Start at it's parent (the parent/containing vertex) and make sure we can get it
+                                                                       // back using it's key properties and that we only get one.
+                                                                       Iterator <Vertex> vertI2 = source1.V(thisVtx).union(__.inE().has(EdgeProperty.CONTAINS.toString(), AAIDirection.OUT.toString()).outV(), __.outE().has(EdgeProperty.CONTAINS.toString(), AAIDirection.IN.toString()).inV());
+                                                                       Vertex parentVtx = null;
+                                                                       // First we need to try to find the parent/containing vertex.
+                                                                       int pCount = 0;
+                                                                       while( vertI2 != null && vertI2.hasNext() ){
+                                                                               parentVtx = vertI2.next();
+                                                                               pCount++;
+                                                                       }
+                                                                       if( pCount <= 0 ){
+                                                                               // It's Missing it's dependent/parent/containing node - it's an orphan
+                                                                               depNodeOk = false;
+                                                                               if (deleteCandidateList.contains(thisVid)) {
+                                                                                       boolean okFlag = true;
+                                                                                       boolean updateOnlyFlag = false;
+                                                                                       try {
+                                                                                               processedVertices.add(thisVtx.id().toString());
+                                                                                               Object ob = thisVtx.<Object>property("aai-node-type").orElse(null);
+                                                                                               if( ob == null && !skipIndexUpdateFix ){
+                                                                                                       updateIndexedProps(thisVtx, thisVid, nType, propTypeHash, indexedProps);
+                                                                                                       updateOnlyFlag = true;
+                                                                                                       dummyUpdCount++;
+                                                                                                       // Since we are updating this delete candidate, not deleting it, we
+                                                                                                       // want it to show up as a delete candidate for this run also.
+                                                                                                       missingAaiNtNodeHash.put(thisVid, thisVtx);
+                                                                                               }
+                                                                                               else {
+                                                                                                       // There was an aai-node-type parameter, so we'll do the remove
+                                                                                                       thisVtx.remove();
+                                                                                                       deleteCount++;
+                                                                                                       thisNtDeleteCount++;
+                                                                                               }
+                                                                                       } catch (Exception e) {
+                                                                                               okFlag = false;
+                                                                                               LoggingContext.statusCode(StatusCode.ERROR);
+                                                                                               LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                                                                                               LOGGER.error("ERROR trying to delete delete Candidate VID = " + thisVid + " " + LogFormatTools.getStackTop(e));
+                                                                                       }
+                                                                                       if (okFlag){
+                                                                                               if( updateOnlyFlag ) {
+                                                                                                       LOGGER.info(" Updated Indexes for Delete Candidate VID = " + thisVid);
+                                                                                               }
+                                                                                               else {
+                                                                                                       LOGGER.info(" DELETED Delete Candidate VID = " + thisVid);
+                                                                                               }
+                                                                                       }
+                                                                               } else {
+                                                                                       // NOTE - Only nodes that are missing their parent/containing node are ever considered "orphaned".
+                                                                                       // That is, you could have a node with no edges... which sounds like an orphan, but not all
+                                                                                       // nodes require edges.  For example, you could have a newly created "image" node which does not have
+                                                                                       // any edges connected to it (using it) yet.
+                                                                                       Object ob = thisVtx.<Object>property("aai-node-type").orElse(null);
+                                                                                       if( ob == null ){
+                                                                                               // Group this with missing-node-type guys - which
+                                                                                               // we will delete more readily than orphans.
+                                                                                               LOGGER.info(" >> Encountered a missingAaiNodeType while looking for the parent of a [" + nType + "] node.");
+                                                                                               missingAaiNtNodeHash.put(thisVid, thisVtx);
+                                                                                       }
+                                                                                       else {
+                                                                                               Object ob2 = thisVtx.<Object>property("aai-uuid").orElse(null);
+                                                                                               String auid = "";
+                                                                                               if( ob2 != null ){
+                                                                                                       auid = ob2.toString();
+                                                                                               }
+                                                                                               String checkDummyUid = thisVid + "dummy";
+                                                                                               if( auid.equals(checkDummyUid) ){
+                                                                                                       // Group this with missing-node-type guys.
+                                                                                                       LOGGER.info(" >> Encountered a missingAaiNodeType mid-fix-node while looking for the parent of a [" + nType + "] node.");
+                                                                                                       missingAaiNtNodeHash.put(thisVid, thisVtx);
+                                                                                               }
+                                                                                               else {
+                                                                                                       // It's a regular old orphan
+                                                                                                       orphanNodeHash.put(thisVid, thisVtx);
+                                                                                               }
+                                                                                       }
+                                                                               }
+                                                                       }
+                                                                       else if ( pCount > 1 ){
+                                                                               // Not sure how this could happen?  Should we do something here?
+                                                                               depNodeOk = false;
+                                                                       }
+                                                                       else {
+                                                                               // We found the parent - so use it to do the second-look.
+                                                                               // NOTE --- We're just going to do the same check from the other direction - because
+                                                                               //  there could be duplicates or the pointer going the other way could be broken
+                                                                               ArrayList <Vertex> tmpListSec = new ArrayList <> ();
+                                                                               
+                                                                               tmpListSec = getConnectedChildrenOfOneType( source1, parentVtx, nType ) ;
+                                                                               Iterator<Vertex> vIter = tmpListSec.iterator();
+                                                                               while (vIter.hasNext()) {
+                                                                                       Vertex tmpV = vIter.next();
+                                                                                       if( vertexHasTheseKeys(tmpV, propHashWithKeys) ){
+                                                                                               secondGetList.add(tmpV);
+                                                                                       }
+                                                                               }
+                                                                       }
+                                                               }// end of -- else this is a dependent node  -- piece
+                                                               
+                                                               if( depNodeOk && (secondGetList == null || secondGetList.size() == 0) ){
+                                                                       // We could not get the node back using it's own key info. 
+                                                                       // So, it's a PHANTOM
+                                                                       if (deleteCandidateList.contains(thisVid)) {
+                                                                               boolean okFlag = true;
+                                                                               boolean updateOnlyFlag = false;
+                                                                               try {
+                                                                                       Object ob = thisVtx.<Object>property("aai-node-type").orElse(null);
+                                                                                       if( ob == null && !skipIndexUpdateFix ){
+                                                                                               updateIndexedProps(thisVtx, thisVid, nType, propTypeHash, indexedProps);
+                                                                                               dummyUpdCount++;
+                                                                                               updateOnlyFlag = true;
+                                                                                               // Since we are updating this delete candidate, not deleting it, we
+                                                                                               // want it to show up as a delete candidate for this run also.
+                                                                                               missingAaiNtNodeHash.put(thisVid, thisVtx);
+                                                                                       }
+                                                                                       else {
+                                                                                               // There was an aai-node-type parameter, so we'll do the remove
+                                                                                               thisVtx.remove();
+                                                                                               deleteCount++;
+                                                                                               thisNtDeleteCount++;
+                                                                                       }
+                                                                               } catch (Exception e) {
+                                                                                       okFlag = false;
+                                                                                       LoggingContext.statusCode(StatusCode.ERROR);
+                                                                                       LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                                                                                       LOGGER.error("ERROR trying to delete phantom VID = " + thisVid + " " + LogFormatTools.getStackTop(e));
+                                                                               }
+                                                                               if (okFlag){
+                                                                                       if( updateOnlyFlag ) {
+                                                                                               LOGGER.info(" Updated Indexes for Delete Candidate VID = " + thisVid);
+                                                                                       }
+                                                                                       else {
+                                                                                               LOGGER.info(" DELETED VID = " + thisVid);
+                                                                                       }
+                                                                               }
+                                                                       } else {
+                                                                               ghostNodeHash.put(thisVid, thisVtx);
+                                                                       }
+                                                               }
+                                                               else if( (secondGetList.size() > 1) && depNodeOk && !dupeCheckOff ){
+                                                                       // Found some DUPLICATES - need to process them
+                                                                       LOGGER.info(" - now check Dupes for this guy - ");
+                                                                       List<String> tmpDupeGroups = checkAndProcessDupes(
+                                                                                               TRANSID, FROMAPPID, g, source1, version,
+                                                                                               nType, secondGetList, dupeFixOn,
+                                                                                               deleteCandidateList, singleCommits,     dupeGroups, loader);
+                                                                       Iterator<String> dIter = tmpDupeGroups.iterator();
+                                                                       while (dIter.hasNext()) {
+                                                                               // Add in any newly found dupes to our running list
+                                                                               String tmpGrp = dIter.next();
+                                                                               LOGGER.info("Found set of dupes: [" + tmpGrp + "]");
+                                                                               dupeGroups.add(tmpGrp);
+                                                                       }
+                                                               }
+                                                       } 
+                                                       catch (AAIException e1) {
+                                                               LoggingContext.statusCode(StatusCode.ERROR);
+                                                               LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                                                               LOGGER.warn(" For nodeType = " + nType + " Caught exception", e1);
+                                                               errArr.add(e1.getErrorObject().toString());
+                                                       }
+                                                       catch (Exception e2) {
+                                                               LoggingContext.statusCode(StatusCode.ERROR);
+                                                               LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                                                               LOGGER.warn(" For nodeType = " + nType
+                                                                               + " Caught exception", e2);
+                                                               errArr.add(e2.getMessage());
+                                                       }
+                                               }// try block to enclose looping over each single vertex
+                                               catch (Exception exx) {
+                                                       LoggingContext.statusCode(StatusCode.ERROR);
+                                                       LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                                                       LOGGER.warn("WARNING from inside the while-verts-loop ", exx);
+                                               }
+                                               
+                                       } // while loop for each record of a nodeType
+                                       
+                                       if( depNodeTypes.isEmpty() && !dupeCheckOff ){
+                                               // For this nodeType, we haven't looked at the possibility of a 
+                                               // non-dependent node where two verts have same key info
+                                               ArrayList<ArrayList<Vertex>> nonDependentDupeSets = new ArrayList<ArrayList<Vertex>>();
+                                                       nonDependentDupeSets = getDupeSets4NonDepNodes( 
+                                                                               TRANSID, FROMAPPID, g,
+                                                                               version, nType, tmpList, 
+                                                                               keyProps, loader );
+                                               // For each set found (each set is for a unique instance of key-values),
+                                               //  process the dupes found
+                                               Iterator<ArrayList<Vertex>> dsItr = nonDependentDupeSets.iterator();
+                                               while( dsItr.hasNext() ){
+                                                       ArrayList<Vertex> dupeList =  dsItr.next();
+                                                       LOGGER.info(" - now check Dupes for some non-dependent guys - ");
+                                                       List<String> tmpDupeGroups = checkAndProcessDupes(
+                                                                               TRANSID, FROMAPPID, g, source1, version,
+                                                                               nType, dupeList, dupeFixOn,
+                                                                               deleteCandidateList, singleCommits,     dupeGroups, loader);
+                                                       Iterator<String> dIter = tmpDupeGroups.iterator();
+                                                       while (dIter.hasNext()) {
+                                                               // Add in any newly found dupes to our running list
+                                                               String tmpGrp = dIter.next();
+                                                               LOGGER.info("Found set of dupes: [" + tmpGrp + "]");
+                                                               dupeGroups.add(tmpGrp);
+                                                       }
+                                               }
+                                               
+                                       }// end of extra dupe check for non-dependent nodes
+                                       
+                                       if ( (thisNtDeleteCount > 0) && singleCommits ) {
+                                               // NOTE - the singleCommits option is not used in normal processing
+                                               g.tx().commit();
+                                               g = AAIGraph.getInstance().getGraph().newTransaction();
+                                               
+                                       }
+                                       thisNtDeleteCount = 0;
+                                       LOGGER.info( " Processed " + thisNtCount + " records for [" + nType + "], " + totalNodeCount + " total (in window) overall. " );
+                                       
+                               }// While-loop for each node type
+                               
+                       }// end of check to make sure we weren't only supposed to do edges
+
+
+                 if( !skipEdgeCheckFlag ){
+                       // --------------------------------------------------------------------------------------
+                       // Now, we're going to look for one-armed-edges. Ie. an edge that
+                       // should have
+                       // been deleted (because a vertex on one side was deleted) but
+                       // somehow was not deleted.
+                       // So the one end of it points to a vertexId -- but that vertex is
+                       // empty.
+                       // --------------------------------------------------------------------------------------
+
+                       // To do some strange checking - we need a second graph object
+                       LOGGER.debug("    ---- DEBUG --- about to open a SECOND graph (takes a little while)--------\n");
+                       // Note - graph2 just reads - but we want it to use a fresh connection to 
+                       //      the database, so we are NOT using the CACHED DB CONFIG here.
+                       
+                       // -- note JanusGraphFactory has been leaving db connections open
+                       //graph2 = JanusGraphFactory.open(new AAIGraphConfig.Builder(AAIConstants.REALTIME_DB_CONFIG).forService(DataGrooming.class.getSimpleName()).withGraphType("realtime2").buildConfiguration());
+                       graph2 = AAIGraph.getInstance().getGraph();
+                       if (graph2 == null) {
+                               String emsg = "null graph2 object in DataGrooming\n";
+                               throw new AAIException("AAI_6101", emsg);
+                       } else {
+                               LOGGER.debug("Got the graph2 object... \n");
+                       }
+                       g2 = graph2.newTransaction();
+                       if (g2 == null) {
+                               String emsg = "null graphTransaction2 object in DataGrooming\n";
+                               throw new AAIException("AAI_6101", emsg);
+                       }
+                       
+                       ArrayList<Vertex> vertList = new ArrayList<>();
+                       Iterator<Vertex> vItor3 = g.traversal().V();
+                       // Gotta hold these in a List - or else HBase times out as you cycle
+                       // through these
+                       while (vItor3.hasNext()) {
+                               Vertex v = vItor3.next();
+                               vertList.add(v);
+                       }
+                       int counter = 0;
+                       int lastShown = 0;
+                       Iterator<Vertex> vItor2 = vertList.iterator();
+                       LOGGER.info(" Checking for bad edges  --- ");
+
+                       while (vItor2.hasNext()) {
+                               Vertex v = null;
+                               try {
+                                       try {
+                                               v = vItor2.next();
+                                       } catch (Exception vex) {
+                                               LoggingContext.statusCode(StatusCode.ERROR);
+                                               LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                                               LOGGER.warn(">>> WARNING trying to get next vertex on the vItor2 ");
+                                               continue;
+                                       }
+                                       
+                                       counter++;
+                                       String thisVertId = "";
+                                       try {
+                                               thisVertId = v.id().toString();
+                                       } catch (Exception ev) {
+                                               LoggingContext.statusCode(StatusCode.ERROR);
+                                               LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                                               LOGGER.warn("WARNING when doing getId() on a vertex from our vertex list.  ");
+                                               continue;
+                                       }
+                                       if (ghostNodeHash.containsKey(thisVertId)) {
+                                               // This is a phantom node, so don't try to use it
+                                               LOGGER.info(" >> Skipping edge check for edges from vertexId = "
+                                                                               + thisVertId
+                                                                               + ", since that guy is a Phantom Node");
+                                               continue;
+                                       }
+                                       
+                                       if( windowStartTime > 0 ){
+                                               // They are using the time-window, so we only want nodes that are updated after a
+                                               // passed-in timestamp OR that have no last-modified-timestamp which means they are suspicious.
+                                               Object objModTimeStamp = v.property("aai-last-mod-ts").orElse(null);
+                                               if( objModTimeStamp != null ){
+                                                       long thisNodeModTime = (long)objModTimeStamp;
+                                                       if( thisNodeModTime < windowStartTime ){
+                                                               // It has a last modified ts and is NOT in our window, so we can pass over it
+                                                               continue;
+                                                       }
+                                               }
+                                       }
+                                       
+                                       if (counter == lastShown + 250) {
+                                               lastShown = counter;
+                                               LOGGER.info("... Checking edges for vertex # "
+                                                               + counter);
+                                       }
+                                       Iterator<Edge> eItor = v.edges(Direction.BOTH);
+                                       while (eItor.hasNext()) {
+                                               Edge e = null;
+                                               Vertex vIn = null;
+                                               Vertex vOut = null;
+                                               try {
+                                                       e = eItor.next();
+                                               } catch (Exception iex) {
+                                                       LoggingContext.statusCode(StatusCode.ERROR);
+                                                       LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                                                       LOGGER.warn(">>> WARNING trying to get next edge on the eItor ", iex);
+                                                       continue;
+                                               }
+
+                                               try {
+                                                       vIn = e.inVertex();
+                                               } catch (Exception err) {
+                                                       LoggingContext.statusCode(StatusCode.ERROR);
+                                                       LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                                                       LOGGER.warn(">>> WARNING trying to get edge's In-vertex ", err);
+                                               }
+                                               String vNtI = "";
+                                               String vIdI = "";
+                                               Vertex ghost2 = null;
+                                               
+                                               Boolean keysMissing = true;
+                                               Boolean cantGetUsingVid = false;
+                                               if (vIn != null) {
+                                                       try {
+                                                               Object ob = vIn.<Object>property("aai-node-type").orElse(null);
+                                                               if (ob != null) {
+                                                                       vNtI = ob.toString();
+                                                                       keysMissing = anyKeyFieldsMissing(vNtI, vIn, loader);
+                                                               }
+                                                               ob = vIn.id();
+                                                               long vIdLong = 0L;
+                                                               if (ob != null) {
+                                                                       vIdI = ob.toString();
+                                                                       vIdLong = Long.parseLong(vIdI);
+                                                               }
+                                                               
+                                                               if( ! ghost2CheckOff ){
+                                                                       Vertex connectedVert = g2.traversal().V(vIdLong).next();
+                                                                       if( connectedVert == null ) {
+                                                                               LoggingContext.statusCode(StatusCode.ERROR);
+                                                                               LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                                                                               LOGGER.warn( "GHOST2 -- got NULL when doing getVertex for vid = " + vIdLong);
+                                                                               cantGetUsingVid = true;
+                                                                               
+                                                                               // If we can NOT get this ghost with the SECOND graph-object, 
+                                                                               // it is still a ghost since even though we can get data about it using the FIRST graph 
+                                                                               // object.  
+                                                                               try {
+                                                                                        ghost2 = g.traversal().V(vIdLong).next();
+                                                                               }
+                                                                               catch( Exception ex){
+                                                                                       LoggingContext.statusCode(StatusCode.ERROR);
+                                                                                       LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                                                                                       LOGGER.warn( "GHOST2 --  Could not get the ghost info for a bad edge for vtxId = " + vIdLong, ex);
+                                                                               }
+                                                                               if( ghost2 != null ){
+                                                                                       ghostNodeHash.put(vIdI, ghost2);
+                                                                               }
+                                                                       }
+                                                               }// end of the ghost2 checking
+                                                       } 
+                                                       catch (Exception err) {
+                                                               LoggingContext.statusCode(StatusCode.ERROR);
+                                                               LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                                                               LOGGER.warn(">>> WARNING trying to get edge's In-vertex props ", err);
+                                                       }
+                                               }
+                                               if (keysMissing || vIn == null || vNtI.equals("")
+                                                               || cantGetUsingVid) {
+                                                       // this is a bad edge because it points to a vertex
+                                                       // that isn't there anymore or is corrupted
+                                                       String thisEid = e.id().toString();
+                                                       if (deleteCandidateList.contains(thisEid) || deleteCandidateList.contains(vIdI)) {
+                                                               boolean okFlag = true;
+                                                               if (!vIdI.equals("")) {
+                                                                       // try to get rid of the corrupted vertex
+                                                                       try {
+                                                                               if( (ghost2 != null) && ghost2FixOn ){
+                                                                                       ghost2.remove();
+                                                                               }
+                                                                               else {
+                                                                                       vIn.remove();
+                                                                               }
+                                                                               if (singleCommits) {
+                                                                                       // NOTE - the singleCommits option is not used in normal processing
+                                                                                       g.tx().commit();
+                                                                                       g = AAIGraph.getInstance().getGraph().newTransaction();
+                                                                               }
+                                                                               else {
+                                                                                       executeFinalCommit = true;
+                                                                               }
+                                                                               deleteCount++;
+                                                                       } catch (Exception e1) {
+                                                                               okFlag = false;
+                                                                               LoggingContext.statusCode(StatusCode.ERROR);
+                                                                               LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                                                                               LOGGER.warn("WARNING when trying to delete bad-edge-connected VERTEX VID = "
+                                                                                               + vIdI, e1);
+                                                                       }
+                                                                       if (okFlag) {
+                                                                               LOGGER.info(" DELETED vertex from bad edge = "
+                                                                                                               + vIdI);
+                                                                       }
+                                                               } else {
+                                                                       // remove the edge if we couldn't get the
+                                                                       // vertex
+                                                                       try {
+                                                                               e.remove();
+                                                                               if (singleCommits) {
+                                                                                       // NOTE - the singleCommits option is not used in normal processing
+                                                                                       g.tx().commit();
+                                                                                       g = AAIGraph.getInstance().getGraph().newTransaction();
+                                                                               }
+                                                                               else {
+                                                                                       executeFinalCommit = true;
+                                                                               }
+                                                                               deleteCount++;
+                                                                       } catch (Exception ex) {
+                                                                               // NOTE - often, the exception is just
+                                                                               // that this edge has already been
+                                                                               // removed
+                                                                               okFlag = false;
+                                                                               LoggingContext.statusCode(StatusCode.ERROR);
+                                                                               LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                                                                               LOGGER.warn("WARNING when trying to delete edge = "
+                                                                                               + thisEid);
+                                                                       }
+                                                                       if (okFlag) {
+                                                                               LOGGER.info(" DELETED edge = " + thisEid);
+                                                                       }
+                                                               }
+                                                       } else {
+                                                               oneArmedEdgeHash.put(thisEid, e);
+                                                               if ((vIn != null) && (vIn.id() != null)) {
+                                                                       emptyVertexHash.put(thisEid, vIn.id()
+                                                                                       .toString());
+                                                               }
+                                                       }
+                                               }
+
+                                               try {
+                                                       vOut = e.outVertex();
+                                               } catch (Exception err) {
+                                                       LoggingContext.statusCode(StatusCode.ERROR);
+                                                       LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                                                       LOGGER.warn(">>> WARNING trying to get edge's Out-vertex ");
+                                               }
+                                               String vNtO = "";
+                                               String vIdO = "";
+                                               ghost2 = null;
+                                               keysMissing = true;
+                                               cantGetUsingVid = false;
+                                               if (vOut != null) {
+                                                       try {
+                                                               Object ob = vOut.<Object>property("aai-node-type").orElse(null);
+                                                               if (ob != null) {
+                                                                       vNtO = ob.toString();
+                                                                       keysMissing = anyKeyFieldsMissing(vNtO,
+                                                                                       vOut, loader);
+                                                               }
+                                                               ob = vOut.id();
+                                                               long vIdLong = 0L;
+                                                               if (ob != null) {
+                                                                       vIdO = ob.toString();
+                                                                       vIdLong = Long.parseLong(vIdO);
+                                                               }
+                                                               
+                                                               if( ! ghost2CheckOff ){
+                                                                       Vertex connectedVert = g2.traversal().V(vIdLong).next();
+                                                                       if( connectedVert == null ) {
+                                                                               cantGetUsingVid = true;
+                                                                               LOGGER.info( "GHOST2 -- got NULL when doing getVertex for vid = " + vIdLong);
+                                                                               // If we can get this ghost with the other graph-object, then get it -- it's still a ghost
+                                                                               try {
+                                                                                        ghost2 = g.traversal().V(vIdLong).next();
+                                                                               }
+                                                                               catch( Exception ex){
+                                                                                       LoggingContext.statusCode(StatusCode.ERROR);
+                                                                                       LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                                                                                       LOGGER.warn( "GHOST2 -- Could not get the ghost info for a bad edge for vtxId = " + vIdLong, ex);
+                                                                               }
+                                                                               if( ghost2 != null ){
+                                                                                       ghostNodeHash.put(vIdO, ghost2);
+                                                                               }
+                                                                       }
+                                                               }
+                                                       } catch (Exception err) {
+                                                               LoggingContext.statusCode(StatusCode.ERROR);
+                                                               LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                                                               LOGGER.warn(">>> WARNING trying to get edge's Out-vertex props ", err);
+                                                       }
+                                               }
+                                               if (keysMissing || vOut == null || vNtO.equals("")
+                                                               || cantGetUsingVid) {
+                                                       // this is a bad edge because it points to a vertex
+                                                       // that isn't there anymore
+                                                       String thisEid = e.id().toString();
+                                                       if (deleteCandidateList.contains(thisEid) || deleteCandidateList.contains(vIdO)) {
+                                                               boolean okFlag = true;
+                                                               if (!vIdO.equals("")) {
+                                                                       // try to get rid of the corrupted vertex
+                                                                       try {
+                                                                               if( (ghost2 != null) && ghost2FixOn ){
+                                                                                       ghost2.remove();
+                                                                               }
+                                                                               else if (vOut != null) {
+                                                                                       vOut.remove();
+                                                                               }
+                                                                               if (singleCommits) {
+                                                                                       // NOTE - the singleCommits option is not used in normal processing
+                                                                                       g.tx().commit();
+                                                                                       g = AAIGraph.getInstance().getGraph().newTransaction();
+                                                                               }
+                                                                               else {
+                                                                                       executeFinalCommit = true;
+                                                                               }
+                                                                               deleteCount++;
+                                                                       } catch (Exception e1) {
+                                                                               okFlag = false;
+                                                                               LoggingContext.statusCode(StatusCode.ERROR);
+                                                                               LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                                                                               LOGGER.warn("WARNING when trying to delete bad-edge-connected VID = "
+                                                                                               + vIdO, e1);
+                                                                       }
+                                                                       if (okFlag) {
+                                                                               LOGGER.info(" DELETED vertex from bad edge = "
+                                                                                                               + vIdO);
+                                                                       }
+                                                               } else {
+                                                                       // remove the edge if we couldn't get the
+                                                                       // vertex
+                                                                       try {
+                                                                               e.remove();
+                                                                               if (singleCommits) {
+                                                                                       // NOTE - the singleCommits option is not used in normal processing
+                                                                                       g.tx().commit();
+                                                                                       g = AAIGraph.getInstance().getGraph().newTransaction();
+                                                                               }
+                                                                               else {
+                                                                                       executeFinalCommit = true;
+                                                                               }
+                                                                               deleteCount++;
+                                                                       } catch (Exception ex) {
+                                                                               // NOTE - often, the exception is just
+                                                                               // that this edge has already been
+                                                                               // removed
+                                                                               okFlag = false;
+                                                                               LoggingContext.statusCode(StatusCode.ERROR);
+                                                                               LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                                                                               LOGGER.warn("WARNING when trying to delete edge = "
+                                                                                               + thisEid, ex);
+                                                                       }
+                                                                       if (okFlag) {
+                                                                               LOGGER.info(" DELETED edge = " + thisEid);
+                                                                       }
+                                                               }
+                                                       } else {
+                                                               oneArmedEdgeHash.put(thisEid, e);
+                                                               if ((vOut != null) && (vOut.id() != null)) {
+                                                                       emptyVertexHash.put(thisEid, vOut.id()
+                                                                                       .toString());
+                                                               }
+                                                       }
+                                               }
+                                       }// End of while-edges-loop
+                               } catch (Exception exx) {
+                                       LoggingContext.statusCode(StatusCode.ERROR);
+                                       LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                                       LOGGER.warn("WARNING from in the while-verts-loop ", exx);
+                               }
+                       }// End of while-vertices-loop (the edge-checking)
+                       LOGGER.info(" Done checking for bad edges  --- ");
+                 }     // end of -- if we're not skipping the edge-checking
+                       
+
+                       deleteCount = deleteCount + dupeGrpsDeleted;
+                       if (!singleCommits && (deleteCount > 0 || dummyUpdCount > 0) ){
+                               executeFinalCommit = true;
+                       }
+
+                       int ghostNodeCount = ghostNodeHash.size();
+                       int orphanNodeCount = orphanNodeHash.size();
+                       int oneArmedEdgeCount = oneArmedEdgeHash.size();
+                       int missingAaiNtNodeCount = missingAaiNtNodeHash.size();
+                       int dupeCount = dupeGroups.size();
+
+                       deleteCount = deleteCount + dupeGrpsDeleted;
+
+                       bw.write("\n\n ============ Summary ==============\n");
+                       if( timeWindowMinutes == 0 ){
+                               bw.write("Ran FULL data grooming (no time-window). \n");
+                       }
+                       else {
+                               bw.write("Ran PARTIAL data grooming just looking at data added/updated in the last " + timeWindowMinutes + " minutes. \n");
+                       }
+                       
+                       bw.write("\nRan these nodeTypes: " + ntList + "\n\n");
+                       bw.write("There were this many delete candidates from previous run =  "
+                                       + deleteCandidateList.size() + "\n");
+                       if (dontFixOrphansFlag) {
+                               bw.write(" Note - we are not counting orphan nodes since the -dontFixOrphans parameter was used. \n");
+                       }
+                       bw.write("Deleted this many delete candidates =  " + deleteCount
+                                       + "\n");
+                       bw.write("Dummy-index-update to delete candidates =  " + dummyUpdCount
+                                       + "\n");
+                       bw.write("Total number of nodes looked at =  " + totalNodeCount
+                                       + "\n");
+                       bw.write("Ghost Nodes identified = " + ghostNodeCount + "\n");
+                       bw.write("Orphan Nodes identified =  " + orphanNodeCount + "\n");
+                       bw.write("Missing aai-node-type Nodes identified =  " + missingAaiNtNodeCount + "\n");
+                       bw.write("Bad Edges identified =  " + oneArmedEdgeCount + "\n");
+                       bw.write("Duplicate Groups count =  " + dupeCount + "\n");
+                       bw.write("MisMatching Label/aai-node-type count =  "
+                                       + misMatchedHash.size() + "\n");
+
+                       bw.write("\n ------------- Delete Candidates ---------\n");
+                       for (Map.Entry<String, Vertex> entry : ghostNodeHash
+                                       .entrySet()) {
+                               String vid = entry.getKey();
+                               bw.write("DeleteCandidate: Phantom Vid = [" + vid + "]\n");
+                               cleanupCandidateCount++;
+                       }
+                       for (Map.Entry<String, Vertex> entry : missingAaiNtNodeHash
+                                       .entrySet()) {
+                               String vid = entry.getKey();
+                               bw.write("DeleteCandidate: Missing aai-node-type Vid = [" + vid + "]\n");
+                               cleanupCandidateCount++;
+                       }
+                       for (Map.Entry<String, Vertex> entry : orphanNodeHash
+                                       .entrySet()) {
+                               String vid = entry.getKey();
+                               bw.write("DeleteCandidate: OrphanDepNode Vid = [" + vid + "]\n");
+                               if (!dontFixOrphansFlag) {
+                                       cleanupCandidateCount++;
+                               }
+                       }
+                       for (Map.Entry<String, Edge> entry : oneArmedEdgeHash.entrySet()) {
+                               String eid = entry.getKey();
+                               bw.write("DeleteCandidate: Bad EDGE Edge-id = [" + eid + "]\n");
+                               cleanupCandidateCount++;
+                       }
+
+                       bw.write("\n-- NOTE - To see DeleteCandidates for Duplicates, you need to look in the Duplicates Detail section below.\n");
+
+                       bw.write("\n ------------- GHOST NODES - detail ");
+                       for (Map.Entry<String, Vertex> entry : ghostNodeHash
+                                       .entrySet()) {
+                               try {
+                                       String vid = entry.getKey();
+                                       bw.write("\n ==> Phantom Vid = " + vid + "\n");
+                                       ArrayList<String> retArr = showPropertiesForNode(
+                                                       TRANSID, FROMAPPID, entry.getValue());
+                                       for (String info : retArr) {
+                                               bw.write(info + "\n");
+                                       }
+                                       retArr = showAllEdgesForNode(TRANSID, FROMAPPID,
+                                                       entry.getValue());
+                                       for (String info : retArr) {
+                                               bw.write(info + "\n");
+                                       }
+                               } catch (Exception dex) {
+                                       LoggingContext.statusCode(StatusCode.ERROR);
+                                       LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                                       LOGGER.error("error trying to print detail info for a ghost-node:  " + LogFormatTools.getStackTop(dex));
+                               }
+                       }
+
+                       bw.write("\n ------------- Missing aai-node-type NODES - detail: ");
+                       for (Map.Entry<String, Vertex> entry : missingAaiNtNodeHash
+                                       .entrySet()) {
+                               try {
+                                       String vid = entry.getKey();
+                                       bw.write("\n> Missing aai-node-type Node Vid = " + vid + "\n");
+                                       ArrayList<String> retArr = showPropertiesForNode(
+                                                       TRANSID, FROMAPPID, entry.getValue());
+                                       for (String info : retArr) {
+                                               bw.write(info + "\n");
+                                       }
+
+                                       retArr = showAllEdgesForNode(TRANSID, FROMAPPID,
+                                                       entry.getValue());
+                                       for (String info : retArr) {
+                                               bw.write(info + "\n");
+                                       }
+                               } catch (Exception dex) {
+                                       LoggingContext.statusCode(StatusCode.ERROR);
+                                       LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                                       LOGGER.error("error trying to print detail info for a node missing its aai-node-type  " + LogFormatTools.getStackTop(dex));
+                               }
+                       }
+
+                       bw.write("\n ------------- Missing Dependent Edge ORPHAN NODES - detail: ");
+                       for (Map.Entry<String, Vertex> entry : orphanNodeHash
+                                       .entrySet()) {
+                               try {
+                                       String vid = entry.getKey();
+                                       bw.write("\n> Orphan Node Vid = " + vid + "\n");
+                                       ArrayList<String> retArr = showPropertiesForNode(
+                                                       TRANSID, FROMAPPID, entry.getValue());
+                                       for (String info : retArr) {
+                                               bw.write(info + "\n");
+                                       }
+       
+                                       retArr = showAllEdgesForNode(TRANSID, FROMAPPID,
+                                                       entry.getValue());
+                                       for (String info : retArr) {
+                                               bw.write(info + "\n");
+                                       }
+                               } catch (Exception dex) {
+                                       LoggingContext.statusCode(StatusCode.ERROR);
+                                       LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                                       LOGGER.error("error trying to print detail info for a Orphan Node /missing dependent edge " + LogFormatTools.getStackTop(dex));
+                               }
+                       }
+
+                       bw.write("\n ------------- EDGES pointing to empty/bad vertices: ");
+                       for (Map.Entry<String, Edge> entry : oneArmedEdgeHash.entrySet()) {
+                               try {
+                                       String eid = entry.getKey();
+                                       Edge thisE = entry.getValue();
+                                       String badVid = emptyVertexHash.get(eid);
+                                       bw.write("\n>  Edge pointing to bad vertex (Vid = "
+                                                       + badVid + ") EdgeId = " + eid + "\n");
+                                       bw.write("Label: [" + thisE.label() + "]\n");
+                                       Iterator<Property<Object>> pI = thisE.properties();
+                                       while (pI.hasNext()) {
+                                               Property<Object> propKey = pI.next();
+                                               bw.write("Prop: [" + propKey + "], val = ["
+                                                               + propKey.value() + "]\n");
+                                       }
+                               } catch (Exception pex) {
+                                       LoggingContext.statusCode(StatusCode.ERROR);
+                                       LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                                       LOGGER.error("error trying to print empty/bad vertex data: " + LogFormatTools.getStackTop(pex));
+                               }
+                       }
+
+                       bw.write("\n ------------- Duplicates: ");
+                       Iterator<String> dupeIter = dupeGroups.iterator();
+                       int dupeSetCounter = 0;
+                       while (dupeIter.hasNext()) {
+                               dupeSetCounter++;
+                               String dset = (String) dupeIter.next();
+
+                               bw.write("\n --- Duplicate Group # " + dupeSetCounter
+                                               + " Detail -----------\n");
+                               try {
+                                       // We expect each line to have at least two vid's, followed
+                                       // by the preferred one to KEEP
+                                       String[] dupeArr = dset.split("\\|");
+                                       ArrayList<String> idArr = new ArrayList<>();
+                                       int lastIndex = dupeArr.length - 1;
+                                       for (int i = 0; i <= lastIndex; i++) {
+                                               if (i < lastIndex) {
+                                                       // This is not the last entry, it is one of the
+                                                       // dupes, so we want to show all its info
+                                                       bw.write("    >> Duplicate Group # "
+                                                                       + dupeSetCounter + "  Node # " + i
+                                                                       + " ----\n");
+                                                       String vidString = dupeArr[i];
+                                                       idArr.add(vidString);
+                                                       long longVertId = Long.parseLong(vidString);
+                                                       Iterator<Vertex> vtxIterator = g.vertices(longVertId);
+                                                       Vertex vtx = null;
+                                                       if (vtxIterator.hasNext()) {
+                                                               vtx = vtxIterator.next();
+                                                       }
+                                                       ArrayList<String> retArr = showPropertiesForNode(TRANSID, FROMAPPID, vtx);
+                                                       for (String info : retArr) {
+                                                               bw.write(info + "\n");
+                                                       }
+
+                                                       retArr = showAllEdgesForNode(TRANSID,
+                                                                       FROMAPPID, vtx);
+                                                       for (String info : retArr) {
+                                                               bw.write(info + "\n");
+                                                       }
+                                               } else {
+                                                       // This is the last entry which should tell us if we
+                                                       // have a preferred keeper
+                                                       String prefString = dupeArr[i];
+                                                       if (prefString.equals("KeepVid=UNDETERMINED")) {
+                                                               bw.write("\n For this group of duplicates, could not tell which one to keep.\n");
+                                                               bw.write(" >>> This group needs to be taken care of with a manual/forced-delete.\n");
+                                                       } else {
+                                                               // If we know which to keep, then the prefString
+                                                               // should look like, "KeepVid=12345"
+                                                               String[] prefArr = prefString.split("=");
+                                                               if (prefArr.length != 2
+                                                                               || (!prefArr[0].equals("KeepVid"))) {
+                                                                       throw new Exception("Bad format. Expecting KeepVid=999999");
+                                                               } else {
+                                                                       String keepVidStr = prefArr[1];
+                                                                       if (idArr.contains(keepVidStr)) {
+                                                                               bw.write("\n The vertex we want to KEEP has vertexId = "
+                                                                                               + keepVidStr);
+                                                                               bw.write("\n The others become delete candidates: \n");
+                                                                               idArr.remove(keepVidStr);
+                                                                               for (int x = 0; x < idArr.size(); x++) {
+                                                                                       cleanupCandidateCount++;
+                                                                                       bw.write("DeleteCandidate: Duplicate Vid = ["
+                                                                                                       + idArr.get(x) + "]\n");
+                                                                               }
+                                                                       } else {
+                                                                               throw new Exception("ERROR - Vertex Id to keep not found in list of dupes.  dset = ["
+                                                                                               + dset + "]");
+                                                                       }
+                                                               }
+                                                       }// else we know which one to keep
+                                               }// else last entry
+                                       }// for each vertex in a group
+                               } catch (Exception dex) {
+                                       LoggingContext.statusCode(StatusCode.ERROR);
+                                       LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                                       LOGGER.error("error trying to print duplicate vertex data " + LogFormatTools.getStackTop(dex));
+                               }
+
+                       }// while - work on each group of dupes
+
+                       bw.write("\n ------------- Mis-matched Label/aai-node-type Nodes: \n ");
+                       for (Map.Entry<String, String> entry : misMatchedHash.entrySet()) {
+                               String msg = entry.getValue();
+                               bw.write("MixedMsg = " + msg + "\n");
+                       }
+
+                       bw.write("\n ------------- Got these errors while processing: \n");
+                       Iterator<String> errIter = errArr.iterator();
+                       while (errIter.hasNext()) {
+                               String line = (String) errIter.next();
+                               bw.write(line + "\n");
+                       }
+
+                       bw.close();
+
+                       LOGGER.info("\n ------------- Done doing all the checks ------------ ");
+                       LOGGER.info("Output will be written to " + fullOutputFileName);
+
+                       if (cleanupCandidateCount > 0) {
+                               // Technically, this is not an error -- but we're throwing this
+                               // error so that hopefully a
+                               // monitoring system will pick it up and do something with it.
+                               throw new AAIException("AAI_6123", "See file: [" + fullOutputFileName
+                                               + "] and investigate delete candidates. ");
+                       }
+               } catch (AAIException e) {
+                       LoggingContext.statusCode(StatusCode.ERROR);
+                       LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                       LOGGER.error("Caught AAIException while grooming data");
+                       ErrorLogHelper.logException(e);
+               } catch (Exception ex) {
+                       LoggingContext.statusCode(StatusCode.ERROR);
+                       LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                       LOGGER.error("Caught exception while grooming data");
+                       ErrorLogHelper.logError("AAI_6128", ex.getMessage() + ", resolve and rerun dataGrooming");
+               } finally {
+
+                       if (bw != null) {
+                               try {
+                                       bw.close();
+                               } catch (IOException iox) {
+                                       LoggingContext.statusCode(StatusCode.ERROR);
+                                       LoggingContext.responseCode(LoggingContext.AVAILABILITY_TIMEOUT_ERROR);
+                                       LOGGER.warn("Got an IOException trying to close bufferedWriter() \n", iox);
+                               }
+                       }
+
+                       if (executeFinalCommit) {
+                               // If we were holding off on commits till the end - then now is the time.
+                               if( g == null ){
+                                       LoggingContext.statusCode(StatusCode.ERROR);
+                                       LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                                       LOGGER.error(" >>>> ERROR <<<<   Could not commit changes. graph was null when we wanted to commit.");
+                               }
+                               else if( !g.tx().isOpen() ){
+                                       LoggingContext.statusCode(StatusCode.ERROR);
+                                       LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                                       LOGGER.error(" >>>> ERROR <<<<   Could not commit changes. Transaction was not open when we wanted to commit.");
+                               }
+                               else {
+                                       try {
+                                               LOGGER.info("About to do the commit for "
+                                                       + deleteCount + " removes. ");
+                                               g.tx().commit();
+                                               LOGGER.info("Commit was successful ");
+                                       } catch (Exception excom) {
+                                               LoggingContext.statusCode(StatusCode.ERROR);
+                                               LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                                               LOGGER.error(" >>>> ERROR <<<<   Could not commit changes. " + LogFormatTools.getStackTop(excom));
+                                               deleteCount = 0;
+                                       }
+                               }
+                       }
+                       else if (g != null && g.tx().isOpen()) {
+                               try {
+                                       // We did not do any deletes that need to be committed.
+                                       // The rollback is to clear out the transaction used while doing those reads
+                                       g.tx().rollback();
+                               } catch (Exception ex) {
+                                       // Don't throw anything because JanusGraph sometimes is just saying that the graph is already closed
+                                       LoggingContext.statusCode(StatusCode.ERROR);
+                                       LoggingContext.responseCode(LoggingContext.AVAILABILITY_TIMEOUT_ERROR);
+                                       LOGGER.warn("WARNING from final graphTransaction.rollback()", ex);
+                               }
+                       }
+                       
+                       if (g2 != null && g2.tx().isOpen()) {
+                               try {
+                                       // We only read on g2.  The rollback is to clear out the transaction used while doing those reads
+                                       g2.tx().rollback();
+                               } catch (Exception ex) {
+                                       // Don't throw anything because JanusGraph sometimes is just saying that the graph is already closed
+                                       LoggingContext.statusCode(StatusCode.ERROR);
+                                       LoggingContext.responseCode(LoggingContext.AVAILABILITY_TIMEOUT_ERROR);
+                                       LOGGER.warn("WARNING from final graphTransaction2.rollback()", ex);
+                               }
+                       }
+                               
+                       if( finalShutdownFlag ){
+                               try {
+                                       if( graph != null && graph.isOpen() ){
+                                               graph.tx().close();
+                                               if( "true".equals(System.getProperty("org.onap.aai.graphadmin.started"))) {
+                                                       // Since dataGrooming was called from a scheduled task - do not call graph.close() 
+                                               }
+                                               else {
+                                                       // DataGrooming must have been called manually - so we need to call close().
+                                                       graph.close();
+                                               }
+                                       }
+                               } catch (Exception ex) {
+                                       // Don't throw anything because JanusGraph sometimes is just saying that the graph is already closed{
+                                       LoggingContext.statusCode(StatusCode.ERROR);
+                                       LoggingContext.responseCode(LoggingContext.AVAILABILITY_TIMEOUT_ERROR);
+                                       LOGGER.warn("WARNING from final graph.shutdown()", ex);
+                               }
+                               
+                               try {
+                                       if( graph2 != null && graph2.isOpen() ){
+                                               graph2.tx().close();
+                                               if( "true".equals(System.getProperty("org.onap.aai.graphadmin.started"))) {
+                                                       // Since dataGrooming was called from a scheduled task - do not call graph2.close() 
+                                               }
+                                               else {
+                                                       // DataGrooming must have been called manually - so we need to call close().
+                                                       graph2.close();
+                                               }
+                                       }
+                               } catch (Exception ex) {
+                                       // Don't throw anything because JanusGraph sometimes is just saying that the graph is already closed{
+                                       LoggingContext.statusCode(StatusCode.ERROR);
+                                       LoggingContext.responseCode(LoggingContext.AVAILABILITY_TIMEOUT_ERROR);
+                                       LOGGER.warn("WARNING from final graph2.shutdown()", ex);
+                               }
+                       }
+                               
+               }
+
+               return cleanupCandidateCount;
+
+       }// end of doTheGrooming()
+       
+       
+       private void updateIndexedProps(Vertex thisVtx, String thisVidStr, String nType,
+                       HashMap <String,String>propTypeHash, ArrayList <String> indexedProps) {
+               // This is a "missing-aai-node-type" scenario.
+               // Other indexes may also be messed up, so we will update all of them on
+               //    this pass.  A future pass will just treat this node like a regular orphan
+               //    and delete it (if appropriate).
+               LOGGER.info("  We will be updating the indexed properties for this node to dummy values.  VID = " + thisVidStr );
+               String dummyPropValStr = thisVidStr + "dummy";
+               // These reserved-prop-names are all indexed for all nodes
+               thisVtx.property("aai-node-type",nType);
+               thisVtx.property("aai-uri", dummyPropValStr);
+               thisVtx.property("aai-unique-key", dummyPropValStr);
+               thisVtx.property("aai-uuid", dummyPropValStr);
+               thisVtx.property("source-of-truth", dummyPropValStr);
+               Iterator<String> indexedPropI = indexedProps.iterator();
+               while (indexedPropI.hasNext()) {
+                       String propName = indexedPropI.next();
+                       // Using the VID in case this property is unique in the db and
+                       // we're doing this kind of thing on more than one of these nodes..
+                       String dataType = propTypeHash.get(propName);
+                       if( dataType == null || dataType.toLowerCase().endsWith(".string") ){
+                               thisVtx.property(propName, dummyPropValStr);
+                       }
+                       else if( dataType.toLowerCase().endsWith(".long") ){
+                               Long thisVidLong = (Long) thisVtx.id();
+                               thisVtx.property(propName, thisVidLong);
+                       }
+                       else if( dataType.toLowerCase().endsWith(".boolean") ){
+                               thisVtx.property(propName, false);
+                       }
+                       else if( dataType.toLowerCase().endsWith(".integer") ){
+                               thisVtx.property(propName, 9999);
+                       }
+                       else {
+                               // Not sure what it is - try a string
+                               thisVtx.property(propName, dummyPropValStr);
+                       }
+               }
+       }
+
+       /**
+        * Vertex has these keys.
+        *
+        * @param tmpV the tmp V
+        * @param propHashWithKeys the prop hash with keys
+        * @return the boolean
+        */
+       private Boolean vertexHasTheseKeys( Vertex tmpV, HashMap <String, Object> propHashWithKeys) {
+               Iterator <?> it = propHashWithKeys.entrySet().iterator();
+               while( it.hasNext() ){
+                       String propName = "";
+                       String propVal = "";
+                       Map.Entry <?,?>propEntry = (Map.Entry<?,?>)it.next();
+                       Object propNameObj = propEntry.getKey();
+                       if( propNameObj != null ){
+                               propName = propNameObj.toString();
+                       }
+                       Object propValObj = propEntry.getValue();
+                       if( propValObj != null ){
+                               propVal = propValObj.toString();
+                       }
+                       Object checkValObj = tmpV.<Object>property(propName).orElse(null);
+                       if( checkValObj == null ) {
+                               return false;
+                       }
+                       else if( !propVal.equals(checkValObj.toString()) ){
+                               return false;
+                       }
+               }
+               return true;
+       }       
+       
+       
+       /**
+        * Any key fields missing.
+        *
+        * @param nType the n type
+        * @param v the v
+        * @return the boolean
+        */
+       private Boolean anyKeyFieldsMissing(String nType, Vertex v, Loader loader) {
+               
+               try {
+                       Introspector obj = null;
+                       try {
+                               obj = loader.introspectorFromName(nType);
+                       } catch (AAIUnknownObjectException e) {
+                               // They gave us a non-empty nodeType but our NodeKeyProps does
+                               //   not have data for it.  Since we do not know what the
+                               //   key params are for this type of node, we will just
+                               //   return "false".
+                               String emsg = " -- WARNING -- Unrecognized nodeType: [" + nType 
+                                               + "].  We cannot determine required keys for this nType. ";
+                               // NOTE - this will be caught below and a "false" returned
+                               throw new AAIException("AAI_6121", emsg);
+                       }       
+                       
+                       // Determine what the key fields are for this nodeType
+                       Collection <String> keyPropNamesColl = obj.getKeys();
+                       Iterator<String> keyPropI = keyPropNamesColl.iterator();
+                       while (keyPropI.hasNext()) {
+                               String propName = keyPropI.next();
+                               Object ob = v.<Object>property(propName).orElse(null);
+                               if (ob == null || ob.toString().equals("")) {
+                                       // It is missing a key property
+                                       return true;
+                               }
+                       }
+               } catch (AAIException e) {
+                       // Something was wrong -- but since we weren't able to check
+                       // the keys, we will not declare that it is missing keys.
+                       return false;
+               }
+               return false;
+       }
+       
+
+       /**
+        * Gets the delete list.
+        *
+        * @param targetDir the target dir
+        * @param fileName the file name
+        * @param edgesOnlyFlag the edges only flag
+        * @param dontFixOrphans the dont fix orphans
+        * @param dupeFixOn the dupe fix on
+        * @return the delete list
+        * @throws AAIException the AAI exception
+        */
+       private Set<String> getDeleteList(String targetDir,
+                       String fileName, Boolean edgesOnlyFlag, Boolean dontFixOrphans,
+                       Boolean dupeFixOn) throws AAIException {
+
+               // Look in the file for lines formated like we expect - pull out any
+               // Vertex Id's to delete on this run
+               Set<String> delList = new LinkedHashSet<>();
+               String fullFileName = targetDir + AAIConstants.AAI_FILESEP + fileName;
+
+               try(BufferedReader br = new BufferedReader(new FileReader(fullFileName))) {
+                       String line = br.readLine();
+                       while (line != null) {
+                               if (!"".equals(line) && line.startsWith("DeleteCandidate")) {
+                                       if (edgesOnlyFlag && (!line.contains("Bad Edge"))) {
+                                               // We're only processing edges and this line is not for an edge
+                                       } else if (dontFixOrphans && line.contains("Orphan")) {
+                                               // We're not going to process orphans
+                                       } else if (!dupeFixOn && line.contains("Duplicate")) {
+                                               // We're not going to process Duplicates
+                                       } else {
+                                               int begIndex = line.indexOf("id = ");
+                                               int endIndex = line.indexOf("]");
+                                               String vidVal = line.substring(begIndex + 6, endIndex);
+                                               delList.add(vidVal);
+                                       }
+                               }
+                               line = br.readLine();
+                       }
+                       br.close();
+               } catch (IOException e) {
+                       throw new AAIException("AAI_6124", e, "Could not open input-file [" + fullFileName
+                                       + "], exception= " + e.getMessage());
+               }
+
+               return delList;
+
+       }// end of getDeleteList
+
+       /**
+        * Gets the preferred dupe.
+        *
+        * @param transId the trans id
+        * @param fromAppId the from app id
+        * @param g the g
+        * @param dupeVertexList the dupe vertex list
+        * @param ver the ver
+        * @return Vertex
+        * @throws AAIException the AAI exception
+        */
+       public Vertex getPreferredDupe(String transId,
+                       String fromAppId, GraphTraversalSource g,
+                       ArrayList<Vertex> dupeVertexList, String ver, Loader loader)
+                       throws AAIException {
+
+               // This method assumes that it is being passed a List of vertex objects
+               // which
+               // violate our uniqueness constraints.
+
+               Vertex nullVtx = null;
+
+               if (dupeVertexList == null) {
+                       return nullVtx;
+               }
+               int listSize = dupeVertexList.size();
+               if (listSize == 0) {
+                       return nullVtx;
+               }
+               if (listSize == 1) {
+                       return (dupeVertexList.get(0));
+               }
+
+               Vertex vtxPreferred = null;
+               Vertex currentFaveVtx = dupeVertexList.get(0);
+               for (int i = 1; i < listSize; i++) {
+                       Vertex vtxB = dupeVertexList.get(i);
+                       vtxPreferred = pickOneOfTwoDupes(transId, fromAppId, g,
+                                       currentFaveVtx, vtxB, ver, loader);
+                       if (vtxPreferred == null) {
+                               // We couldn't choose one
+                               return nullVtx;
+                       } else {
+                               currentFaveVtx = vtxPreferred;
+                       }
+               }
+
+               return (currentFaveVtx);
+
+       } // end of getPreferredDupe()
+
+       /**
+        * Pick one of two dupes.
+        *
+        * @param transId the trans id
+        * @param fromAppId the from app id
+        * @param g the g
+        * @param vtxA the vtx A
+        * @param vtxB the vtx B
+        * @param ver the ver
+        * @return Vertex
+        * @throws AAIException the AAI exception
+        */
+       public Vertex pickOneOfTwoDupes(String transId,
+                       String fromAppId, GraphTraversalSource g, Vertex vtxA,
+                       Vertex vtxB, String ver, Loader loader) throws AAIException {
+
+               Vertex nullVtx = null;
+               Vertex preferredVtx = null;
+
+               Long vidA = new Long(vtxA.id().toString());
+               Long vidB = new Long(vtxB.id().toString());
+
+               String vtxANodeType = "";
+               String vtxBNodeType = "";
+               Object objType = vtxA.<Object>property("aai-node-type").orElse(null);
+               if (objType != null) {
+                       vtxANodeType = objType.toString();
+               }
+               objType = vtxB.<Object>property("aai-node-type").orElse(null);
+               if (objType != null) {
+                       vtxBNodeType = objType.toString();
+               }
+
+               if (vtxANodeType.equals("") || (!vtxANodeType.equals(vtxBNodeType))) {
+                       // Either they're not really dupes or there's some bad data - so
+                       // don't pick one
+                       return nullVtx;
+               }
+
+               // Check that node A and B both have the same key values (or else they
+               // are not dupes)
+               // (We'll check dep-node later)
+               // Determine what the key fields are for this nodeType
+               Collection <String> keyProps = new ArrayList <>();
+               HashMap <String,Object> keyPropValsHash = new HashMap <String,Object>();
+               try {
+                       keyProps = loader.introspectorFromName(vtxANodeType).getKeys();
+               } catch (AAIUnknownObjectException e) {
+                       LOGGER.warn("Required property not found", e);
+                       throw new AAIException("AAI_6105", "Required Property name(s) not found for nodeType = " + vtxANodeType + ")");
+               }
+               
+               Iterator<String> keyPropI = keyProps.iterator();
+               while (keyPropI.hasNext()) {
+                       String propName = keyPropI.next();
+                       String vtxAKeyPropVal = "";
+                       objType = vtxA.<Object>property(propName).orElse(null);
+                       if (objType != null) {
+                               vtxAKeyPropVal = objType.toString();
+                       }
+                       String vtxBKeyPropVal = "";
+                       objType = vtxB.<Object>property(propName).orElse(null);
+                       if (objType != null) {
+                               vtxBKeyPropVal = objType.toString();
+                       }
+
+                       if (vtxAKeyPropVal.equals("")
+                                       || (!vtxAKeyPropVal.equals(vtxBKeyPropVal))) {
+                               // Either they're not really dupes or they are missing some key
+                               // data - so don't pick one
+                               return nullVtx;
+                       }
+                       else {
+                               // Keep these around for (potential) use later
+                               keyPropValsHash.put(propName, vtxAKeyPropVal);
+                       }
+                            
+               }
+
+               // Collect the vid's and aai-node-types of the vertices that each vertex
+               // (A and B) is connected to.
+               ArrayList<String> vtxIdsConn2A = new ArrayList<>();
+               ArrayList<String> vtxIdsConn2B = new ArrayList<>();
+               HashMap<String, String> nodeTypesConn2A = new HashMap<>();
+               HashMap<String, String> nodeTypesConn2B = new HashMap<>();
+
+               ArrayList<Vertex> vertListA = getConnectedNodes( g, vtxA );
+               if (vertListA != null) {
+                       Iterator<Vertex> iter = vertListA.iterator();
+                       while (iter.hasNext()) {
+                               Vertex tvCon = iter.next();
+                               String conVid = tvCon.id().toString();
+                               String nt = "";
+                               objType = tvCon.<Object>property("aai-node-type").orElse(null);
+                               if (objType != null) {
+                                       nt = objType.toString();
+                               }
+                               nodeTypesConn2A.put(nt, conVid);
+                               vtxIdsConn2A.add(conVid);
+                       }
+               }
+
+               ArrayList<Vertex> vertListB = getConnectedNodes( g, vtxB );
+               if (vertListB != null) {
+                       Iterator<Vertex> iter = vertListB.iterator();
+                       while (iter.hasNext()) {
+                               Vertex tvCon = iter.next();
+                               String conVid = tvCon.id().toString();
+                               String nt = "";
+                               objType = tvCon.<Object>property("aai-node-type").orElse(null);
+                               if (objType != null) {
+                                       nt = objType.toString();
+                               }
+                               nodeTypesConn2B.put(nt, conVid);
+                               vtxIdsConn2B.add(conVid);
+                       }
+               }
+
+               // 1 - If this kind of node needs a dependent node for uniqueness, then
+               //    verify that they both nodes point to the same dependent 
+               //    node (otherwise they're not really duplicates)
+               // Note - there are sometimes more than one dependent node type since
+               //    one nodeType can be used in different ways. But for a 
+               //    particular node, it will only have one dependent node that 
+               //    it's connected to.
+               String onlyNodeThatIndexPointsToVidStr = "";
+               Collection<String> depNodeTypes = loader.introspectorFromName(vtxANodeType).getDependentOn();
+               if (depNodeTypes.isEmpty()) {
+                       // This kind of node is not dependent on any other. That is ok.
+                       // We need to find out if the unique index info is good or not and
+                       // use that later when deciding if we can delete one.
+                       onlyNodeThatIndexPointsToVidStr = findJustOneUsingIndex( transId,
+                                       fromAppId, g, keyPropValsHash, vtxANodeType, vidA, vidB, ver );
+               } else {
+                       String depNodeVtxId4A = "";
+                       String depNodeVtxId4B = "";
+                       Iterator<String> iter = depNodeTypes.iterator();
+                       while (iter.hasNext()) {
+                               String depNodeType = iter.next();
+                               if (nodeTypesConn2A.containsKey(depNodeType)) {
+                                       // This is the dependent node type that vertex A is using
+                                       depNodeVtxId4A = nodeTypesConn2A.get(depNodeType);
+                               }
+                               if (nodeTypesConn2B.containsKey(depNodeType)) {
+                                       // This is the dependent node type that vertex B is using
+                                       depNodeVtxId4B = nodeTypesConn2B.get(depNodeType);
+                               }
+                       }
+                       if (depNodeVtxId4A.equals("")
+                                       || (!depNodeVtxId4A.equals(depNodeVtxId4B))) {
+                               // Either they're not really dupes or there's some bad data - so
+                               // don't pick either one
+                               return nullVtx;
+                       }
+               }
+
+               if (vtxIdsConn2A.size() == vtxIdsConn2B.size()) {
+                       // 2 - If they both have edges to all the same vertices, 
+                       //  then return the one that can be reached uniquely via the 
+                       //  key if that is the case or
+                       //  else the one with the lower vertexId
+                       
+                       boolean allTheSame = true;
+                       Iterator<String> iter = vtxIdsConn2A.iterator();
+                       while (iter.hasNext()) {
+                               String vtxIdConn2A = iter.next();
+                               if (!vtxIdsConn2B.contains(vtxIdConn2A)) {
+                                       allTheSame = false;
+                                       break;
+                               }
+                       }
+
+                       if (allTheSame) {
+                               // If everything is the same, but one of the two has a good 
+                               // pointer to it, then save that one.  Otherwise, take the
+                               // older one.
+                               if( !onlyNodeThatIndexPointsToVidStr.equals("") ){
+                                       // only one is reachable via the index - choose that one.
+                                       if( onlyNodeThatIndexPointsToVidStr.equals(vidA.toString()) ){
+                                               preferredVtx = vtxA;
+                                       }
+                                       else if( onlyNodeThatIndexPointsToVidStr.equals(vidB.toString()) ){
+                                               preferredVtx = vtxB;
+                                       }
+                               }
+                               else if (vidA < vidB) {
+                                       preferredVtx = vtxA;
+                               } else {
+                                       preferredVtx = vtxB;
+                               }
+                       }
+               } else if (vtxIdsConn2A.size() > vtxIdsConn2B.size()) {
+                       // 3 - VertexA is connected to more things than vtxB.
+                       // We'll pick VtxA if its edges are a superset of vtxB's edges 
+                       //   and it doesn't contradict the check for the index/key pointer.
+                       boolean missingOne = false;
+                       Iterator<String> iter = vtxIdsConn2B.iterator();
+                       while (iter.hasNext()) {
+                               String vtxIdConn2B = iter.next();
+                               if (!vtxIdsConn2A.contains(vtxIdConn2B)) {
+                                       missingOne = true;
+                                       break;
+                               }
+                       }
+                       if (!missingOne) {
+                               if( onlyNodeThatIndexPointsToVidStr.equals("") 
+                                               || onlyNodeThatIndexPointsToVidStr.equals(vidA.toString()) ){
+                                       preferredVtx = vtxA;
+                               }
+                       }
+               } else if (vtxIdsConn2B.size() > vtxIdsConn2A.size()) {
+                       // 4 - VertexB is connected to more things than vtxA.
+                       // We'll pick VtxB if its edges are a superset of vtxA's edges
+                       //   and it doesn't contradict the check for the index/key pointer.
+                       boolean missingOne = false;
+                       Iterator<String> iter = vtxIdsConn2A.iterator();
+                       while (iter.hasNext()) {
+                               String vtxIdConn2A = iter.next();
+                               if (!vtxIdsConn2B.contains(vtxIdConn2A)) {
+                                       missingOne = true;
+                                       break;
+                               }
+                       }
+                       if (!missingOne) {
+                               if( onlyNodeThatIndexPointsToVidStr.equals("") 
+                                               || onlyNodeThatIndexPointsToVidStr.equals(vidB.toString()) ){
+                                       preferredVtx = vtxB;
+                               }
+                       }
+               } else {
+                       preferredVtx = nullVtx;
+               }
+
+               return (preferredVtx);
+
+       } // end of pickOneOfTwoDupes()
+
+       /**
+        * Check and process dupes.
+        *
+        * @param transId the trans id
+        * @param fromAppId the from app id
+        * @param g the g
+        * @param version the version
+        * @param nType the n type
+        * @param passedVertList the passed vert list
+        * @param dupeFixOn the dupe fix on
+        * @param deleteCandidateList the delete candidate list
+        * @param singleCommits the single commits
+        * @param alreadyFoundDupeGroups the already found dupe groups
+        * @return the array list
+        */
+       private List<String> checkAndProcessDupes(String transId,
+                       String fromAppId, Graph g, GraphTraversalSource source, String version, String nType,
+                       List<Vertex> passedVertList, Boolean dupeFixOn,
+                       Set<String> deleteCandidateList, Boolean singleCommits,
+                       ArrayList<String> alreadyFoundDupeGroups, Loader loader ) {
+               
+               ArrayList<String> returnList = new ArrayList<>();
+               ArrayList<Vertex> checkVertList = new ArrayList<>();
+               ArrayList<String> alreadyFoundDupeVidArr = new ArrayList<>();
+               Boolean noFilterList = true;
+               Iterator<String> afItr = alreadyFoundDupeGroups.iterator();
+               while (afItr.hasNext()) {
+                       String dupeGrpStr = afItr.next();
+                       String[] dupeArr = dupeGrpStr.split("\\|");
+                       int lastIndex = dupeArr.length - 1;
+                       for (int i = 0; i < lastIndex; i++) {
+                               // Note: we don't want the last one...
+                               String vidString = dupeArr[i];
+                               alreadyFoundDupeVidArr.add(vidString);
+                               noFilterList = false;
+                       }
+               }
+
+               // For a given set of Nodes that were found with a set of KEY
+               // Parameters, (nodeType + key data) we will
+               // see if we find any duplicate nodes that need to be cleaned up. Note -
+               // it's legit to have more than one
+               // node with the same key data if the nodes depend on a parent for
+               // uniqueness -- as long as the two nodes
+               // don't hang off the same Parent.
+               // If we find duplicates, and we can figure out which of each set of
+               // duplicates is the one that we
+               // think should be preserved, we will record that. Whether we can tell
+               // which one should be
+               // preserved or not, we will return info about any sets of duplicates
+               // found.
+               //
+               // Each element in the returned arrayList might look like this:
+               // "1234|5678|keepVid=UNDETERMINED" (if there were 2 dupes, and we
+               // couldn't figure out which one to keep)
+               // or, "100017|200027|30037|keepVid=30037" (if there were 3 dupes and we
+               // thought the third one was the one that should survive)
+
+               // Because of the way the calling code loops over stuff, we can get the
+               // same data multiple times - so we should
+               // not process any vertices that we've already seen.
+
+               try {
+                       Iterator<Vertex> pItr = passedVertList.iterator();
+                       while (pItr.hasNext()) {
+                               Vertex tvx = pItr.next();
+                               String passedId = tvx.id().toString();
+                               if (noFilterList || !alreadyFoundDupeVidArr.contains(passedId)) {
+                                       // We haven't seen this one before - so we should check it.
+                                       checkVertList.add(tvx);
+                               }
+                       }
+
+                       if (checkVertList.size() < 2) {
+                               // Nothing new to check.
+                               return returnList;
+                       }
+
+                       if (loader.introspectorFromName(nType).isTopLevel()) {
+                               // If this was a node that does NOT depend on other nodes for
+                               // uniqueness, and we
+                               // found more than one node using its key -- record the found
+                               // vertices as duplicates.
+                               String dupesStr = "";
+                               for (int i = 0; i < checkVertList.size(); i++) {
+                                       dupesStr = dupesStr
+                                                       + ((checkVertList.get(i))).id()
+                                                                       .toString() + "|";
+                               }
+                               if (dupesStr != "") {
+                                       Vertex prefV = getPreferredDupe(transId, fromAppId,
+                                                       source, checkVertList, version, loader);
+                                       if (prefV == null) {
+                                               // We could not determine which duplicate to keep
+                                               dupesStr = dupesStr + "KeepVid=UNDETERMINED";
+                                               returnList.add(dupesStr);
+                                       } else {
+                                               dupesStr = dupesStr + "KeepVid=" + prefV.id();
+                                               Boolean didRemove = false;
+                                               if (dupeFixOn) {
+                                                       didRemove = deleteNonKeepersIfAppropriate(g,
+                                                                       dupesStr, prefV.id().toString(),
+                                                                       deleteCandidateList, singleCommits);
+                                               }
+                                               if (didRemove) {
+                                                       dupeGrpsDeleted++;
+                                               } else {
+                                                       // keep them on our list
+                                                       returnList.add(dupesStr);
+                                               }
+                                       }
+                               }
+                       } else {
+                               // More than one node have the same key fields since they may
+                               // depend on a parent node for uniqueness. Since we're finding 
+                               // more than one, we want to check to see if any of the
+                               // vertices that have this set of keys (and are the same nodeType)
+                               // are also pointing at the same 'parent' node.
+                               // Note: for a given set of key data, it is possible that there
+                               // could be more than one set of duplicates.
+                               HashMap<String, ArrayList<Vertex>> vertsGroupedByParentHash = groupVertsByDepNodes(
+                                               transId, fromAppId, source, version, nType,
+                                               checkVertList, loader);
+                               for (Map.Entry<String, ArrayList<Vertex>> entry : vertsGroupedByParentHash
+                                               .entrySet()) {
+                                       ArrayList<Vertex> thisParentsVertList = entry
+                                                       .getValue();
+                                       if (thisParentsVertList.size() > 1) {
+                                               // More than one vertex found with the same key info
+                                               // hanging off the same parent/dependent node
+                                               String dupesStr = "";
+                                               for (int i = 0; i < thisParentsVertList.size(); i++) {
+                                                       dupesStr = dupesStr
+                                                                       + ((thisParentsVertList
+                                                                                       .get(i))).id() + "|";
+                                               }
+                                               if (dupesStr != "") {
+                                                       Vertex prefV = getPreferredDupe(transId,
+                                                                       fromAppId, source, thisParentsVertList,
+                                                                       version, loader);
+
+                                                       if (prefV == null) {
+                                                               // We could not determine which duplicate to
+                                                               // keep
+                                                               dupesStr = dupesStr + "KeepVid=UNDETERMINED";
+                                                               returnList.add(dupesStr);
+                                                       } else {
+                                                               Boolean didRemove = false;
+                                                               dupesStr = dupesStr + "KeepVid="
+                                                                               + prefV.id().toString();
+                                                               if (dupeFixOn) {
+                                                                       didRemove = deleteNonKeepersIfAppropriate(
+                                                                                       g, dupesStr, prefV.id()
+                                                                                                       .toString(),
+                                                                                       deleteCandidateList, singleCommits);
+                                                               }
+                                                               if (didRemove) {
+                                                                       dupeGrpsDeleted++;
+                                                               } else {
+                                                                       // keep them on our list
+                                                                       returnList.add(dupesStr);
+                                                               }
+                                                       }
+                                               }
+                                       }
+                               }
+                       }
+               } catch (Exception e) {
+                       LoggingContext.statusCode(StatusCode.ERROR);
+                       LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                       LOGGER.warn(" >>> Threw an error in checkAndProcessDupes - just absorb this error and move on. ", e);
+               }
+
+               return returnList;
+
+       }// End of checkAndProcessDupes()
+
+       /**
+        * Group verts by dep nodes.
+        *
+        * @param transId the trans id
+        * @param fromAppId the from app id
+        * @param g the g
+        * @param version the version
+        * @param nType the n type
+        * @param passedVertList the passed vert list
+        * @return the hash map
+        * @throws AAIException the AAI exception
+        */
+       private HashMap<String, ArrayList<Vertex>> groupVertsByDepNodes(
+                       String transId, String fromAppId, GraphTraversalSource g, String version,
+                       String nType, ArrayList<Vertex> passedVertList, Loader loader)
+                       throws AAIException {
+               // Given a list of JanusGraph Vertices of one nodeType (see AAI-8956), group 
+               // them together by the parent node they depend on.
+               // Ie. if given a list of ip address nodes (assumed to all have the
+               // same key info) they might sit under several different parent vertices.
+               // Under Normal conditions, there would only be one per parent -- but
+               // we're trying to find duplicates - so we
+               // allow for the case where more than one is under the same parent node.
+
+               HashMap<String, ArrayList<Vertex>> retHash = new HashMap<String, ArrayList<Vertex>>();
+               if (loader.introspectorFromName(nType).isTopLevel()) {
+                       // This method really should not have been called if this is not the
+                       // kind of node
+                       // that depends on a parent for uniqueness, so just return the empty
+                       // hash.
+                       return retHash;
+               }
+
+               // Find out what types of nodes the passed in nodes can depend on
+               ArrayList<String> depNodeTypeL = new ArrayList<>();
+               Collection<String> depNTColl = loader.introspectorFromName(nType).getDependentOn();
+               Iterator<String> ntItr = depNTColl.iterator();
+               while (ntItr.hasNext()) {
+                       depNodeTypeL.add(ntItr.next());
+               }
+               // For each vertex, we want find its depended-on/parent vertex so we
+               // can track what other vertexes that are dependent on that same guy.
+               if (passedVertList != null) {
+                       Iterator<Vertex> iter = passedVertList.iterator();
+                       while (iter.hasNext()) {
+                               Vertex thisVert = iter.next();
+                               Vertex tmpParentVtx = getConnectedParent( g, thisVert );
+                               if( tmpParentVtx != null ) {
+                                       String parentNt = null;
+                                       Object obj = tmpParentVtx.<Object>property("aai-node-type").orElse(null);
+                                       if (obj != null) {
+                                               parentNt = obj.toString();
+                                       }
+                                       if (depNTColl.contains(parentNt)) {
+                                               // This must be the parent/dependent node
+                                               String parentVid = tmpParentVtx.id().toString();
+                                               if (retHash.containsKey(parentVid)) {
+                                                       // add this vert to the list for this parent key
+                                                       retHash.get(parentVid).add(thisVert);
+                                               } else {
+                                                       // This is the first one we found on this parent
+                                                       ArrayList<Vertex> vList = new ArrayList<>();
+                                                       vList.add(thisVert);
+                                                       retHash.put(parentVid, vList);
+                                               }
+                                       }
+                               }
+                       }
+               }
+
+               return retHash;
+
+       }// end of groupVertsByDepNodes()
+
+       /**
+        * Delete non keepers if appropriate.
+        *
+        * @param g the g
+        * @param dupeInfoString the dupe info string
+        * @param vidToKeep the vid to keep
+        * @param deleteCandidateList the delete candidate list
+        * @param singleCommits the single commits
+        * @return the boolean
+        */
+       private Boolean deleteNonKeepersIfAppropriate(Graph g,
+                       String dupeInfoString, String vidToKeep,
+                       Set<String> deleteCandidateList, Boolean singleCommits) {
+
+               Boolean deletedSomething = false;
+               // This assumes that the dupeInfoString is in the format of
+               // pipe-delimited vid's followed by
+               // ie. "3456|9880|keepVid=3456"
+               if (deleteCandidateList == null || deleteCandidateList.size() == 0) {
+                       // No vid's on the candidate list -- so no deleting will happen on
+                       // this run
+                       return false;
+               }
+
+               String[] dupeArr = dupeInfoString.split("\\|");
+               ArrayList<String> idArr = new ArrayList<>();
+               int lastIndex = dupeArr.length - 1;
+               for (int i = 0; i <= lastIndex; i++) {
+                       if (i < lastIndex) {
+                               // This is not the last entry, it is one of the dupes,
+                               String vidString = dupeArr[i];
+                               idArr.add(vidString);
+                       } else {
+                               // This is the last entry which should tell us if we have a
+                               // preferred keeper
+                               String prefString = dupeArr[i];
+                               if (prefString.equals("KeepVid=UNDETERMINED")) {
+                                       // They sent us a bad string -- nothing should be deleted if
+                                       // no dupe could be tagged as preferred
+                                       return false;
+                               } else {
+                                       // If we know which to keep, then the prefString should look
+                                       // like, "KeepVid=12345"
+                                       String[] prefArr = prefString.split("=");
+                                       if (prefArr.length != 2 || (!prefArr[0].equals("KeepVid"))) {
+                                               LoggingContext.statusCode(StatusCode.ERROR);
+                                               LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                                               LOGGER.error("Bad format. Expecting KeepVid=999999");
+                                               return false;
+                                       } else {
+                                               String keepVidStr = prefArr[1];
+                                               if (idArr.contains(keepVidStr)) {
+                                                       idArr.remove(keepVidStr);
+
+                                                       // So now, the idArr should just contain the vid's
+                                                       // that we want to remove.
+                                                       for (int x = 0; x < idArr.size(); x++) {
+                                                               boolean okFlag = true;
+                                                               String thisVid = idArr.get(x);
+                                                               if (deleteCandidateList.contains(thisVid)) {
+                                                                       // This vid is a valid delete candidate from
+                                                                       // a prev. run, so we can remove it.
+                                                                       try {
+                                                                               long longVertId = Long
+                                                                                               .parseLong(thisVid);
+                                                                               Vertex vtx = g
+                                                                                               .traversal().V(longVertId).next();
+                                                                               vtx.remove();
+
+                                                                               if (singleCommits) {
+                                                                                       // NOTE - the singleCommits option is not used in normal processing
+                                                                                       g.tx().commit();
+                                                                                       g = AAIGraph.getInstance().getGraph().newTransaction();
+                                                                               }
+                                                                       } catch (Exception e) {
+                                                                               okFlag = false;
+                                                                               LoggingContext.statusCode(StatusCode.ERROR);
+                                                                               LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                                                                               LOGGER.error("ERROR trying to delete VID = " + thisVid + " " + LogFormatTools.getStackTop(e));
+                                                                       }
+                                                                       if (okFlag) {
+                                                                               LOGGER.info(" DELETED VID = " + thisVid);
+                                                                               deletedSomething = true;
+                                                                       }
+                                                               }
+                                                       }
+                                               } else {
+                                                       LoggingContext.statusCode(StatusCode.ERROR);
+                                                       LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                                                       LOGGER.error("ERROR - Vertex Id to keep not found in list of dupes.  dupeInfoString = ["
+                                                                       + dupeInfoString + "]");
+                                                       return false;
+                                               }
+                                       }
+                               }// else we know which one to keep
+                       }// else last entry
+               }// for each vertex in a group
+
+               return deletedSomething;
+
+       }// end of deleteNonKeepersIfAppropriate()
+
+       
+       /**
+        * Gets the node just using key params.
+        *
+        * @param transId the trans id
+        * @param fromAppId the from app id
+        * @param graph the graph
+        * @param nodeType the node type
+        * @param keyPropsHash the key props hash
+        * @param apiVersion the api version
+        * @return the node just using key params
+        * @throws AAIException the AAI exception
+        */
+       public List <Vertex> getNodeJustUsingKeyParams( String transId, String fromAppId, GraphTraversalSource graph, String nodeType,
+                       HashMap<String,Object> keyPropsHash, String apiVersion )         throws AAIException{
+               
+               List <Vertex> retVertList = new ArrayList <> ();
+               
+               // We assume that all NodeTypes have at least one key-property defined.  
+               // Note - instead of key-properties (the primary key properties), a user could pass
+               //        alternate-key values if they are defined for the nodeType.
+               List<String> kName = new ArrayList<>();
+               List<Object> kVal = new ArrayList<>();
+               if( keyPropsHash == null || keyPropsHash.isEmpty() ) {
+                       throw new AAIException("AAI_6120", " NO key properties passed for this getNodeJustUsingKeyParams() request.  NodeType = [" + nodeType + "]. "); 
+               }
+               
+               int i = -1;
+               for( Map.Entry<String, Object> entry : keyPropsHash.entrySet() ){
+                       i++;
+                       kName.add(i, entry.getKey());
+                       kVal.add(i, entry.getValue());
+               }
+               int topPropIndex = i;
+               Vertex tiV = null;
+               String propsAndValuesForMsg = "";
+               Iterator <Vertex> verts = null;
+
+               try { 
+                       if( topPropIndex == 0 ){
+                               propsAndValuesForMsg = " (" + kName.get(0) + " = " + kVal.get(0) + ") ";
+                               verts= graph.V().has(kName.get(0),kVal.get(0)).has("aai-node-type",nodeType);   
+                       }       
+                       else if( topPropIndex == 1 ){
+                               propsAndValuesForMsg = " (" + kName.get(0) + " = " + kVal.get(0) + ", " 
+                                               + kName.get(1) + " = " + kVal.get(1) + ") ";
+                               verts =  graph.V().has(kName.get(0),kVal.get(0)).has(kName.get(1),kVal.get(1)).has("aai-node-type",nodeType);   
+                       }                       
+                       else if( topPropIndex == 2 ){
+                               propsAndValuesForMsg = " (" + kName.get(0) + " = " + kVal.get(0) + ", " 
+                                               + kName.get(1) + " = " + kVal.get(1) + ", " 
+                                               + kName.get(2) + " = " + kVal.get(2) +  ") ";
+                               verts= graph.V().has(kName.get(0),kVal.get(0)).has(kName.get(1),kVal.get(1)).has(kName.get(2),kVal.get(2)).has("aai-node-type",nodeType);
+                       }       
+                       else if( topPropIndex == 3 ){
+                               propsAndValuesForMsg = " (" + kName.get(0) + " = " + kVal.get(0) + ", " 
+                                               + kName.get(1) + " = " + kVal.get(1) + ", " 
+                                               + kName.get(2) + " = " + kVal.get(2) + ", " 
+                                               + kName.get(3) + " = " + kVal.get(3) +  ") ";
+                               verts= graph.V().has(kName.get(0),kVal.get(0)).has(kName.get(1),kVal.get(1)).has(kName.get(2),kVal.get(2)).has(kName.get(3),kVal.get(3)).has("aai-node-type",nodeType);
+                       }                       
+                       else {
+                               throw new AAIException("AAI_6114", " We only support 4 keys per nodeType for now \n"); 
+                       }
+               }
+               catch( Exception ex ){
+                       LoggingContext.statusCode(StatusCode.ERROR);
+                       LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                       LOGGER.error( " ERROR trying to get node for: [" + propsAndValuesForMsg + "]" + LogFormatTools.getStackTop(ex));
+               }
+
+               if( verts != null ){
+                       while( verts.hasNext() ){
+                               tiV = verts.next();
+                               retVertList.add(tiV);
+                       }
+               }
+               
+               if( retVertList.size() == 0 ){
+                       LOGGER.debug("DEBUG No node found for nodeType = [" + nodeType +
+                                       "], propsAndVal = " + propsAndValuesForMsg );
+               }
+               
+               return retVertList;
+               
+       }// End of getNodeJustUsingKeyParams() 
+       
+       /**
+        * Show all edges for node.
+        *
+        * @param transId the trans id
+        * @param fromAppId the from app id
+        * @param tVert the t vert
+        * @return the array list
+        */
+       private ArrayList <String> showAllEdgesForNode( String transId, String fromAppId, Vertex tVert ){
+
+               ArrayList <String> retArr = new ArrayList <> ();
+               Iterator <Edge> eI = tVert.edges(Direction.IN);
+               if( ! eI.hasNext() ){
+                       retArr.add("No IN edges were found for this vertex. ");
+               }
+               while( eI.hasNext() ){
+                       Edge ed = eI.next();
+                       String lab = ed.label();
+                       Vertex vtx;
+                       if (tVert.equals(ed.inVertex())) {
+                               vtx = ed.outVertex();
+                       } else {
+                               vtx = ed.inVertex();
+                       }
+                       if( vtx == null ){
+                               retArr.add(" >>> COULD NOT FIND VERTEX on the other side of this edge edgeId = " + ed.id() + " <<< ");
+                       }
+                       else {
+                               String nType = vtx.<String>property("aai-node-type").orElse(null);
+                               String vid = vtx.id().toString();
+                               retArr.add("Found an IN edge (" + lab + ") to this vertex from a [" + nType + "] node with VtxId = " + vid );
+                               
+                       }
+               }
+               
+               eI = tVert.edges(Direction.OUT);
+               if( ! eI.hasNext() ){
+                       retArr.add("No OUT edges were found for this vertex. ");
+               }
+               while( eI.hasNext() ){
+                       Edge ed =  eI.next();
+                       String lab = ed.label();
+                       Vertex vtx;
+                       if (tVert.equals(ed.inVertex())) {
+                               vtx = ed.outVertex();
+                       } else {
+                               vtx = ed.inVertex();
+                       }
+                       if( vtx == null ){
+                               retArr.add(" >>> COULD NOT FIND VERTEX on the other side of this edge edgeId = " + ed.id() + " <<< ");
+                       }
+                       else {
+                               String nType = vtx.<String>property("aai-node-type").orElse(null);
+                               String vid = vtx.id().toString();
+                               retArr.add("Found an OUT edge (" + lab + ") from this vertex to a [" + nType + "] node with VtxId = " + vid );
+                       }
+               }
+               return retArr;
+       }
+
+       
+       /**
+        * Show properties for node.
+        *
+        * @param transId the trans id
+        * @param fromAppId the from app id
+        * @param tVert the t vert
+        * @return the array list
+        */
+       private ArrayList <String> showPropertiesForNode( String transId, String fromAppId, Vertex tVert ){
+
+               ArrayList <String> retArr = new ArrayList <> ();
+               if( tVert == null ){
+                       retArr.add("null Node object passed to showPropertiesForNode()\n");
+               }
+               else {
+                       String nodeType = "";
+                       Object ob = tVert.<Object>property("aai-node-type").orElse(null);
+                       if( ob == null ){
+                               nodeType = "null";
+                       }
+                       else{
+                               nodeType = ob.toString();
+                       }
+                       
+                       retArr.add(" AAINodeType/VtxID for this Node = [" + nodeType + "/" + tVert.id() + "]");
+                       retArr.add(" Property Detail: ");
+                       Iterator<VertexProperty<Object>> pI = tVert.properties();
+                       while( pI.hasNext() ){
+                               VertexProperty<Object> tp = pI.next();
+                               Object val = tp.value();
+                               retArr.add("Prop: [" + tp.key() + "], val = [" + val + "] ");
+                       }
+               }
+               return retArr;
+       }
+
+       
+       private ArrayList <Vertex> getConnectedNodes(GraphTraversalSource g, Vertex startVtx )
+                       throws AAIException {
+       
+               ArrayList <Vertex> retArr = new ArrayList <> ();
+               if( startVtx == null ){
+                       return retArr;
+               }
+               else {
+                        GraphTraversal<Vertex, Vertex> modPipe = null;
+                        modPipe = g.V(startVtx).both();
+                        if( modPipe != null && modPipe.hasNext() ){
+                               while( modPipe.hasNext() ){
+                                       Vertex conVert = modPipe.next();
+                                       retArr.add(conVert);
+                               }
+                       }
+               }
+               return retArr;
+               
+       }// End of getConnectedNodes()
+       
+
+       private ArrayList <Vertex> getConnectedChildrenOfOneType( GraphTraversalSource g,
+                       Vertex startVtx, String childNType ) throws AAIException{
+               
+               ArrayList <Vertex> childList = new ArrayList <> ();
+               Iterator <Vertex> vertI =  g.V(startVtx).union(__.outE().has(EdgeProperty.CONTAINS.toString(), AAIDirection.OUT.toString()).inV(), __.inE().has(EdgeProperty.CONTAINS.toString(), AAIDirection.IN.toString()).outV());
+               
+               Vertex tmpVtx = null;
+               while( vertI != null && vertI.hasNext() ){
+                       tmpVtx = vertI.next();
+                       Object ob = tmpVtx.<Object>property("aai-node-type").orElse(null);
+                       if (ob != null) {
+                               String tmpNt = ob.toString();
+                               if( tmpNt.equals(childNType)){
+                                       childList.add(tmpVtx);
+                               }
+                       }
+               }
+               
+               return childList;               
+
+       }// End of getConnectedChildrenOfOneType()
+
+
+       private Vertex getConnectedParent( GraphTraversalSource g,
+                       Vertex startVtx ) throws AAIException{
+               
+               Vertex parentVtx = null;
+               Iterator <Vertex> vertI = g.V(startVtx).union(__.inE().has(EdgeProperty.CONTAINS.toString(), AAIDirection.OUT.toString()).outV(), __.outE().has(EdgeProperty.CONTAINS.toString(), AAIDirection.IN.toString()).inV());
+
+               while( vertI != null && vertI.hasNext() ){
+                       // Note - there better only be one!
+                       parentVtx = vertI.next();
+               }
+               
+               return parentVtx;               
+
+       }// End of getConnectedParent()
+       
+       
+       private long figureWindowStartTime( int timeWindowMinutes ){
+               // Given a window size, calculate what the start-timestamp would be.
+               
+               if( timeWindowMinutes <= 0 ){
+                       // This just means that there is no window...
+                       return 0;
+               }
+               long unixTimeNow = System.currentTimeMillis();
+               long windowInMillis = timeWindowMinutes * 60L * 1000;
+               
+               long startTimeStamp = unixTimeNow - windowInMillis;
+               
+               return startTimeStamp;
+       } // End of figureWindowStartTime()
+       
+       
+       /**
+        * Collect Duplicate Sets for nodes that are NOT dependent on parent nodes.
+        *
+        * @param transId the trans id
+        * @param fromAppId the from app id
+        * @param g the g
+        * @param version the version
+        * @param nType the n type
+        * @param passedVertList the passed vert list
+        * @return the array list
+        */
+       private ArrayList<ArrayList<Vertex>> getDupeSets4NonDepNodes( String transId,
+                       String fromAppId, Graph g, String version, String nType,
+                       ArrayList<Vertex> passedVertList,
+                       ArrayList <String> keyPropNamesArr, 
+                        Loader loader ) {
+               
+               ArrayList<ArrayList<Vertex>> returnList = new ArrayList<ArrayList<Vertex>>();
+               
+               // We've been passed a set of nodes that we want to check. 
+               // They are all NON-DEPENDENT nodes of the same nodeType meaning that they should be 
+               // unique in the DB based on their KEY DATA alone.  So, if
+               // we group them by their key data - if any key has more than one
+               // vertex mapped to it, those vertices are dupes.
+               //
+               // When we find duplicates, we group them in an ArrayList (there can be
+               //     more than one duplicate for one set of key data)
+               // Then these dupeSets are grouped up and returned.
+               // 
+               
+               HashMap <String, ArrayList<String>> keyVals2VidHash = new HashMap <String, ArrayList<String>>();
+               HashMap <String,Vertex> vtxHash = new HashMap <String,Vertex>();
+               Iterator<Vertex> pItr = passedVertList.iterator();
+               while (pItr.hasNext()) {
+                       try {
+                               Vertex tvx =  pItr.next();
+                               String thisVid = tvx.id().toString();
+                               vtxHash.put(thisVid, tvx);
+                               
+                               // if there are more than one vertexId mapping to the same keyProps -- they are dupes
+                               // we dont check till later since a set can contain more than 2.
+                               String hKey = getNodeKeyValString( tvx, keyPropNamesArr );
+                               if( hKey.equals("") ){
+                                       // When we have corrupted data, hKey comes back as an empty string
+                                       // We will just skip this entry since it is not a Dupe - it is
+                                       // corrupted data which should be picked up in other checks.
+                                       continue;
+                               }
+                               if( keyVals2VidHash.containsKey(hKey) ){
+                                       // We've already seen this key 
+                                       ArrayList <String> tmpVL = (ArrayList <String>)keyVals2VidHash.get(hKey);
+                                       tmpVL.add(thisVid);
+                                       keyVals2VidHash.put(hKey, tmpVL);
+                               }
+                               else {
+                                       // First time for this key
+                                       ArrayList <String> tmpVL = new ArrayList <String>();
+                                       tmpVL.add(thisVid);
+                                       keyVals2VidHash.put(hKey, tmpVL);
+                               }
+                       }
+                       catch (Exception e) {
+                               LOGGER.warn(" >>> Threw an error in getDupeSets4NonDepNodes - just absorb this error and move on. ", e);
+                       }
+               }
+                                       
+               for( Map.Entry<String, ArrayList<String>> entry : keyVals2VidHash.entrySet() ){
+                       ArrayList <String> vidList = entry.getValue();
+                       try {
+                               if( !vidList.isEmpty() && vidList.size() > 1 ){
+                                       // There are more than one vertex id's using the same key info
+                                       ArrayList <Vertex> vertList = new ArrayList <Vertex> ();
+                                       for (int i = 0; i < vidList.size(); i++) {
+                                               String tmpVid = vidList.get(i);
+                                               vertList.add(vtxHash.get(tmpVid));
+                                       }
+                                       returnList.add(vertList);
+                               }
+                       } 
+                       catch (Exception e) {
+                               LOGGER.warn(" >>> Threw an error in getDupeSets4NonDepNodes - just absorb this error and move on. ", e);
+                       }
+                       
+               }
+               return returnList;
+
+       }// End of getDupeSets4NonDepNodes()
+       
+       
+       /**
+        * Get values of the key properties for a node as a single string
+        *
+        * @param tvx the vertex to pull the properties from
+        * @param keyPropNamesArr collection of key prop names
+        * @return a String of concatenated values
+        */
+       private String getNodeKeyValString( Vertex tvx,
+                       ArrayList <String> keyPropNamesArr ) {
+               
+               String retString = "";
+               Iterator <String> propItr = keyPropNamesArr.iterator();
+               while( propItr.hasNext() ){
+                       String propName = propItr.next();
+                       if( tvx != null ){
+                               Object propValObj = tvx.property(propName).orElse(null);
+                               if( propValObj == null ){
+                                       LOGGER.warn(" >>> WARNING >>> could not find this key-property for this vertex.  propName = ["
+                                                       + propName + "], VID = " + tvx.id().toString() );
+                               }
+                               else {
+                                       retString = " " + retString + propValObj.toString();
+                               }
+                       } 
+               }
+               return retString;
+       
+       }// End of getNodeKeyValString()        
+       
+       
+       private String findJustOneUsingIndex( String transId, String fromAppId,
+                       GraphTraversalSource gts, HashMap <String,Object> keyPropValsHash, 
+                       String nType, Long vidAL, Long vidBL, String apiVer){
+               
+               // See if querying by JUST the key params (which should be indexed) brings back
+               // ONLY one of the two vertices. Ie. the db still has a pointer to one of them
+               // and the other one is sort of stranded.
+               String returnVid = "";
+               
+               try {
+                       List <Vertex> tmpVertList = getNodeJustUsingKeyParams( transId, fromAppId, gts,
+                                       nType, keyPropValsHash, apiVer );
+                       if( tmpVertList != null && tmpVertList.size() == 1 ){
+                               // We got just one - if it matches one of the ones we're looking
+                               // for, then return that VID
+                               Vertex tmpV = tmpVertList.get(0);
+                               String thisVid = tmpV.id().toString();
+                               if( thisVid.equals(vidAL.toString()) || thisVid.equals(vidBL.toString()) ){
+                                       String msg = " vid = " + thisVid + " is one of two that the DB can retrieve directly ------";
+                                       //System.out.println(msg);
+                                       LOGGER.info(msg);
+                                       returnVid = thisVid;
+                               }
+                       }
+               }
+               catch ( AAIException ae ){
+                       String emsg = "Error trying to get node just by key " + ae.getMessage();
+                       //System.out.println(emsg);
+                       LOGGER.error(emsg);
+               }
+               
+               return returnVid;
+               
+       }// End of findJustOneUsingIndex()
+
+}
\ No newline at end of file
diff --git a/src/main/java/org/onap/aai/datagrooming/DataGroomingTasks.java b/src/main/java/org/onap/aai/datagrooming/DataGroomingTasks.java
new file mode 100644 (file)
index 0000000..85a127f
--- /dev/null
@@ -0,0 +1,204 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.datagrooming;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.text.SimpleDateFormat;
+import java.util.*;
+
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.logging.ErrorLogHelper;
+import org.onap.aai.logging.LoggingContext;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.util.AAIConfig;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.context.annotation.PropertySource;
+import org.springframework.scheduling.annotation.Scheduled;
+import org.springframework.stereotype.Component;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+
+@Component
+@PropertySource("file:${server.local.startpath}/etc/appprops/datatoolscrons.properties")
+public class DataGroomingTasks {
+       
+       private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(DataGroomingTasks.class);
+       private static final SimpleDateFormat dateFormat = new SimpleDateFormat("HH:mm:ss");
+
+       @Autowired
+       private LoaderFactory loaderFactory;
+
+       @Autowired
+       private SchemaVersions schemaVersions;
+
+       @Scheduled(cron = "${datagroomingtasks.cron}" )
+       public void groomingScheduleTask() throws AAIException, Exception   {
+
+               LoggingContext.init();
+               LoggingContext.requestId(UUID.randomUUID().toString());
+               LoggingContext.partnerName("AAI");
+               LoggingContext.targetEntity("CronApp");
+               LoggingContext.component("dataGrooming");
+               LoggingContext.serviceName("groomingScheduleTask");
+               LoggingContext.targetServiceName("groomingScheduleTask");
+               LoggingContext.statusCode(LoggingContext.StatusCode.COMPLETE);
+
+
+               if(!"true".equals(AAIConfig.get("aai.disable.check.grooming.running", "false"))){
+                       if(checkIfDataGroomingIsRunning()){
+                               LOGGER.info("Data Grooming is already running on the system");
+                               return;
+                       }
+               }
+
+               LOGGER.info("Started cron job dataGrooming @ " + dateFormat.format(new Date()));
+
+               Map<String, String> dataGroomingFlagMap = new HashMap<>();
+               append("enableautofix" , AAIConfig.get("aai.datagrooming.enableautofix"), dataGroomingFlagMap);
+               append("enabledupefixon" , AAIConfig.get("aai.datagrooming.enabledupefixon"), dataGroomingFlagMap);
+               append("enabledontfixorphans" , AAIConfig.get("aai.datagrooming.enabledontfixorphans"), dataGroomingFlagMap);
+               append("enabletimewindowminutes" , AAIConfig.get("aai.datagrooming.enabletimewindowminutes"), dataGroomingFlagMap);
+               append("enableskiphostcheck" , AAIConfig.get("aai.datagrooming.enableskiphostcheck"), dataGroomingFlagMap);
+               append("enablesleepminutes" , AAIConfig.get("aai.datagrooming.enablesleepminutes"), dataGroomingFlagMap);
+               append("enableedgesonly" , AAIConfig.get("aai.datagrooming.enableedgesonly"), dataGroomingFlagMap);
+               append("enableskipedgechecks" , AAIConfig.get("aai.datagrooming.enableskipedgechecks"), dataGroomingFlagMap);
+               append("enablemaxfix" , AAIConfig.get("aai.datagrooming.enablemaxfix"), dataGroomingFlagMap);
+               append("enablesinglecommits" , AAIConfig.get("aai.datagrooming.enablesinglecommits"), dataGroomingFlagMap);
+               append("enabledupecheckoff" , AAIConfig.get("aai.datagrooming.enabledupecheckoff"), dataGroomingFlagMap);
+               append("enableghost2checkoff" , AAIConfig.get("aai.datagrooming.enableghost2checkoff"), dataGroomingFlagMap);
+               append("enableghost2fixon" , AAIConfig.get("aai.datagrooming.enableghost2fixon"), dataGroomingFlagMap);
+               append("enablef" , AAIConfig.get("aai.datagrooming.enablef"), dataGroomingFlagMap);
+               append("fvalue" , AAIConfig.get("aai.datagrooming.fvalue"), dataGroomingFlagMap);
+               append("timewindowminutesvalue" , AAIConfig.get("aai.datagrooming.timewindowminutesvalue"), dataGroomingFlagMap);
+               append("sleepminutesvalue" , AAIConfig.get("aai.datagrooming.sleepminutesvalue"), dataGroomingFlagMap);
+               append("maxfixvalue" , AAIConfig.get("aai.datagrooming.maxfixvalue"), dataGroomingFlagMap);
+
+               if(LOGGER.isDebugEnabled()){
+                       LOGGER.debug("DataGrooming Flag Values : ");
+                   dataGroomingFlagMap.forEach((key, val) -> LOGGER.debug("Key: {} Value: {}", key, val));
+               }
+
+               List<String> paramsArray  = new ArrayList();
+               try {
+                       if("true".equals(dataGroomingFlagMap.get("enableautofix"))){
+                               paramsArray.add("-autoFix");
+                       }
+                       if("true".equals(dataGroomingFlagMap.get("enabledupefixon"))){
+                               paramsArray.add("-dupeFixOn");
+                       }
+                       if("true".equals(dataGroomingFlagMap.get("enabledontfixorphans"))){
+                               paramsArray.add("-dontFixOrphans");
+                       }
+                       if("true".equals(dataGroomingFlagMap.get("enabletimewindowminutes"))){
+                               paramsArray.add("-timeWindowMinutes");                  
+                               paramsArray.add(dataGroomingFlagMap.get("enabletimewindowminutesvalue"));
+                       }
+                       if("true".equals(dataGroomingFlagMap.get("enableskiphostcheck"))){
+                               paramsArray.add("-skipHostCheck");
+                       }
+
+                       if("true".equals(dataGroomingFlagMap.get("enablesleepminutes"))) {
+                               paramsArray.add("-sleepMinutes");               
+                               paramsArray.add(dataGroomingFlagMap.get("sleepminutesvalue"));
+                       }
+               
+                       if("true".equals(dataGroomingFlagMap.get("enableedgesonly"))){
+                               paramsArray.add("-edgesOnly");
+                       }
+                       if("true".equals(dataGroomingFlagMap.get("enableskipedgechecks"))) {
+                               paramsArray.add("-skipEdgeChecks");
+                       }
+               
+                       if("true".equals(dataGroomingFlagMap.get("enablemaxfix"))) {
+                               paramsArray.add("-maxFix"); 
+                               paramsArray.add(dataGroomingFlagMap.get("maxfixvalue"));
+                       }
+                       if("true".equals(dataGroomingFlagMap.get("enablesinglecommits"))){
+                               paramsArray.add("-singleCommits");
+                       }
+                       if("true".equals(dataGroomingFlagMap.get("enabledupecheckoff"))){
+                               paramsArray.add("-dupeCheckOff");
+                       }
+                       if("true".equals(dataGroomingFlagMap.get("enableghost2checkoff"))){
+                               paramsArray.add("-ghost2CheckOff");
+                       }
+                       if("true".equals(dataGroomingFlagMap.get("enableghost2fixon"))){
+                               paramsArray.add("-ghost2FixOn");
+                       }
+
+                       if("true".equals(dataGroomingFlagMap.get("enablef"))) {
+                               paramsArray.add("-f");
+                               paramsArray.add(dataGroomingFlagMap.get("fvalue"));
+                       }
+                               
+            DataGrooming dataGrooming = new DataGrooming(loaderFactory, schemaVersions);
+            String[] paramsList = paramsArray.toArray(new String[0]);
+            if (AAIConfig.get("aai.cron.enable.dataGrooming").equals("true")) {
+                               dataGrooming.execute(paramsList);
+                               System.out.println("returned from main method ");
+            }
+        }
+               catch (Exception e) {
+            ErrorLogHelper.logError("AAI_4000", "Exception running cron job for dataGrooming"+e.toString());
+            LOGGER.info("AAI_4000", "Exception running cron job for dataGrooming"+e.toString());
+            throw e;
+               } finally {
+                       LOGGER.info("Ended cron job dataGrooming @ " + dateFormat.format(new Date()));
+                       LoggingContext.clear();
+               }
+       }
+
+       private boolean checkIfDataGroomingIsRunning(){
+
+               Process process = null;
+
+               int count = 0;
+               try {
+                       process = new ProcessBuilder().command("bash", "-c", "ps -ef | grep '[D]ataGrooming'").start();
+                       InputStream is = process.getInputStream();
+                       InputStreamReader isr = new InputStreamReader(is);
+                       BufferedReader br = new BufferedReader(isr);
+
+                       while (br.readLine() != null){
+                           count++;
+                       }
+
+                       int exitVal = process.waitFor();
+                       LOGGER.info("Exit value of the dataGrooming check process: " + exitVal);
+               } catch (Exception e) {
+                       e.printStackTrace();
+               }
+
+               if(count > 0){
+                   return true;
+               } else {
+                       return false;
+               }
+       }
+
+       private void append(String key, String value, Map<String, String> hashMap){
+               hashMap.put(key, value);
+       }
+}
diff --git a/src/main/java/org/onap/aai/datasnapshot/DataSnapshot.java b/src/main/java/org/onap/aai/datasnapshot/DataSnapshot.java
new file mode 100644 (file)
index 0000000..12815ee
--- /dev/null
@@ -0,0 +1,835 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ *
+ * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ */
+package org.onap.aai.datasnapshot;
+
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.SequenceInputStream;
+import java.util.*;
+
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
+
+import java.util.concurrent.TimeUnit;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+
+import org.apache.commons.configuration.PropertiesConfiguration;
+
+import org.apache.tinkerpop.gremlin.structure.io.IoCore;
+import org.apache.tinkerpop.gremlin.structure.io.graphson.LegacyGraphSONReader;
+import org.onap.aai.dbmap.AAIGraph;
+import org.onap.aai.dbmap.AAIGraphConfig;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.logging.ErrorLogHelper;
+import org.onap.aai.util.AAIConfig;
+import org.onap.aai.util.AAIConstants;
+import org.onap.aai.util.AAISystemExitUtil;
+import org.onap.aai.util.FormatDate;
+
+import com.att.eelf.configuration.Configuration;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.janusgraph.core.JanusGraph;
+import org.janusgraph.core.JanusGraphFactory;
+import org.janusgraph.core.util.JanusGraphCleanup;
+
+public class DataSnapshot {
+
+       private static EELFLogger LOGGER;
+       
+       /* Using realtime d */
+       private static final String REALTIME_DB = "realtime";
+
+       private static final Set<String> SNAPSHOT_RELOAD_COMMANDS = new HashSet<>();
+
+       static {
+           SNAPSHOT_RELOAD_COMMANDS.add("RELOAD_LEGACY_DATA");
+               SNAPSHOT_RELOAD_COMMANDS.add("RELOAD_DATA");
+               SNAPSHOT_RELOAD_COMMANDS.add("RELOAD_DATA_MULTI");
+       }
+       
+       
+       /**
+        * The main method.
+        *
+        * @param args
+        *            the arguments
+        */
+       public static void main(String[] args) {
+
+           boolean success = true;
+
+               // Set the logging file properties to be used by EELFManager
+               System.setProperty("aai.service.name", DataSnapshot.class.getSimpleName());
+               Properties props = System.getProperties();
+               props.setProperty(Configuration.PROPERTY_LOGGING_FILE_NAME, AAIConstants.AAI_LOGBACK_PROPS);
+               props.setProperty(Configuration.PROPERTY_LOGGING_FILE_PATH, AAIConstants.AAI_HOME_BUNDLECONFIG);
+               LOGGER = EELFManager.getInstance().getLogger(DataSnapshot.class);
+               Boolean dbClearFlag = false;
+               JanusGraph graph = null;
+               String command = "JUST_TAKE_SNAPSHOT"; // This is the default
+               String oldSnapshotFileName = "";
+               
+               Long vertAddDelayMs = 1L;   // Default value
+               Long edgeAddDelayMs = 1L;   // Default value
+               
+               Long failureDelayMs = 50L;  // Default value
+               Long retryDelayMs = 1500L;  // Default value
+               int maxErrorsPerThread = 25; // Default value
+               Long vertToEdgeProcDelay = 9000L; // Default value 
+               Long staggerThreadDelay = 5000L;  // Default value
+
+               int threadCount = 0;
+               Boolean debugFlag = false;
+               int debugAddDelayTime = 1;  // Default to 1 millisecond
+
+               boolean isExistingTitan = false;
+               
+               if (args.length >= 1) {
+                       command = args[0];
+               }
+                       
+               if( SNAPSHOT_RELOAD_COMMANDS.contains(command)){
+                       if (args.length == 2) {
+                               // If re-loading, they need to also pass the snapshot file name to use.
+                               // We expected the file to be found in our snapshot directory.
+                               oldSnapshotFileName = args[1];
+                       }
+               }
+               else if( command.equals("THREADED_SNAPSHOT") ){
+                       if (args.length == 2) {
+                               // If doing a "threaded" snapshot, they need to specify how many threads to use
+                               try {
+                                       threadCount = Integer.parseInt(args[1]);
+                               }
+                               catch ( NumberFormatException nfe ){
+                                       ErrorLogHelper.logError("AAI_6128", "Bad (non-integer) threadCount passed to DataSnapshot [" + args[1] + "]");
+                                       LOGGER.debug("Bad (non-integer) threadCount passed to DataSnapshot [" + args[1] + "]");
+                                       AAISystemExitUtil.systemExitCloseAAIGraph(1);
+                               }
+                               if( threadCount < 1 || threadCount > 100 ){
+                                       ErrorLogHelper.logError("AAI_6128", "Out of range (1-100) threadCount passed to DataSnapshot [" + args[1] + "]");
+                                       LOGGER.debug("Out of range (1-100) threadCount passed to DataSnapshot [" + args[1] + "]");
+                                       AAISystemExitUtil.systemExitCloseAAIGraph(1);
+                               }
+                               LOGGER.debug(" Will do Threaded Snapshot with threadCount = " + threadCount );
+                       }
+                       else if (args.length == 3) {
+                               // If doing a "threaded" snapshot, they need to specify how many threads to use
+                               // They can also use debug mode if they pass the word "DEBUG" to do the nodes one at a time to see where it breaks.
+                               try {
+                                       threadCount = Integer.parseInt(args[1]);
+                               }
+                               catch ( NumberFormatException nfe ){
+                                       ErrorLogHelper.logError("AAI_6128", "Bad (non-integer) threadCount passed to DataSnapshot [" + args[1] + "]");
+                                       LOGGER.debug("Bad (non-integer) threadCount passed to DataSnapshot [" + args[1] + "]");
+                                       AAISystemExitUtil.systemExitCloseAAIGraph(1);
+                               }
+                               if( threadCount < 1 || threadCount > 100 ){
+                                       ErrorLogHelper.logError("AAI_6128", "Out of range (1-100) threadCount passed to DataSnapshot [" + args[1] + "]");
+                                       LOGGER.debug("Out of range (1-100) threadCount passed to DataSnapshot [" + args[1] + "]");
+                                       AAISystemExitUtil.systemExitCloseAAIGraph(1);
+                               }
+                               if( args[2].equals("DEBUG") ){
+                                       debugFlag = true;
+                               }
+                               LOGGER.debug(" Will do Threaded Snapshot with threadCount = " + threadCount + 
+                                               ", and DEBUG mode set ON. ");
+                       }
+                       else if (args.length == 4) {
+                               // If doing a "threaded" snapshot, they need to specify how many threads to use (param 1)
+                               // They can also use debug mode if they pass the word "DEBUG" to do the nodes one (param 2)
+                               // They can also pass a delayTimer - how many milliseconds to put between each node's ADD (param 3)
+                               try {
+                                       threadCount = Integer.parseInt(args[1]);
+                               }
+                               catch ( NumberFormatException nfe ){
+                                       ErrorLogHelper.logError("AAI_6128", "Bad (non-integer) threadCount passed to DataSnapshot [" + args[1] + "]");
+                                       LOGGER.debug("Bad (non-integer) threadCount passed to DataSnapshot [" + args[1] + "]");
+                                       AAISystemExitUtil.systemExitCloseAAIGraph(1);
+                               }
+                               if( threadCount < 1 || threadCount > 100 ){
+                                       ErrorLogHelper.logError("AAI_6128", "Out of range (1-100) threadCount passed to DataSnapshot [" + args[1] + "]");
+                                       LOGGER.debug("Out of range (1-100) threadCount passed to DataSnapshot [" + args[1] + "]");
+                                       AAISystemExitUtil.systemExitCloseAAIGraph(1);
+                               }
+                               if( args[2].equals("DEBUG") ){
+                                       debugFlag = true;
+                               }
+                               try {
+                                       debugAddDelayTime = Integer.parseInt(args[3]);
+                               }
+                               catch ( NumberFormatException nfe ){
+                                       ErrorLogHelper.logError("AAI_6128", "Bad (non-integer) debugAddDelayTime passed to DataSnapshot [" + args[3] + "]");
+                                       LOGGER.debug("Bad (non-integer) debugAddDelayTime passed to DataSnapshot [" + args[3] + "]");
+                                       AAISystemExitUtil.systemExitCloseAAIGraph(1);
+                               }
+                               LOGGER.debug(" Will do Threaded Snapshot with threadCount = " + threadCount + 
+                                               ", DEBUG mode ON and addDelayTimer = " + debugAddDelayTime + " mSec. ");
+                       }
+                       else {
+                               ErrorLogHelper.logError("AAI_6128", "Wrong param count (should be 2,3 or 4) when using THREADED_SNAPSHOT.");
+                               LOGGER.debug("Wrong param count (should be 2,3 or 4) when using THREADED_SNAPSHOT.");
+                               AAISystemExitUtil.systemExitCloseAAIGraph(1);
+                       }
+               }
+               else if( command.equals("MULTITHREAD_RELOAD") ){
+                       // Note - this will use as many threads as the snapshot file is 
+                       //   broken up into.  (up to a limit)
+                       if (args.length == 2) {
+                               // Since they are re-loading, they need to pass the snapshot file name to use.
+                               // We expected the file to be found in our snapshot directory.  Note - if
+                               // it is a multi-part snapshot, then this should be the root of the name.
+                               // We will be using the default delay timers.
+                               oldSnapshotFileName = args[1];
+                       }
+                       else if (args.length == 7) {
+                               // Since they are re-loading, they need to pass the snapshot file name to use.
+                               // We expected the file to be found in our snapshot directory.  Note - if
+                               // it is a multi-part snapshot, then this should be the root of the name.
+                               oldSnapshotFileName = args[1];
+                               // They should be passing the timers in in this order:
+                               //    vertDelay, edgeDelay, failureDelay, retryDelay
+                               vertAddDelayMs = Long.parseLong(args[2]);
+                               edgeAddDelayMs = Long.parseLong(args[3]);
+                               failureDelayMs = Long.parseLong(args[4]);
+                               retryDelayMs = Long.parseLong(args[5]);
+                               try {
+                                       maxErrorsPerThread = Integer.parseInt(args[6]);
+                               }
+                               catch ( NumberFormatException nfe ){
+                                       ErrorLogHelper.logError("AAI_6128", "Bad (non-integer) maxErrorsPerThread passed to DataSnapshot [" + args[6] + "]");
+                                       LOGGER.debug("Bad (non-integer) maxErrorsPerThread passed to DataSnapshot [" + args[6] + "]");
+                                       AAISystemExitUtil.systemExitCloseAAIGraph(1);
+                               }
+                               if( maxErrorsPerThread < 1  ){
+                                       ErrorLogHelper.logError("AAI_6128", "Out of range (>0) maxErrorsPerThread passed to DataSnapshot [" + args[6] + "]");
+                                       LOGGER.debug("Out of range (>0) maxErrorsPerThread passed to DataSnapshot [" + args[6] + "]");
+                                       AAISystemExitUtil.systemExitCloseAAIGraph(1);
+                               }
+                       }
+                       else {
+                               ErrorLogHelper.logError("AAI_6128", "Wrong param count (should be either 2 or 7) when using MUTLITHREAD_RELOAD.");
+                               LOGGER.debug("Wrong param count (should be 2 or 7) when using MUTLITHREAD_RELOAD.");
+                               AAISystemExitUtil.systemExitCloseAAIGraph(1);
+                       }
+               }
+               else if (command.equals("CLEAR_ENTIRE_DATABASE")) {
+                       if (args.length >= 2) {
+                               oldSnapshotFileName = args[1];
+                       }
+                       if (args.length == 3) {
+                               String titanFlag = args[2];
+                               if ("titan".equalsIgnoreCase(titanFlag)) {
+                                       isExistingTitan = true;
+                               }
+                       }
+               }
+
+               ByteArrayOutputStream baos = new ByteArrayOutputStream();
+               try {
+                       
+                       AAIConfig.init();
+                       ErrorLogHelper.loadProperties();
+                       LOGGER.debug("Command = " + command + ", oldSnapshotFileName = [" + oldSnapshotFileName + "].");
+                       String targetDir = AAIConstants.AAI_HOME + AAIConstants.AAI_FILESEP + "logs" + AAIConstants.AAI_FILESEP + "data" + AAIConstants.AAI_FILESEP + "dataSnapshots";
+
+                       // Make sure the dataSnapshots directory is there
+                       new File(targetDir).mkdirs();
+
+                       LOGGER.debug("    ---- NOTE --- about to open graph (takes a little while) ");
+                       
+                       if (command.equals("JUST_TAKE_SNAPSHOT")) {
+                               // ------------------------------------------
+                               // They just want to take a snapshot.
+                               // ------------------------------------------
+                               verifyGraph(AAIGraph.getInstance().getGraph());
+                               FormatDate fd = new FormatDate("yyyyMMddHHmm", "GMT");
+                               String dteStr = fd.getDateTime();
+                               String newSnapshotOutFname = targetDir + AAIConstants.AAI_FILESEP + "dataSnapshot.graphSON." + dteStr;
+                               graph = AAIGraph.getInstance().getGraph();
+
+                               graph.io(IoCore.graphson()).writeGraph(newSnapshotOutFname);
+
+                               LOGGER.debug("Snapshot written to " + newSnapshotOutFname);
+       
+                       }       
+                       else if (command.equals("THREADED_SNAPSHOT")) {
+                                       // ---------------------------------------------------------------------
+                                       // They want the creation of the snapshot to be spread out via threads
+                                       // ---------------------------------------------------------------------
+                                       
+                                       FormatDate fd = new FormatDate("yyyyMMddHHmm", "GMT");
+                                       String dteStr = fd.getDateTime();
+                                       String newSnapshotOutFname = targetDir + AAIConstants.AAI_FILESEP + "dataSnapshot.graphSON." + dteStr;
+                                       verifyGraph(AAIGraph.getInstance().getGraph());
+                                       graph = AAIGraph.getInstance().getGraph();
+                                       LOGGER.debug(" Successfully got the Graph instance. ");
+                                       long timeA = System.nanoTime();
+
+                                       LOGGER.debug(" Need to divide vertexIds across this many threads: " + threadCount );
+                                       HashMap <String,ArrayList> vertListHash = new HashMap <String,ArrayList> ();
+                                       for( int t = 0; t < threadCount; t++ ){
+                                               ArrayList <Vertex> vList = new ArrayList <Vertex> ();
+                                               String tk = "" + t;
+                                               vertListHash.put( tk, vList);
+                                       }
+                                       LOGGER.debug("Count how many nodes are in the db. ");
+                                       long totalVertCount = graph.traversal().V().count().next();
+                                       LOGGER.debug(" Total Count of Nodes in DB = " + totalVertCount + ".");
+                                       long nodesPerFile = totalVertCount / threadCount;
+                                       LOGGER.debug(" Thread count = " + threadCount + ", each file will get (roughly): " + nodesPerFile + " nodes.");
+                                       long timeA2 = System.nanoTime();
+                                       long diffTime =  timeA2 - timeA;
+                                       long minCount = TimeUnit.NANOSECONDS.toMinutes(diffTime);
+                                       long secCount = TimeUnit.NANOSECONDS.toSeconds(diffTime) - (60 * minCount);
+                                       LOGGER.debug("    -- To count all vertices in DB it took: " +
+                                                       minCount + " minutes, " + secCount + " seconds " );
+                                       
+                                       long vtxIndex = 0;
+                                       int currentTNum = 0; 
+                                       String currentTKey = "0";
+                                       long thisThrIndex = 0;
+                                       Iterator <Vertex> vtxItr = graph.vertices();
+                                       while( vtxItr.hasNext() ){
+                                               // Divide up all the vertices so we can process them on different threads
+                                               vtxIndex++;
+                                               thisThrIndex++;
+                                               if( (thisThrIndex > nodesPerFile) && (currentTNum < threadCount -1) ){
+                                                       // We will need to start adding to the Hash for the next thread
+                                                       currentTNum++;
+                                                       currentTKey = "" + currentTNum;
+                                                       thisThrIndex = 0;
+                                               }
+                                               (vertListHash.get(currentTKey)).add(vtxItr.next());
+                                       }
+                                       
+                                       long timeB = System.nanoTime();
+                                       diffTime =  timeB - timeA2;
+                                       minCount = TimeUnit.NANOSECONDS.toMinutes(diffTime);
+                                       secCount = TimeUnit.NANOSECONDS.toSeconds(diffTime) - (60 * minCount);
+                                       LOGGER.debug("    -- To Loop over all vertices, and put them into sub-Arrays it took: " +
+                                                       minCount + " minutes, " + secCount + " seconds " );
+                                       
+                                       // Need to print out each set of vertices using it's own thread
+                                       ArrayList <Thread> threadArr = new ArrayList <Thread> ();
+                                       for( int thNum = 0; thNum < threadCount; thNum++ ){
+                                               String thNumStr = "" + thNum;
+                                               String subFName = newSnapshotOutFname + ".P" + thNumStr;
+                                               Thread thr = new Thread(new PrintVertexDetails(graph, subFName, vertListHash.get(thNumStr),
+                                                               debugFlag, debugAddDelayTime) );
+                                               thr.start();
+                                               threadArr.add(thr);
+                                       }
+                                       
+                                       // Make sure all the threads finish before moving on.
+                                       for( int thNum = 0; thNum < threadCount; thNum++ ){
+                                               if( null != threadArr.get(thNum) ){
+                                                       (threadArr.get(thNum)).join();
+                                               }
+                                       }
+                                       
+                                       long timeC = System.nanoTime();
+                                       diffTime =  timeC - timeB;
+                                       minCount = TimeUnit.NANOSECONDS.toMinutes(diffTime);
+                                       secCount = TimeUnit.NANOSECONDS.toSeconds(diffTime) - (60 * minCount);
+                                       LOGGER.debug("   -- To write all the data out to snapshot files, it took: " +
+                                                       minCount + " minutes, " + secCount + " seconds " );
+                       
+                                       
+                       } else if( command.equals("MULTITHREAD_RELOAD") ){              
+                               // ---------------------------------------------------------------------
+                               // They want the RELOAD of the snapshot to be spread out via threads
+                               // NOTE - it will only use as many threads as the number of files the
+                               //    snapshot is  written to.  Ie. if you have a single-file snapshot,
+                               //    then this will be single-threaded.
+                               //      
+                               ArrayList <File> snapFilesArr = getFilesToProcess(targetDir, oldSnapshotFileName, false);
+                               int fCount = snapFilesArr.size();
+                               Iterator <File> fItr = snapFilesArr.iterator();
+                               
+                               JanusGraph graph1 = AAIGraph.getInstance().getGraph();
+                               long timeStart = System.nanoTime();
+                               
+                               HashMap <String,String> old2NewVertIdMap = new <String,String> HashMap ();
+                               
+                                       // We're going to try loading in the vertices - without edges or properties
+                                       //    using Separate threads
+                                       
+                                       ExecutorService executor = Executors.newFixedThreadPool(fCount);
+                                       List<Future<HashMap<String,String>>> list = new ArrayList<Future<HashMap<String,String>>>();
+                                       
+                                       for( int i=0; i < fCount; i++ ){
+                                               File f = snapFilesArr.get(i);
+                                               String fname = f.getName();
+                                               String fullSnapName = targetDir + AAIConstants.AAI_FILESEP + fname;
+                                               Thread.sleep(staggerThreadDelay);  // Stagger the threads a bit
+                                               LOGGER.debug(" -- Read file: [" + fullSnapName + "]");
+                                               LOGGER.debug(" -- Call the PartialVertexLoader to just load vertices  ----");
+                                               LOGGER.debug(" -- vertAddDelayMs = " + vertAddDelayMs 
+                                                               + ", failureDelayMs = " + failureDelayMs + ", retryDelayMs = " + retryDelayMs 
+                                                               + ", maxErrorsPerThread = " + maxErrorsPerThread );
+                                               Callable <HashMap<String,String>> vLoader = new PartialVertexLoader(graph1, fullSnapName, 
+                                                               vertAddDelayMs, failureDelayMs, retryDelayMs, maxErrorsPerThread, LOGGER);
+                                               Future <HashMap<String,String>> future = (Future<HashMap<String, String>>) executor.submit(vLoader);
+                                               
+                                               // add Future to the list, we can get return value using Future
+                                               list.add(future);
+                                               LOGGER.debug(" --  Starting PartialDbLoad VERT_ONLY thread # "+ i );
+                                       }
+                                       
+                                       threadCount = 0;
+                                       int threadFailCount = 0;
+                                       for(Future<HashMap<String,String>> fut : list){
+                               threadCount++;
+                               try {
+                                       old2NewVertIdMap.putAll(fut.get());
+                                       LOGGER.debug(" -- back from PartialVertexLoader.  returned thread # " + threadCount +
+                                                       ", current size of old2NewVertMap is: " + old2NewVertIdMap.size() );
+                               } 
+                               catch (InterruptedException e) {  
+                                       threadFailCount++;
+                                       e.printStackTrace();
+                               } 
+                               catch (ExecutionException e) {
+                                       threadFailCount++;
+                                       e.printStackTrace();
+                               }
+                           }                       
+                                       
+                                       executor.shutdown();
+                                       
+                                       if( threadFailCount > 0 ) {
+                                               String emsg = " FAILURE >> " + threadFailCount + " Vertex-loader thread(s) failed to complete successfully.  ";
+                                               LOGGER.debug(emsg);
+                                               throw new Exception( emsg );
+                                       }
+                                       
+                                       long timeX = System.nanoTime();
+                                       long diffTime =  timeX - timeStart;
+                                       long minCount = TimeUnit.NANOSECONDS.toMinutes(diffTime);
+                                       long secCount = TimeUnit.NANOSECONDS.toSeconds(diffTime) - (60 * minCount);
+                                       LOGGER.debug("   -- To reload just the vertex ids from the snapshot files, it took: " +
+                                                       minCount + " minutes, " + secCount + " seconds " );
+                                                       
+                                       // Give the DB a little time to chew on all those vertices
+                                       Thread.sleep(vertToEdgeProcDelay);
+                                       
+                                       // ----------------------------------------------------------------------------------------
+                                       LOGGER.debug("\n\n\n  -- Now do the edges/props ----------------------");
+                                       // ----------------------------------------------------------------------------------------
+                                       
+                                                               
+                                       // We're going to try loading in the edges and missing properties
+                                       // Note - we're passing the whole oldVid2newVid mapping to the PartialPropAndEdgeLoader
+                                       //     so that the String-updates to the GraphSON will happen in the threads instead of
+                                       //     here in the un-threaded calling method.
+                                       executor = Executors.newFixedThreadPool(fCount);        
+                                       ArrayList<Future<ArrayList<String>>> listEdg = new ArrayList<Future<ArrayList<String>>>();
+                                       for( int i=0; i < fCount; i++ ){
+                                               File f = snapFilesArr.get(i);
+                                               String fname = f.getName();
+                                               String fullSnapName = targetDir + AAIConstants.AAI_FILESEP + fname;
+                                               Thread.sleep(staggerThreadDelay);  // Stagger the threads a bit
+                                               LOGGER.debug(" -- Read file: [" + fullSnapName + "]");
+                                               LOGGER.debug(" -- Call the PartialPropAndEdgeLoader for Properties and EDGEs  ----");
+                                               LOGGER.debug(" -- edgeAddDelayMs = " + vertAddDelayMs 
+                                                               + ", failureDelayMs = " + failureDelayMs + ", retryDelayMs = " + retryDelayMs 
+                                                               + ", maxErrorsPerThread = " + maxErrorsPerThread );
+                                               
+                                               Callable  eLoader = new PartialPropAndEdgeLoader(graph1, fullSnapName, 
+                                                               edgeAddDelayMs, failureDelayMs, retryDelayMs, 
+                                                               old2NewVertIdMap, maxErrorsPerThread, LOGGER);
+                                               Future <ArrayList<String>> future = (Future<ArrayList<String>>) executor.submit(eLoader);
+                                               
+                                               //add Future to the list, we can get return value using Future
+                                               listEdg.add(future);
+                                               LOGGER.debug(" --  Starting PartialPropAndEdge thread # "+ i );
+                                       }
+                                               
+                                       threadCount = 0;
+                                       for(Future<ArrayList<String>> fut : listEdg){
+                                   threadCount++;
+                                   try{
+                                       fut.get();  // DEBUG -- should be doing something with the return value if it's not empty - ie. errors
+                                       LOGGER.debug(" -- back from PartialPropAndEdgeLoader.  thread # " + threadCount  );
+                                   } 
+                                               catch (InterruptedException e) {  
+                                                       threadFailCount++;
+                                                       e.printStackTrace();
+                                               } 
+                                               catch (ExecutionException e) {
+                                                       threadFailCount++;
+                                                       e.printStackTrace();
+                                               }
+                                       }   
+                                       
+                                       executor.shutdown();
+                                                                       
+                                       if( threadFailCount > 0 ) {
+                                               String emsg = " FAILURE >> " + threadFailCount + " Property/Edge-loader thread(s) failed to complete successfully.  ";
+                                               LOGGER.debug(emsg);
+                                               throw new Exception( emsg );
+                                       }
+                                       
+                                       // This is needed so we can see the data committed by the called threads
+                                       graph1.tx().commit();
+                                        
+                                       long timeEnd = System.nanoTime();
+                                       diffTime =  timeEnd - timeX;
+                                       minCount = TimeUnit.NANOSECONDS.toMinutes(diffTime);
+                                       secCount = TimeUnit.NANOSECONDS.toSeconds(diffTime) - (60 * minCount);
+                                       LOGGER.debug("   -- To reload the edges and properties from snapshot files, it took: " +
+                                                       minCount + " minutes, " + secCount + " seconds " );
+                                       
+                                       long totalDiffTime =  timeEnd - timeStart;
+                                       long totalMinCount = TimeUnit.NANOSECONDS.toMinutes(totalDiffTime);
+                                       long totalSecCount = TimeUnit.NANOSECONDS.toSeconds(totalDiffTime) - (60 * totalMinCount);
+                                       LOGGER.debug("   -- TOTAL multi-threaded reload time: " +
+                                                       totalMinCount + " minutes, " + totalSecCount + " seconds " );
+                                       
+                       } else if (command.equals("CLEAR_ENTIRE_DATABASE")) {
+                               // ------------------------------------------------------------------
+                               // They are calling this to clear the db before re-loading it
+                               // later
+                               // ------------------------------------------------------------------
+
+                               // First - make sure the backup file(s) they will be using can be
+                               // found and has(have) data.
+                               // getFilesToProcess makes sure the file(s) exist and have some data.
+                               getFilesToProcess(targetDir, oldSnapshotFileName, true);
+                               
+                               LOGGER.debug("\n>>> WARNING <<<< ");
+                               LOGGER.debug(">>> All data and schema in this database will be removed at this point. <<<");
+                               LOGGER.debug(">>> Processing will begin in 5 seconds. <<<");
+                               LOGGER.debug(">>> WARNING <<<< ");
+
+                               try {
+                                       // Give them a chance to back out of this
+                                       Thread.sleep(5000);
+                               } catch (java.lang.InterruptedException ie) {
+                                       LOGGER.debug(" DB Clearing has been aborted. ");
+                                       AAISystemExitUtil.systemExitCloseAAIGraph(1);
+                               }
+
+                               LOGGER.debug(" Begin clearing out old data. ");
+                               String rtConfig = AAIConstants.REALTIME_DB_CONFIG;
+                               String serviceName = System.getProperty("aai.service.name", "NA");
+                               LOGGER.debug("Getting new configs for clearig");
+                               PropertiesConfiguration propertiesConfiguration = new AAIGraphConfig.Builder(rtConfig).forService(serviceName).withGraphType(REALTIME_DB).buildConfiguration();
+                               if(isExistingTitan){
+                                       LOGGER.debug("Existing DB is Titan");
+                                       propertiesConfiguration.setProperty("graph.titan-version","1.0.0");
+                               }
+                               LOGGER.debug("Open New Janus Graph");
+                               JanusGraph janusGraph = JanusGraphFactory.open(propertiesConfiguration);
+                               verifyGraph(janusGraph);
+
+                               if(isExistingTitan){
+                                       JanusGraphFactory.drop(janusGraph);
+                               } else {
+                                       janusGraph.close();
+                                       JanusGraphCleanup.clear(janusGraph);
+                               }
+                               LOGGER.debug(" Done clearing data. ");
+                               LOGGER.debug(">>> IMPORTANT - NOTE >>> you need to run the SchemaGenerator (use GenTester) before ");
+                               LOGGER.debug("     reloading data or the data will be put in without indexes. ");
+                               dbClearFlag = true;
+                               LOGGER.debug("All done clearing DB");
+                               
+                       } else if (command.equals("RELOAD_LEGACY_DATA")) {
+                               // -------------------------------------------------------------------
+                               // They want to restore the database from an old snapshot file
+                               // -------------------------------------------------------------------
+                               verifyGraph(AAIGraph.getInstance().getGraph());
+                               graph = AAIGraph.getInstance().getGraph();
+                               if (oldSnapshotFileName.equals("")) {
+                                       String emsg = "No oldSnapshotFileName passed to DataSnapshot when RELOAD_LEGACY_DATA used.";
+                                       LOGGER.debug(emsg);
+                                       AAISystemExitUtil.systemExitCloseAAIGraph(1);
+                               }
+                               String oldSnapshotFullFname = targetDir + AAIConstants.AAI_FILESEP + oldSnapshotFileName;
+                               File f = new File(oldSnapshotFullFname);
+                               if (!f.exists()) {
+                                       String emsg = "oldSnapshotFile " + oldSnapshotFullFname + " could not be found.";
+                                       LOGGER.debug(emsg);
+                                       AAISystemExitUtil.systemExitCloseAAIGraph(1);
+                               } else if (!f.canRead()) {
+                                       String emsg = "oldSnapshotFile " + oldSnapshotFullFname + " could not be read.";
+                                       LOGGER.debug(emsg);
+                                       AAISystemExitUtil.systemExitCloseAAIGraph(1);
+                               } else if (f.length() == 0) {
+                                       String emsg = "oldSnapshotFile " + oldSnapshotFullFname + " had no data.";
+                                       LOGGER.debug(emsg);
+                                       AAISystemExitUtil.systemExitCloseAAIGraph(1);
+                               }
+
+                               LOGGER.debug("We will load data IN from the file = " + oldSnapshotFullFname);
+                               LOGGER.debug(" Begin reloading JanusGraph 0.5 data. ");
+                               
+                               LegacyGraphSONReader lgr = LegacyGraphSONReader.build().create();
+                               InputStream is = new FileInputStream(oldSnapshotFullFname);
+                               lgr.readGraph(is, graph);
+                               
+                               LOGGER.debug("Completed the inputGraph command, now try to commit()... ");
+                               graph.tx().commit();
+                               LOGGER.debug("Completed reloading JanusGraph 0.5 data.");
+
+                               long vCount = graph.traversal().V().count().next();
+                               LOGGER.debug("A little after repopulating from an old snapshot, we see: " + vCount + " vertices in the db.");
+                       } else if (command.equals("RELOAD_DATA")) {
+                               // -------------------------------------------------------------------
+                               // They want to restore the database from an old snapshot file
+                               // -------------------------------------------------------------------
+                               verifyGraph(AAIGraph.getInstance().getGraph());
+                               graph = AAIGraph.getInstance().getGraph();
+                               if (oldSnapshotFileName.equals("")) {
+                                       String emsg = "No oldSnapshotFileName passed to DataSnapshot when RELOAD_DATA used.";
+                                       LOGGER.debug(emsg);
+                                       AAISystemExitUtil.systemExitCloseAAIGraph(1);
+                               }
+                               String oldSnapshotFullFname = targetDir + AAIConstants.AAI_FILESEP + oldSnapshotFileName;
+                               File f = new File(oldSnapshotFullFname);
+                               if (!f.exists()) {
+                                       String emsg = "oldSnapshotFile " + oldSnapshotFullFname + " could not be found.";
+                                       LOGGER.debug(emsg);
+                                       AAISystemExitUtil.systemExitCloseAAIGraph(1);
+                               } else if (!f.canRead()) {
+                                       String emsg = "oldSnapshotFile " + oldSnapshotFullFname + " could not be read.";
+                                       LOGGER.debug(emsg);
+                                       AAISystemExitUtil.systemExitCloseAAIGraph(1);
+                               } else if (f.length() == 0) {
+                                       String emsg = "oldSnapshotFile " + oldSnapshotFullFname + " had no data.";
+                                       LOGGER.debug(emsg);
+                                       AAISystemExitUtil.systemExitCloseAAIGraph(1);
+                               }
+
+                               LOGGER.debug("We will load data IN from the file = " + oldSnapshotFullFname);
+                               LOGGER.debug(" Begin reloading data. ");
+                               graph.io(IoCore.graphson()).readGraph(oldSnapshotFullFname);
+                               LOGGER.debug("Completed the inputGraph command, now try to commit()... ");
+                               graph.tx().commit();
+                               LOGGER.debug("Completed reloading data.");
+
+                               long vCount = graph.traversal().V().count().next();
+                               
+                               LOGGER.debug("A little after repopulating from an old snapshot, we see: " + vCount + " vertices in the db.");
+                               
+                       } else if (command.equals("RELOAD_DATA_MULTI")) {
+                               // -------------------------------------------------------------------
+                               // They want to restore the database from a group of snapshot files
+                               // Note - this uses multiple snapshot files, but runs single-threaded.
+                               // -------------------------------------------------------------------
+                               verifyGraph(AAIGraph.getInstance().getGraph());
+                               graph = AAIGraph.getInstance().getGraph();
+                               
+                               ArrayList <File> snapFilesArr = getFilesToProcess(targetDir, oldSnapshotFileName, false);
+                               
+                               long timeA = System.nanoTime();
+                               
+                               int fCount = snapFilesArr.size();
+                               Iterator <File> fItr = snapFilesArr.iterator();
+                               Vector<InputStream> inputStreamsV = new Vector<>();                  
+                               for( int i = 0; i < fCount; i++ ){
+                                       File f = snapFilesArr.get(i);
+                                       String fname = f.getName();
+                                       if (!f.canRead()) {
+                                               String emsg = "oldSnapshotFile " + fname + " could not be read.";
+                                               LOGGER.debug(emsg);
+                                               AAISystemExitUtil.systemExitCloseAAIGraph(1);
+                                       } else if (f.length() == 0) {
+                                               String emsg = "oldSnapshotFile " + fname + " had no data.";
+                                               LOGGER.debug(emsg);
+                                               AAISystemExitUtil.systemExitCloseAAIGraph(1);
+                                       }
+                                       String fullFName = targetDir + AAIConstants.AAI_FILESEP + fname;
+                                       InputStream fis = new FileInputStream(fullFName);
+                                       inputStreamsV.add(fis);
+                               }
+                               // Now add inputStreams.elements() to the Vector,
+                           // inputStreams.elements() will return Enumerations
+                           InputStream sis = new SequenceInputStream(inputStreamsV.elements());
+                           LOGGER.debug("Begin loading data from " + fCount + " files  -----");
+                               graph.io(IoCore.graphson()).reader().create().readGraph(sis, graph);  
+                               LOGGER.debug("Completed the inputGraph command, now try to commit()... ");
+                               graph.tx().commit();
+                               LOGGER.debug(" >> Completed reloading data.");
+                               
+                               long vCount = graph.traversal().V().count().next();
+                               LOGGER.debug("A little after repopulating from an old snapshot, we see: " + vCount + " vertices in the db.");
+                               
+                               long timeB = System.nanoTime();
+                               long diffTime =  timeB - timeA;
+                               long minCount = TimeUnit.NANOSECONDS.toMinutes(diffTime);
+                               long secCount = TimeUnit.NANOSECONDS.toSeconds(diffTime) - (60 * minCount);
+                               LOGGER.debug("    -- To Reload this snapshot, it took: " +
+                                               minCount + " minutes, " + secCount + " seconds " );
+                               
+                               
+                       } else {
+                               String emsg = "Bad command passed to DataSnapshot: [" + command + "]";
+                               LOGGER.debug(emsg);
+                               AAISystemExitUtil.systemExitCloseAAIGraph(1);
+                       }
+
+               } catch (AAIException e) {
+                       ErrorLogHelper.logError("AAI_6128", e.getMessage());
+                       LOGGER.error("Encountered an exception during the datasnapshot: ", e);
+                       e.printStackTrace();
+                       success = false;
+               } catch (Exception ex) {
+                       ErrorLogHelper.logError("AAI_6128", ex.getMessage());
+                       LOGGER.error("Encountered an exception during the datasnapshot: ", ex);
+                       ex.printStackTrace();
+                       success = false;
+               } finally {
+                       if (!dbClearFlag && graph != null) {
+                               // Any changes that worked correctly should have already done
+                               // thier commits.
+                               if(!"true".equals(System.getProperty("org.onap.aai.graphadmin.started"))) {
+                                       if (graph.isOpen()) {
+                                               graph.tx().rollback();
+                                               graph.close();
+                                       }
+                               }
+                       }
+                       try {
+                               baos.close();
+                       } catch (IOException iox) {
+                       }
+               }
+
+               if(success){
+                       AAISystemExitUtil.systemExitCloseAAIGraph(0);
+               } else {
+                       AAISystemExitUtil.systemExitCloseAAIGraph(1);
+               }
+
+       }// End of main()
+       
+       
+       private static ArrayList <File> getFilesToProcess(String targetDir, String oldSnapshotFileName, boolean doingClearDb)
+               throws Exception {
+       
+               if( oldSnapshotFileName == null || oldSnapshotFileName.equals("") ){
+                       String emsg = "No oldSnapshotFileName passed to DataSnapshot for Reload.  ";
+                       if( doingClearDb ) {
+                               emsg = "No oldSnapshotFileName passed to DataSnapshot. Needed when Clearing the db in case we need a backup.  ";
+                       }
+                       LOGGER.debug(emsg);
+                       throw new Exception( emsg );
+               }
+       
+               ArrayList <File> snapFilesArrList = new ArrayList <File> ();
+               
+               // First, we'll assume that this is a multi-file snapshot and
+               //    look for names based on that.
+               String thisSnapPrefix = oldSnapshotFileName + ".P";
+               File fDir = new File(targetDir); // Snapshot directory
+               File[] allFilesArr = fDir.listFiles();
+               for (File snapFile : allFilesArr) {
+                       String snapFName = snapFile.getName();
+                       if( snapFName.startsWith(thisSnapPrefix)){
+                               if (!snapFile.canRead()) {
+                                       String emsg = "oldSnapshotFile " + snapFName + " could not be read.";
+                                       LOGGER.debug(emsg);
+                                       throw new Exception (emsg);
+                               } else if (snapFile.length() == 0) {
+                                       String emsg = "oldSnapshotFile " + snapFName + " had no data.";
+                                       LOGGER.debug(emsg);
+                                       throw new Exception (emsg);
+                               }
+                               snapFilesArrList.add(snapFile);
+                       }
+               }
+       
+               if( snapFilesArrList.isEmpty() ){
+                       // Multi-file snapshot check did not find files, so this may 
+                       //   be a single-file snapshot.
+                       String oldSnapshotFullFname = targetDir + AAIConstants.AAI_FILESEP + oldSnapshotFileName;
+                       File f = new File(oldSnapshotFullFname);
+                       if (!f.exists()) {
+                               String emsg = "oldSnapshotFile " + oldSnapshotFullFname + " could not be found.";
+                               LOGGER.debug(emsg);
+                               throw new Exception (emsg);
+                       } else if (!f.canRead()) {
+                               String emsg = "oldSnapshotFile " + oldSnapshotFullFname + " could not be read.";
+                               LOGGER.debug(emsg);
+                               throw new Exception (emsg);
+                       } else if (f.length() == 0) {
+                               String emsg = "oldSnapshotFile " + oldSnapshotFullFname + " had no data.";
+                               LOGGER.debug(emsg);
+                               throw new Exception (emsg);
+                       }
+                       snapFilesArrList.add(f);
+               }
+               
+               if( snapFilesArrList.isEmpty() ){
+                       // Still haven't found anything..  that was not a good file name.
+                       String fullFName = targetDir + AAIConstants.AAI_FILESEP + thisSnapPrefix;
+                       String emsg = "oldSnapshotFile " + fullFName + "* could not be found.";
+                       LOGGER.debug(emsg);
+                       throw new Exception(emsg);
+               }
+               
+               return snapFilesArrList;
+       }
+       
+       
+       public static void verifyGraph(JanusGraph graph) {
+
+               if (graph == null) {
+                       String emsg = "Not able to get a graph object in DataSnapshot.java\n";
+                       LOGGER.debug(emsg);
+                       AAISystemExitUtil.systemExitCloseAAIGraph(1);
+               }
+
+       }
+
+
+}
\ No newline at end of file
diff --git a/src/main/java/org/onap/aai/datasnapshot/DataSnapshotTasks.java b/src/main/java/org/onap/aai/datasnapshot/DataSnapshotTasks.java
new file mode 100644 (file)
index 0000000..cc9ca97
--- /dev/null
@@ -0,0 +1,115 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.datasnapshot;
+
+import java.io.BufferedReader;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.text.SimpleDateFormat;
+import java.util.*;
+
+import org.onap.aai.datagrooming.DataGrooming;
+import org.onap.aai.datagrooming.DataGroomingTasks;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.logging.ErrorLogHelper;
+import org.onap.aai.logging.LoggingContext;
+import org.onap.aai.util.AAIConfig;
+import org.springframework.context.annotation.PropertySource;
+import org.springframework.scheduling.annotation.Scheduled;
+import org.springframework.stereotype.Component;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+
+@Component
+@PropertySource("file:${server.local.startpath}/etc/appprops/datatoolscrons.properties")
+public class DataSnapshotTasks {
+
+       private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(DataSnapshotTasks.class);
+       private static final SimpleDateFormat dateFormat = new SimpleDateFormat("HH:mm:ss");
+       
+       @Scheduled(cron = "${datasnapshottasks.cron}" )
+       public void snapshotScheduleTask() throws AAIException, Exception {
+
+               LoggingContext.init();
+               LoggingContext.requestId(UUID.randomUUID().toString());
+               LoggingContext.partnerName("AAI");
+               LoggingContext.targetEntity("CronApp");
+               LoggingContext.component("dataSnapshot");
+               LoggingContext.serviceName("snapshotScheduleTask");
+               LoggingContext.targetServiceName("snapshotScheduleTask");
+               LoggingContext.statusCode(LoggingContext.StatusCode.COMPLETE);
+
+               if(!"true".equals(AAIConfig.get("aai.disable.check.snapshot.running", "false"))){
+                       if(checkIfDataSnapshotIsRunning()){
+                               LOGGER.info("Data Snapshot is already running on the system");
+                               return;
+                       }
+               }
+
+               LOGGER.info("Started cron job dataSnapshot @ " + dateFormat.format(new Date()));
+               try {
+                       if (AAIConfig.get("aai.cron.enable.dataSnapshot").equals("true")) {
+                               DataSnapshot dataSnapshot = new DataSnapshot();
+                               String [] dataSnapshotParms = AAIConfig.get("aai.datasnapshot.params",  "JUST_TAKE_SNAPSHOT").split("\\s+");
+                               LOGGER.info("DataSnapshot Params {}", Arrays.toString(dataSnapshotParms));
+                               dataSnapshot.main(dataSnapshotParms);
+                       }
+               }
+               catch (Exception e) {
+                       ErrorLogHelper.logError("AAI_4000", "Exception running cron job for DataSnapshot"+e.toString());
+                       LOGGER.info("AAI_4000", "Exception running cron job for DataSnapshot"+e.toString());
+                       throw e;
+               } finally {
+                       LOGGER.info("Ended cron job dataSnapshot @ " + dateFormat.format(new Date()));
+                       LoggingContext.clear();
+               }
+
+       }
+
+       private boolean checkIfDataSnapshotIsRunning(){
+
+               Process process = null;
+
+               int count = 0;
+               try {
+                       process = new ProcessBuilder().command("bash", "-c", "ps -ef | grep '[D]ataSnapshot'").start();
+                       InputStream is = process.getInputStream();
+                       InputStreamReader isr = new InputStreamReader(is);
+                       BufferedReader br = new BufferedReader(isr);
+
+                       while (br.readLine() != null){
+                               count++;
+                       }
+
+                       int exitVal = process.waitFor();
+                       LOGGER.info("Exit value of the dataSnapshot check process: " + exitVal);
+               } catch (Exception e) {
+                       e.printStackTrace();
+               }
+
+               if(count > 0){
+                       return true;
+               } else {
+                       return false;
+               }
+       }
+}
+               
+       
\ No newline at end of file
diff --git a/src/main/java/org/onap/aai/datasnapshot/PartialPropAndEdgeLoader.java b/src/main/java/org/onap/aai/datasnapshot/PartialPropAndEdgeLoader.java
new file mode 100644 (file)
index 0000000..af858ae
--- /dev/null
@@ -0,0 +1,421 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.datasnapshot;
+
+import java.io.BufferedReader;
+import java.io.FileReader;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.concurrent.Callable;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.janusgraph.core.JanusGraph;
+import org.apache.tinkerpop.gremlin.structure.Edge;
+import org.json.JSONArray;
+import org.json.JSONObject;
+
+import com.att.eelf.configuration.EELFLogger;
+
+
+
+
+public class PartialPropAndEdgeLoader implements Callable <ArrayList<String>>{
+       
+       private EELFLogger LOGGER;
+
+       private JanusGraph jg;
+       private String fName;
+       private Long edgeAddDelayMs;
+       private Long retryDelayMs;
+       private Long failureDelayMs;
+       private HashMap<String,String> old2NewVidMap;
+       private int maxAllowedErrors;
+       
+
+               
+       public PartialPropAndEdgeLoader (JanusGraph graph, String fn, Long edgeDelay, Long failureDelay, Long retryDelay, 
+                        HashMap<String,String> vidMap, int maxErrors, EELFLogger elfLog ){
+               jg = graph;
+               fName = fn;
+               edgeAddDelayMs = edgeDelay;
+               failureDelayMs = failureDelay;
+               retryDelayMs = retryDelay;
+               old2NewVidMap = vidMap;
+               maxAllowedErrors = maxErrors;
+               LOGGER = elfLog;
+       }
+       
+               
+       public ArrayList<String> call() throws Exception  {  
+       
+               // This is a partner to the "PartialVertexLoader" code.  
+               // That code loads in vertex-id's/vertex-label's for a 
+               // multi-file data snapshot.
+               // This code assumes that the all vertex-id's are now in the target db.
+               // This code loads vertex properties and edges for a
+               // multi-file data snapshot (the same one that loaded
+               // the vertex-ids).
+               // 
+               
+               
+               // NOTE - We will be loading parameters and edges for one node at a time so that problems can be 
+               //   identified or ignored or re-tried instead of causing the entire load to fail.   
+               //
+               // Return an arrayList of Strings to give info on what nodes encountered problems
+               
+               int entryCount = 0;
+               int retryCount = 0;
+               int failureCount = 0;
+               int retryFailureCount = 0;
+               HashMap <String,String> failedAttemptHash = new HashMap <String,String> ();
+               ArrayList <String> failedAttemptInfo = new ArrayList <String> ();
+               
+               int passNum = 1;
+               try( BufferedReader br = new BufferedReader(new FileReader(fName))) {
+                       // loop through the file lines and do PUT for each vertex or the edges depending on what the loadtype is
+                       for(String origLine; (origLine = br.readLine()) != null; ) {
+                               entryCount++;
+                       Thread.sleep(edgeAddDelayMs);  // Space the edge requests out a little
+                       
+                       String errInfoStr = processThisLine(origLine, passNum); 
+                       if( !errInfoStr.equals("") ){
+                               // There was a problem with this line
+                               String vidStr = getTheVidForThisLine(origLine);
+                               // We'll use the failedAttemptHash to reTry this item
+                               failedAttemptHash.put(vidStr,origLine);
+                               failedAttemptInfo.add(errInfoStr);
+                               failureCount++;
+                               if( failureCount > maxAllowedErrors ) {
+                                       LOGGER.debug(">>> Abandoning PartialPropAndEdgeLoader() because " +
+                                                       "Max Allowed Error count was exceeded for this thread. (max = " + 
+                                                       maxAllowedErrors + ". ");
+                                       throw new Exception(" ERROR - Max Allowed Error count exceeded for this thread. (max = " + maxAllowedErrors + ". ");
+                               }
+                               Thread.sleep(failureDelayMs);  // take a little nap if it failed
+                       }
+               } // End of looping over each line
+                       if( br != null  ){
+                       br.close();
+               }
+       }
+               catch (Exception e) {
+               LOGGER.debug(" --- Failed in the main loop for Buffered-Reader item # " + entryCount +
+                               ", fName = " + fName );
+               LOGGER.debug(" --- msg = " + e.getMessage() );
+               throw e;
+               }       
+               
+               // ---------------------------------------------------------------------------
+        // Now Re-Try any failed requests that might have Failed on the first pass.
+               // ---------------------------------------------------------------------------
+               passNum++;
+               try {
+                       for (String failedVidStr : failedAttemptHash.keySet()) {
+                       // Take a little nap, and retry this failed attempt
+                               LOGGER.debug("DEBUG >> We will sleep for " + retryDelayMs + " and then RETRY any failed edge/property ADDs. ");
+                       Thread.sleep(retryDelayMs);
+                       retryCount++;
+                       Long failedVidL = Long.parseLong(failedVidStr);
+                       // When an Edge/Property Add fails, we store the whole (translated) graphSON line as the data in the failedAttemptHash
+               // We're really just doing a GET of this one vertex here...
+                       String jsonLineToRetry = failedAttemptHash.get(failedVidStr);
+                       String errInfoStr = processThisLine(jsonLineToRetry, passNum); 
+               if( !errInfoStr.equals("") ){
+                       // There was a problem with this line
+                       String translatedVidStr = getTheVidForThisLine(jsonLineToRetry);
+                       failedAttemptHash.put(translatedVidStr,jsonLineToRetry);
+                       failedAttemptInfo.add(errInfoStr);
+                       retryFailureCount++;
+                               if( retryFailureCount > maxAllowedErrors ) {
+                                       LOGGER.debug(">>> Abandoning PartialPropAndEdgeLoader() because " +
+                                               "Max Allowed Error count was exceeded while doing retries for this thread. (max = " + 
+                                               maxAllowedErrors + ". ");
+                                       throw new Exception(" ERROR - Max Allowed Error count exceeded for this thread. (max = " + maxAllowedErrors + ". ");
+                               }
+                       Thread.sleep(failureDelayMs);  // take a little nap if it failed
+                       }
+            } // End of looping over each failed line
+        }
+               catch (Exception e) {
+                       LOGGER.debug(" -- error in RETRY block. ErrorMsg = [" + e.getMessage() + "]" );
+                       throw e;
+               }       
+     
+               LOGGER.debug(">>> After Processing in PartialPropAndEdgeLoader() " +
+                       entryCount + " records processed.  " + failureCount + " records failed. " +
+                       retryCount + " RETRYs processed.  " + retryFailureCount + " RETRYs failed. ");
+                       
+               return failedAttemptInfo;
+               
+       }// end of call()  
+
+       
+       
+       private String translateThisVid(String oldVid) throws Exception {
+               
+               if( old2NewVidMap == null ){
+                       throw new Exception(" ERROR - null old2NewVidMap found in translateThisVid. ");
+               }
+               
+               if( old2NewVidMap.containsKey(oldVid) ){
+                       return old2NewVidMap.get(oldVid);
+               }
+               else {
+                       throw new Exception(" ERROR - could not find VID translation for original VID = " + oldVid );
+               }
+       }
+       
+       
+       private String getTheVidForThisLine(String graphSonLine) throws Exception {
+               
+               if( graphSonLine == null ){
+                       throw new Exception(" ERROR - null graphSonLine passed to getTheVidForThisLine. ");
+               }
+               
+               // We are assuming that the graphSonLine has the vertexId as the first ID:
+               // {"id":100995128,"label":"vertex","inE":{"hasPinterface":[{"id":"7lgg0e-2... etc...
+               
+                // The vertexId for this line is the numeric part after the initial {"id":xxxxx  up to the first comma
+               int x = graphSonLine.indexOf(':') + 1;
+               int y = graphSonLine.indexOf(',');
+               String initialVid = graphSonLine.substring(x,y);
+               if( initialVid != null && !initialVid.isEmpty() && initialVid.matches("^[0-9]+$") ){
+                       return initialVid;
+               }
+               else {
+                       throw new Exception(" ERROR - could not determine initial VID for graphSonLine: " + graphSonLine );
+               }
+       }
+               
+       
+       private String processThisLine(String graphSonLine, int passNum){
+               
+               String passInfo = ""; 
+               if( passNum > 1 ) {
+                       passInfo = " >> RETRY << pass # " + passNum + " ";
+               }
+
+               JSONObject jObj = new JSONObject();
+               String originalVid = "";
+               
+               try{
+                       jObj = new JSONObject(graphSonLine);
+                       originalVid = jObj.get("id").toString();
+               }
+               catch ( Exception e ){
+               LOGGER.debug(" -- Could not convert line to JsonObject [ " + graphSonLine + "]" );
+               LOGGER.debug(" -- ErrorMsg = [" +e.getMessage() + "]");
+                       
+               return(" DEBUG -a- JSON translation exception when processing this line ---");
+               //xxxxxDEBUGxxxxx I think we put some info on the return String and then return?
+       }
+                       
+               // -----------------------------------------------------------------------------------------
+               // Note - this assumes that any vertices referred to by an edge will already be in the DB.
+               // -----------------------------------------------------------------------------------------
+               Vertex dbVtx = null;    
+               
+               String newVidStr = "";
+               Long newVidL = 0L;
+               try {
+                       newVidStr = translateThisVid(originalVid);
+                       newVidL = Long.parseLong(newVidStr);
+               }
+               catch ( Exception e ){
+               LOGGER.debug(" -- "  + passInfo + " translate VertexId before adding edges failed for this: vtxId = " 
+                               + originalVid + ".  ErrorMsg = [" +e.getMessage() + "]");
+                       
+               return(" DEBUG -b- there VID-translation error when processing this line ---");
+               //xxxxxDEBUGxxxxx I think we put some info on the return String and then return?
+       }
+               
+               
+               try {
+                       dbVtx = getVertexFromDbForVid(newVidStr);
+               }
+               catch ( Exception e ){
+               LOGGER.debug(" -- "  + passInfo + " READ Vertex from DB before adding edges failed for this: vtxId = " + originalVid
+                               + ", newVidId = " + newVidL + ".  ErrorMsg = [" +e.getMessage() + "]");
+                       
+               return("  --  there was an error processing this line --- Line = [" + graphSonLine + "]");
+               //xxxxxxDEBUGxxxx I think we put some info on the return String and then return?
+       }
+                       
+               
+               String edResStr = processEdgesForVtx( jObj, dbVtx, passInfo, originalVid );
+               if( edResStr.equals("") ){
+                       // We will commit the edges by themselves in case the properties stuff below fails
+               try { 
+                       jg.tx().commit();
+                       }
+                       catch ( Exception e ){
+                               LOGGER.debug(" -- " + passInfo + " COMMIT FAILED adding EDGES for this vertex: vtxId = " 
+                                               + originalVid + ".  ErrorMsg = [" +e.getMessage() + "]");
+                               //xxxxxxxxxx I think we put some info on the return String and then return?
+                   return(" DEBUG -d- there was an error doing the commit while processing edges for this line ---");
+                       }
+               }
+               
+               // Add the properties that we didn't have when we added the 'bare-bones' vertex
+               String pResStr = processPropertiesForVtx( jObj, dbVtx, passInfo, originalVid );
+               if( pResStr.equals("") ){
+                       try { 
+                       jg.tx().commit();
+                       return "";
+                       }
+                       catch ( Exception e ){
+                               LOGGER.debug(" -- " + passInfo + " COMMIT FAILED adding Properties for this vertex: vtxId = " 
+                                               + originalVid + ".  ErrorMsg = [" +e.getMessage() + "]");
+                               //xxxxxxxxxx I think we put some info on the return String and then return?
+                   return(" DEBUG -e- there was an error doing the commit while processing Properties for this line ---");
+                       }
+               }
+               else {
+                       LOGGER.debug("DEBUG " + passInfo + " Error processing Properties for this vertex: vtxId = " + originalVid );
+                       
+                       //xxxxxxxxxx I think we put some info on the return String and then return?
+                   return(" DEBUG -f- there was an error while processing Properties for this line ---");
+               }
+       }
+       
+       
+       private String processPropertiesForVtx( JSONObject jObj, Vertex dbVtx, String passInfo, String originalVid ){
+               
+               try {
+                       JSONObject propsOb = (JSONObject) jObj.get("properties");
+                       Iterator <String> propsItr = propsOb.keys();
+                       while( propsItr.hasNext() ){
+                               String pKey = propsItr.next();
+                               JSONArray propsDetArr = propsOb.getJSONArray(pKey);
+                               for( int i=0; i< propsDetArr.length(); i++ ){
+                                       JSONObject prop = propsDetArr.getJSONObject(i);
+                                       String val = prop.getString("value");
+                                       dbVtx.property(pKey, val);  //DEBUGjojo -- val is always String here.. which is not right -------------------DEBUG
+                               }
+                       }
+       
+               }
+               catch ( Exception e ){
+                       LOGGER.debug(" -- " + passInfo + " failure getting/setting properties for: vtxId = " 
+                                       + originalVid + ".  ErrorMsg = [" + e.getMessage() + "]");
+                       //xxxDEBUGxxxxxxx I think we put some info on the return String and then return?
+                   return(" DEBUG -g- there was an error adding properties while processing this line ---");
+                       
+               }
+                       
+               return "";
+       }
+       
+       
+       private Vertex getVertexFromDbForVid( String vtxIdStr ) throws Exception {
+               Vertex thisVertex = null;
+               Long vtxIdL = 0L;
+               
+               try {
+                       vtxIdL = Long.parseLong(vtxIdStr);
+                       Iterator <Vertex> vItr = jg.vertices(vtxIdL);
+                       // Note - we only expect to find one vertex found for this ID.
+                       while( vItr.hasNext() ){
+                               thisVertex = vItr.next();
+                       }
+               }
+               catch ( Exception e ){
+                       String emsg = "Error finding vertex for vid = " + vtxIdStr + "[" + e.getMessage() + "]";
+                       throw new Exception ( emsg );
+               }
+               
+               if( thisVertex == null ){
+                       String emsg = "Could not find vertex for passed vid = " + vtxIdStr;
+                       throw new Exception ( emsg );
+               }
+               
+               return thisVertex;
+       }
+       
+       
+       private String processEdgesForVtx( JSONObject jObj, Vertex dbVtx, String passInfo, String originalVid ){
+
+               // Process the edges for this vertex -- but, just the "OUT" ones so edges don't get added twice (once from
+               // each side of the edge).
+               JSONObject edOb = null;
+               try {
+                       edOb = (JSONObject) jObj.get("outE");
+               }
+               catch (Exception e){
+                       // There were no OUT edges.  This is OK.
+                       return "";
+               }
+                       
+               try {
+                       if( edOb == null ){
+                               // There were no OUT edges.  This is OK.  Not all nodes have out edges.
+                               return "";
+                       }
+                       Iterator <String> edItr = edOb.keys();
+                       while( edItr.hasNext() ){
+                               String eLabel = edItr.next();
+                               String inVid = "";   // Note - this should really be a Long?
+                               JSONArray edArr = edOb.getJSONArray(eLabel);
+                               for( int i=0; i< edArr.length(); i++ ){
+                                       JSONObject eObj = edArr.getJSONObject(i);
+                                       String inVidStr = eObj.get("inV").toString();
+                                       String translatedInVidStr = translateThisVid(inVidStr);
+                                       Vertex newInVertex = getVertexFromDbForVid(translatedInVidStr);
+                                       
+                                       // Note - addEdge automatically adds the edge in the OUT direction from the 
+                                       //     'anchor' node that the call is being made from.
+                                       Edge tmpE = dbVtx.addEdge(eLabel, newInVertex); 
+                                       JSONObject ePropsOb = null;
+                                       try {
+                                               ePropsOb = (JSONObject) eObj.get("properties");
+                                       }
+                                       catch (Exception e){
+                                               // NOTE - model definition related edges do not have edge properties.  That is OK.
+                                               // Ie. when a model-element node has an "isA" edge to a "model-ver" node, that edge does
+                                               //    not have edge properties on it.
+                                       }
+                                       if( ePropsOb != null ){
+                                               Iterator <String> ePropsItr = ePropsOb.keys();
+                                               while( ePropsItr.hasNext() ){
+                                                       String pKey = ePropsItr.next();
+                                                       tmpE.property(pKey, ePropsOb.getString(pKey));
+                                               }
+                                       }
+                               }
+                       }
+
+               }
+               catch ( Exception e ){
+                       String msg =  " -- " + passInfo + " failure adding edge for: original vtxId = " 
+                                       + originalVid + ".  ErrorMsg = [" +e.getMessage() + "]";
+                       LOGGER.debug( " -- " + msg );
+                       //xxxxxxDEBUGxxxx I think we might need some better info on the return String to return?
+                       LOGGER.debug(" -- now going to return/bail out of processEdgesForVtx" );
+                       return(" >> " + msg );
+               
+               }
+                       
+               return "";
+       }
+       
+       
+}           
+       
+
diff --git a/src/main/java/org/onap/aai/datasnapshot/PartialVertexLoader.java b/src/main/java/org/onap/aai/datasnapshot/PartialVertexLoader.java
new file mode 100644 (file)
index 0000000..387f45e
--- /dev/null
@@ -0,0 +1,223 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.datasnapshot;
+
+import java.io.BufferedReader;
+import java.io.FileReader;
+import java.util.HashMap;
+import java.util.concurrent.Callable;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.apache.tinkerpop.gremlin.structure.io.graphson.GraphSONReader;
+import org.janusgraph.core.JanusGraph;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.google.gson.JsonObject;
+import com.google.gson.JsonParser;
+
+
+
+public class PartialVertexLoader implements Callable<HashMap<String,String>>{
+       
+       private EELFLogger LOGGER;
+
+       private JanusGraph jg;
+       private String fName;
+       private Long vertAddDelayMs;
+       private Long failurePauseMs;
+       private Long retryDelayMs;
+       private int maxAllowedErrors;
+               
+       public PartialVertexLoader (JanusGraph graph, String fn, Long vertDelay, Long failurePause, 
+                       Long retryDelay, int maxErrors, EELFLogger elfLog ){
+               jg = graph;
+               fName = fn;
+               vertAddDelayMs = vertDelay;
+               failurePauseMs = failurePause;
+               retryDelayMs = retryDelay;
+               maxAllowedErrors = maxErrors;
+               LOGGER = elfLog;
+       }
+               
+       public HashMap<String,String> call() throws Exception  {  
+       
+               // NOTE - we will be loading one node at a time so that bad nodes can be ignored instead of causing the
+               //   entire load to fail.   
+               //
+               int entryCount = 0;
+               int retryCount = 0;
+               int failureCount = 0;
+               int retryFailureCount = 0;
+               HashMap <String,String> failedAttemptHash = new HashMap <String,String> ();
+               HashMap <String,String> old2NewVtxIdHash = new HashMap <String,String> ();
+               GraphSONReader gsr = GraphSONReader.build().create();
+               
+       
+               // Read this file into a JSON object
+               JsonParser parser = new JsonParser();
+               
+               try( BufferedReader br = new BufferedReader(new FileReader(fName))) {
+                       // loop through the file lines and do PUT for each vertex or the edges depending on what the loadtype is
+                       for(String line; (line = br.readLine()) != null; ) {
+                               entryCount++;
+                               Object ob = parser.parse(line);
+                               JsonObject jObj = (JsonObject) ob;
+                               // NOTE - we will need to keep track of how the newly generated vid's map
+                       //    to the old ones so we can aim the edges correctly later.
+                               
+                       // ----  Note -- This ONLY loads the vertexId and the label for each vertex -------------
+                               Thread.sleep(vertAddDelayMs); 
+                                       
+                       String oldVtxIdStr = jObj.get("id").getAsString();
+                       String vtxLabelStr = jObj.get("label").getAsString();
+                       try { 
+                               Vertex tmpV = jg.addVertex(vtxLabelStr);
+                                       String newVtxIdStr = tmpV.id().toString();
+                                       old2NewVtxIdHash.put(oldVtxIdStr,  newVtxIdStr);
+                               }
+                               catch ( Exception e ){
+                                       failureCount++;
+                                       Thread.sleep(failurePauseMs); // Slow down if things are failing
+                                       LOGGER.debug(" >> addVertex FAILED for vtxId = " + oldVtxIdStr + ", label = [" 
+                                                       + vtxLabelStr + "].  ErrorMsg = [" + e.getMessage() + "]" );
+                               //e.printStackTrace();
+                               failedAttemptHash.put(oldVtxIdStr, vtxLabelStr);
+                               if( failureCount > maxAllowedErrors ) {
+                                       LOGGER.debug(" >>> Abandoning PartialVertexLoader() because " +
+                                                       "Max Allowed Error count was exceeded for this thread. (max = " + 
+                                                       maxAllowedErrors + ". ");
+                                       throw new Exception(" ERROR - Max Allowed Error count exceeded for this thread. (max = " + maxAllowedErrors + ". ");
+                               }
+                               else {
+                                       continue;
+                               }
+                       }
+                       try { 
+                               jg.tx().commit();
+                       }
+                       catch ( Exception e ){
+                               failureCount++;
+                               Thread.sleep(failurePauseMs); // Slow down if things are failing
+                               LOGGER.debug(" -- COMMIT FAILED for Vtx ADD for vtxId = " + oldVtxIdStr + ", label = [" 
+                                               + vtxLabelStr + "].  ErrorMsg = [" +e.getMessage() + "]" );
+                               //e.printStackTrace();                                          
+                               failedAttemptHash.put(oldVtxIdStr, vtxLabelStr);
+                               if( failureCount > maxAllowedErrors ) {
+                                       LOGGER.debug(">>> Abandoning PartialVertexLoader() because " +
+                                                       "Max Allowed Error count was exceeded for this thread. (max = " + 
+                                                       maxAllowedErrors + ". ");
+                                       throw new Exception(" ERROR - Max Allowed Error count exceeded for this thread. (max = " + maxAllowedErrors + ". ");
+                               }
+                               else {
+                                       continue;
+                               }
+                       }
+                                       
+               } // End of looping over each line
+                       
+               if( br != null  ){
+                       br.close();
+               }
+               }
+               catch (Exception e) {
+               LOGGER.debug(" --- Failed in the main loop for Buffered-Reader item # " + entryCount +
+                               ", fName = " + fName );
+               LOGGER.debug(" --- msg = " + e.getMessage() );
+                       e.printStackTrace();
+                   throw e;
+               }       
+                       
+               // ---------------------------------------------------------------------------
+               // Now Re-Try any failed requests that might have Failed on the first pass.
+               // ---------------------------------------------------------------------------
+               try {
+               for (String failedVidStr : failedAttemptHash.keySet()) {
+                       // Take a little nap, and retry this failed attempt
+                       LOGGER.debug("DEBUG >> We will sleep for " + retryDelayMs + " and then RETRY any failed vertex ADDs. ");
+                       Thread.sleep(retryDelayMs);
+                       
+                       retryCount++;
+                       // When a vertex Add fails we store the label as the data in the failedAttemptHash.
+                       String failedLabel = failedAttemptHash.get(failedVidStr);
+                       LOGGER.debug("DEBUG >> RETRY << " +
+                                       failedVidStr + ", label = " + failedLabel );
+                       try {
+                               Vertex tmpV = jg.addVertex(failedLabel);
+                               String newVtxIdStr = tmpV.id().toString();
+                       old2NewVtxIdHash.put(failedVidStr, newVtxIdStr);
+                       }
+                       catch ( Exception e ){
+                               retryFailureCount++;
+                               LOGGER.debug(" -- addVertex FAILED for RETRY for vtxId = " +
+                                               failedVidStr + ", label = [" + failedLabel + 
+                                               "].  ErrorMsg = [" +e.getMessage() + "]" );
+                               e.printStackTrace();
+                               if( retryFailureCount > maxAllowedErrors ) {
+                                       LOGGER.debug(">>> Abandoning PartialVertexLoader() because " +
+                                                       "Max Allowed Error count was exceeded for this thread. (max = " + 
+                                                       maxAllowedErrors + ". ");
+                                       throw new Exception(" ERROR - Max Allowed Error count exceeded for this thread. (max = " + maxAllowedErrors + ". ");
+                               }
+                               else {
+                                       continue;
+                               }
+                       }
+                       try { 
+                               jg.tx().commit();
+                       // If this worked, we can take it off of the failed list
+                       failedAttemptHash.remove(failedVidStr);
+               }
+                       catch ( Exception e ){
+                               retryFailureCount++;
+                               LOGGER.debug(" -- COMMIT FAILED for RETRY for vtxId = " + failedVidStr 
+                                               + ", label = [" + failedLabel + "].  ErrorMsg = [" + e.getMessage() + "]" );
+                                       e.printStackTrace();
+                               if( retryFailureCount > maxAllowedErrors ) {
+                                       LOGGER.debug(">>> Abandoning PartialVertexLoader() because " +
+                                                       "Max Allowed Error count was exceeded for this thread. (max = " + 
+                                                       maxAllowedErrors + ". ");
+                                       throw new Exception(" ERROR - Max Allowed Error count exceeded for this thread. (max = " + maxAllowedErrors + ". ");
+                               }
+                               else {
+                                       continue;
+                               }
+                               }
+               } // End of looping over failed attempt hash and doing retries  
+                       
+               }               
+        catch ( Exception e ){
+                       LOGGER.debug(" -- error in RETRY block. ErrorMsg = [" +e.getMessage() + "]" );
+                       e.printStackTrace();
+                       throw e;        
+        }
+                       
+        // This would need to be properly logged...            
+               LOGGER.debug(">>> After Processing in PartialVertexLoader():  " + 
+                               entryCount + " records processed.  " + failureCount + " records failed. " +
+                               retryCount + " RETRYs processed.  " + retryFailureCount + " RETRYs failed. ");
+                       
+        return old2NewVtxIdHash;
+               
+       }// end of call()  
+       
+       
+               
+}           
+       
+
diff --git a/src/main/java/org/onap/aai/datasnapshot/PrintVertexDetails.java b/src/main/java/org/onap/aai/datasnapshot/PrintVertexDetails.java
new file mode 100644 (file)
index 0000000..791ae15
--- /dev/null
@@ -0,0 +1,107 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.datasnapshot;
+
+import java.io.FileOutputStream;
+import java.util.ArrayList;
+import java.util.Iterator;
+import org.apache.tinkerpop.gremlin.structure.Direction;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.apache.tinkerpop.gremlin.structure.io.IoCore;
+import org.janusgraph.core.JanusGraph;
+
+
+public class PrintVertexDetails implements Runnable{
+       
+       //private static EELFLogger LOGGER;
+
+       private JanusGraph jg;
+       private String fname;
+       private ArrayList<Vertex> vtxList;
+       private Boolean debugOn;
+       private int debugDelayMs;
+               
+       public PrintVertexDetails (JanusGraph graph, String fn, ArrayList<Vertex> vL, Boolean debugFlag, int debugDelay){
+               jg = graph;
+               fname = fn;
+               vtxList = vL;
+               debugOn = debugFlag;
+               debugDelayMs = debugDelay;
+       }
+               
+       public void run(){  
+               if( debugOn ){
+                       // This is much slower, but sometimes we need to find out which single line is causing a failure
+                       try{
+                               int okCount = 0;
+                               int failCount = 0;
+                               Long debugDelayMsL = new Long(debugDelayMs);
+                               FileOutputStream subFileStr = new FileOutputStream(fname);
+                               Iterator <Vertex> vSubItr = vtxList.iterator();
+                               while( vSubItr.hasNext() ){
+                                       Long vertexIdL = 0L;
+                                       String aaiNodeType = "";
+                                       String aaiUri = "";
+                                       String aaiUuid = "";
+                                       try {
+                                               Vertex tmpV = vSubItr.next();
+                                               vertexIdL = (Long) tmpV.id();
+                                               aaiNodeType = (String) tmpV.property("aai-node-type").orElse(null);
+                                               aaiUri = (String) tmpV.property("aai-uri").orElse(null);
+                                               aaiUuid = (String) tmpV.property("aai-uuid").orElse(null);
+                                               
+                                               Thread.sleep(debugDelayMsL); // Make sure it doesn't bump into itself
+                                               jg.io(IoCore.graphson()).writer().create().writeVertex(subFileStr, tmpV, Direction.BOTH); 
+                                               okCount++;
+                                       }
+                                       catch(Exception e) {
+                                               failCount++;
+                                               System.out.println(" >> DEBUG MODE >> Failed at:  VertexId = [" + vertexIdL + 
+                                                               "], aai-node-type = [" + aaiNodeType + 
+                                                               "], aai-uuid = [" + aaiUuid + 
+                                                               "], aai-uri = [" + aaiUri + "]. " );
+                                               e.printStackTrace();
+                                       }
+                               }
+                               System.out.println(" -- Printed " + okCount + " vertexes out to " + fname +
+                                               ", with " + failCount + " failed.");
+                               subFileStr.close();
+                       }
+                       catch(Exception e){
+                               e.printStackTrace();
+                       }       
+               }
+               else {
+                       // Not in DEBUG mode, so we'll do all the nodes in one group
+                       try{
+                               int count = vtxList.size();
+                               Iterator <Vertex> vSubItr = vtxList.iterator();
+                               FileOutputStream subFileStr = new FileOutputStream(fname);
+                               jg.io(IoCore.graphson()).writer().create().writeVertices(subFileStr, vSubItr, Direction.BOTH);
+                               subFileStr.close();
+                               System.out.println(" -- Printed " + count + " vertexes out to " + fname);
+                       }
+                       catch(Exception e){
+                               e.printStackTrace();
+                       }       
+               }
+       }  
+       
+}       
\ No newline at end of file
diff --git a/src/main/java/org/onap/aai/db/schema/AuditDoc.java b/src/main/java/org/onap/aai/db/schema/AuditDoc.java
new file mode 100644 (file)
index 0000000..2beec12
--- /dev/null
@@ -0,0 +1,88 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.db.schema;
+
+import org.codehaus.jackson.annotate.JsonProperty;
+
+import java.util.List;
+
+public class AuditDoc {
+
+       private List<DBProperty> properties;
+       private List<DBIndex> indexes;
+       private List<EdgeProperty> edgeLabels;
+       
+       /**
+        * Gets the properties.
+        *
+        * @return the properties
+        */
+       public List<DBProperty> getProperties() {
+               return properties;
+       }
+       
+       /**
+        * Sets the properties.
+        *
+        * @param properties the new properties
+        */
+       public void setProperties(List<DBProperty> properties) {
+               this.properties = properties;
+       }
+       
+       /**
+        * Gets the indexes.
+        *
+        * @return the indexes
+        */
+       public List<DBIndex> getIndexes() {
+               return indexes;
+       }
+       
+       /**
+        * Sets the indexes.
+        *
+        * @param indexes the new indexes
+        */
+       public void setIndexes(List<DBIndex> indexes) {
+               this.indexes = indexes;
+       }
+    
+    /**
+     * Gets the edge labels.
+     *
+     * @return the edge labels
+     */
+    @JsonProperty("edge-labels")
+       public List<EdgeProperty> getEdgeLabels() {
+               return edgeLabels;
+       }
+       
+       /**
+        * Sets the edge labels.
+        *
+        * @param edgeLabels the new edge labels
+        */
+       public void setEdgeLabels(List<EdgeProperty> edgeLabels) {
+               this.edgeLabels = edgeLabels;
+       }
+       
+       
+}
diff --git a/src/main/java/org/onap/aai/db/schema/AuditJanusGraph.java b/src/main/java/org/onap/aai/db/schema/AuditJanusGraph.java
new file mode 100644 (file)
index 0000000..e49aa7f
--- /dev/null
@@ -0,0 +1,121 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.db.schema;
+
+
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.janusgraph.core.EdgeLabel;
+import org.janusgraph.core.JanusGraph;
+import org.janusgraph.core.PropertyKey;
+import org.janusgraph.core.schema.JanusGraphIndex;
+import org.janusgraph.core.schema.JanusGraphManagement;
+
+import java.util.Iterator;
+import java.util.LinkedHashSet;
+
+public class AuditJanusGraph extends Auditor {
+
+       private final JanusGraph graph;
+       
+       /**
+        * Instantiates a new audit JanusGraph.
+        *
+        * @param g the g
+        */
+       public AuditJanusGraph (JanusGraph g) {
+               this.graph = g;
+               buildSchema();
+       }
+       
+       /**
+        * Builds the schema.
+        */
+       private void buildSchema() {
+               populateProperties();
+               populateIndexes();
+               populateEdgeLabels();
+       }
+       
+       /**
+        * Populate properties.
+        */
+       private void populateProperties() {
+               JanusGraphManagement mgmt = graph.openManagement();
+               Iterable<PropertyKey> iterable = mgmt.getRelationTypes(PropertyKey.class);
+               Iterator<PropertyKey> JanusGraphProperties = iterable.iterator();
+               PropertyKey propKey;
+               while (JanusGraphProperties.hasNext()) {
+                       propKey = JanusGraphProperties.next();
+                       DBProperty prop = new DBProperty();
+
+                       prop.setName(propKey.name());
+                       prop.setCardinality(propKey.cardinality());
+                       prop.setTypeClass(propKey.dataType());
+
+                       this.properties.put(prop.getName(), prop);
+               }
+       }
+
+       /**
+        * Populate indexes.
+        */
+       private void populateIndexes() {
+               JanusGraphManagement mgmt = graph.openManagement();
+               Iterable<JanusGraphIndex> iterable = mgmt.getGraphIndexes(Vertex.class);
+               Iterator<JanusGraphIndex> JanusGraphIndexes = iterable.iterator();
+               JanusGraphIndex JanusGraphIndex;
+               while (JanusGraphIndexes.hasNext()) {
+                       JanusGraphIndex = JanusGraphIndexes.next();
+                       if (JanusGraphIndex.isCompositeIndex()) {
+                               DBIndex index = new DBIndex();
+                               LinkedHashSet<DBProperty> dbProperties = new LinkedHashSet<>();
+                               index.setName(JanusGraphIndex.name());
+                               index.setUnique(JanusGraphIndex.isUnique());
+                               PropertyKey[] keys = JanusGraphIndex.getFieldKeys();
+                               for (PropertyKey key : keys) {
+                                       dbProperties.add(this.properties.get(key.name()));
+                               }
+                               index.setProperties(dbProperties);
+                               index.setStatus(JanusGraphIndex.getIndexStatus(keys[0]));
+                               this.indexes.put(index.getName(), index);
+                       }
+               }
+       }
+
+       /**
+        * Populate edge labels.
+        */
+       private void populateEdgeLabels() {
+               JanusGraphManagement mgmt = graph.openManagement();
+               Iterable<EdgeLabel> iterable = mgmt.getRelationTypes(EdgeLabel.class);
+               Iterator<EdgeLabel> JanusGraphEdgeLabels = iterable.iterator();
+               EdgeLabel edgeLabel;
+               while (JanusGraphEdgeLabels.hasNext()) {
+                       edgeLabel = JanusGraphEdgeLabels.next();
+                       EdgeProperty edgeProperty = new EdgeProperty();
+                       
+                       edgeProperty.setName(edgeLabel.name());
+                       edgeProperty.setMultiplicity(edgeLabel.multiplicity());
+                       
+                       this.edgeLabels.put(edgeProperty.getName(), edgeProperty);
+               }       
+       }
+       
+}
diff --git a/src/main/java/org/onap/aai/db/schema/AuditOXM.java b/src/main/java/org/onap/aai/db/schema/AuditOXM.java
new file mode 100644 (file)
index 0000000..417824c
--- /dev/null
@@ -0,0 +1,227 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.db.schema;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.google.common.collect.Multimap;
+import org.janusgraph.core.Cardinality;
+import org.janusgraph.core.Multiplicity;
+import org.janusgraph.core.schema.SchemaStatus;
+import org.onap.aai.db.props.AAIProperties;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.edges.EdgeRule;
+import org.onap.aai.edges.exceptions.EdgeRuleNotFoundException;
+import org.onap.aai.introspection.Introspector;
+import org.onap.aai.introspection.Loader;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.introspection.exceptions.AAIUnknownObjectException;
+import org.onap.aai.logging.LogFormatTools;
+import org.onap.aai.schema.enums.ObjectMetadata;
+import org.onap.aai.setup.SchemaVersion;
+
+import java.util.*;
+import java.util.stream.Collectors;
+
+public class AuditOXM extends Auditor {
+
+       private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(AuditOXM.class);
+
+       private Set<Introspector> allObjects;
+       private EdgeIngestor ingestor;
+
+       /**
+        * Instantiates a new audit OXM.
+        *
+        * @param version the version
+        */
+       public AuditOXM(LoaderFactory loaderFactory, SchemaVersion version) {
+               
+               Loader loader = loaderFactory.createLoaderForVersion(ModelType.MOXY, version);
+               Set<String> objectNames = getAllObjects(loader);
+               allObjects = new HashSet<>();
+               for (String key : objectNames) {
+                       try {
+                               final Introspector temp = loader.introspectorFromName(key);
+                               allObjects.add(temp);
+                               this.createDBProperties(temp);
+                       } catch (AAIUnknownObjectException e) {
+                               LOGGER.warn("Skipping audit for object " + key + " (Unknown Object) " + LogFormatTools.getStackTop(e));
+                       }
+               }
+               for (Introspector temp : allObjects) {
+                       this.createDBIndexes(temp);
+               }
+               try {
+                       createEdgeLabels();
+               } catch (EdgeRuleNotFoundException e) {
+                       LOGGER.warn("Skipping audit for version " + version + " due to " + LogFormatTools.getStackTop(e));
+               }
+
+       }
+
+       /**
+        * Gets the all objects.
+        *
+        * @param version the version
+        * @return the all objects
+        */
+       private Set<String> getAllObjects(Loader loader) {
+
+               Set<String> result = loader.getAllObjects().entrySet()
+                       .stream()
+                       .map(Map.Entry::getKey)
+                       .collect(Collectors.toSet());
+
+               result.remove("EdgePropNames");
+               return result;
+       }
+       
+       /**
+        * Creates the DB properties.
+        *
+        * @param temp the temp
+        */
+       private void createDBProperties(Introspector temp) {
+               Set<String> objectProperties = temp.getProperties();
+               
+               for (String prop : objectProperties) {
+                       if (!properties.containsKey(prop)) {
+                               DBProperty dbProperty = new DBProperty();
+                               dbProperty.setName(prop);
+                               if (temp.isListType(prop)) {
+                                       dbProperty.setCardinality(Cardinality.SET);
+                                       if (temp.isSimpleGenericType(prop)) {
+                                               Class<?> clazz = null;
+                                               try {
+                                                       clazz = Class.forName(temp.getGenericType(prop));
+                                               } catch (ClassNotFoundException e) {
+                                                       clazz = Object.class;
+                                               }
+                                               dbProperty.setTypeClass(clazz);
+                                               properties.put(prop, dbProperty);
+                                       }
+                               } else {
+                                       dbProperty.setCardinality(Cardinality.SINGLE);
+                                       if (temp.isSimpleType(prop)) {
+                                               Class<?> clazz = null;
+                                               try {
+                                                       clazz = Class.forName(temp.getType(prop));
+                                               } catch (ClassNotFoundException e) {
+                                                       clazz = Object.class;
+                                               }
+                                               dbProperty.setTypeClass(clazz);
+                                               properties.put(prop, dbProperty);
+                                       }
+                               }
+                       }
+               }
+               
+       }
+       
+       /**
+        * Creates the DB indexes.
+        *
+        * @param temp the temp
+        */
+       private void createDBIndexes(Introspector temp) {
+               String uniqueProps = temp.getMetadata(ObjectMetadata.UNIQUE_PROPS);
+               String namespace = temp.getMetadata(ObjectMetadata.NAMESPACE);
+               if (uniqueProps == null) {
+                       uniqueProps = "";
+               }
+               if (namespace == null) {
+                       namespace = "";
+               }
+               boolean isTopLevel = namespace != "";
+               List<String> unique = Arrays.asList(uniqueProps.split(","));
+               Set<String> indexed = temp.getIndexedProperties();
+               Set<String> keys = temp.getKeys();
+               
+               for (String prop : indexed) {
+                       DBIndex dbIndex = new DBIndex();
+                       LinkedHashSet<DBProperty> properties = new LinkedHashSet<>();
+                       if (!this.indexes.containsKey(prop)) {
+                               dbIndex.setName(prop);
+                               dbIndex.setUnique(unique.contains(prop));
+                               properties.add(this.properties.get(prop));
+                               dbIndex.setProperties(properties);
+                               dbIndex.setStatus(SchemaStatus.ENABLED);
+                               this.indexes.put(prop, dbIndex);
+                       }
+               }
+               if (keys.size() > 1 || isTopLevel) {
+                       DBIndex dbIndex = new DBIndex();
+                       LinkedHashSet<DBProperty> properties = new LinkedHashSet<>();
+                       dbIndex.setName("key-for-" + temp.getDbName());
+                       if (!this.indexes.containsKey(dbIndex.getName())) {
+                               boolean isUnique = false;
+                               if (isTopLevel) {
+                                       properties.add(this.properties.get(AAIProperties.NODE_TYPE));
+                               }
+                               for (String key : keys) {
+                                       properties.add(this.properties.get(key));
+       
+                                       if (unique.contains(key) && !isUnique) {
+                                               isUnique = true;
+                                       }
+                               }
+                               dbIndex.setUnique(isUnique);
+                               dbIndex.setProperties(properties);
+                               dbIndex.setStatus(SchemaStatus.ENABLED);
+                               this.indexes.put(dbIndex.getName(), dbIndex);
+                       }
+               }
+
+       }
+       
+       /**
+        * Creates the edge labels.
+        */
+       private void createEdgeLabels() throws EdgeRuleNotFoundException {
+               Multimap<String, EdgeRule> edgeRules = ingestor.getAllCurrentRules();
+               for (String key : edgeRules.keySet()) {
+                       Collection<EdgeRule> collection = edgeRules.get(key);
+                       EdgeProperty prop = new EdgeProperty();
+                       //there is only ever one, they used the wrong type for EdgeRules
+                       String label = "";
+                       for (EdgeRule item : collection) {
+                               label = item.getLabel();
+                       }
+                       prop.setName(label);
+                       prop.setMultiplicity(Multiplicity.MULTI);
+                       this.edgeLabels.put(label, prop);
+               }
+       }
+       
+       /**
+        * Gets the all introspectors.
+        *
+        * @return the all introspectors
+        */
+       public Set<Introspector> getAllIntrospectors() {
+               return this.allObjects;
+       }
+
+       public void setEdgeIngestor(EdgeIngestor ingestor){
+               this.ingestor = ingestor;
+       }
+}
diff --git a/src/main/java/org/onap/aai/db/schema/Auditor.java b/src/main/java/org/onap/aai/db/schema/Auditor.java
new file mode 100644 (file)
index 0000000..5dc8c6c
--- /dev/null
@@ -0,0 +1,53 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.db.schema;
+
+import java.util.*;
+
+public abstract class Auditor {
+
+       protected Map<String, DBProperty> properties = new HashMap<>();
+       protected Map<String, DBIndex> indexes = new HashMap<>();
+       protected Map<String, EdgeProperty> edgeLabels = new HashMap<>();
+       
+       /**
+        * Gets the audit doc.
+        *
+        * @return the audit doc
+        */
+       public AuditDoc getAuditDoc() {
+               AuditDoc doc = new AuditDoc();
+               List<DBProperty> propertyList = new ArrayList<>();
+               List<DBIndex> indexList = new ArrayList<>();
+               List<EdgeProperty> edgeLabelList = new ArrayList<>();
+               propertyList.addAll(this.properties.values());
+               indexList.addAll(this.indexes.values());
+               edgeLabelList.addAll(this.edgeLabels.values());
+               Collections.sort(propertyList, new CompareByName());
+               Collections.sort(indexList, new CompareByName());
+               Collections.sort(edgeLabelList, new CompareByName());
+               
+               doc.setProperties(propertyList);
+               doc.setIndexes(indexList);
+               doc.setEdgeLabels(edgeLabelList);
+               
+               return doc;
+       }
+}
diff --git a/src/main/java/org/onap/aai/db/schema/AuditorFactory.java b/src/main/java/org/onap/aai/db/schema/AuditorFactory.java
new file mode 100644 (file)
index 0000000..6d96f29
--- /dev/null
@@ -0,0 +1,53 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.db.schema;
+
+import org.janusgraph.core.JanusGraph;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.setup.SchemaVersion;
+
+public class AuditorFactory {
+
+       private LoaderFactory loaderFactory;
+
+    public AuditorFactory(LoaderFactory loaderFactory){
+        this.loaderFactory = loaderFactory;
+       }
+       /**
+        * Gets the OXM auditor.
+        *
+        * @param v the v
+        * @return the OXM auditor
+        */
+       public Auditor getOXMAuditor (SchemaVersion v) {
+               return new AuditOXM(loaderFactory, v);
+       }
+       
+       /**
+        * Gets the graph auditor.
+        *
+        * @param g the g
+        * @return the graph auditor
+        */
+       public Auditor getGraphAuditor (JanusGraph g) {
+               return new AuditJanusGraph(g);
+       }
+}
diff --git a/src/main/java/org/onap/aai/db/schema/CompareByName.java b/src/main/java/org/onap/aai/db/schema/CompareByName.java
new file mode 100644 (file)
index 0000000..829239d
--- /dev/null
@@ -0,0 +1,35 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.db.schema;
+
+import java.util.Comparator;
+
+public class CompareByName implements Comparator<Named>{
+
+       /**
+        * {@inheritDoc}
+        */
+       @Override
+       public int compare(Named o1, Named o2) {
+               return o1.getName().compareTo(o2.getName());
+       }
+
+
+}
diff --git a/src/main/java/org/onap/aai/db/schema/DBIndex.java b/src/main/java/org/onap/aai/db/schema/DBIndex.java
new file mode 100644 (file)
index 0000000..754999c
--- /dev/null
@@ -0,0 +1,104 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.db.schema;
+
+import org.janusgraph.core.schema.SchemaStatus;
+
+import java.util.LinkedHashSet;
+import java.util.Set;
+
+public class DBIndex implements Named {
+
+       private String name = null;
+       private boolean unique = false;
+       private LinkedHashSet<DBProperty> properties = new LinkedHashSet<>();
+       private SchemaStatus status = null;
+
+       /**
+        * Gets the name
+        */
+       public String getName() {
+               return name;
+       }
+
+       /**
+        * Sets the name.
+        *
+        * @param name the new name
+        */
+       public void setName(String name) {
+               this.name = name;
+       }
+
+       /**
+        * Checks if is unique.
+        *
+        * @return true, if is unique
+        */
+       public boolean isUnique() {
+               return unique;
+       }
+
+       /**
+        * Sets the unique.
+        *
+        * @param unique the new unique
+        */
+       public void setUnique(boolean unique) {
+               this.unique = unique;
+       }
+
+       /**
+        * Gets the properties.
+        *
+        * @return the properties
+        */
+       public Set<DBProperty> getProperties() {
+               return properties;
+       }
+       
+       /**
+        * Sets the properties.
+        *
+        * @param properties the new properties
+        */
+       public void setProperties(LinkedHashSet<DBProperty> properties) {
+               this.properties = properties;
+       }
+       
+       /**
+        * Gets the status.
+        *
+        * @return the status
+        */
+       public SchemaStatus getStatus() {
+               return status;
+       }
+       
+       /**
+        * Sets the status.
+        *
+        * @param status the new status
+        */
+       public void setStatus(SchemaStatus status) {
+               this.status = status;
+       }
+       
+}
diff --git a/src/main/java/org/onap/aai/db/schema/DBProperty.java b/src/main/java/org/onap/aai/db/schema/DBProperty.java
new file mode 100644 (file)
index 0000000..491331d
--- /dev/null
@@ -0,0 +1,83 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.db.schema;
+
+import org.janusgraph.core.Cardinality;
+
+public class DBProperty implements Named {
+
+       
+       private String name = null;
+       private Cardinality cardinality = null;
+       private Class<?> typeClass = null;
+       
+       /**
+        * Gets the name
+        */
+       public String getName() {
+               return name;
+       }
+       
+       /**
+        * Sets the name.
+        *
+        * @param name the new name
+        */
+       public void setName(String name) {
+               this.name = name;
+       }
+       
+       /**
+        * Gets the cardinality.
+        *
+        * @return the cardinality
+        */
+       public Cardinality getCardinality() {
+               return cardinality;
+       }
+       
+       /**
+        * Sets the cardinality.
+        *
+        * @param cardinality the new cardinality
+        */
+       public void setCardinality(Cardinality cardinality) {
+               this.cardinality = cardinality;
+       }
+       
+       /**
+        * Gets the type class.
+        *
+        * @return the type class
+        */
+       public Class<?> getTypeClass() {
+               return typeClass;
+       }
+       
+       /**
+        * Sets the type class.
+        *
+        * @param type the new type class
+        */
+       public void setTypeClass(Class<?> type) {
+               this.typeClass = type;
+       }
+       
+}
diff --git a/src/main/java/org/onap/aai/db/schema/EdgeProperty.java b/src/main/java/org/onap/aai/db/schema/EdgeProperty.java
new file mode 100644 (file)
index 0000000..f89bc8f
--- /dev/null
@@ -0,0 +1,68 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.db.schema;
+
+import org.codehaus.jackson.annotate.JsonProperty;
+import org.codehaus.jackson.annotate.JsonPropertyOrder;
+import org.janusgraph.core.Multiplicity;
+
+@JsonPropertyOrder({ "label", "multiplicity" })
+public class EdgeProperty implements Named {
+
+       private String name = null;
+       private Multiplicity multiplicity = null;
+       
+    /**
+     * Gets the name
+     */
+    @JsonProperty("label")
+       public String getName() {
+               return name;
+       }
+    
+    /**
+     * Sets the name.
+     *
+     * @param name the new name
+     */
+    @JsonProperty("label")
+       public void setName(String name) {
+               this.name = name;
+       }
+       
+       /**
+        * Gets the multiplicity.
+        *
+        * @return the multiplicity
+        */
+       public Multiplicity getMultiplicity() {
+               return multiplicity;
+       }
+       
+       /**
+        * Sets the multiplicity.
+        *
+        * @param multiplicity the new multiplicity
+        */
+       public void setMultiplicity(Multiplicity multiplicity) {
+               this.multiplicity = multiplicity;
+       }
+       
+}
diff --git a/src/main/java/org/onap/aai/db/schema/ManageJanusGraphSchema.java b/src/main/java/org/onap/aai/db/schema/ManageJanusGraphSchema.java
new file mode 100644 (file)
index 0000000..dccc141
--- /dev/null
@@ -0,0 +1,328 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.db.schema;
+
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.janusgraph.core.JanusGraph;
+import org.janusgraph.core.PropertyKey;
+import org.janusgraph.core.schema.JanusGraphIndex;
+import org.janusgraph.core.schema.JanusGraphManagement;
+import org.janusgraph.core.schema.JanusGraphManagement.IndexBuilder;
+import org.janusgraph.core.schema.SchemaStatus;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.setup.SchemaVersion;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Set;
+
+public class ManageJanusGraphSchema {
+
+       
+       private JanusGraphManagement graphMgmt;
+       private JanusGraph graph;
+       private List<DBProperty> aaiProperties;
+       private List<DBIndex> aaiIndexes;
+       private List<EdgeProperty> aaiEdgeProperties;
+       private Auditor oxmInfo = null;
+       private Auditor graphInfo = null;
+
+       /**
+        * Instantiates a new manage JanusGraph schema.
+        *
+        * @param graph the graph
+        */
+       public ManageJanusGraphSchema(final JanusGraph graph, AuditorFactory auditorFactory, SchemaVersions schemaVersions) {
+               this.graph = graph;
+               oxmInfo = auditorFactory.getOXMAuditor(schemaVersions.getDefaultVersion());
+               graphInfo = auditorFactory.getGraphAuditor(graph);
+       }
+
+
+       /**
+        * Builds the schema.
+        */
+       public void buildSchema() {
+
+               this.graphMgmt = graph.openManagement();
+               aaiProperties = new ArrayList<>();
+               aaiEdgeProperties = new ArrayList<>();
+               aaiIndexes = new ArrayList<>();
+               aaiProperties.addAll(oxmInfo.getAuditDoc().getProperties());
+               aaiIndexes.addAll(oxmInfo.getAuditDoc().getIndexes());
+               aaiEdgeProperties.addAll(oxmInfo.getAuditDoc().getEdgeLabels());
+               try {
+                       createPropertyKeys();
+                       createIndexes();
+                       createEdgeLabels();
+               } catch (Exception e) {
+                       e.printStackTrace();
+                       graphMgmt.rollback();
+               }
+               graphMgmt.commit();
+       }
+
+       /**
+        * Creates the property keys.
+        */
+       private void createPropertyKeys() {
+
+
+               for (DBProperty prop : aaiProperties) {
+
+                       if (graphMgmt.containsPropertyKey(prop.getName())) {
+                               PropertyKey key = graphMgmt.getPropertyKey(prop.getName());
+                               boolean isChanged = false;
+                               if (!prop.getCardinality().equals(key.cardinality())) {
+                                       isChanged = true;
+                               }
+                               if (!prop.getTypeClass().equals(key.dataType())) {
+                                       isChanged = true;
+                               }
+                               if (isChanged) {
+                                       //must modify!
+                                       this.replaceProperty(prop);
+                               }
+                       } else {
+                               //create a new property key
+                               System.out.println("Key: " + prop.getName() + " not found - adding");
+                               graphMgmt.makePropertyKey(prop.getName()).dataType(prop.getTypeClass()).cardinality(prop.getCardinality()).make();
+                       }
+               }
+
+       }
+
+       /**
+        * Creates the indexes.
+        */
+       private void createIndexes() {
+
+               for (DBIndex index : aaiIndexes) {
+                       Set<DBProperty> props = index.getProperties();
+                       boolean isChanged = false;
+                       boolean isNew = false;
+                       List<PropertyKey> keyList = new ArrayList<>();
+                       for (DBProperty prop : props) {
+                               keyList.add(graphMgmt.getPropertyKey(prop.getName()));
+                       }
+                       if (graphMgmt.containsGraphIndex(index.getName())) {
+                               JanusGraphIndex JanusGraphIndex = graphMgmt.getGraphIndex(index.getName());
+                               PropertyKey[] dbKeys = JanusGraphIndex.getFieldKeys();
+                               if (dbKeys.length != keyList.size()) {
+                                       isChanged = true;
+                               } else {
+                                       int i = 0;
+                                       for (PropertyKey key : keyList) {
+                                               if (!dbKeys[i].equals(key)) {
+                                                       isChanged = true;
+                                                       break;
+                                               }
+                                               i++;
+                                       }
+                               }
+                       } else {
+                               isNew = true;
+                       }
+                       if (keyList.size() > 0) {
+                               this.createIndex(graphMgmt, index.getName(), keyList, index.isUnique(), isNew, isChanged);
+                       }
+               }
+       }
+
+       // Use EdgeRules to make sure edgeLabels are defined in the db.  NOTE: the multiplicty used here is
+       // always "MULTI".  This is not the same as our internal "Many2Many", "One2One", "One2Many" or "Many2One"
+       // We use the same edge-label for edges between many different types of nodes and our internal
+       // multiplicty definitions depends on which two types of nodes are being connected.
+       /**
+        * Creates the edge labels.
+        */
+       private void createEdgeLabels() {
+
+
+               for (EdgeProperty prop : aaiEdgeProperties) {
+
+                       if (graphMgmt.containsEdgeLabel(prop.getName())) {
+                               // see what changed
+                       } else {
+                               graphMgmt.makeEdgeLabel(prop.getName()).multiplicity(prop.getMultiplicity()).make();
+                       }
+
+               }
+
+
+       }
+
+       /**
+        * Creates the property.
+        *
+        * @param mgmt the mgmt
+        * @param prop the prop
+        */
+       private void createProperty(JanusGraphManagement mgmt, DBProperty prop) {
+               if (mgmt.containsPropertyKey(prop.getName())) {
+                       PropertyKey key = mgmt.getPropertyKey(prop.getName());
+                       boolean isChanged = false;
+                       if (!prop.getCardinality().equals(key.cardinality())) {
+                               isChanged = true;
+                       }
+                       if (!prop.getTypeClass().equals(key.dataType())) {
+                               isChanged = true;
+                       }
+                       if (isChanged) {
+                               //must modify!
+                               this.replaceProperty(prop);
+                       }
+               } else {
+                       //create a new property key
+                       System.out.println("Key: " + prop.getName() + " not found - adding");
+                       mgmt.makePropertyKey(prop.getName()).dataType(prop.getTypeClass()).cardinality(prop.getCardinality()).make();
+               }
+       }
+
+       /**
+        * Creates the index.
+        *
+        * @param mgmt the mgmt
+        * @param indexName the index name
+        * @param keys the keys
+        * @param isUnique the is unique
+        * @param isNew the is new
+        * @param isChanged the is changed
+        */
+       private void createIndex(JanusGraphManagement mgmt, String indexName, List<PropertyKey> keys, boolean isUnique, boolean isNew, boolean isChanged) {
+
+               /*if (isChanged) {
+                       System.out.println("Changing index: " + indexName);
+                       JanusGraphIndex oldIndex = mgmt.getGraphIndex(indexName);
+                       mgmt.updateIndex(oldIndex, SchemaAction.DISABLE_INDEX);
+                       mgmt.commit();
+                       //cannot remove indexes
+                       //graphMgmt.updateIndex(oldIndex, SchemaAction.REMOVE_INDEX);
+               }*/
+               if (isNew || isChanged) {
+
+                       if (isNew) {
+                               IndexBuilder builder = mgmt.buildIndex(indexName,Vertex.class);
+                               for (PropertyKey k : keys) {
+                                       builder.addKey(k);
+                               }
+                               if (isUnique) {
+                                       builder.unique();
+                               }
+                               builder.buildCompositeIndex();
+                               System.out.println("Built index for " + indexName + " with keys: " + keys);
+
+                               //mgmt.commit();
+                       }
+
+                       //mgmt = graph.asAdmin().getManagementSystem();
+                       //mgmt.updateIndex(mgmt.getGraphIndex(indexName), SchemaAction.REGISTER_INDEX);
+                       //mgmt.commit();
+
+                       try {
+                               //waitForCompletion(indexName);
+                               //JanusGraphIndexRepair.hbaseRepair(AAIConstants.AAI_CONFIG_FILENAME, indexName, "");
+                       } catch (Exception e) {
+                               // TODO Auto-generated catch block
+                               graph.tx().rollback();
+                               graph.close();
+                               e.printStackTrace();
+                       }
+
+                       //mgmt = graph.asAdmin().getManagementSystem();
+                       //mgmt.updateIndex(mgmt.getGraphIndex(indexName), SchemaAction.REINDEX);
+
+                       //mgmt.updateIndex(mgmt.getGraphIndex(indexName), SchemaAction.ENABLE_INDEX);
+
+                       //mgmt.commit();
+
+               }
+       }
+
+       /**
+        * Wait for completion.
+        *
+        * @param name the name
+        * @throws InterruptedException the interrupted exception
+        */
+       private void waitForCompletion(String name) throws InterruptedException {
+
+               boolean registered = false;
+               long before = System.currentTimeMillis();
+               while (!registered) {
+                   Thread.sleep(500L);
+                   JanusGraphManagement mgmt = graph.openManagement();
+                   JanusGraphIndex idx  = mgmt.getGraphIndex(name);
+                   registered = true;
+                   for (PropertyKey k : idx.getFieldKeys()) {
+                       SchemaStatus s = idx.getIndexStatus(k);
+                       registered &= s.equals(SchemaStatus.REGISTERED);
+                   }
+                   mgmt.rollback();
+               }
+               System.out.println("Index REGISTERED in " + (System.currentTimeMillis() - before) + " ms");
+       }
+
+       /**
+        * Replace property.
+        *
+        * @param key the key
+        */
+       private void replaceProperty(DBProperty key) {
+               
+               
+               
+               
+       }
+
+       /**
+        * Update index.
+        *
+        * @param index the index
+        */
+       public void updateIndex(DBIndex index) {
+
+               JanusGraphManagement mgmt = graph.openManagement();
+               List<PropertyKey> keys = new ArrayList<>();
+               boolean isNew = false;
+               boolean isChanged = false;
+               for (DBProperty prop : index.getProperties()) {
+                       createProperty(mgmt, prop);
+                       keys.add(mgmt.getPropertyKey(prop.getName()));
+               }
+               if (mgmt.containsGraphIndex(index.getName())) {
+                       System.out.println("index already exists");
+                       isNew = false;
+                       isChanged = true;
+               } else {
+                       isNew = true;
+                       isChanged = false;
+               }
+               this.createIndex(mgmt, index.getName(), keys, index.isUnique(), isNew, isChanged);
+
+               mgmt.commit();
+               
+       }
+       
+       
+       
+       
+       
+}
diff --git a/src/main/java/org/onap/aai/db/schema/Named.java b/src/main/java/org/onap/aai/db/schema/Named.java
new file mode 100644 (file)
index 0000000..f12699b
--- /dev/null
@@ -0,0 +1,30 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.db.schema;
+
+public interface Named {
+
+       /**
+        * Gets the name.
+        *
+        * @return the name
+        */
+       public String getName();
+}
diff --git a/src/main/java/org/onap/aai/db/schema/ScriptDriver.java b/src/main/java/org/onap/aai/db/schema/ScriptDriver.java
new file mode 100644 (file)
index 0000000..dca8e83
--- /dev/null
@@ -0,0 +1,123 @@
+/**\r
+ * ============LICENSE_START=======================================================\r
+ * org.onap.aai\r
+ * ================================================================================\r
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.\r
+ * ================================================================================\r
+ * Licensed under the Apache License, Version 2.0 (the "License");\r
+ * you may not use this file except in compliance with the License.\r
+ * You may obtain a copy of the License at\r
+ *\r
+ *    http://www.apache.org/licenses/LICENSE-2.0\r
+ *\r
+ * Unless required by applicable law or agreed to in writing, software\r
+ * distributed under the License is distributed on an "AS IS" BASIS,\r
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
+ * See the License for the specific language governing permissions and\r
+ * limitations under the License.\r
+ * ============LICENSE_END=========================================================\r
+ */\r
+package org.onap.aai.db.schema;\r
+\r
+import java.io.IOException;\r
+import java.util.UUID;\r
+\r
+import com.fasterxml.jackson.databind.ObjectMapper;\r
+import org.apache.commons.configuration.ConfigurationException;\r
+import org.apache.commons.configuration.PropertiesConfiguration;\r
+import org.codehaus.jackson.JsonGenerationException;\r
+import org.onap.aai.dbmap.AAIGraphConfig;\r
+import org.onap.aai.exceptions.AAIException;\r
+import org.onap.aai.setup.SchemaVersions;\r
+import org.onap.aai.setup.SchemaVersion;\r
+import org.onap.aai.logging.LoggingContext;\r
+import org.onap.aai.logging.LoggingContext.StatusCode;\r
+import org.onap.aai.util.AAIConfig;\r
+import com.beust.jcommander.JCommander;\r
+import com.beust.jcommander.Parameter;\r
+import org.janusgraph.core.JanusGraphFactory;\r
+import org.janusgraph.core.JanusGraph;\r
+import org.springframework.context.annotation.AnnotationConfigApplicationContext;\r
+\r
+public class ScriptDriver {\r
+\r
+       /**\r
+        * The main method.\r
+        *\r
+        * @param args the arguments\r
+        * @throws AAIException the AAI exception\r
+        * @throws JsonGenerationException the json generation exception\r
+        * @throws JsonMappingException the json mapping exception\r
+        * @throws IOException Signals that an I/O exception has occurred.\r
+        */\r
+       public static void main (String[] args) throws AAIException, IOException, ConfigurationException {\r
+               CommandLineArgs cArgs = new CommandLineArgs();\r
+               \r
+               LoggingContext.init();\r
+               LoggingContext.component("DBSchemaScriptDriver");\r
+               LoggingContext.partnerName("NA");\r
+               LoggingContext.targetEntity("AAI");\r
+               LoggingContext.requestId(UUID.randomUUID().toString());\r
+               LoggingContext.serviceName("AAI");\r
+               LoggingContext.targetServiceName("main");\r
+               LoggingContext.statusCode(StatusCode.COMPLETE);\r
+               LoggingContext.responseCode(LoggingContext.SUCCESS);\r
+               \r
+               new JCommander(cArgs, args);\r
+               \r
+               if (cArgs.help) {\r
+                       System.out.println("-c [path to graph configuration] -type [what you want to audit - oxm or graph]");\r
+               }\r
+\r
+               AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext(\r
+                               "org.onap.aai.config",\r
+                               "org.onap.aai.setup"\r
+               );\r
+\r
+               AuditorFactory auditorFactory = ctx.getBean(AuditorFactory.class);\r
+               SchemaVersions schemaVersions = ctx.getBean(SchemaVersions.class);\r
+\r
+               String config = cArgs.config;\r
+               AAIConfig.init();\r
+\r
+               PropertiesConfiguration graphConfiguration = new AAIGraphConfig\r
+                       .Builder(config)\r
+                       .forService(ScriptDriver.class.getSimpleName())\r
+                       .withGraphType("NA")\r
+                       .buildConfiguration();\r
+\r
+               try (JanusGraph graph = JanusGraphFactory.open(graphConfiguration)) {\r
+                       if (!("oxm".equals(cArgs.type) || "graph".equals(cArgs.type))) {\r
+                               System.out.println("type: " + cArgs.type + " not recognized.");\r
+                               System.exit(1);\r
+                       }\r
+\r
+                       AuditDoc doc = null;\r
+                       if ("oxm".equals(cArgs.type)) {\r
+                               doc = auditorFactory.getOXMAuditor(schemaVersions.getDefaultVersion()).getAuditDoc();\r
+                       } else if ("graph".equals(cArgs.type)) {\r
+                               doc = auditorFactory.getGraphAuditor(graph).getAuditDoc();\r
+                       }\r
+\r
+                       ObjectMapper mapper = new ObjectMapper();\r
+\r
+                       String json = mapper.writerWithDefaultPrettyPrinter().writeValueAsString(doc);\r
+                       System.out.println(json);\r
+               }\r
+       }\r
+       \r
+}\r
+\r
+class CommandLineArgs {\r
+       \r
+       @Parameter(names = "--help", description = "Help")\r
+       public boolean help = false;\r
+       \r
+       @Parameter(names = "-c", description = "Configuration", required=true)\r
+       public String config;\r
+       \r
+       @Parameter(names = "-type", description = "Type", required=true)\r
+       public String type = "graph";\r
+       \r
+\r
+}
\ No newline at end of file
diff --git a/src/main/java/org/onap/aai/dbgen/DupeTool.java b/src/main/java/org/onap/aai/dbgen/DupeTool.java
new file mode 100644 (file)
index 0000000..7b7ef99
--- /dev/null
@@ -0,0 +1,1854 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.dbgen;
+
+import java.io.FileInputStream;
+import java.io.InputStream;
+import java.util.*;
+import java.util.Map.Entry;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__;
+import org.apache.tinkerpop.gremlin.structure.Direction;
+import org.apache.tinkerpop.gremlin.structure.Edge;
+import org.apache.tinkerpop.gremlin.structure.Graph;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.apache.tinkerpop.gremlin.structure.VertexProperty;
+import org.onap.aai.db.props.AAIProperties;
+import org.onap.aai.dbmap.AAIGraphConfig;
+import org.onap.aai.dbmap.AAIGraph;
+import org.onap.aai.edges.enums.EdgeProperty;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.introspection.Introspector;
+import org.onap.aai.introspection.Loader;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.logging.ErrorLogHelper;
+import org.onap.aai.logging.LogFormatTools;
+import org.onap.aai.logging.LoggingContext;
+import org.onap.aai.logging.LoggingContext.StatusCode;
+import org.onap.aai.edges.enums.AAIDirection;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.util.AAIConfig;
+import org.onap.aai.util.AAIConstants;
+import org.slf4j.MDC;
+
+import com.att.eelf.configuration.Configuration;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.janusgraph.core.JanusGraphFactory;
+import org.janusgraph.core.JanusGraph;
+import org.springframework.context.annotation.AnnotationConfigApplicationContext;
+
+public class DupeTool {
+
+    private static final EELFLogger logger = EELFManager.getInstance().getLogger(DupeTool.class.getSimpleName());
+    private static final String FROMAPPID = "AAI-DB";
+    private static final String TRANSID = UUID.randomUUID().toString();
+
+    private static String graphType = "realdb";
+    private final SchemaVersions schemaVersions;
+
+    private boolean shouldExitVm = true;
+
+    public void exit(int statusCode) {
+        if (this.shouldExitVm) {
+            System.exit(1);
+        }
+    }
+
+    private LoaderFactory loaderFactory;
+
+    public DupeTool(LoaderFactory loaderFactory, SchemaVersions schemaVersions){
+        this(loaderFactory, schemaVersions, true);
+    }
+
+    public DupeTool(LoaderFactory loaderFactory, SchemaVersions schemaVersions, boolean shouldExitVm){
+        this.loaderFactory = loaderFactory;
+        this.schemaVersions = schemaVersions;
+        this.shouldExitVm = shouldExitVm;
+    }
+
+    public void execute(String[] args){
+
+        String defVersion = "v12";
+        try {
+            defVersion = AAIConfig.get(AAIConstants.AAI_DEFAULT_API_VERSION_PROP);
+        } catch (AAIException ae) {
+            String emsg = "Error trying to get default API Version property \n";
+            System.out.println(emsg);
+            LoggingContext.statusCode(StatusCode.ERROR);
+            LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+            logger.error(emsg);
+            exit(0);
+        }
+
+
+        Loader loader = null;
+        try {
+            loader = loaderFactory.createLoaderForVersion(ModelType.MOXY, schemaVersions.getDefaultVersion());
+        } catch (Exception ex) {
+            LoggingContext.statusCode(StatusCode.ERROR);
+            LoggingContext.responseCode(LoggingContext.UNKNOWN_ERROR);
+            logger.error("ERROR - Could not do the moxyMod.init() " + LogFormatTools.getStackTop(ex));
+            exit(1);
+        }
+        JanusGraph graph1 = null;
+        JanusGraph graph2 = null;
+        Graph gt1 = null;
+        Graph gt2 = null;
+
+        boolean specialTenantRule = false;
+
+        try {
+            AAIConfig.init();
+            int maxRecordsToFix = AAIConstants.AAI_DUPETOOL_DEFAULT_MAX_FIX;
+            int sleepMinutes = AAIConstants.AAI_DUPETOOL_DEFAULT_SLEEP_MINUTES;
+            int timeWindowMinutes = 0;   // A value of 0 means that we will not have a time-window -- we will look
+            // at all nodes of the passed-in nodeType.
+            long windowStartTime = 0;  // Translation of the window into a starting timestamp
+
+            try {
+                String maxFixStr = AAIConfig.get("aai.dupeTool.default.max.fix");
+                if (maxFixStr != null && !maxFixStr.equals("")) {
+                    maxRecordsToFix = Integer.parseInt(maxFixStr);
+                }
+                String sleepStr = AAIConfig.get("aai.dupeTool.default.sleep.minutes");
+                if (sleepStr != null && !sleepStr.equals("")) {
+                    sleepMinutes = Integer.parseInt(sleepStr);
+                }
+            } catch (Exception e) {
+                // Don't worry, we'll just use the defaults that we got from AAIConstants
+                logger.warn("WARNING - could not pick up aai.dupeTool values from aaiconfig.properties file.  Will use defaults. ");
+            }
+
+            String nodeTypeVal = "";
+            String userIdVal = "";
+            String filterParams = "";
+            Boolean skipHostCheck = false;
+            Boolean autoFix = false;
+            String argStr4Msg = "";
+            Introspector obj = null;
+
+            if (args != null && args.length > 0) {
+                // They passed some arguments in that will affect processing
+                for (int i = 0; i < args.length; i++) {
+                    String thisArg = args[i];
+                    argStr4Msg = argStr4Msg + " " + thisArg;
+
+                    if (thisArg.equals("-nodeType")) {
+                        i++;
+                        if (i >= args.length) {
+                            LoggingContext.statusCode(StatusCode.ERROR);
+                            LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
+                            logger.error(" No value passed with -nodeType option.  ");
+                            exit(0);
+                        }
+                        nodeTypeVal = args[i];
+                        argStr4Msg = argStr4Msg + " " + nodeTypeVal;
+                    } else if (thisArg.equals("-sleepMinutes")) {
+                        i++;
+                        if (i >= args.length) {
+                            LoggingContext.statusCode(StatusCode.ERROR);
+                            LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
+                            logger.error("No value passed with -sleepMinutes option.");
+                            exit(0);
+                        }
+                        String nextArg = args[i];
+                        try {
+                            sleepMinutes = Integer.parseInt(nextArg);
+                        } catch (Exception e) {
+                            LoggingContext.statusCode(StatusCode.ERROR);
+                            LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
+                            logger.error("Bad value passed with -sleepMinutes option: ["
+                                    + nextArg + "]");
+                            exit(0);
+                        }
+                        argStr4Msg = argStr4Msg + " " + sleepMinutes;
+                    } else if (thisArg.equals("-maxFix")) {
+                        i++;
+                        if (i >= args.length) {
+                            LoggingContext.statusCode(StatusCode.ERROR);
+                            LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
+                            logger.error("No value passed with -maxFix option.");
+                            exit(0);
+                        }
+                        String nextArg = args[i];
+                        try {
+                            maxRecordsToFix = Integer.parseInt(nextArg);
+                        } catch (Exception e) {
+                            LoggingContext.statusCode(StatusCode.ERROR);
+                            LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
+                            logger.error("Bad value passed with -maxFix option: ["
+                                    + nextArg + "]");
+                            exit(0);
+                        }
+                        argStr4Msg = argStr4Msg + " " + maxRecordsToFix;
+                    } else if (thisArg.equals("-timeWindowMinutes")) {
+                        i++;
+                        if (i >= args.length) {
+                            LoggingContext.statusCode(StatusCode.ERROR);
+                            LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
+                            logger.error("No value passed with -timeWindowMinutes option.");
+                            exit(0);
+                        }
+                        String nextArg = args[i];
+                        try {
+                            timeWindowMinutes = Integer.parseInt(nextArg);
+                        } catch (Exception e) {
+                            LoggingContext.statusCode(StatusCode.ERROR);
+                            LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
+                            logger.error("Bad value passed with -timeWindowMinutes option: ["
+                                    + nextArg + "]");
+                            exit(0);
+                        }
+                        argStr4Msg = argStr4Msg + " " + timeWindowMinutes;
+                    } else if (thisArg.equals("-skipHostCheck")) {
+                        skipHostCheck = true;
+                    } else if (thisArg.equals("-specialTenantRule")) {
+                        specialTenantRule = true;
+                    } else if (thisArg.equals("-autoFix")) {
+                        autoFix = true;
+                    } else if (thisArg.equals("-userId")) {
+                        i++;
+                        if (i >= args.length) {
+                            LoggingContext.statusCode(StatusCode.ERROR);
+                            LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
+                            logger.error(" No value passed with -userId option.  ");
+                            exit(0);
+                        }
+                        userIdVal = args[i];
+                        argStr4Msg = argStr4Msg + " " + userIdVal;
+                    } else if (thisArg.equals("-params4Collect")) {
+                        i++;
+                        if (i >= args.length) {
+                            LoggingContext.statusCode(StatusCode.ERROR);
+                            LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
+                            logger.error(" No value passed with -params4Collect option.  ");
+                            exit(0);
+                        }
+                        filterParams = args[i];
+                        argStr4Msg = argStr4Msg + " " + filterParams;
+                    } else {
+                        LoggingContext.statusCode(StatusCode.ERROR);
+                        LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
+                        logger.error(" Unrecognized argument passed to DupeTool: ["
+                                + thisArg + "]. ");
+                        logger.error(" Valid values are: -action -userId -vertexId -edgeId -overRideProtection ");
+                        exit(0);
+                    }
+                }
+            }
+
+            userIdVal = userIdVal.trim();
+            if ((userIdVal.length() < 6) || userIdVal.toUpperCase().equals("AAIADMIN")) {
+                String emsg = "userId parameter is required.  [" + userIdVal + "] passed to DupeTool(). userId must be not empty and not aaiadmin \n";
+                System.out.println(emsg);
+                LoggingContext.statusCode(StatusCode.ERROR);
+                LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
+                logger.error(emsg);
+                exit(0);
+            }
+
+            nodeTypeVal = nodeTypeVal.trim();
+            if (nodeTypeVal.equals("")) {
+                String emsg = " nodeType is a required parameter for DupeTool().\n";
+                System.out.println(emsg);
+                LoggingContext.statusCode(StatusCode.ERROR);
+                LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
+                logger.error(emsg);
+                exit(0);
+            } else {
+                obj = loader.introspectorFromName(nodeTypeVal);
+            }
+
+            if (timeWindowMinutes > 0) {
+                // Translate the window value (ie. 30 minutes) into a unix timestamp like
+                //    we use in the db - so we can select data created after that time.
+                windowStartTime = figureWindowStartTime(timeWindowMinutes);
+            }
+
+            String msg = "";
+            msg = "DupeTool called with these params: [" + argStr4Msg + "]";
+            System.out.println(msg);
+            logger.info(msg);
+
+            // Determine what the key fields are for this nodeType (and we want them ordered)
+            ArrayList<String> keyPropNamesArr = new ArrayList<String>(obj.getKeys());
+
+            // Determine what kinds of nodes (if any) this nodeType is dependent on for uniqueness
+            ArrayList<String> depNodeTypeList = new ArrayList<String>();
+            Collection<String> depNTColl = obj.getDependentOn();
+            Iterator<String> ntItr = depNTColl.iterator();
+            while (ntItr.hasNext()) {
+                depNodeTypeList.add(ntItr.next());
+            }
+
+            // Based on the nodeType, window and filterData, figure out the vertices that we will be checking
+            System.out.println("    ---- NOTE --- about to open graph (takes a little while)--------\n");
+            graph1 = setupGraph(logger);
+            gt1 = getGraphTransaction(graph1, logger);
+            ArrayList<Vertex> verts2Check = new ArrayList<Vertex>();
+            try {
+                verts2Check = figureOutNodes2Check(TRANSID, FROMAPPID, gt1,
+                        nodeTypeVal, windowStartTime, filterParams, logger);
+            } catch (AAIException ae) {
+                String emsg = "Error trying to get initial set of nodes to check. \n";
+                System.out.println(emsg);
+                LoggingContext.statusCode(StatusCode.ERROR);
+                LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                logger.error(emsg);
+                exit(0);
+            }
+
+            if (verts2Check == null || verts2Check.size() == 0) {
+                msg = " No vertices found to check.  Used nodeType = [" + nodeTypeVal
+                        + "], windowMinutes = " + timeWindowMinutes
+                        + ", filterData = [" + filterParams + "].";
+                logger.info(msg);
+                System.out.println(msg);
+                exit(0);
+            } else {
+                msg = " Found " + verts2Check.size() + " nodes of type " + nodeTypeVal
+                        + " to check using passed filterParams and windowStartTime. ";
+                logger.info(msg);
+                System.out.println(msg);
+            }
+
+            ArrayList<String> firstPassDupeSets = new ArrayList<String>();
+            ArrayList<String> secondPassDupeSets = new ArrayList<String>();
+            Boolean isDependentOnParent = false;
+            if (!obj.getDependentOn().isEmpty()) {
+                isDependentOnParent = true;
+            }
+
+            if (isDependentOnParent) {
+                firstPassDupeSets = getDupeSets4DependentNodes(TRANSID, FROMAPPID, gt1,
+                        defVersion, nodeTypeVal, verts2Check, keyPropNamesArr, loader,
+                        specialTenantRule, logger);
+            } else {
+                firstPassDupeSets = getDupeSets4NonDepNodes(TRANSID, FROMAPPID, gt1,
+                        defVersion, nodeTypeVal, verts2Check, keyPropNamesArr,
+                        specialTenantRule, loader, logger);
+            }
+
+            msg = " Found " + firstPassDupeSets.size() + " sets of duplicates for this request. ";
+            logger.info(msg);
+            System.out.println(msg);
+            if (firstPassDupeSets.size() > 0) {
+                msg = " Here is what they look like: ";
+                logger.info(msg);
+                System.out.println(msg);
+                for (int x = 0; x < firstPassDupeSets.size(); x++) {
+                    msg = " Set " + x + ": [" + firstPassDupeSets.get(x) + "] ";
+                    logger.info(msg);
+                    System.out.println(msg);
+                    showNodeDetailsForADupeSet(gt1, firstPassDupeSets.get(x), logger);
+                }
+            }
+
+            boolean didSomeDeletesFlag = false;
+            ArrayList<String> dupeSetsToFix = new ArrayList<String>();
+            if (autoFix && firstPassDupeSets.size() == 0) {
+                msg = "AutoFix option is on, but no dupes were found on the first pass.  Nothing to fix.";
+                logger.info(msg);
+                System.out.println(msg);
+            } else if (autoFix) {
+                // We will try to fix any dupes that we can - but only after sleeping for a
+                // time and re-checking the list of duplicates using a seperate transaction.
+                try {
+                    msg = "\n\n-----------  About to sleep for " + sleepMinutes + " minutes."
+                            + "  -----------\n\n";
+                    logger.info(msg);
+                    System.out.println(msg);
+                    int sleepMsec = sleepMinutes * 60 * 1000;
+                    Thread.sleep(sleepMsec);
+                } catch (InterruptedException ie) {
+                    msg = "\n >>> Sleep Thread has been Interrupted <<< ";
+                    logger.info(msg);
+                    System.out.println(msg);
+                    exit(0);
+                }
+
+                graph2 = setupGraph(logger);
+                gt2 = getGraphTransaction(graph2, logger);
+                if (isDependentOnParent) {
+                    secondPassDupeSets = getDupeSets4DependentNodes(TRANSID, FROMAPPID, gt2,
+                            defVersion, nodeTypeVal, verts2Check, keyPropNamesArr, loader,
+                            specialTenantRule, logger);
+                } else {
+                    secondPassDupeSets = getDupeSets4NonDepNodes(TRANSID, FROMAPPID, gt2,
+                            defVersion, nodeTypeVal, verts2Check, keyPropNamesArr,
+                            specialTenantRule, loader, logger);
+                }
+
+                dupeSetsToFix = figureWhichDupesStillNeedFixing(firstPassDupeSets, secondPassDupeSets, logger);
+                msg = "\nAfter running a second pass, there were " + dupeSetsToFix.size()
+                        + " sets of duplicates that we think can be deleted. ";
+                logger.info(msg);
+                System.out.println(msg);
+                if (dupeSetsToFix.size() > 0) {
+                    msg = " Here is what the sets look like: ";
+                    logger.info(msg);
+                    System.out.println(msg);
+                    for (int x = 0; x < dupeSetsToFix.size(); x++) {
+                        msg = " Set " + x + ": [" + dupeSetsToFix.get(x) + "] ";
+                        logger.info(msg);
+                        System.out.println(msg);
+                        showNodeDetailsForADupeSet(gt2, dupeSetsToFix.get(x), logger);
+                    }
+                }
+
+                if (dupeSetsToFix.size() > 0) {
+                    if (dupeSetsToFix.size() > maxRecordsToFix) {
+                        String infMsg = " >> WARNING >>  Dupe list size ("
+                                + dupeSetsToFix.size()
+                                + ") is too big.  The maxFix we are using is: "
+                                + maxRecordsToFix
+                                + ".  No nodes will be deleted. (use the"
+                                + " -maxFix option to override this limit.)";
+                        System.out.println(infMsg);
+                        logger.info(infMsg);
+                    } else {
+                        // Call the routine that fixes known dupes
+                        didSomeDeletesFlag = deleteNonKeepers(gt2, dupeSetsToFix, logger);
+                    }
+                }
+                if (didSomeDeletesFlag) {
+                    gt2.tx().commit();
+                }
+            }
+
+        } catch (AAIException e) {
+            logger.error("Caught AAIException while running the dupeTool: " + LogFormatTools.getStackTop(e));
+            ErrorLogHelper.logException(e);
+        } catch (Exception ex) {
+            logger.error("Caught exception while running the dupeTool: " + LogFormatTools.getStackTop(ex));
+            ErrorLogHelper.logError("AAI_6128", ex.getMessage() + ", resolve and rerun the dupeTool. ");
+        } finally {
+            if (gt1 != null && gt1.tx().isOpen()) {
+                // We don't change any data with gt1 - so just roll it back so it knows we're done.
+                try {
+                    gt1.tx().rollback();
+                } catch (Exception ex) {
+                    // Don't throw anything because JanusGraph sometimes is just saying that the graph is already closed
+                    logger.warn("WARNING from final gt1.rollback() " + LogFormatTools.getStackTop(ex));
+                }
+            }
+
+            if (gt2 != null && gt2.tx().isOpen()) {
+                // Any changes that worked correctly should have already done
+                // their commits.
+                try {
+                    gt2.tx().rollback();
+                } catch (Exception ex) {
+                    // Don't throw anything because JanusGraph sometimes is just saying that the graph is already closed
+                    logger.warn("WARNING from final gt2.rollback() " + LogFormatTools.getStackTop(ex));
+                }
+            }
+
+            try {
+                if (graph1 != null && graph1.isOpen()) {
+                    closeGraph(graph1, logger);
+                }
+            } catch (Exception ex) {
+                // Don't throw anything because JanusGraph sometimes is just saying that the graph is already closed{
+                logger.warn("WARNING from final graph1.shutdown() " + LogFormatTools.getStackTop(ex));
+            }
+
+            try {
+                if (graph2 != null && graph2.isOpen()) {
+                    closeGraph(graph2, logger);
+                }
+            } catch (Exception ex) {
+                // Don't throw anything because JanusGraph sometimes is just saying that the graph is already closed{
+                logger.warn("WARNING from final graph2.shutdown() " + LogFormatTools.getStackTop(ex));
+            }
+        }
+
+        exit(0);
+    }
+
+    /**
+     * The main method.
+     *
+     * @param args the arguments
+     */
+    public static void main(String[] args) {
+
+        System.setProperty("aai.service.name", DupeTool.class.getSimpleName());
+        // Set the logging file properties to be used by EELFManager
+        Properties props = System.getProperties();
+        props.setProperty(Configuration.PROPERTY_LOGGING_FILE_NAME, "dupeTool-logback.xml");
+        props.setProperty(Configuration.PROPERTY_LOGGING_FILE_PATH, AAIConstants.AAI_HOME_BUNDLECONFIG);
+        MDC.put("logFilenameAppender", DupeTool.class.getSimpleName());
+
+        LoggingContext.init();
+        LoggingContext.partnerName(FROMAPPID);
+        LoggingContext.serviceName(AAIConstants.AAI_RESOURCES_MS);
+        LoggingContext.component("dupeTool");
+        LoggingContext.targetEntity(AAIConstants.AAI_RESOURCES_MS);
+        LoggingContext.targetServiceName("main");
+        LoggingContext.requestId(TRANSID);
+        LoggingContext.statusCode(StatusCode.COMPLETE);
+        LoggingContext.responseCode(LoggingContext.SUCCESS);
+
+        AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext(
+                "org.onap.aai.config",
+                "org.onap.aai.setup"
+        );
+
+        LoaderFactory loaderFactory = ctx.getBean(LoaderFactory.class);
+        SchemaVersions schemaVersions = ctx.getBean(SchemaVersions.class);
+        DupeTool dupeTool = new DupeTool(loaderFactory, schemaVersions);
+        dupeTool.execute(args);
+    }// end of main()
+
+
+    /**
+     * Collect Duplicate Sets for nodes that are NOT dependent on parent nodes.
+     *
+     * @param transId        the trans id
+     * @param fromAppId      the from app id
+     * @param g              the g
+     * @param version        the version
+     * @param nType          the n type
+     * @param passedVertList the passed vert list
+     * @param dbMaps         the db maps
+     * @return the array list
+     */
+    private ArrayList<String> getDupeSets4NonDepNodes(String transId,
+                                                             String fromAppId, Graph g, String version, String nType,
+                                                             ArrayList<Vertex> passedVertList,
+                                                             ArrayList<String> keyPropNamesArr,
+                                                             Boolean specialTenantRule, Loader loader, EELFLogger logger) {
+
+        ArrayList<String> returnList = new ArrayList<String>();
+
+        // We've been passed a set of nodes that we want to check.
+        // They are all NON-DEPENDENT nodes meaning that they should be
+        // unique in the DB based on their KEY DATA alone.  So, if
+        // we group them by their key data - if any key has more than one
+        // vertex mapped to it, those vertices are dupes.
+        //
+        // When we find duplicates, we return then as a String (there can be
+        //     more than one duplicate for one set of key data):
+        // Each element in the returned arrayList might look like this:
+        // "1234|5678|keepVid=UNDETERMINED" (if there were 2 dupes, and we
+        // couldn't figure out which one to keep)
+        // or, "100017|200027|30037|keepVid=30037" (if there were 3 dupes and we
+        // thought the third one was the one that should survive)
+
+        HashMap<String, ArrayList<String>> keyVals2VidHash = new HashMap<String, ArrayList<String>>();
+        HashMap<String, Vertex> vtxHash = new HashMap<String, Vertex>();
+        Iterator<Vertex> pItr = passedVertList.iterator();
+        while (pItr.hasNext()) {
+            try {
+                Vertex tvx = pItr.next();
+                String thisVid = tvx.id().toString();
+                vtxHash.put(thisVid, tvx);
+
+                // if there are more than one vertexId mapping to the same keyProps -- they are dupes
+                String hKey = getNodeKeyValString(tvx, keyPropNamesArr, logger);
+                if (keyVals2VidHash.containsKey(hKey)) {
+                    // We've already seen this key
+                    ArrayList<String> tmpVL = (ArrayList<String>) keyVals2VidHash.get(hKey);
+                    tmpVL.add(thisVid);
+                    keyVals2VidHash.put(hKey, tmpVL);
+                } else {
+                    // First time for this key
+                    ArrayList<String> tmpVL = new ArrayList<String>();
+                    tmpVL.add(thisVid);
+                    keyVals2VidHash.put(hKey, tmpVL);
+                }
+            } catch (Exception e) {
+                logger.warn(" >>> Threw an error in getDupeSets4NonDepNodes - just absorb this error and move on. " + LogFormatTools.getStackTop(e));
+            }
+        }
+
+        for (Map.Entry<String, ArrayList<String>> entry : keyVals2VidHash.entrySet()) {
+            ArrayList<String> vidList = entry.getValue();
+            try {
+                if (!vidList.isEmpty() && vidList.size() > 1) {
+                    // There are more than one vertex id's using the same key info
+                    String dupesStr = "";
+                    ArrayList<Vertex> vertList = new ArrayList<Vertex>();
+                    for (int i = 0; i < vidList.size(); i++) {
+                        String tmpVid = vidList.get(i);
+                        dupesStr = dupesStr + tmpVid + "|";
+                        vertList.add(vtxHash.get(tmpVid));
+                    }
+
+                    if (dupesStr != "") {
+                        Vertex prefV = getPreferredDupe(transId, fromAppId,
+                                g, vertList, version, specialTenantRule, loader, logger);
+                        if (prefV == null) {
+                            // We could not determine which duplicate to keep
+                            dupesStr = dupesStr + "KeepVid=UNDETERMINED";
+                            returnList.add(dupesStr);
+                        } else {
+                            dupesStr = dupesStr + "KeepVid=" + prefV.id();
+                            returnList.add(dupesStr);
+                        }
+                    }
+                }
+            } catch (Exception e) {
+                logger.warn(" >>> Threw an error in getDupeSets4NonDepNodes - just absorb this error and move on. " + LogFormatTools.getStackTop(e));
+            }
+
+        }
+        return returnList;
+
+    }// End of getDupeSets4NonDepNodes()
+
+
+    /**
+     * Collect Duplicate Sets for nodes that are dependent on parent nodes.
+     *
+     * @param transId           the trans id
+     * @param fromAppId         the from app id
+     * @param g                 the g
+     * @param version           the version
+     * @param nType             the n type
+     * @param passedVertList    the passed vert list
+     * @param dbMaps            the db maps
+     * @param keyPropNamesArr   Array (ordered) of keyProperty names
+     * @param specialTenantRule flag
+     * @param EELFLogger        the logger
+     * @return the array list
+     */
+    private ArrayList<String> getDupeSets4DependentNodes(String transId,
+                                                                String fromAppId, Graph g, String version, String nType,
+                                                                ArrayList<Vertex> passedVertList,
+                                                                ArrayList<String> keyPropNamesArr, Loader loader,
+                                                                Boolean specialTenantRule, EELFLogger logger) {
+
+        // This is for nodeTypes that DEPEND ON A PARENT NODE FOR UNIQUNESS
+
+        ArrayList<String> returnList = new ArrayList<String>();
+        ArrayList<String> alreadyFoundDupeVidArr = new ArrayList<String>();
+
+        // We've been passed a set of nodes that we want to check.  These are
+        // all nodes that ARE DEPENDENT on a PARENT Node for uniqueness.
+        // The first thing to do is to identify the key properties for the node-type
+        // and pull from the db just using those properties.
+        // Then, we'll check those nodes with their parent nodes to see if there
+        // are any duplicates.
+        //
+        // When we find duplicates, we return then as a String (there can be
+        //     more than one duplicate for one set of key data):
+        // Each element in the returned arrayList might look like this:
+        // "1234|5678|keepVid=UNDETERMINED" (if there were 2 dupes, and we
+        // couldn't figure out which one to keep)
+        // or, "100017|200027|30037|keepVid=30037" (if there were 3 dupes and we
+        // thought the third one was the one that should survive)
+        HashMap<String, Object> checkVertHash = new HashMap<String, Object>();
+        try {
+            Iterator<Vertex> pItr = passedVertList.iterator();
+            while (pItr.hasNext()) {
+                Vertex tvx = pItr.next();
+                String passedId = tvx.id().toString();
+                if (!alreadyFoundDupeVidArr.contains(passedId)) {
+                    // We haven't seen this one before - so we should check it.
+                    HashMap<String, Object> keyPropValsHash = getNodeKeyVals(tvx, keyPropNamesArr, logger);
+                    ArrayList<Vertex> tmpVertList = getNodeJustUsingKeyParams(transId, fromAppId, g,
+                            nType, keyPropValsHash, version, logger);
+
+                    if (tmpVertList.size() <= 1) {
+                        // Even without a parent node, this thing is unique so don't worry about it.
+                    } else {
+                        for (int i = 0; i < tmpVertList.size(); i++) {
+                            Vertex tmpVtx = (tmpVertList.get(i));
+                            String tmpVid = tmpVtx.id().toString();
+                            alreadyFoundDupeVidArr.add(tmpVid);
+
+                            String hKey = getNodeKeyValString(tmpVtx, keyPropNamesArr, logger);
+                            if (checkVertHash.containsKey(hKey)) {
+                                // add it to an existing list
+                                ArrayList<Vertex> tmpVL = (ArrayList<Vertex>) checkVertHash.get(hKey);
+                                tmpVL.add(tmpVtx);
+                                checkVertHash.put(hKey, tmpVL);
+                            } else {
+                                // First time for this key
+                                ArrayList<Vertex> tmpVL = new ArrayList<Vertex>();
+                                tmpVL.add(tmpVtx);
+                                checkVertHash.put(hKey, tmpVL);
+                            }
+                        }
+                    }
+                }
+            }
+
+            // More than one node have the same key fields since they may
+            // depend on a parent node for uniqueness. Since we're finding
+            // more than one, we want to check to see if any of the
+            // vertices that have this set of keys are also pointing at the
+            // same 'parent' node.
+            // Note: for a given set of key data, it is possible that there
+            // could be more than one set of duplicates.
+            for (Entry<String, Object> lentry : checkVertHash.entrySet()) {
+                ArrayList<Vertex> thisIdSetList = (ArrayList<Vertex>) lentry.getValue();
+                if (thisIdSetList == null || thisIdSetList.size() < 2) {
+                    // Nothing to check for this set.
+                    continue;
+                }
+
+                HashMap<String, ArrayList<Vertex>> vertsGroupedByParentHash = groupVertsByDepNodes(
+                        transId, fromAppId, g, version, nType,
+                        thisIdSetList, loader);
+                for (Map.Entry<String, ArrayList<Vertex>> entry : vertsGroupedByParentHash
+                        .entrySet()) {
+                    ArrayList<Vertex> thisParentsVertList = entry
+                            .getValue();
+                    if (thisParentsVertList.size() > 1) {
+                        // More than one vertex found with the same key info
+                        // hanging off the same parent/dependent node
+                        String dupesStr = "";
+                        for (int i = 0; i < thisParentsVertList.size(); i++) {
+                            dupesStr = dupesStr
+                                    + ((thisParentsVertList
+                                    .get(i))).id() + "|";
+                        }
+                        if (dupesStr != "") {
+                            Vertex prefV = getPreferredDupe(transId,
+                                    fromAppId, g, thisParentsVertList,
+                                    version, specialTenantRule, loader, logger);
+
+                            if (prefV == null) {
+                                // We could not determine which duplicate to keep
+                                dupesStr = dupesStr + "KeepVid=UNDETERMINED";
+                                returnList.add(dupesStr);
+                            } else {
+                                dupesStr = dupesStr + "KeepVid="
+                                        + prefV.id().toString();
+                                returnList.add(dupesStr);
+                            }
+                        }
+                    }
+                }
+            }
+
+        } catch (Exception e) {
+            logger.warn(" >>> Threw an error in checkAndProcessDupes - just absorb this error and move on. " + LogFormatTools.getStackTop(e));
+        }
+
+        return returnList;
+
+    }// End of getDupeSets4DependentNodes()
+
+
+    private Graph getGraphTransaction(JanusGraph graph, EELFLogger logger) {
+
+        Graph gt = null;
+        try {
+            if (graph == null) {
+                String emsg = "could not get graph object in DupeTool.  \n";
+                System.out.println(emsg);
+                LoggingContext.statusCode(StatusCode.ERROR);
+                LoggingContext.responseCode(LoggingContext.AVAILABILITY_TIMEOUT_ERROR);
+                logger.error(emsg);
+                exit(0);
+            }
+            gt = graph.newTransaction();
+            if (gt == null) {
+                String emsg = "null graphTransaction object in DupeTool. \n";
+                throw new AAIException("AAI_6101", emsg);
+            }
+
+        } catch (AAIException e1) {
+            String msg = e1.getErrorObject().toString();
+            System.out.println(msg);
+            LoggingContext.statusCode(StatusCode.ERROR);
+            LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+            logger.error(msg);
+            exit(0);
+        } catch (Exception e2) {
+            String msg = e2.toString();
+            System.out.println(msg);
+            LoggingContext.statusCode(StatusCode.ERROR);
+            LoggingContext.responseCode(LoggingContext.UNKNOWN_ERROR);
+            logger.error(msg);
+            exit(0);
+        }
+
+        return gt;
+
+    }// End of getGraphTransaction()
+
+
+    public void showNodeInfo(EELFLogger logger, Vertex tVert, Boolean displayAllVidsFlag) {
+
+        try {
+            Iterator<VertexProperty<Object>> pI = tVert.properties();
+            String infStr = ">>> Found Vertex with VertexId = " + tVert.id() + ", properties:    ";
+            System.out.println(infStr);
+            logger.info(infStr);
+            while (pI.hasNext()) {
+                VertexProperty<Object> tp = pI.next();
+                infStr = " [" + tp.key() + "|" + tp.value() + "] ";
+                System.out.println(infStr);
+                logger.info(infStr);
+            }
+
+            ArrayList<String> retArr = collectEdgeInfoForNode(logger, tVert, displayAllVidsFlag);
+            for (String infoStr : retArr) {
+                System.out.println(infoStr);
+                logger.info(infoStr);
+            }
+        } catch (Exception e) {
+            String warnMsg = " -- Error -- trying to display edge info. [" + e.getMessage() + "]";
+            System.out.println(warnMsg);
+            LoggingContext.statusCode(StatusCode.ERROR);
+            LoggingContext.responseCode(LoggingContext.UNKNOWN_ERROR);
+            logger.warn(warnMsg);
+            LoggingContext.statusCode(StatusCode.COMPLETE);
+            LoggingContext.responseCode(LoggingContext.SUCCESS);
+        }
+
+    }// End of showNodeInfo()
+
+
+    public ArrayList<String> collectEdgeInfoForNode(EELFLogger logger, Vertex tVert, boolean displayAllVidsFlag) {
+        ArrayList<String> retArr = new ArrayList<String>();
+        Direction dir = Direction.OUT;
+        for (int i = 0; i <= 1; i++) {
+            if (i == 1) {
+                // Second time through we'll look at the IN edges.
+                dir = Direction.IN;
+            }
+            Iterator<Edge> eI = tVert.edges(dir);
+            if (!eI.hasNext()) {
+                retArr.add("No " + dir + " edges were found for this vertex. ");
+            }
+            while (eI.hasNext()) {
+                Edge ed = eI.next();
+                String lab = ed.label();
+                Vertex vtx = null;
+                if (dir == Direction.OUT) {
+                    // get the vtx on the "other" side
+                    vtx = ed.inVertex();
+                } else {
+                    // get the vtx on the "other" side
+                    vtx = ed.outVertex();
+                }
+                if (vtx == null) {
+                    retArr.add(" >>> COULD NOT FIND VERTEX on the other side of this edge edgeId = " + ed.id() + " <<< ");
+                } else {
+                    String nType = vtx.<String>property("aai-node-type").orElse(null);
+                    if (displayAllVidsFlag) {
+                        // This should rarely be needed
+                        String vid = vtx.id().toString();
+                        retArr.add("Found an " + dir + " edge (" + lab + ") between this vertex and a [" + nType + "] node with VtxId = " + vid);
+                    } else {
+                        // This is the normal case
+                        retArr.add("Found an " + dir + " edge (" + lab + ") between this vertex and a [" + nType + "] node. ");
+                    }
+                }
+            }
+        }
+        return retArr;
+
+    }// end of collectEdgeInfoForNode()
+
+
+    private long figureWindowStartTime(int timeWindowMinutes) {
+        // Given a window size, calculate what the start-timestamp would be.
+
+        if (timeWindowMinutes <= 0) {
+            // This just means that there is no window...
+            return 0;
+        }
+        long unixTimeNow = System.currentTimeMillis();
+        long windowInMillis = timeWindowMinutes * 60 * 1000;
+
+        long startTimeStamp = unixTimeNow - windowInMillis;
+
+        return startTimeStamp;
+    } // End of figureWindowStartTime()
+
+
+    /**
+     * Gets the node(s) just using key params.
+     *
+     * @param transId      the trans id
+     * @param fromAppId    the from app id
+     * @param graph        the graph
+     * @param nodeType     the node type
+     * @param keyPropsHash the key props hash
+     * @param apiVersion   the api version
+     * @return the node just using key params
+     * @throws AAIException the AAI exception
+     */
+    public ArrayList<Vertex> getNodeJustUsingKeyParams(String transId, String fromAppId, Graph graph, String nodeType,
+                                                              HashMap<String, Object> keyPropsHash, String apiVersion, EELFLogger logger) throws AAIException {
+
+        ArrayList<Vertex> retVertList = new ArrayList<Vertex>();
+
+        // We assume that all NodeTypes have at least one key-property defined.
+        // Note - instead of key-properties (the primary key properties), a user could pass
+        //        alternate-key values if they are defined for the nodeType.
+        ArrayList<String> kName = new ArrayList<String>();
+        ArrayList<Object> kVal = new ArrayList<Object>();
+        if (keyPropsHash == null || keyPropsHash.isEmpty()) {
+            throw new AAIException("AAI_6120", " NO key properties passed for this getNodeJustUsingKeyParams() request.  NodeType = [" + nodeType + "]. ");
+        }
+
+        int i = -1;
+        for (Map.Entry<String, Object> entry : keyPropsHash.entrySet()) {
+            i++;
+            kName.add(i, entry.getKey());
+            kVal.add(i, entry.getValue());
+        }
+        int topPropIndex = i;
+        Vertex tiV = null;
+        String propsAndValuesForMsg = "";
+        Iterator<Vertex> verts = null;
+        GraphTraversalSource g = graph.traversal();
+        try {
+            if (topPropIndex == 0) {
+                propsAndValuesForMsg = " (" + kName.get(0) + " = " + kVal.get(0) + ") ";
+                verts = g.V().has(kName.get(0), kVal.get(0)).has("aai-node-type", nodeType);
+            } else if (topPropIndex == 1) {
+                propsAndValuesForMsg = " (" + kName.get(0) + " = " + kVal.get(0) + ", "
+                        + kName.get(1) + " = " + kVal.get(1) + ") ";
+                verts = g.V().has(kName.get(0), kVal.get(0)).has(kName.get(1), kVal.get(1)).has("aai-node-type", nodeType);
+            } else if (topPropIndex == 2) {
+                propsAndValuesForMsg = " (" + kName.get(0) + " = " + kVal.get(0) + ", "
+                        + kName.get(1) + " = " + kVal.get(1) + ", "
+                        + kName.get(2) + " = " + kVal.get(2) + ") ";
+                verts = g.V().has(kName.get(0), kVal.get(0)).has(kName.get(1), kVal.get(1)).has(kName.get(2), kVal.get(2)).has("aai-node-type", nodeType);
+            } else if (topPropIndex == 3) {
+                propsAndValuesForMsg = " (" + kName.get(0) + " = " + kVal.get(0) + ", "
+                        + kName.get(1) + " = " + kVal.get(1) + ", "
+                        + kName.get(2) + " = " + kVal.get(2) + ", "
+                        + kName.get(3) + " = " + kVal.get(3) + ") ";
+                verts = g.V().has(kName.get(0), kVal.get(0)).has(kName.get(1), kVal.get(1)).has(kName.get(2), kVal.get(2)).has(kName.get(3), kVal.get(3)).has("aai-node-type", nodeType);
+            } else {
+                throw new AAIException("AAI_6114", " We only support 4 keys per nodeType for now \n");
+            }
+        } catch (Exception ex) {
+            LoggingContext.statusCode(StatusCode.ERROR);
+            LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+            logger.error(" ERROR trying to get node for: [" + propsAndValuesForMsg + "] " + LogFormatTools.getStackTop(ex));
+            LoggingContext.statusCode(StatusCode.COMPLETE);
+            LoggingContext.responseCode(LoggingContext.SUCCESS);
+        }
+
+        if (verts != null) {
+            while (verts.hasNext()) {
+                tiV = verts.next();
+                retVertList.add(tiV);
+            }
+        }
+
+        if (retVertList.size() == 0) {
+            logger.debug("DEBUG No node found for nodeType = [" + nodeType +
+                    "], propsAndVal = " + propsAndValuesForMsg);
+        }
+
+        return retVertList;
+
+    }// End of getNodeJustUsingKeyParams()
+
+
+    /**
+     * Gets the node(s) just using key params.
+     *
+     * @param transId         the trans id
+     * @param fromAppId       the from app id
+     * @param graph           the graph
+     * @param nodeType        the node type
+     * @param windowStartTime the window start time
+     * @param propsHash       the props hash
+     * @param apiVersion      the api version
+     * @return the nodes
+     * @throws AAIException the AAI exception
+     */
+    public ArrayList<Vertex> figureOutNodes2Check(String transId, String fromAppId,
+                                                         Graph graph, String nodeType, long windowStartTime,
+                                                         String propsString, EELFLogger logger) throws AAIException {
+
+        ArrayList<Vertex> retVertList = new ArrayList<Vertex>();
+        String msg = "";
+        GraphTraversal<Vertex, Vertex> tgQ = graph.traversal().V().has("aai-node-type", nodeType);
+        String qStringForMsg = "graph.traversal().V().has(\"aai-node-type\"," + nodeType + ")";
+
+        if (propsString != null && !propsString.trim().equals("")) {
+            propsString = propsString.trim();
+            int firstPipeLoc = propsString.indexOf("|");
+            if (firstPipeLoc <= 0) {
+                msg = "Bad props4Collect passed: [" + propsString + "].  \n Expecting a format like, 'propName1|propVal1,propName2|propVal2'";
+                System.out.println(msg);
+                LoggingContext.statusCode(StatusCode.ERROR);
+                LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                logger.error(msg);
+                exit(0);
+            }
+
+            // Note - if they're only passing on parameter, there won't be any commas
+            String[] paramArr = propsString.split(",");
+            for (int i = 0; i < paramArr.length; i++) {
+                int pipeLoc = paramArr[i].indexOf("|");
+                if (pipeLoc <= 0) {
+                    msg = "Bad propsString passed: [" + propsString + "].  \n Expecting a format like, 'propName1|propVal1,propName2|propVal2'";
+                    System.out.println(msg);
+                    LoggingContext.statusCode(StatusCode.ERROR);
+                    LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                    logger.error(msg);
+                    exit(0);
+                } else {
+                    String propName = paramArr[i].substring(0, pipeLoc);
+                    String propVal = paramArr[i].substring(pipeLoc + 1);
+                    tgQ = tgQ.has(propName, propVal);
+                    qStringForMsg = qStringForMsg + ".has(" + propName + "," + propVal + ")";
+                }
+            }
+        }
+
+        if (tgQ == null) {
+            msg = "Bad JanusGraphQuery object.  ";
+            System.out.println(msg);
+            LoggingContext.statusCode(StatusCode.ERROR);
+            LoggingContext.responseCode(LoggingContext.AVAILABILITY_TIMEOUT_ERROR);
+            logger.error(msg);
+            exit(0);
+        } else {
+            Iterator<Vertex> vertItor = tgQ;
+            while (vertItor.hasNext()) {
+                Vertex tiV = vertItor.next();
+                if (windowStartTime <= 0) {
+                    // We're not applying a time-window
+                    retVertList.add(tiV);
+                } else {
+                    Object objTimeStamp = tiV.property("aai-created-ts").orElse(null);
+                    if (objTimeStamp == null) {
+                        // No timestamp - so just take it
+                        retVertList.add(tiV);
+                    } else {
+                        long thisNodeCreateTime = (long) objTimeStamp;
+                        if (thisNodeCreateTime > windowStartTime) {
+                            // It is in our window, so we can take it
+                            retVertList.add(tiV);
+                        }
+                    }
+                }
+            }
+        }
+
+        if (retVertList.size() == 0) {
+            logger.debug("DEBUG No node found for: [" + qStringForMsg + ", with aai-created-ts > " + windowStartTime);
+        }
+
+        return retVertList;
+
+    }// End of figureOutNodes2Check()
+
+
+    /**
+     * Gets the preferred dupe.
+     *
+     * @param transId        the trans id
+     * @param fromAppId      the from app id
+     * @param g              the g
+     * @param dupeVertexList the dupe vertex list
+     * @param ver            the ver
+     * @param EELFLogger     the logger
+     * @return Vertex
+     * @throws AAIException the AAI exception
+     */
+    public Vertex getPreferredDupe(String transId,
+                                          String fromAppId, Graph g,
+                                          ArrayList<Vertex> dupeVertexList, String ver,
+                                          Boolean specialTenantRule, Loader loader, EELFLogger logger)
+            throws AAIException {
+
+        // This method assumes that it is being passed a List of vertex objects
+        // which violate our uniqueness constraints.
+
+        Vertex nullVtx = null;
+
+        if (dupeVertexList == null) {
+            return nullVtx;
+        }
+        int listSize = dupeVertexList.size();
+        if (listSize == 0) {
+            return nullVtx;
+        }
+        if (listSize == 1) {
+            return (dupeVertexList.get(0));
+        }
+
+        Vertex vtxPreferred = null;
+        Vertex currentFaveVtx = dupeVertexList.get(0);
+        for (int i = 1; i < listSize; i++) {
+            Vertex vtxB = dupeVertexList.get(i);
+            vtxPreferred = pickOneOfTwoDupes(transId, fromAppId, g,
+                    currentFaveVtx, vtxB, ver, specialTenantRule, loader, logger);
+            if (vtxPreferred == null) {
+                // We couldn't choose one
+                return nullVtx;
+            } else {
+                currentFaveVtx = vtxPreferred;
+            }
+        }
+
+        return (currentFaveVtx);
+
+    } // end of getPreferredDupe()
+
+
+    /**
+     * Pick one of two dupes.
+     *
+     * @param transId    the trans id
+     * @param fromAppId  the from app id
+     * @param g          the g
+     * @param vtxA       the vtx A
+     * @param vtxB       the vtx B
+     * @param ver        the ver
+     * @param boolean    specialTenantRuleFlag flag
+     * @param EELFLogger the logger
+     * @return Vertex
+     * @throws AAIException the AAI exception
+     */
+    public Vertex pickOneOfTwoDupes(String transId,
+                                           String fromAppId, Graph g, Vertex vtxA,
+                                           Vertex vtxB, String ver, Boolean specialTenantRule, Loader loader, EELFLogger logger) throws AAIException {
+
+        Vertex nullVtx = null;
+        Vertex preferredVtx = null;
+
+        Long vidA = new Long(vtxA.id().toString());
+        Long vidB = new Long(vtxB.id().toString());
+
+        String vtxANodeType = "";
+        String vtxBNodeType = "";
+        Object obj = vtxA.<Object>property("aai-node-type").orElse(null);
+        if (obj != null) {
+            vtxANodeType = obj.toString();
+        }
+        obj = vtxB.<Object>property("aai-node-type").orElse(null);
+        if (obj != null) {
+            vtxBNodeType = obj.toString();
+        }
+
+        if (vtxANodeType.equals("") || (!vtxANodeType.equals(vtxBNodeType))) {
+            // Either they're not really dupes or there's some bad data - so
+            // don't pick one
+            return nullVtx;
+        }
+
+        // Check that node A and B both have the same key values (or else they
+        // are not dupes)
+        // (We'll check dep-node later)
+        Collection<String> keyProps = loader.introspectorFromName(vtxANodeType).getKeys();
+        Iterator<String> keyPropI = keyProps.iterator();
+        while (keyPropI.hasNext()) {
+            String propName = keyPropI.next();
+            String vtxAKeyPropVal = "";
+            obj = vtxA.<Object>property(propName).orElse(null);
+            if (obj != null) {
+                vtxAKeyPropVal = obj.toString();
+            }
+            String vtxBKeyPropVal = "";
+            obj = vtxB.<Object>property(propName).orElse(null);
+            if (obj != null) {
+                vtxBKeyPropVal = obj.toString();
+            }
+
+            if (vtxAKeyPropVal.equals("")
+                    || (!vtxAKeyPropVal.equals(vtxBKeyPropVal))) {
+                // Either they're not really dupes or they are missing some key
+                // data - so don't pick one
+                return nullVtx;
+            }
+        }
+
+        // Collect the vid's and aai-node-types of the vertices that each vertex
+        // (A and B) is connected to.
+        ArrayList<String> vtxIdsConn2A = new ArrayList<String>();
+        ArrayList<String> vtxIdsConn2B = new ArrayList<String>();
+        HashMap<String, String> nodeTypesConn2A = new HashMap<String, String>();
+        HashMap<String, String> nodeTypesConn2B = new HashMap<String, String>();
+
+        ArrayList<String> retArr = new ArrayList<String>();
+        Iterator<Edge> eAI = vtxA.edges(Direction.BOTH);
+        while (eAI.hasNext()) {
+            Edge ed = eAI.next();
+            Vertex tmpVtx;
+            if (vtxA.equals(ed.inVertex())) {
+                tmpVtx = ed.outVertex();
+            } else {
+                tmpVtx = ed.inVertex();
+            }
+            if (tmpVtx == null) {
+                retArr.add(" >>> COULD NOT FIND VERTEX on the other side of this edge edgeId = " + ed.id() + " <<< ");
+            } else {
+                String conVid = tmpVtx.id().toString();
+                String nt = "";
+                obj = tmpVtx.<Object>property("aai-node-type").orElse(null);
+                if (obj != null) {
+                    nt = obj.toString();
+                }
+                nodeTypesConn2A.put(nt, conVid);
+                vtxIdsConn2A.add(conVid);
+            }
+        }
+
+        Iterator<Edge> eBI = vtxB.edges(Direction.BOTH);
+        while (eBI.hasNext()) {
+            Edge ed = eBI.next();
+            Vertex tmpVtx;
+
+            if (vtxB.equals(ed.inVertex())) {
+                tmpVtx = ed.outVertex();
+            } else {
+                tmpVtx = ed.inVertex();
+            }
+            if (tmpVtx == null) {
+                retArr.add(" >>> COULD NOT FIND VERTEX on the other side of this edge edgeId = " + ed.id() + " <<< ");
+            } else {
+                String conVid = tmpVtx.id().toString();
+                String nt = "";
+                obj = tmpVtx.<Object>property("aai-node-type").orElse(null);
+                if (obj != null) {
+                    nt = obj.toString();
+                }
+                nodeTypesConn2B.put(nt, conVid);
+                vtxIdsConn2B.add(conVid);
+            }
+        }
+
+        // 1 - If this kind of node needs a dependent node for uniqueness, then
+        //    verify that they both nodes point to the same dependent
+        //    node (otherwise they're not really duplicates)
+        // Note - there are sometimes more than one dependent node type since
+        //    one nodeType can be used in different ways. But for a
+        //    particular node, it will only have one dependent node that
+        //    it's connected to.
+        Collection<String> depNodeTypes = loader.introspectorFromName(vtxANodeType).getDependentOn();
+        if (depNodeTypes.isEmpty()) {
+            // This kind of node is not dependent on any other. That is ok.
+        } else {
+            String depNodeVtxId4A = "";
+            String depNodeVtxId4B = "";
+            Iterator<String> iter = depNodeTypes.iterator();
+            while (iter.hasNext()) {
+                String depNodeType = iter.next();
+                if (nodeTypesConn2A.containsKey(depNodeType)) {
+                    // This is the dependent node type that vertex A is using
+                    depNodeVtxId4A = nodeTypesConn2A.get(depNodeType);
+                }
+                if (nodeTypesConn2B.containsKey(depNodeType)) {
+                    // This is the dependent node type that vertex B is using
+                    depNodeVtxId4B = nodeTypesConn2B.get(depNodeType);
+                }
+            }
+            if (depNodeVtxId4A.equals("")
+                    || (!depNodeVtxId4A.equals(depNodeVtxId4B))) {
+                // Either they're not really dupes or there's some bad data - so
+                // don't pick either one
+                return nullVtx;
+            }
+        }
+
+        if (vtxIdsConn2A.size() == vtxIdsConn2B.size()) {
+            // 2 - If they both have edges to all the same vertices, then return
+            // the one with the lower vertexId.
+
+            // OR (2b)-- if this is the SPECIAL case -- of
+            //  "tenant|vserver vs. tenant|service-subscription"
+            //   then we pick/prefer the one that's connected to
+            //   the service-subscription.  AAI-8172
+            boolean allTheSame = true;
+            Iterator<String> iter = vtxIdsConn2A.iterator();
+            while (iter.hasNext()) {
+                String vtxIdConn2A = iter.next();
+                if (!vtxIdsConn2B.contains(vtxIdConn2A)) {
+                    allTheSame = false;
+                    break;
+                }
+            }
+
+            if (allTheSame) {
+                if (vidA < vidB) {
+                    preferredVtx = vtxA;
+                } else {
+                    preferredVtx = vtxB;
+                }
+            } else if (specialTenantRule) {
+                // They asked us to apply a special rule if it applies
+                if (vtxIdsConn2A.size() == 2 && vtxANodeType.equals("tenant")) {
+                    // We're dealing with two tenant nodes which each just have
+                    // two connections.  One must be the parent (cloud-region)
+                    // which we check in step 1 above.   If one connects to
+                    // a vserver and the other connects to a service-subscription,
+                    // our special rule is to keep the one connected
+                    // to the
+                    if (nodeTypesConn2A.containsKey("vserver") && nodeTypesConn2B.containsKey("service-subscription")) {
+                        String infMsg = " WARNING >>> we are using the special tenant rule to choose to " +
+                                " delete tenant vtxId = " + vidA + ", and keep tenant vtxId = " + vidB;
+                        System.out.println(infMsg);
+                        logger.info(infMsg);
+                        preferredVtx = vtxB;
+                    } else if (nodeTypesConn2B.containsKey("vserver") && nodeTypesConn2A.containsKey("service-subscription")) {
+                        String infMsg = " WARNING >>> we are using the special tenant rule to choose to " +
+                                " delete tenant vtxId = " + vidB + ", and keep tenant vtxId = " + vidA;
+                        System.out.println(infMsg);
+                        logger.info(infMsg);
+                        preferredVtx = vtxA;
+                    }
+                }
+            }
+        } else if (vtxIdsConn2A.size() > vtxIdsConn2B.size()) {
+            // 3 - VertexA is connected to more things than vtxB.
+            // We'll pick VtxA if its edges are a superset of vtxB's edges.
+            boolean missingOne = false;
+            Iterator<String> iter = vtxIdsConn2B.iterator();
+            while (iter.hasNext()) {
+                String vtxIdConn2B = iter.next();
+                if (!vtxIdsConn2A.contains(vtxIdConn2B)) {
+                    missingOne = true;
+                    break;
+                }
+            }
+            if (!missingOne) {
+                preferredVtx = vtxA;
+            }
+        } else if (vtxIdsConn2B.size() > vtxIdsConn2A.size()) {
+            // 4 - VertexB is connected to more things than vtxA.
+            // We'll pick VtxB if its edges are a superset of vtxA's edges.
+            boolean missingOne = false;
+            Iterator<String> iter = vtxIdsConn2A.iterator();
+            while (iter.hasNext()) {
+                String vtxIdConn2A = iter.next();
+                if (!vtxIdsConn2B.contains(vtxIdConn2A)) {
+                    missingOne = true;
+                    break;
+                }
+            }
+            if (!missingOne) {
+                preferredVtx = vtxB;
+            }
+        } else {
+            preferredVtx = nullVtx;
+        }
+
+        return (preferredVtx);
+
+    } // end of pickOneOfTwoDupes()
+
+
+    /**
+     * Group verts by dep nodes.
+     *
+     * @param transId        the trans id
+     * @param fromAppId      the from app id
+     * @param g              the g
+     * @param version        the version
+     * @param nType          the n type
+     * @param passedVertList the passed vert list
+     * @param dbMaps         the db maps
+     * @return the hash map
+     * @throws AAIException the AAI exception
+     */
+    private HashMap<String, ArrayList<Vertex>> groupVertsByDepNodes(
+            String transId, String fromAppId, Graph g, String version,
+            String nType, ArrayList<Vertex> passedVertList, Loader loader)
+            throws AAIException {
+
+        // Given a list of JanusGraph Vertices, group them together by dependent
+        // nodes. Ie. if given a list of ip address nodes (assumed to all
+        // have the same key info) they might sit under several different
+        // parent vertices.
+        // Under Normal conditions, there would only be one per parent -- but
+        // we're trying to find duplicates - so we allow for the case
+        // where more than one is under the same parent node.
+
+        HashMap<String, ArrayList<Vertex>> retHash = new HashMap<String, ArrayList<Vertex>>();
+        GraphTraversalSource gts = g.traversal();
+        if (passedVertList != null) {
+            Iterator<Vertex> iter = passedVertList.iterator();
+            while (iter.hasNext()) {
+                Vertex thisVert = iter.next();
+                Vertex parentVtx = getConnectedParent(gts, thisVert);
+                if (parentVtx != null) {
+                    String parentVid = parentVtx.id().toString();
+                    if (retHash.containsKey(parentVid)) {
+                        // add this vert to the list for this parent key
+                        retHash.get(parentVid).add(thisVert);
+                    } else {
+                        // This is the first one we found on this parent
+                        ArrayList<Vertex> vList = new ArrayList<Vertex>();
+                        vList.add(thisVert);
+                        retHash.put(parentVid, vList);
+                    }
+                }
+            }
+        }
+        return retHash;
+
+    }// end of groupVertsByDepNodes()
+
+
+    private Vertex getConnectedParent(GraphTraversalSource g,
+                                             Vertex startVtx) throws AAIException {
+
+        Vertex parentVtx = null;
+        // This traversal does not assume a parent/child edge direction
+        Iterator<Vertex> vertI = g.V(startVtx).union(__.inE().has(EdgeProperty.CONTAINS.toString(), AAIDirection.OUT.toString()).outV(), __.outE().has(EdgeProperty.CONTAINS.toString(), AAIDirection.IN.toString()).inV());
+        while (vertI != null && vertI.hasNext()) {
+            // Note - there better only be one!
+            parentVtx = vertI.next();
+        }
+        return parentVtx;
+
+    }// End of getConnectedParent()
+
+
+    /**
+     * Delete non keepers if appropriate.
+     *
+     * @param g            the g
+     * @param dupeInfoList the dupe info string
+     * @param logger       the EELFLogger
+     * @return the boolean
+     */
+    private Boolean deleteNonKeepers(Graph g,
+                                            ArrayList<String> dupeInfoList, EELFLogger logger) {
+
+        // This assumes that each dupeInfoString is in the format of
+        // pipe-delimited vid's followed by either "keepVid=xyz" or "keepVid=UNDETERMINED"
+        // ie. "3456|9880|keepVid=3456"
+
+        boolean didADelFlag = false;
+        for (int n = 0; n < dupeInfoList.size(); n++) {
+            String dupeInfoString = dupeInfoList.get(n);
+            boolean tmpFlag = deleteNonKeeperForOneSet(g, dupeInfoString, logger);
+            didADelFlag = tmpFlag | didADelFlag;
+        }
+
+        return didADelFlag;
+
+    }// end of deleteNonKeepers()
+
+
+    /**
+     * Delete non keepers if appropriate.
+     *
+     * @param g          the g
+     * @param dupeSetStr the dupe string
+     * @param logger     the EELFLogger
+     * @return the boolean
+     */
+    private Boolean deleteNonKeeperForOneSet(Graph g,
+                                                    String dupeInfoString, EELFLogger logger) {
+
+        Boolean deletedSomething = false;
+        // This assumes that each dupeInfoString is in the format of
+        // pipe-delimited vid's followed by either "keepVid=xyz" or "keepVid=UNDETERMINED"
+        // ie. "3456|9880|keepVid=3456"
+
+
+        String[] dupeArr = dupeInfoString.split("\\|");
+        ArrayList<String> idArr = new ArrayList<String>();
+        int lastIndex = dupeArr.length - 1;
+        for (int i = 0; i <= lastIndex; i++) {
+            if (i < lastIndex) {
+                // This is not the last entry, it is one of the dupes,
+                String vidString = dupeArr[i];
+                idArr.add(vidString);
+            } else {
+                // This is the last entry which should tell us if we have a
+                // preferred keeper
+                String prefString = dupeArr[i];
+                if (prefString.equals("KeepVid=UNDETERMINED")) {
+                    // They sent us a bad string -- nothing should be deleted if
+                    // no dupe could be tagged as preferred.
+                    return false;
+                } else {
+                    // If we know which to keep, then the prefString should look
+                    // like, "KeepVid=12345"
+                    String[] prefArr = prefString.split("=");
+                    if (prefArr.length != 2 || (!prefArr[0].equals("KeepVid"))) {
+                        String emsg = "Bad format. Expecting KeepVid=999999";
+                        System.out.println(emsg);
+                        LoggingContext.statusCode(StatusCode.ERROR);
+                        LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                        logger.error(emsg);
+                        LoggingContext.statusCode(StatusCode.COMPLETE);
+                        LoggingContext.responseCode(LoggingContext.SUCCESS);
+                        return false;
+                    } else {
+                        String keepVidStr = prefArr[1];
+                        if (idArr.contains(keepVidStr)) {
+                            idArr.remove(keepVidStr);
+                            // So now, the idArr should just contain the vid's
+                            // that we want to remove.
+                            for (int x = 0; x < idArr.size(); x++) {
+                                boolean okFlag = true;
+                                String thisVid = idArr.get(x);
+                                try {
+                                    long longVertId = Long.parseLong(thisVid);
+                                    Vertex vtx = g.traversal().V(longVertId).next();
+                                    String msg = "--->>>   We will delete node with VID = " + thisVid + " <<<---";
+                                    System.out.println(msg);
+                                    logger.info(msg);
+                                    vtx.remove();
+                                } catch (Exception e) {
+                                    okFlag = false;
+                                    String emsg = "ERROR trying to delete VID = " + thisVid + ", [" + e + "]";
+                                    System.out.println(emsg);
+                                    LoggingContext.statusCode(StatusCode.ERROR);
+                                    LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                                    logger.error(emsg);
+                                    LoggingContext.statusCode(StatusCode.COMPLETE);
+                                    LoggingContext.responseCode(LoggingContext.SUCCESS);
+                                }
+                                if (okFlag) {
+                                    String infMsg = " DELETED VID = " + thisVid;
+                                    logger.info(infMsg);
+                                    System.out.println(infMsg);
+                                    deletedSomething = true;
+                                }
+                            }
+                        } else {
+                            String emsg = "ERROR - Vertex Id to keep not found in list of dupes.  dupeInfoString = ["
+                                    + dupeInfoString + "]";
+                            LoggingContext.statusCode(StatusCode.ERROR);
+                            LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                            logger.error(emsg);
+                            LoggingContext.statusCode(StatusCode.COMPLETE);
+                            LoggingContext.responseCode(LoggingContext.SUCCESS);
+                            System.out.println(emsg);
+                            return false;
+                        }
+                    }
+                }// else we know which one to keep
+            }// else last entry
+        }// for each vertex in a group
+
+        return deletedSomething;
+
+    }// end of deleteNonKeeperForOneSet()
+
+
+    /**
+     * Get values of the key properties for a node.
+     *
+     * @param tvx              the vertex to pull the properties from
+     * @param keyPropertyNames ArrayList (ordered) of key prop names
+     * @param logger           the EELFLogger
+     * @return a hashMap of the propertyNames/values
+     */
+    private HashMap<String, Object> getNodeKeyVals(Vertex tvx,
+                                                          ArrayList<String> keyPropNamesArr, EELFLogger logger) {
+
+        HashMap<String, Object> retHash = new HashMap<String, Object>();
+        Iterator<String> propItr = keyPropNamesArr.iterator();
+        while (propItr.hasNext()) {
+            String propName = propItr.next();
+            if (tvx != null) {
+                Object propValObj = tvx.property(propName).orElse(null);
+                retHash.put(propName, propValObj);
+            }
+        }
+        return retHash;
+
+    }// End of getNodeKeyVals()
+
+
+    /**
+     * Get values of the key properties for a node as a single string
+     *
+     * @param tvx              the vertex to pull the properties from
+     * @param keyPropertyNames collection of key prop names
+     * @param logger           the EELFLogger
+     * @return a String of concatenated values
+     */
+    private String getNodeKeyValString(Vertex tvx,
+                                              ArrayList<String> keyPropNamesArr, EELFLogger logger) {
+
+        // -- NOTE -- for what we're using this for, we would need to
+        // guarantee that the properties are always in the same order
+
+        String retString = "";
+        Iterator<String> propItr = keyPropNamesArr.iterator();
+        while (propItr.hasNext()) {
+            String propName = propItr.next();
+            if (tvx != null) {
+                Object propValObj = tvx.property(propName).orElse(null);
+                retString = " " + retString + propValObj.toString();
+            }
+        }
+        return retString;
+
+    }// End of getNodeKeyValString()
+
+
+    /**
+     * Find duplicate sets from two dupe runs.
+     *
+     * @param firstPassDupeSets  from the first pass
+     * @param secondPassDupeSets from the second pass
+     * @param EELFLogger         logger
+     * @return commonDupeSets that are common to both passes and have a determined keeper
+     */
+    private ArrayList<String> figureWhichDupesStillNeedFixing(ArrayList<String> firstPassDupeSets,
+                                                                     ArrayList<String> secondPassDupeSets, EELFLogger logger) {
+
+        ArrayList<String> common2BothSet = new ArrayList<String>();
+
+        // We just want to look for entries from the first set which have identical (almost)
+        //    entries in the secondary set.  I say "almost" because the order of the
+        //    vid's to delete may be in a different order, but we only want to use it if
+        //    they have all the same values.   Note also - we're just looking for
+        //    the sets where we have a candidate to delete.
+
+        // The duplicate-set Strings are in this format:
+        // "1234|5678|keepVid=UNDETERMINED" (if there were 2 dupes, and we
+        // couldn't figure out which one to keep)
+        // or, "100017|200027|30037|keepVid=30037" (if there were 3 dupes and we
+        // thought the third one was the one that should survive)
+
+        if (firstPassDupeSets == null || firstPassDupeSets.isEmpty()
+                || secondPassDupeSets == null || secondPassDupeSets.isEmpty()) {
+            // If either set is empty, then our return list has to be empty too
+            return common2BothSet;
+        }
+
+        boolean needToParse = false;
+        for (int x = 0; x < secondPassDupeSets.size(); x++) {
+            String secPassDupeSetStr = secondPassDupeSets.get(x);
+            if (secPassDupeSetStr.endsWith("UNDETERMINED")) {
+                // This is a set of dupes where we could not pick one
+                // to delete - so don't include it on our list for
+                // fixing.
+            } else if (firstPassDupeSets.contains(secPassDupeSetStr)) {
+                // We have lucked out and do not even need to parse this since
+                // it was in the other array with any dupes listed in the same order
+                // This is actually the most common scenario since there is
+                // usually only one dupe, so order doesn't matter.
+                common2BothSet.add(secPassDupeSetStr);
+            } else {
+                // We'll need to do some parsing to check this one
+                needToParse = true;
+            }
+        }
+
+        if (needToParse) {
+            // Make a hash from the first and second Pass data
+            // where the key is the vid to KEEP and the value is an
+            //         array of (String) vids that would get deleted.
+            HashMap<String, ArrayList<String>> firstPassHash = makeKeeperHashOfDupeStrings(firstPassDupeSets, common2BothSet, logger);
+
+            HashMap<String, ArrayList<String>> secPassHash = makeKeeperHashOfDupeStrings(secondPassDupeSets, common2BothSet, logger);
+
+            // Loop through the secondPass data and keep the ones
+            //       that check out against the firstPass set.
+            for (Map.Entry<String, ArrayList<String>> entry : secPassHash.entrySet()) {
+                boolean skipThisOne = false;
+                String secKey = entry.getKey();
+                ArrayList<String> secList = entry.getValue();
+                if (!firstPassHash.containsKey(secKey)) {
+                    // The second pass found this delete candidate, but not the first pass
+                    skipThisOne = true;
+                } else {
+                    // They both think they should keep this VID, check the associated deletes for it.
+                    ArrayList<String> firstList = firstPassHash.get(secKey);
+                    for (int z = 0; z < secList.size(); z++) {
+                        if (!firstList.contains(secList.get(z))) {
+                            // The first pass did not think this needed to be deleted
+                            skipThisOne = true;
+                        }
+                    }
+                }
+                if (!skipThisOne) {
+                    // Put the string back together and pass it back
+                    // Not beautiful, but no time to make it nice right now...
+                    // Put it back in the format: "3456|9880|keepVid=3456"
+                    String thisDelSetStr = "";
+                    for (int z = 0; z < secList.size(); z++) {
+                        if (z == 0) {
+                            thisDelSetStr = secList.get(z);
+                        } else {
+                            thisDelSetStr = thisDelSetStr + "|" + secList.get(z);
+                        }
+                    }
+                    thisDelSetStr = thisDelSetStr + "|keepVid=" + secKey;
+                    common2BothSet.add(thisDelSetStr);
+                }
+            }
+
+        }
+        return common2BothSet;
+
+    }// figureWhichDupesStillNeedFixing
+
+
+    private HashMap<String, ArrayList<String>> makeKeeperHashOfDupeStrings(ArrayList<String> dupeSets,
+                                                                                  ArrayList<String> excludeSets, EELFLogger logger) {
+
+        HashMap<String, ArrayList<String>> keeperHash = new HashMap<String, ArrayList<String>>();
+
+        for (int x = 0; x < dupeSets.size(); x++) {
+            String tmpSetStr = dupeSets.get(x);
+            if (excludeSets.contains(tmpSetStr)) {
+                // This isn't one of the ones we needed to parse.
+                continue;
+            }
+
+            String[] dupeArr = tmpSetStr.split("\\|");
+            ArrayList<String> delIdArr = new ArrayList<String>();
+            int lastIndex = dupeArr.length - 1;
+            for (int i = 0; i <= lastIndex; i++) {
+                if (i < lastIndex) {
+                    // This is not the last entry, it is one of the dupes
+                    delIdArr.add(dupeArr[i]);
+                } else {
+                    // This is the last entry which should tell us if we
+                    // have a preferred keeper and how many dupes we had
+                    String prefString = dupeArr[i];
+                    if (i == 1) {
+                        // There was only one dupe, so if we were gonna find
+                        // it, we would have found it above with no parsing.
+                    } else if (prefString.equals("KeepVid=UNDETERMINED")) {
+                        // This one had no determined keeper, so we don't
+                        // want it.
+                    } else {
+                        // If we know which to keep, then the prefString
+                        // should look like, "KeepVid=12345"
+                        String[] prefArr = prefString.split("=");
+                        if (prefArr.length != 2
+                                || (!prefArr[0].equals("KeepVid"))) {
+                            String infMsg = "Bad format in figureWhichDupesStillNeedFixing(). Expecting " +
+                                    " KeepVid=999999 but string looks like: [" + tmpSetStr + "]";
+                            System.out.println(infMsg);
+                            logger.info(infMsg);
+                        } else {
+                            keeperHash.put(prefArr[0], delIdArr);
+                        }
+                    }
+                }
+            }
+        }
+
+        return keeperHash;
+
+    }// End makeHashOfDupeStrings()
+
+
+    /**
+     * Get values of the key properties for a node.
+     *
+     * @param g              the g
+     * @param dupeInfoString
+     * @param logger         the EELFLogger
+     * @return void
+     */
+    private void showNodeDetailsForADupeSet(Graph g, String dupeInfoString, EELFLogger logger) {
+
+        // dang...   parsing this string once again...
+
+        String[] dupeArr = dupeInfoString.split("\\|");
+        int lastIndex = dupeArr.length - 1;
+        for (int i = 0; i <= lastIndex; i++) {
+            if (i < lastIndex) {
+                // This is not the last entry, it is one of the dupes,
+                String vidString = dupeArr[i];
+                long longVertId = Long.parseLong(vidString);
+                Vertex vtx = g.traversal().V(longVertId).next();
+                showNodeInfo(logger, vtx, false);
+            } else {
+                // This is the last entry which should tell us if we have a
+                // preferred keeper
+                String prefString = dupeArr[i];
+                if (prefString.equals("KeepVid=UNDETERMINED")) {
+                    String msg = " Our algorithm cannot choose from among these, so they will all be kept. -------\n";
+                    System.out.println(msg);
+                    logger.info(msg);
+                } else {
+                    // If we know which to keep, then the prefString should look
+                    // like, "KeepVid=12345"
+                    String[] prefArr = prefString.split("=");
+                    if (prefArr.length != 2 || (!prefArr[0].equals("KeepVid"))) {
+                        String emsg = "Bad format. Expecting KeepVid=999999";
+                        System.out.println(emsg);
+                        LoggingContext.statusCode(StatusCode.ERROR);
+                        LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                        logger.error(emsg);
+                        LoggingContext.statusCode(StatusCode.COMPLETE);
+                        LoggingContext.responseCode(LoggingContext.SUCCESS);
+                    } else {
+                        String keepVidStr = prefArr[1];
+                        String msg = " vid = " + keepVidStr + " is the one that we would KEEP. ------\n";
+                        System.out.println(msg);
+                        logger.info(msg);
+                    }
+                }
+            }
+        }
+
+    }// End of showNodeDetailsForADupeSet()
+
+    private int graphIndex = 1;
+
+    public JanusGraph setupGraph(EELFLogger logger) {
+
+        JanusGraph JanusGraph = null;
+
+
+        try (InputStream inputStream = new FileInputStream(AAIConstants.REALTIME_DB_CONFIG);) {
+
+            Properties properties = new Properties();
+            properties.load(inputStream);
+
+            if ("inmemory".equals(properties.get("storage.backend"))) {
+                JanusGraph = AAIGraph.getInstance().getGraph();
+                graphType = "inmemory";
+            } else {
+                JanusGraph = JanusGraphFactory.open(new AAIGraphConfig.Builder(AAIConstants.REALTIME_DB_CONFIG).forService(DupeTool.class.getSimpleName()).withGraphType("realtime" + graphIndex).buildConfiguration());
+                graphIndex++;
+            }
+        } catch (Exception e) {
+            logger.error("Unable to open the graph", e);
+        }
+
+        return JanusGraph;
+    }
+
+    public void closeGraph(JanusGraph graph, EELFLogger logger) {
+
+        try {
+            if ("inmemory".equals(graphType)) {
+                return;
+            }
+            if (graph != null && graph.isOpen()) {
+                graph.tx().close();
+                graph.close();
+            }
+        } catch (Exception ex) {
+            // Don't throw anything because JanusGraph sometimes is just saying that the graph is already closed{
+            logger.warn("WARNING from final graph.shutdown()", ex);
+        }
+    }
+}
+
diff --git a/src/main/java/org/onap/aai/dbgen/ForceDeleteTool.java b/src/main/java/org/onap/aai/dbgen/ForceDeleteTool.java
new file mode 100644 (file)
index 0000000..790bfa1
--- /dev/null
@@ -0,0 +1,875 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.dbgen;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.Properties;
+import java.util.Scanner;
+import java.util.UUID;
+
+import org.apache.commons.configuration.ConfigurationException;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__;
+import org.apache.tinkerpop.gremlin.structure.Direction;
+import org.apache.tinkerpop.gremlin.structure.Edge;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.apache.tinkerpop.gremlin.structure.VertexProperty;
+import org.onap.aai.dbmap.AAIGraphConfig;
+import org.onap.aai.dbmap.AAIGraph;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.logging.LogFormatTools;
+import org.onap.aai.logging.LoggingContext;
+import org.onap.aai.logging.LoggingContext.StatusCode;
+import org.onap.aai.edges.enums.AAIDirection;
+import org.onap.aai.edges.enums.EdgeProperty;
+import org.onap.aai.util.AAIConfig;
+import org.onap.aai.util.AAIConstants;
+import org.slf4j.MDC;
+
+import com.att.eelf.configuration.Configuration;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.janusgraph.core.JanusGraphFactory;
+import org.janusgraph.core.JanusGraph;
+
+
+
+public class ForceDeleteTool {
+       private static  final  String    FROMAPPID = "AAI-DB";
+       private static  final  String    TRANSID   = UUID.randomUUID().toString();
+
+       private static String graphType = "realdb";
+
+       public static boolean SHOULD_EXIT_VM = true;
+
+       public static int EXIT_VM_STATUS_CODE = -1;
+
+       public static void exit(int statusCode){
+               if(SHOULD_EXIT_VM){
+                       System.exit(1);
+               }
+               EXIT_VM_STATUS_CODE = statusCode;
+       }
+
+       /*
+        * The main method.
+        *
+        * @param args the arguments
+        */
+       public static void main(String[] args) {
+               
+               //SWGK 01/21/2016 - To suppress the warning message when the tool is run from the Terminal.
+
+               System.setProperty("aai.service.name", ForceDelete.class.getSimpleName());
+               // Set the logging file properties to be used by EELFManager
+               Properties props = System.getProperties();
+               props.setProperty(Configuration.PROPERTY_LOGGING_FILE_NAME, AAIConstants.AAI_FORCE_DELETE_LOGBACK_PROPS);
+               props.setProperty(Configuration.PROPERTY_LOGGING_FILE_PATH, AAIConstants.AAI_HOME_BUNDLECONFIG);
+               EELFLogger logger = EELFManager.getInstance().getLogger(ForceDeleteTool.class.getSimpleName());
+               MDC.put("logFilenameAppender", ForceDeleteTool.class.getSimpleName());
+               
+               LoggingContext.init();
+               LoggingContext.partnerName(FROMAPPID);
+               LoggingContext.serviceName(AAIConstants.AAI_RESOURCES_MS);
+               LoggingContext.component("forceDeleteTool");
+               LoggingContext.targetEntity(AAIConstants.AAI_RESOURCES_MS);
+               LoggingContext.targetServiceName("main");
+               LoggingContext.requestId(TRANSID);
+               LoggingContext.statusCode(StatusCode.COMPLETE);
+               LoggingContext.responseCode(LoggingContext.SUCCESS);
+               
+               String actionVal = "";
+               String userIdVal = "";
+               String dataString = "";
+               Boolean displayAllVidsFlag = false;  // Note - This should rarely be needed
+               Boolean overRideProtection = false;  // This should rarely be used - it overrides all our new checking
+               long vertexIdLong = 0;
+               String edgeIdStr = "";
+               String argStr4Msg = "";
+               
+               if (args != null && args.length > 0) {
+                       // They passed some arguments in that will affect processing
+                       for (int i = 0; i < args.length; i++) {
+                               String thisArg = args[i];
+                               argStr4Msg = argStr4Msg + " " + thisArg;
+                               
+                               if (thisArg.equals("-action")) {
+                                       i++;
+                                       if (i >= args.length) {
+                                               LoggingContext.statusCode(StatusCode.ERROR);
+                                               LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
+                                               logger.error(" No value passed with -action option.  ");
+                                               exit(0);
+                                       }
+                                       actionVal = args[i];
+                                       argStr4Msg = argStr4Msg + " " + actionVal;
+                               }
+                               else if (thisArg.equals("-userId")) {
+                                       i++;
+                                       if (i >= args.length) {
+                                               LoggingContext.statusCode(StatusCode.ERROR);
+                                               LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
+                                               logger.error(" No value passed with -userId option.  ");
+                                               exit(0);
+                                       }
+                                       userIdVal = args[i];
+                                       argStr4Msg = argStr4Msg + " " + userIdVal;
+                               }
+                               else if (thisArg.equals("-overRideProtection")) {
+                                       overRideProtection = true;
+                               }
+                               else if (thisArg.equals("-DISPLAY_ALL_VIDS")) {
+                                       displayAllVidsFlag = true;
+                               }
+                               else if (thisArg.equals("-vertexId")) {
+                                       i++;
+                                       if (i >= args.length) {
+                                               LoggingContext.statusCode(StatusCode.ERROR);
+                                               LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
+                                               logger.error(" No value passed with -vertexId option.  ");
+                                               exit(0);
+                                       }
+                                       String nextArg = args[i];
+                                       argStr4Msg = argStr4Msg + " " + nextArg;
+                                       try {
+                                               vertexIdLong = Long.parseLong(nextArg);
+                                       } catch (Exception e) {
+                                               LoggingContext.statusCode(StatusCode.ERROR);
+                                               LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
+                                               logger.error("Bad value passed with -vertexId option: ["
+                                                                               + nextArg + "]");
+                                               exit(0);
+                                       }
+                               }
+                               else if (thisArg.equals("-params4Collect")) {
+                                       i++;
+                                       if (i >= args.length) {
+                                               LoggingContext.statusCode(StatusCode.ERROR);
+                                               LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
+                                               logger.error(" No value passed with -params4Collect option.  ");
+                                               exit(0);
+                                       }
+                                       dataString = args[i];
+                                       argStr4Msg = argStr4Msg + " " + dataString;
+                               }
+                               else if (thisArg.equals("-edgeId")) {
+                                       i++;
+                                       if (i >= args.length) {
+                                               LoggingContext.statusCode(StatusCode.ERROR);
+                                               LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
+                                               logger.error(" No value passed with -edgeId option.  ");
+                                               exit(0);
+                                       }
+                                       String nextArg = args[i];
+                                       argStr4Msg = argStr4Msg + " " + nextArg;
+                                       edgeIdStr = nextArg;
+                               }
+                               else {
+                                       LoggingContext.statusCode(StatusCode.ERROR);
+                                       LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
+                                       logger.error(" Unrecognized argument passed to ForceDeleteTool: ["
+                                                                       + thisArg + "]. ");
+                                       logger.error(" Valid values are: -action -userId -vertexId -edgeId -overRideProtection -params4Collect -DISPLAY_ALL_VIDS");
+                                       exit(0);
+                               }
+                       }
+               }
+               
+               if( !actionVal.equals("COLLECT_DATA") && !actionVal.equals("DELETE_NODE") && !actionVal.equals("DELETE_EDGE")){
+                       String emsg = "Bad action parameter [" + actionVal + "] passed to ForceDeleteTool().  Valid values = COLLECT_DATA or DELETE_NODE or DELETE_EDGE\n";
+                       System.out.println(emsg);
+                       LoggingContext.statusCode(StatusCode.ERROR);
+                       LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
+                       logger.error(emsg);
+                       exit(0);
+               }
+               
+               if( actionVal.equals("DELETE_NODE") && vertexIdLong == 0 ){
+                       String emsg = "ERROR: No vertex ID passed on DELETE_NODE request. \n";
+                       System.out.println(emsg);
+                       LoggingContext.statusCode(StatusCode.ERROR);
+                       LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
+                       logger.error(emsg);
+                       exit(0);
+               }
+               else if( actionVal.equals("DELETE_EDGE") && edgeIdStr.equals("")){
+                       String emsg = "ERROR: No edge ID passed on DELETE_EDGE request. \n";
+                       System.out.println(emsg);
+                       LoggingContext.statusCode(StatusCode.ERROR);
+                       LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
+                       logger.error(emsg);
+                       exit(0);
+               }
+               
+               
+               userIdVal = userIdVal.trim();
+               if( (userIdVal.length() < 6) || userIdVal.toUpperCase().equals("AAIADMIN") ){
+                       String emsg = "Bad userId parameter [" + userIdVal + "] passed to ForceDeleteTool(). must be not empty and not aaiadmin \n";
+                       System.out.println(emsg);
+                       LoggingContext.statusCode(StatusCode.ERROR);
+                       LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
+                       logger.error(emsg);
+                       exit(0);
+               }
+               
+               String msg = "";
+               JanusGraph graph = null;
+               try {   
+               AAIConfig.init();
+               System.out.println("    ---- NOTE --- about to open graph (takes a little while)--------\n");
+                       graph = setupGraph(logger);
+               if( graph == null ){
+                       String emsg = "could not get graph object in ForceDeleteTool() \n";
+                       System.out.println(emsg);
+                       LoggingContext.statusCode(StatusCode.ERROR);
+                       LoggingContext.responseCode(LoggingContext.AVAILABILITY_TIMEOUT_ERROR);
+                       logger.error(emsg);
+                       exit(0);
+               }
+       }
+           catch (AAIException e1) {
+                       msg =  e1.getErrorObject().toString();
+                       System.out.println(msg);
+                       LoggingContext.statusCode(StatusCode.ERROR);
+                       LoggingContext.responseCode(LoggingContext.UNKNOWN_ERROR);
+                       logger.error(msg);
+                       exit(0);
+           }
+        catch (Exception e2) {
+                       msg =  e2.toString();
+                       System.out.println(msg);
+                       LoggingContext.statusCode(StatusCode.ERROR);
+                       LoggingContext.responseCode(LoggingContext.UNKNOWN_ERROR);
+                       logger.error(msg);
+                       exit(0);
+        }
+       
+               msg = "ForceDelete called by: userId [" + userIdVal + "] with these params: [" + argStr4Msg + "]";
+               System.out.println(msg);
+               logger.info(msg);
+       
+               ForceDelete fd = new ForceDelete(graph);
+       if( actionVal.equals("COLLECT_DATA") ){
+                       // When doing COLLECT_DATA, we expect them to either pass the vertexId or
+               // that the dataString string to be comma separated name value pairs like this:
+                       //    "propName1|propVal1,propName2|propVal2" etc.  We will look for a node or nodes
+                       //    that have properties that ALL match what was passed in.
+                       GraphTraversal<Vertex, Vertex> g = null;
+                       String qStringForMsg = "";
+                       int resCount = 0;
+                       if( vertexIdLong > 0 ){
+                               // They know which vertex they want to look at
+                               qStringForMsg = "graph.vertices(" + vertexIdLong + ")";
+                               Iterator <Vertex> vtxItr = graph.vertices( vertexIdLong );
+                               if( vtxItr != null && vtxItr.hasNext() ) {
+                                       Vertex vtx = vtxItr.next();
+                                       fd.showNodeInfo( logger, vtx, displayAllVidsFlag );
+                                       resCount++;
+                               }
+                       }
+                       else {
+                               // we need to find the node or nodes based on the dataString
+                               int firstPipeLoc = dataString.indexOf("|");
+                               if( firstPipeLoc <= 0 ){
+                                       msg =  "Must use the -params4Collect option when collecting data with data string in a format like: 'propName1|propVal1,propName2|propVal2'";
+                                       System.out.println(msg);
+                                       LoggingContext.statusCode(StatusCode.ERROR);
+                                       LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
+                                       logger.error(msg);
+                                       exit(0);
+                               }
+                               g  = graph.traversal().V();
+                               qStringForMsg = " graph.traversal().V()";
+                               // Note - if they're only passing one parameter, there won't be any commas
+                               String [] paramArr = dataString.split(",");
+                               for( int i = 0; i < paramArr.length; i++ ){
+                                       int pipeLoc = paramArr[i].indexOf("|");
+                                       if( pipeLoc <= 0 ){
+                                               msg =  "Must use the -params4Collect option when collecting data with data string in a format like: 'propName1|propVal1,propName2|propVal2'";
+                                               System.out.println(msg);
+                                               LoggingContext.statusCode(StatusCode.ERROR);
+                                               LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
+                                               logger.error(msg);
+                                               exit(0);
+                                       }
+                                       else {
+                                               String propName = paramArr[i].substring(0,pipeLoc);
+                                               String propVal = paramArr[i].substring(pipeLoc + 1);
+                                               g = g.has(propName,propVal);
+                                               qStringForMsg = qStringForMsg + ".has(" + propName + "," + propVal + ")";
+                                       }
+                               }
+                       
+                               if( (g != null)){
+                                       Iterator<Vertex> vertItor = g;
+                               while( vertItor.hasNext() ){
+                                       resCount++;
+                                       Vertex v = vertItor.next();
+                                       fd.showNodeInfo( logger, v, displayAllVidsFlag );
+                                       int descendantCount = fd.countDescendants( logger, v, 0 );
+                                       String infMsg = " Found " + descendantCount + " descendant nodes \n";
+                                       System.out.println( infMsg );
+                                       logger.info( infMsg );
+                               }
+                               }
+                               else {
+                                       msg =  "Bad JanusGraphQuery object.  ";
+                                       System.out.println(msg);
+                                       LoggingContext.statusCode(StatusCode.ERROR);
+                                       LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                                       logger.error(msg);
+                                       exit(0);
+                               }
+                       }
+                       
+                       String infMsg = "\n\n Found: " + resCount + " nodes for this query: [" + qStringForMsg + "]\n";
+                       System.out.println( infMsg );
+                       logger.info( infMsg );
+               } 
+               else if( actionVal.equals("DELETE_NODE") ){
+                       Iterator <Vertex> vtxItr = graph.vertices( vertexIdLong );
+                       if( vtxItr != null && vtxItr.hasNext() ) {
+                               Vertex vtx = vtxItr.next();
+                               fd.showNodeInfo( logger, vtx, displayAllVidsFlag );
+                       int descendantCount = fd.countDescendants( logger, vtx, 0 );
+                       String infMsg = " Found " + descendantCount + " descendant nodes.  Note - forceDelete does not cascade to " +
+                                       " child nodes, but they may become unreachable after the delete. \n";
+                       System.out.println( infMsg );
+                       logger.info( infMsg );
+                       
+                       int edgeCount = fd.countEdges( logger, vtx );
+                       
+                       infMsg = " Found total of " + edgeCount + " edges incident on this node.  \n";
+                       System.out.println( infMsg );
+                       logger.info( infMsg );
+                       
+                       if( fd.getNodeDelConfirmation(logger, userIdVal, vtx, descendantCount, edgeCount, overRideProtection) ){
+                                       vtx.remove();
+                                       graph.tx().commit();
+                                       infMsg = ">>>>>>>>>> Removed node with vertexId = " + vertexIdLong;
+                                       logger.info( infMsg );
+                                       System.out.println(infMsg);
+                       }
+                       else {
+                               infMsg = " Delete Cancelled. ";
+                               System.out.println(infMsg);
+                               logger.info( infMsg );
+                       }
+                       }
+                       else {
+                               String infMsg = ">>>>>>>>>> Vertex with vertexId = " + vertexIdLong + " not found.";
+                               System.out.println( infMsg );
+                               logger.info( infMsg );
+                       }
+               }
+               else if( actionVal.equals("DELETE_EDGE") ){
+                       Edge thisEdge = null;
+                       Iterator <Edge> edItr = graph.edges( edgeIdStr );
+                       if( edItr != null && edItr.hasNext() ) {
+                               thisEdge = edItr.next();
+                       }
+                       
+                       if( thisEdge == null ){
+                               String infMsg = ">>>>>>>>>> Edge with edgeId = " + edgeIdStr + " not found.";
+                               logger.info( infMsg );
+                               System.out.println(infMsg);
+                               exit(0);
+                       }
+                       
+                       if( fd.getEdgeDelConfirmation(logger, userIdVal, thisEdge, overRideProtection) ){
+                               thisEdge.remove();
+                               graph.tx().commit();
+                               String infMsg = ">>>>>>>>>> Removed edge with edgeId = " + edgeIdStr;
+                               logger.info( infMsg );
+                               System.out.println(infMsg);
+                       } 
+                       else {  
+                               String infMsg = " Delete Cancelled. ";
+                               System.out.println(infMsg);
+                               logger.info( infMsg );
+                       }
+                       exit(0);
+               }
+               else {
+                       String emsg = "Unknown action parameter [" + actionVal + "] passed to ForceDeleteTool().  Valid values = COLLECT_DATA, DELETE_NODE or DELETE_EDGE \n";
+                       System.out.println(emsg);
+                       logger.info( emsg );
+                       exit(0);
+               }
+
+               closeGraph(graph, logger);
+               exit(0);
+    
+       }// end of main()
+       
+       public static class ForceDelete {
+               
+               private final int MAXDESCENDENTDEPTH = 15;
+               private final JanusGraph graph;
+               public ForceDelete(JanusGraph graph) {
+                       this.graph = graph;
+               }
+               public void showNodeInfo(EELFLogger logger, Vertex tVert, Boolean displayAllVidsFlag ){ 
+                       
+                       try {
+                               Iterator<VertexProperty<Object>> pI = tVert.properties();
+                               String infStr = ">>> Found Vertex with VertexId = " + tVert.id() + ", properties:    ";
+                               System.out.println( infStr );
+                               logger.info(infStr);
+                               while( pI.hasNext() ){
+                                       VertexProperty<Object> tp = pI.next();
+                                       infStr = " [" + tp.key() + "|" + tp.value() + "] ";
+                                       System.out.println( infStr ); 
+                                       logger.info(infStr);
+                               }
+                       
+                               ArrayList <String> retArr = collectEdgeInfoForNode( logger, tVert, displayAllVidsFlag );
+                               for( String infoStr : retArr ){ 
+                                       System.out.println( infoStr ); 
+                                       logger.info(infoStr);
+                               }
+                       }
+                       catch (Exception e){
+                               String warnMsg = " -- Error -- trying to display edge info. [" + e.getMessage() + "]";
+                               System.out.println( warnMsg );
+                               LoggingContext.statusCode(StatusCode.ERROR);
+                               LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                               logger.warn(warnMsg);
+                               LoggingContext.successStatusFields();
+                       }
+                       
+               }// End of showNodeInfo()
+
+               
+               public void showPropertiesForEdge( EELFLogger logger, Edge tEd ){ 
+                       String infMsg = "";
+                       if( tEd == null ){
+                               infMsg = "null Edge object passed to showPropertiesForEdge()";
+                               System.out.print(infMsg);
+                               logger.info(infMsg);
+                               return;
+                       }
+                       
+                       // Try to show the edge properties
+                       try {
+                               infMsg =" Label for this Edge = [" + tEd.label() + "] ";
+                               System.out.print(infMsg);
+                               logger.info(infMsg);
+                               
+                               infMsg =" EDGE Properties for edgeId = " + tEd.id() + ": ";
+                               System.out.print(infMsg);
+                               logger.info(infMsg);
+                               Iterator <String> pI = tEd.keys().iterator();
+                               while( pI.hasNext() ){
+                                       String propKey = pI.next();
+                                       infMsg = "Prop: [" + propKey + "], val = [" 
+                                                       + tEd.property(propKey) + "] ";
+                                       System.out.print(infMsg);
+                                       logger.info(infMsg);
+                               }
+                       }
+                       catch( Exception ex ){
+                               infMsg = " Could not retrieve properties for this edge. exMsg = [" 
+                                               + ex.getMessage() + "] ";
+                               System.out.println( infMsg ); 
+                               logger.info(infMsg);
+                       }
+                       
+                       // Try to show what's connected to the IN side of this Edge
+                       try {
+                               infMsg = " Looking for the Vertex on the IN side of the edge:  ";
+                               System.out.print(infMsg);
+                               logger.info(infMsg);
+                               Vertex inVtx = tEd.inVertex();
+                               Iterator<VertexProperty<Object>> pI = inVtx.properties();
+                               String infStr = ">>> Found Vertex with VertexId = " + inVtx.id() 
+                                       + ", properties:    ";
+                               System.out.println( infStr );
+                               logger.info(infStr);
+                               while( pI.hasNext() ){
+                                       VertexProperty<Object> tp = pI.next();
+                                       infStr = " [" + tp.key() + "|" + tp.value() + "] ";
+                                       System.out.println( infStr ); 
+                                       logger.info(infStr);
+                               }
+                       }
+                       catch( Exception ex ){
+                               infMsg = " Could not retrieve vertex data for the IN side of "
+                                               + "the edge. exMsg = [" + ex.getMessage() + "] ";
+                               System.out.println( infMsg ); 
+                               logger.info(infMsg);
+                       }
+                       
+                       // Try to show what's connected to the OUT side of this Edge
+                       try {
+                               infMsg = " Looking for the Vertex on the OUT side of the edge:  ";
+                               System.out.print(infMsg);
+                               logger.info(infMsg);
+                               Vertex outVtx = tEd.outVertex();
+                               Iterator<VertexProperty<Object>> pI = outVtx.properties();
+                               String infStr = ">>> Found Vertex with VertexId = " + outVtx.id() 
+                                       + ", properties:    ";
+                               System.out.println( infStr );
+                               logger.info(infStr);
+                               while( pI.hasNext() ){
+                                       VertexProperty<Object> tp = pI.next();
+                                       infStr = " [" + tp.key() + "|" + tp.value() + "] ";
+                                       System.out.println( infStr ); 
+                                       logger.info(infStr);
+                               }
+                       }
+                       catch( Exception ex ){
+                               infMsg = " Could not retrieve vertex data for the OUT side of "
+                                               + "the edge. exMsg = [" + ex.getMessage() + "] ";
+                               System.out.println( infMsg ); 
+                               logger.info(infMsg);
+                       }
+                       
+               }// end showPropertiesForEdge()
+
+               
+               
+               public ArrayList <String> collectEdgeInfoForNode( EELFLogger logger, Vertex tVert, boolean displayAllVidsFlag ){ 
+                       ArrayList <String> retArr = new ArrayList <String> ();
+                       Direction dir = Direction.OUT;
+                       for ( int i = 0; i <= 1; i++ ){
+                               if( i == 1 ){
+                                       // Second time through we'll look at the IN edges.
+                                       dir = Direction.IN;
+                               }
+                               Iterator <Edge> eI = tVert.edges(dir);
+                               if( ! eI.hasNext() ){
+                                       retArr.add("No " + dir + " edges were found for this vertex. ");
+                               }
+                               while( eI.hasNext() ){
+                                       Edge ed =  eI.next();
+                                       String edId = ed.id().toString();
+                                       String lab = ed.label();
+                                       Vertex vtx = null;
+                                       if( dir == Direction.OUT ){
+                                               // get the vtx on the "other" side
+                                               vtx = ed.inVertex();
+                                       }
+                                       else {
+                                               // get the vtx on the "other" side
+                                               vtx = ed.outVertex();
+                                       }
+                                       if( vtx == null ){
+                                               retArr.add(" >>> COULD NOT FIND VERTEX on the other side of this edge edgeId = " + ed.id() + " <<< ");
+                                       }
+                                       else {
+                                               String nType = vtx.<String>property("aai-node-type").orElse(null);
+                                               if( displayAllVidsFlag ){
+                                                       // This should rarely be needed
+                                                       String vid = vtx.id().toString();
+                                                       retArr.add("Found an " + dir + " edge (" + lab + ") with EDGE-ID = " + edId +
+                                                                       ", between this vertex and a [" + nType + "] node with VtxId = " + vid );
+                                               }
+                                               else {
+                                                       // This is the normal case
+                                                       retArr.add("Found an " + dir + " edge (" + lab + ") between this vertex and a [" + nType + "] node. ");
+                                               }
+                                       }
+                               }
+                       }
+                       return retArr;
+                       
+               }// end of collectEdgeInfoForNode()
+
+               
+               public int countEdges( EELFLogger logger, Vertex vtx ){ 
+                       int edgeCount = 0;
+                       try {
+                               Iterator<Edge> edgesItr = vtx.edges(Direction.BOTH);
+                               while( edgesItr.hasNext() ){
+                                       edgesItr.next();
+                                       edgeCount++;
+                               }
+                       }
+                       catch (Exception e) {
+                               String wMsg = "-- ERROR -- Stopping the counting of edges because of Exception [" + e.getMessage() + "]";
+                               System.out.println( wMsg );
+                               LoggingContext.statusCode(StatusCode.ERROR);
+                               LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                               logger.warn( wMsg );
+                               LoggingContext.successStatusFields();
+                       }
+                       return edgeCount;
+                       
+               }// end of countEdges()
+               
+
+               public int countDescendants(EELFLogger logger, Vertex vtx, int levelVal ){ 
+                       int totalCount = 0;
+                       int thisLevel = levelVal + 1;
+                       
+                       if( thisLevel > MAXDESCENDENTDEPTH ){
+                               String wMsg = "Warning -- Stopping the counting of descendents because we reached the max depth of " + MAXDESCENDENTDEPTH;
+                               System.out.println( wMsg );
+                               LoggingContext.statusCode(StatusCode.ERROR);
+                               LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                               logger.warn( wMsg );
+                               return totalCount;
+                       }
+                       
+                       try {
+                               Iterator <Vertex> vertI = graph.traversal().V(vtx).union(__.outE().has(EdgeProperty.CONTAINS.toString(), AAIDirection.OUT.toString()).inV(), __.inE().has(EdgeProperty.CONTAINS.toString(), AAIDirection.IN.toString()).outV());
+                               while( vertI != null && vertI.hasNext() ){
+                                       totalCount++;
+                                       Vertex childVtx = vertI.next();
+                                       totalCount = totalCount + countDescendants( logger, childVtx, thisLevel );
+                               }
+                       }
+                       catch (Exception e) {
+                               String wMsg = "Error -- Stopping the counting of descendents because of Exception [" + e.getMessage() + "]";
+                               System.out.println( wMsg );
+                               LoggingContext.statusCode(StatusCode.ERROR);
+                               LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                               logger.warn( wMsg );
+                               LoggingContext.successStatusFields();
+                               
+                       }
+                       
+                       return totalCount;
+               }// end of countDescendants()
+
+               
+               public boolean getEdgeDelConfirmation( EELFLogger logger, String uid, Edge ed, 
+                               Boolean overRideProtection ) {
+                       
+                       showPropertiesForEdge( logger, ed );
+                       System.out.print("\n Are you sure you want to delete this EDGE? (y/n): ");
+                       Scanner s = new Scanner(System.in);
+                       s.useDelimiter("");
+                       String confirm = s.next();
+                       s.close();
+                       
+                       if (!confirm.equalsIgnoreCase("y")) {
+                               String infMsg = " User [" + uid + "] has chosen to abandon this delete request. ";
+                               System.out.println("\n" + infMsg);
+                               logger.info(infMsg);
+                               return false;
+                       }
+                       else {
+                               String infMsg = " User [" + uid + "] has confirmed this delete request. ";
+                               System.out.println("\n" + infMsg);
+                               logger.info(infMsg);
+                               return true;
+                       }
+               
+               } // End of getEdgeDelConfirmation()
+                       
+
+               public boolean getNodeDelConfirmation( EELFLogger logger, String uid, Vertex vtx, int edgeCount, 
+                               int descendantCount, Boolean overRideProtection ) {
+                       String thisNodeType = "";
+                       try {
+                               thisNodeType = vtx.<String>property("aai-node-type").orElse(null);
+                       }
+                       catch ( Exception nfe ){
+                               // Let the user know something is going on - but they can confirm the delete if they want to. 
+                               String infMsg = " -- WARNING -- could not get an aai-node-type for this vertex. -- WARNING -- ";
+                               System.out.println( infMsg );
+                               LoggingContext.statusCode(StatusCode.ERROR);
+                               LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                               logger.warn( infMsg );
+                               LoggingContext.successStatusFields();
+                       }
+                       
+                       String ntListString = "";  
+                       String maxDescString = "";
+                       String maxEdgeString = "";
+                       
+                       int maxDescCount = 10; // default value
+                       int maxEdgeCount = 10; // default value
+                       ArrayList <String> protectedNTypes = new ArrayList <String> ();
+                       protectedNTypes.add("cloud-region");  // default value
+                       
+                       try {
+                               ntListString = AAIConfig.get("aai.forceDel.protected.nt.list");
+                               maxDescString = AAIConfig.get("aai.forceDel.protected.descendant.count");
+                               maxEdgeString = AAIConfig.get("aai.forceDel.protected.edge.count");
+                       }
+                       catch ( Exception nfe ){
+                               // Don't worry, we will use default values 
+                               String infMsg = "-- WARNING -- could not get aai.forceDel.protected values from aaiconfig.properties -- will use default values. ";
+                               System.out.println( infMsg );
+                               LoggingContext.statusCode(StatusCode.ERROR);
+                               LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                               logger.warn( infMsg );
+                               LoggingContext.successStatusFields();
+                       }
+                       
+                       if( maxDescString != null && !maxDescString.equals("") ){
+                               try {
+                                       maxDescCount = Integer.parseInt(maxDescString);
+                               }
+                               catch ( Exception nfe ){
+                                       // Don't worry, we will leave "maxDescCount" set to the default value 
+                               }
+                       }
+                       
+                       if( maxEdgeString != null &&  !maxEdgeString.equals("") ){
+                               try {
+                                       maxEdgeCount = Integer.parseInt(maxEdgeString);
+                               }
+                               catch ( Exception nfe ){
+                                       // Don't worry, we will leave "maxEdgeCount" set to the default value 
+                               }
+                       }
+                       
+                       if( ntListString != null && !ntListString.trim().equals("") ){
+                               String [] nodeTypes = ntListString.split("\\|");
+                               for( int i = 0; i < nodeTypes.length; i++ ){
+                                       protectedNTypes.add(nodeTypes[i]);
+                               }
+                       }
+                       
+                       boolean giveProtOverRideMsg = false;
+                       boolean giveProtErrorMsg = false;
+                       if( descendantCount > maxDescCount ){
+                               // They are trying to delete a node with a lots of descendants
+                               String infMsg = " >> WARNING >> This node has more descendant edges than the max ProtectedDescendantCount: " + edgeCount + ".  Max = " + 
+                                                       maxEdgeCount + ".  It can be DANGEROUS to delete one of these. << WARNING << ";
+                               System.out.println(infMsg);
+                               logger.info(infMsg);
+                               if( ! overRideProtection ){
+                                       // They cannot delete this kind of node without using the override option
+                                       giveProtErrorMsg = true;
+                               }
+                               else {
+                                       giveProtOverRideMsg = true;
+                               }
+                       }
+                       
+                       if( edgeCount > maxEdgeCount ){
+                               // They are trying to delete a node with a lot of edges
+                               String infMsg = " >> WARNING >> This node has more edges than the max ProtectedEdgeCount: " + edgeCount + ".  Max = " + 
+                                                       maxEdgeCount + ".  It can be DANGEROUS to delete one of these. << WARNING << ";
+                               System.out.println(infMsg);
+                               logger.info(infMsg);
+                               if( ! overRideProtection ){
+                                       // They cannot delete this kind of node without using the override option
+                                       giveProtErrorMsg = true;
+                               }
+                               else {
+                                       giveProtOverRideMsg = true;
+                               }
+                       }
+                       
+                       if( thisNodeType != null && !thisNodeType.equals("") && protectedNTypes.contains(thisNodeType) ){
+                               // They are trying to delete a protected Node Type
+                               String infMsg = " >> WARNING >> This node is a PROTECTED NODE-TYPE (" + thisNodeType + "). " +
+                                               " It can be DANGEROUS to delete one of these. << WARNING << ";
+                               System.out.println(infMsg);
+                               logger.info(infMsg);
+                               if( ! overRideProtection ){
+                                       // They cannot delete this kind of node without using the override option
+                                       giveProtErrorMsg = true;
+                               }
+                               else {
+                                       giveProtOverRideMsg = true;
+                               }
+                       }
+                       
+                       if( giveProtOverRideMsg ){
+                               String infMsg = " !!>> WARNING >>!! you are using the overRideProtection parameter which will let you do this potentially dangerous delete.";
+                               System.out.println("\n" + infMsg);
+                               logger.info(infMsg);
+                       }
+                       else if( giveProtErrorMsg ) {
+                               String errMsg = " ERROR >> this kind of node can only be deleted if you pass the overRideProtection parameter.";
+                               System.out.println("\n" + errMsg);
+                               LoggingContext.statusCode(StatusCode.ERROR);
+                               LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
+                               logger.error(errMsg);
+                               LoggingContext.successStatusFields();
+                               return false;
+                       }
+                       
+                       System.out.print("\n Are you sure you want to do this delete? (y/n): ");
+                       Scanner s = new Scanner(System.in);
+                       s.useDelimiter("");
+                       String confirm = s.next();
+                       s.close();
+                       
+                       if (!confirm.equalsIgnoreCase("y")) {
+                               String infMsg = " User [" + uid + "] has chosen to abandon this delete request. ";
+                               System.out.println("\n" + infMsg);
+                               logger.info(infMsg);
+                               return false;
+                       }
+                       else {
+                               String infMsg = " User [" + uid + "] has confirmed this delete request. ";
+                               System.out.println("\n" + infMsg);
+                               logger.info(infMsg);
+                               return true;
+                       }
+               
+               } // End of getNodeDelConfirmation()
+       }
+
+       public static JanusGraph setupGraph(EELFLogger logger){
+
+               JanusGraph janusGraph = null;
+
+               try (InputStream inputStream = new FileInputStream(AAIConstants.REALTIME_DB_CONFIG);){
+
+                       Properties properties = new Properties();
+                       properties.load(inputStream);
+
+                       if("inmemory".equals(properties.get("storage.backend"))){
+                               janusGraph = AAIGraph.getInstance().getGraph();
+                               graphType = "inmemory";
+                       } else {
+                               janusGraph = JanusGraphFactory.open(
+                                               new AAIGraphConfig.Builder(AAIConstants.REALTIME_DB_CONFIG)
+                                               .forService(ForceDeleteTool.class.getSimpleName())
+                                               .withGraphType("realtime1")
+                                               .buildConfiguration()
+                               );
+                       }
+               } catch (Exception e) {
+                       logger.error("Unable to open the graph", LogFormatTools.getStackTop(e));
+               }
+
+               return janusGraph;
+       }
+
+       public static void closeGraph(JanusGraph graph, EELFLogger logger){
+
+               try {
+                       if("inmemory".equals(graphType)) {
+                               return;
+                       }
+                       if( graph != null && graph.isOpen() ){
+                               graph.tx().close();
+                               graph.close();
+                       }
+               } catch (Exception ex) {
+                       // Don't throw anything because JanusGraph sometimes is just saying that the graph is already closed{
+                       logger.warn("WARNING from final graph.shutdown()", ex);
+               }
+       }
+}
+
diff --git a/src/main/java/org/onap/aai/dbgen/GraphMLTokens.java b/src/main/java/org/onap/aai/dbgen/GraphMLTokens.java
new file mode 100644 (file)
index 0000000..d43b57f
--- /dev/null
@@ -0,0 +1,56 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.dbgen;
+
+/**
+ * A collection of tokens used for GraphML related data.
+ */
+public class GraphMLTokens {
+    public static final String GRAPHML = "graphml";
+    public static final String XMLNS = "xmlns";
+    public static final String GRAPHML_XMLNS = "http://graphml.graphdrawing.org/xmlns";
+    public static final String G = "G";
+    public static final String EDGEDEFAULT = "edgedefault";
+    public static final String DIRECTED = "directed";
+    public static final String KEY = "key";
+    public static final String FOR = "for";
+    public static final String ID = "id";
+    public static final String ATTR_NAME = "attr.name";
+    public static final String ATTR_TYPE = "attr.type";
+    public static final String GRAPH = "graph";
+    public static final String NODE = "node";
+    public static final String EDGE = "edge";
+    public static final String SOURCE = "source";
+    public static final String TARGET = "target";
+    public static final String DATA = "data";
+    public static final String LABEL = "label";
+    public static final String STRING = "string";
+    public static final String FLOAT = "float";
+    public static final String DOUBLE = "double";
+    public static final String LONG = "long";
+    public static final String BOOLEAN = "boolean";
+    public static final String INT = "int";
+    public static final String ARRAY = "array";
+    public static final String SET = "set";
+    public static final String LIST = "list";
+    public static final String ITEM = "item";
+    public static final String _DEFAULT = "_default";
+
+}
diff --git a/src/main/java/org/onap/aai/dbgen/schemamod/SchemaMod.java b/src/main/java/org/onap/aai/dbgen/schemamod/SchemaMod.java
new file mode 100644 (file)
index 0000000..c0f8ee9
--- /dev/null
@@ -0,0 +1,177 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.dbgen.schemamod;
+
+import java.util.Properties;
+
+import org.onap.aai.dbmap.DBConnectionType;
+import org.onap.aai.introspection.Loader;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.setup.SchemaVersion;
+import org.onap.aai.logging.ErrorLogHelper;
+import org.onap.aai.serialization.engines.QueryStyle;
+import org.onap.aai.serialization.engines.JanusGraphDBEngine;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.util.AAIConfig;
+import org.onap.aai.util.AAIConstants;
+import org.onap.aai.util.UniquePropertyCheck;
+import org.slf4j.MDC;
+
+import com.att.eelf.configuration.Configuration;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.springframework.context.annotation.AnnotationConfigApplicationContext;
+
+public class SchemaMod {
+
+       private final LoaderFactory loaderFactory;
+
+       private final SchemaVersions schemaVersions;
+
+    public SchemaMod(LoaderFactory loaderFactory, SchemaVersions schemaVersions){
+        this.loaderFactory  = loaderFactory;
+        this.schemaVersions = schemaVersions;
+       }
+
+       public void execute(String[] args) {
+
+               // Set the logging file properties to be used by EELFManager
+               Properties props = System.getProperties();
+               props.setProperty(Configuration.PROPERTY_LOGGING_FILE_NAME, AAIConstants.AAI_SCHEMA_MOD_LOGBACK_PROPS);
+               props.setProperty(Configuration.PROPERTY_LOGGING_FILE_PATH, AAIConstants.AAI_HOME_BUNDLECONFIG);
+
+               EELFLogger logger = EELFManager.getInstance().getLogger(UniquePropertyCheck.class.getSimpleName());
+               MDC.put("logFilenameAppender", SchemaMod.class.getSimpleName());
+
+               // NOTE -- We're just working with properties that are used for NODES
+               // for now.
+               String propName = "";
+               String targetDataType = "";
+               String targetIndexInfo = "";
+               String preserveDataFlag = "";
+
+               String usageString = "Usage: SchemaMod propertyName targetDataType targetIndexInfo preserveDataFlag \n";
+               if (args.length != 4) {
+                       String emsg = "Four Parameters are required.  \n" + usageString;
+                       logAndPrint(logger, emsg);
+                       System.exit(1);
+               } else {
+                       propName = args[0];
+                       targetDataType = args[1];
+                       targetIndexInfo = args[2];
+                       preserveDataFlag = args[3];
+               }
+
+               if (propName.equals("")) {
+                       String emsg = "Bad parameter - propertyName cannot be empty.  \n" + usageString;
+                       logAndPrint(logger, emsg);
+                       System.exit(1);
+               } else if (!targetDataType.equals("String") && !targetDataType.equals("Set<String>")
+                               && !targetDataType.equals("Integer") && !targetDataType.equals("Long")
+                               && !targetDataType.equals("Boolean")) {
+                       String emsg = "Unsupported targetDataType.  We only support String, Set<String>, Integer, Long or Boolean for now.\n"
+                                       + usageString;
+                       logAndPrint(logger, emsg);
+                       System.exit(1);
+               } else if (!targetIndexInfo.equals("uniqueIndex") && !targetIndexInfo.equals("index")
+                               && !targetIndexInfo.equals("noIndex")) {
+                       String emsg = "Unsupported IndexInfo.  We only support: 'uniqueIndex', 'index' or 'noIndex'.\n"
+                                       + usageString;
+                       logAndPrint(logger, emsg);
+                       System.exit(1);
+               }
+
+               try {
+                       AAIConfig.init();
+                       ErrorLogHelper.loadProperties();
+               } catch (Exception ae) {
+                       String emsg = "Problem with either AAIConfig.init() or ErrorLogHelper.LoadProperties(). ";
+                       logAndPrint(logger, emsg + "[" + ae.getMessage() + "]");
+                       System.exit(1);
+               }
+
+               // Give a big warning if the DbMaps.PropertyDataTypeMap value does not
+               // agree with what we're doing
+               String warningMsg = "";
+
+               if (!warningMsg.equals("")) {
+                       logAndPrint(logger, "\n>>> WARNING <<<< ");
+                       logAndPrint(logger, ">>> " + warningMsg + " <<<");
+               }
+
+               logAndPrint(logger, ">>> Processing will begin in 5 seconds (unless interrupted). <<<");
+               try {
+                       // Give them a chance to back out of this
+                       Thread.sleep(5000);
+               } catch (java.lang.InterruptedException ie) {
+                       logAndPrint(logger, " DB Schema Update has been aborted. ");
+                       System.exit(1);
+               }
+
+        logAndPrint(logger, "    ---- NOTE --- about to open graph (takes a little while)\n");
+
+        SchemaVersion version = schemaVersions.getDefaultVersion();
+        QueryStyle queryStyle = QueryStyle.TRAVERSAL;
+        ModelType introspectorFactoryType = ModelType.MOXY;
+        Loader loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, version);
+        TransactionalGraphEngine engine = null;
+        try {
+            engine = new JanusGraphDBEngine(queryStyle, DBConnectionType.REALTIME, loader);
+            SchemaModInternal internal = new SchemaModInternal(engine, logger, propName, targetDataType, targetIndexInfo, new Boolean(preserveDataFlag));
+            internal.execute();
+            engine.startTransaction();
+            engine.tx().close();
+            logAndPrint(logger, "------ Completed the SchemaMod -------- ");
+        } catch (Exception e) {
+            String emsg = "Not able to complete the requested SchemaMod \n";
+            logAndPrint(logger, e.getMessage());
+            logAndPrint(logger, emsg);
+            System.exit(1);
+        }
+       }
+       /**
+        * Log and print.
+        *
+        * @param logger the logger
+        * @param msg the msg
+        */
+       protected void logAndPrint(EELFLogger logger, String msg) {
+               System.out.println(msg);
+               logger.info(msg);
+       }
+
+       public static void main(String[] args) {
+
+               AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext(
+                               "org.onap.aai.config",
+                               "org.onap.aai.setup"
+               );
+
+               LoaderFactory loaderFactory = ctx.getBean(LoaderFactory.class);
+               SchemaVersions schemaVersions = ctx.getBean(SchemaVersions.class);
+               SchemaMod schemaMod = new SchemaMod(loaderFactory, schemaVersions);
+               schemaMod.execute(args);
+
+               System.exit(0);
+       }
+
+}
\ No newline at end of file
diff --git a/src/main/java/org/onap/aai/dbgen/schemamod/SchemaModInternal.java b/src/main/java/org/onap/aai/dbgen/schemamod/SchemaModInternal.java
new file mode 100644 (file)
index 0000000..b5ce16b
--- /dev/null
@@ -0,0 +1,317 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.dbgen.schemamod;
+
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.UUID;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
+import org.apache.tinkerpop.gremlin.structure.Direction;
+import org.apache.tinkerpop.gremlin.structure.Edge;
+import org.apache.tinkerpop.gremlin.structure.Graph;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.apache.tinkerpop.gremlin.structure.VertexProperty;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.util.FormatDate;
+import org.onap.aai.util.UniquePropertyCheck;
+
+import com.att.eelf.configuration.EELFLogger;
+import org.janusgraph.core.Cardinality;
+import org.janusgraph.core.PropertyKey;
+import org.janusgraph.core.schema.JanusGraphManagement;
+
+public class SchemaModInternal {
+       private static final String FROMAPPID = "AAI-UTILS";
+       private final String TRANSID = UUID.randomUUID().toString();
+       private final TransactionalGraphEngine engine;
+       private final String propName;
+       private final Class<?> type;
+       private final String indexType;
+       private final boolean preserveData;
+       private final Cardinality cardinality;
+       private final EELFLogger logger;
+       
+       public SchemaModInternal(TransactionalGraphEngine engine, EELFLogger logger, String propName, String type, String indexType, boolean preserveData) {
+               this.engine = engine;
+               this.propName = propName;
+               this.type = determineClass(type);
+               this.indexType = indexType;
+               this.preserveData = preserveData;
+               this.cardinality = determineCardinality(type);
+               this.logger = logger;
+       }
+       
+       
+       private Class<?> determineClass(String type) {
+               final Class<?> result;
+               if (type.equals("String")) {
+                       result = String.class;
+               } else if (type.equals("Set<String>")) {
+                       result = String.class;
+               } else if (type.equals("Integer")) {
+                       result = Integer.class;
+               } else if (type.equals("Boolean")) {
+                       result = Boolean.class;
+               } else if (type.equals("Character")) {
+                       result = Character.class;
+               } else if (type.equals("Long")) {
+                       result = Long.class;
+               } else if (type.equals("Float")) {
+                       result = Float.class;
+               } else if (type.equals("Double")) {
+                       result = Double.class;
+               } else {
+                       String emsg = "Not able translate the targetDataType [" + type + "] to a Class variable.\n";
+                       logAndPrint(logger, emsg);
+                       throw new RuntimeException(emsg);
+               }
+               
+               return result;
+       }
+       private Cardinality determineCardinality(String type) {
+               if (type.equals("Set<String>")) {
+                       return Cardinality.SET;
+               } else {
+                       return Cardinality.SINGLE;
+               }
+       }
+       public void execute() {
+               JanusGraphManagement graphMgt = null;
+               boolean success = false;
+               try {
+                       // Make sure this property is in the DB.
+                       graphMgt = engine.asAdmin().getManagementSystem();
+                       if (graphMgt == null) {
+                               String emsg = "Not able to get a graph Management object in SchemaMod.java\n";
+                               logAndPrint(logger, emsg);
+                               System.exit(1);
+                       }
+                       PropertyKey origPropKey = graphMgt.getPropertyKey(propName);
+                       if (origPropKey == null) {
+                               String emsg = "The propName = [" + propName + "] is not defined in our graph. ";
+                               logAndPrint(logger, emsg);
+                               System.exit(1);
+                       }
+       
+                       if (indexType.equals("uniqueIndex")) {
+                               // Make sure the data in the property being changed can have a
+                               // unique-index put on it.
+                               // Ie. if there are duplicate values, we will not be able to
+                               // migrate the data back into the property.
+                               
+                               
+                               Graph grTmp = engine.tx();
+                               if( grTmp == null ){
+                                       grTmp = engine.startTransaction();
+                               }
+                               // This is good to know in the logs
+                               logAndPrint(logger, "-- Starting UniquePropertyCheck. (this may take a loooong time) --");  
+                               
+                               Boolean foundDupesFlag = UniquePropertyCheck.runTheCheckForUniqueness(TRANSID, FROMAPPID,
+                                               grTmp, propName, logger);
+                               if (foundDupesFlag) {
+                                       logAndPrint(logger,
+                                                       "\n\n!!!!!! >> Cannot add a uniqueIndex for the property: [" + propName
+                                                                       + "] because duplicate values were found.  See the log for details on which"
+                                                                       + " nodes have this value.  \nThey will need to be resolved (by updating those values to new"
+                                                                       + " values or deleting unneeded nodes) using the standard REST-API \n");
+                                       System.exit(1);
+                               }
+                               logAndPrint(logger, "-- Finished UniquePropertyCheck. ");  // This is good to know in the logs
+                       }
+       
+       
+                       // ---- If we made it to here - we must be OK with making this change
+       
+                       // Rename this property to a backup name (old name with "retired_"
+                       // appended plus a dateStr)
+                       FormatDate fd = new FormatDate("MMddHHmm", "GMT");
+                       String dteStr= fd.getDateTime();
+                       
+                       String retiredName = propName + "-" + dteStr + "-RETIRED";
+                       graphMgt.changeName(origPropKey, retiredName);
+       
+                       // Create a new property using the original property name and the
+                       // targetDataType
+                       PropertyKey freshPropKey = graphMgt.makePropertyKey(propName).dataType(type)
+                                       .cardinality(cardinality).make();
+       
+                       // Create the appropriate index (if any)
+                       if (indexType.equals("uniqueIndex")) {
+                               String freshIndexName = propName + dteStr;
+                               graphMgt.buildIndex(freshIndexName, Vertex.class).addKey(freshPropKey).unique().buildCompositeIndex();
+                       } else if (indexType.equals("index")) {
+                               String freshIndexName = propName + dteStr;
+                               graphMgt.buildIndex(freshIndexName, Vertex.class).addKey(freshPropKey).buildCompositeIndex();
+                       }
+       
+                       logAndPrint(logger, "Committing schema changes with graphMgt.commit()");
+                       graphMgt.commit();
+                       engine.commit();
+                       Graph grTmp2 = engine.startTransaction();
+                       
+       
+                       // For each node that has this property, update the new from the old
+                       // and then remove the
+                       // old property from that node
+                       Iterator<Vertex> verts = grTmp2.traversal().V().has(retiredName);
+                       int vtxCount = 0;
+                       ArrayList<String> alreadySeenVals = new ArrayList<String>();
+                       while (verts.hasNext()) {
+                               vtxCount++;
+                               Vertex tmpVtx =  verts.next();
+                               String tmpVid = tmpVtx.id().toString();
+                               Object origVal = tmpVtx.<Object> property(retiredName).orElse(null);
+                               if (preserveData) {
+                                       tmpVtx.property(propName, origVal);
+                                       if (indexType.equals("uniqueIndex")) {
+                                               // We're working on a property that is being used as a
+                                               // unique index
+                                               String origValStr = "";
+                                               if (origVal != null) {
+                                                       origValStr = origVal.toString();
+                                               }
+                                               if (alreadySeenVals.contains(origValStr)) {
+                                                       // This property is supposed to be unique, but we've
+                                                       // already seen this value in this loop
+                                                       // This should have been caught up in the first part
+                                                       // of SchemaMod, but since it wasn't, we
+                                                       // will just log the problem.
+                                                       logAndPrint(logger,
+                                                                       "\n\n ---------- ERROR - could not migrate the old data [" + origValStr
+                                                                                       + "] for propertyName [" + propName
+                                                                                       + "] because this property is having a unique index put on it.");
+                                                       showPropertiesAndEdges(TRANSID, FROMAPPID, tmpVtx, logger);
+                                                       logAndPrint(logger, "-----------------------------------\n");
+                                               } else {
+                                                       // Ok to add this prop in as a unique value
+                                                       tmpVtx.property(propName, origVal);
+                                                       logAndPrint(logger,
+                                                                       "INFO -- just did the add of the freshPropertyKey and updated it with the orig value ("
+                                                                                       + origValStr + ")");
+                                               }
+                                               alreadySeenVals.add(origValStr);
+                                       } else {
+                                               // We are not working with a unique index
+                                               tmpVtx.property(propName, origVal);
+                                               logAndPrint(logger,
+                                                               "INFO -- just did the add of the freshPropertyKey and updated it with the orig value ("
+                                                                               + origVal.toString() + ")");
+                                       }
+                               } else {
+                                       // existing nodes just won't have that property anymore
+                                       // Not sure if we'd ever actually want to do this -- maybe
+                                       // we'd do this if the new
+                                       // data type was not compatible with the old?
+                               }
+                               tmpVtx.property(retiredName).remove();
+                               logAndPrint(logger, "INFO -- just did the remove of the " + retiredName + " from this vertex. (vid="
+                                               + tmpVid + ")");
+                       }
+       
+                       success = true;
+               } catch (Exception ex) {
+                       logAndPrint(logger, "Threw a regular Exception: ");
+                       logAndPrint(logger, ex.getMessage());
+               } finally {
+                       if (graphMgt != null && graphMgt.isOpen()) {
+                               // Any changes that worked correctly should have already done
+                               // their commits.
+                               graphMgt.rollback();
+                       }
+                       if (engine != null) {
+                               if (success) {
+                                       engine.commit();
+                               } else {
+                                       engine.rollback();
+                               }
+                       }
+               }
+       }
+       
+       /**
+        * Show properties and edges.
+        *
+        * @param transId the trans id
+        * @param fromAppId the from app id
+        * @param tVert the t vert
+        * @param logger the logger
+        */
+       private static void showPropertiesAndEdges(String transId, String fromAppId, Vertex tVert, EELFLogger logger) {
+
+               if (tVert == null) {
+                       logAndPrint(logger, "Null node passed to showPropertiesAndEdges.");
+               } else {
+                       String nodeType = "";
+                       Object ob = tVert.<String> property("aai-node-type");
+                       if (ob == null) {
+                               nodeType = "null";
+                       } else {
+                               nodeType = ob.toString();
+                       }
+
+                       logAndPrint(logger, " AAINodeType/VtxID for this Node = [" + nodeType + "/" + tVert.id() + "]");
+                       logAndPrint(logger, " Property Detail: ");
+                       Iterator<VertexProperty<Object>> pI = tVert.properties();
+                       while (pI.hasNext()) {
+                               VertexProperty<Object> tp = pI.next();
+                               Object val = tp.value();
+                               logAndPrint(logger, "Prop: [" + tp.key() + "], val = [" + val + "] ");
+                       }
+
+                       Iterator<Edge> eI = tVert.edges(Direction.BOTH);
+                       if (!eI.hasNext()) {
+                               logAndPrint(logger, "No edges were found for this vertex. ");
+                       }
+                       while (eI.hasNext()) {
+                               Edge ed = eI.next();
+                               String lab = ed.label();
+                               Vertex vtx;
+                               if (tVert.equals(ed.inVertex())) {
+                                       vtx = ed.outVertex();
+                               } else {
+                                       vtx = ed.inVertex();
+                               }
+                               if (vtx == null) {
+                                       logAndPrint(logger,
+                                                       " >>> COULD NOT FIND VERTEX on the other side of this edge edgeId = " + ed.id() + " <<< ");
+                               } else {
+                                       String nType = vtx.<String> property("aai-node-type").orElse(null);
+                                       String vid = vtx.id().toString();
+                                       logAndPrint(logger, "Found an edge (" + lab + ") from this vertex to a [" + nType
+                                                       + "] node with VtxId = " + vid);
+                               }
+                       }
+               }
+       } // End of showPropertiesAndEdges()
+
+       /**
+        * Log and print.
+        *
+        * @param logger the logger
+        * @param msg the msg
+        */
+       protected static void logAndPrint(EELFLogger logger, String msg) {
+               System.out.println(msg);
+               logger.info(msg);
+       }
+       
+}
diff --git a/src/main/java/org/onap/aai/dbgen/tags/Command.java b/src/main/java/org/onap/aai/dbgen/tags/Command.java
new file mode 100644 (file)
index 0000000..ac553f9
--- /dev/null
@@ -0,0 +1,25 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.dbgen.tags;
+
+@FunctionalInterface
+interface Command {
+       public abstract void execute ( ) throws Exception;
+}
diff --git a/src/main/java/org/onap/aai/interceptors/AAIContainerFilter.java b/src/main/java/org/onap/aai/interceptors/AAIContainerFilter.java
new file mode 100644 (file)
index 0000000..6fb7356
--- /dev/null
@@ -0,0 +1,41 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.interceptors;
+
+import org.onap.aai.util.FormatDate;
+
+import java.util.UUID;
+
+public abstract class  AAIContainerFilter {
+    
+       protected String genDate() {
+               FormatDate fd = new FormatDate("YYMMdd-HH:mm:ss:SSS");
+               return fd.getDateTime();
+       }
+       
+       protected boolean isValidUUID(String transId) {
+               try {
+                       UUID.fromString(transId);
+               } catch (IllegalArgumentException e) {
+                       return false;
+               }
+               return true;
+       }
+}
diff --git a/src/main/java/org/onap/aai/interceptors/AAIHeaderProperties.java b/src/main/java/org/onap/aai/interceptors/AAIHeaderProperties.java
new file mode 100644 (file)
index 0000000..6801aee
--- /dev/null
@@ -0,0 +1,39 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.interceptors;
+
+public final class AAIHeaderProperties {
+       
+       private AAIHeaderProperties(){}
+       
+       public static final String REQUEST_CONTEXT = "aai-request-context";
+       
+       public static final String HTTP_METHOD_OVERRIDE = "X-HTTP-Method-Override";
+       
+       public static final String TRANSACTION_ID = "X-TransactionId";
+       
+       public static final String FROM_APP_ID = "X-FromAppId";
+       
+       public static final String AAI_TX_ID = "X-AAI-TXID";
+       
+       public static final String AAI_REQUEST = "X-REQUEST";
+       
+       public static final String AAI_REQUEST_TS = "X-REQUEST-TS";
+}
diff --git a/src/main/java/org/onap/aai/interceptors/package-info.java b/src/main/java/org/onap/aai/interceptors/package-info.java
new file mode 100644 (file)
index 0000000..ee9c334
--- /dev/null
@@ -0,0 +1,36 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+/**
+ * <b>Interceptors</b> package is subdivided to pre and post interceptors
+ * If you want to add an additional interceptor you would need to add
+ * the priority level to AAIRequestFilterPriority or AAIResponsePriority
+ * to give a value which indicates the order in which the interceptor
+ * will be triggered and also you will add that value like here
+ *
+ * <pre>
+ *     <code>
+ *         @Priority(AAIRequestFilterPriority.YOUR_PRIORITY)
+ *         public class YourInterceptor extends AAIContainerFilter implements ContainerRequestFilter {
+ *
+ *         }
+ *     </code>
+ * </pre>
+ */
+package org.onap.aai.interceptors;
\ No newline at end of file
diff --git a/src/main/java/org/onap/aai/interceptors/post/AAIResponseFilterPriority.java b/src/main/java/org/onap/aai/interceptors/post/AAIResponseFilterPriority.java
new file mode 100644 (file)
index 0000000..146f847
--- /dev/null
@@ -0,0 +1,40 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.interceptors.post;
+
+/**
+ * Response Filter order is done reverse sorted
+ * so in the following case the first response filter would be
+ * HEADER_MANIPULATION, RESPONSE_TRANS_LOGGING, RESET_LOGGING_CONTEXT,
+ * and INVALID_RESPONSE_STATUS
+ */
+public final class AAIResponseFilterPriority {
+       
+       private AAIResponseFilterPriority() {}
+
+       public static final int INVALID_RESPONSE_STATUS = 1000;
+
+       public static final int RESET_LOGGING_CONTEXT = 2000;
+
+       public static final int RESPONSE_TRANS_LOGGING = 3000;
+
+       public static final int HEADER_MANIPULATION = 4000;
+
+}
diff --git a/src/main/java/org/onap/aai/interceptors/post/InvalidResponseStatus.java b/src/main/java/org/onap/aai/interceptors/post/InvalidResponseStatus.java
new file mode 100644 (file)
index 0000000..7fd0b9c
--- /dev/null
@@ -0,0 +1,65 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.interceptors.post;
+
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.interceptors.AAIContainerFilter;
+import org.onap.aai.logging.ErrorLogHelper;
+
+import javax.annotation.Priority;
+import javax.ws.rs.container.ContainerRequestContext;
+import javax.ws.rs.container.ContainerResponseContext;
+import javax.ws.rs.container.ContainerResponseFilter;
+import javax.ws.rs.core.MediaType;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+@Priority(AAIResponseFilterPriority.INVALID_RESPONSE_STATUS)
+public class InvalidResponseStatus extends AAIContainerFilter implements ContainerResponseFilter {
+
+       @Override
+       public void filter(ContainerRequestContext requestContext, ContainerResponseContext responseContext)
+                       throws IOException {
+
+               if(responseContext.getStatus() == 405){
+
+                   responseContext.setStatus(400);
+                       AAIException e = new AAIException("AAI_3012");
+                       ArrayList<String> templateVars = new ArrayList<>();
+
+                       List<MediaType> mediaTypeList = new ArrayList<>();
+
+                       String contentType = responseContext.getHeaderString("Content-Type");
+
+                       if (contentType == null) {
+                               mediaTypeList.add(MediaType.APPLICATION_XML_TYPE);
+                       } else {
+                               mediaTypeList.add(MediaType.valueOf(contentType));
+                       }
+
+                       String message = ErrorLogHelper.getRESTAPIErrorResponse(mediaTypeList, e, templateVars);
+
+                       responseContext.setEntity(message);
+               }
+
+       }
+
+}
diff --git a/src/main/java/org/onap/aai/interceptors/post/ResetLoggingContext.java b/src/main/java/org/onap/aai/interceptors/post/ResetLoggingContext.java
new file mode 100644 (file)
index 0000000..baf28ad
--- /dev/null
@@ -0,0 +1,98 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.interceptors.post;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.onap.aai.interceptors.AAIContainerFilter;
+import org.onap.aai.logging.LoggingContext;
+import org.onap.aai.logging.LoggingContext.StatusCode;
+import org.springframework.beans.factory.annotation.Autowired;
+
+import javax.annotation.Priority;
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.container.ContainerRequestContext;
+import javax.ws.rs.container.ContainerResponseContext;
+import javax.ws.rs.container.ContainerResponseFilter;
+import javax.ws.rs.core.Response.Status;
+import javax.ws.rs.core.Response.StatusType;
+import java.io.IOException;
+
+@Priority(AAIResponseFilterPriority.RESET_LOGGING_CONTEXT)
+public class ResetLoggingContext extends AAIContainerFilter implements ContainerResponseFilter {
+
+       private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(ResetLoggingContext.class);
+
+       @Autowired
+       private HttpServletRequest httpServletRequest;
+       
+       @Override
+       public void filter(ContainerRequestContext requestContext, ContainerResponseContext responseContext)
+                       throws IOException {
+
+               this.cleanLoggingContext(responseContext);
+
+       }
+
+       private void cleanLoggingContext(ContainerResponseContext responseContext) {
+               //String url = httpServletRequest.getRequestURL().toString();
+               boolean success = true;
+               String uri = httpServletRequest.getRequestURI();
+               String queryString = httpServletRequest.getQueryString();
+
+               if(queryString != null && !queryString.isEmpty()){
+                   uri = uri + "?" + queryString;
+               }
+               // For now, we use the the HTTP status code, 
+               // This may change, once the requirements for response codes are defined
+
+               int httpStatusCode = responseContext.getStatus();
+               if ( httpStatusCode < 100 || httpStatusCode > 599 ) {
+                       httpStatusCode = Status.INTERNAL_SERVER_ERROR.getStatusCode();
+               }
+               LoggingContext.responseCode(Integer.toString(httpStatusCode));
+               
+               StatusType sType = responseContext.getStatusInfo();
+               if ( sType != null ) {
+                       Status.Family sFamily = sType.getFamily();
+                       if ( ! ( Status.Family.SUCCESSFUL.equals(sFamily)  ||
+                               ( Status.NOT_FOUND.equals(Status.fromStatusCode(httpStatusCode)) ) ) ) {
+                               success = false;
+                       }               
+               }
+               else {
+                       if ( (httpStatusCode < 200 || httpStatusCode > 299) && ( ! ( Status.NOT_FOUND.equals(Status.fromStatusCode(httpStatusCode) ) ) ) ) {
+                               success = false;
+                       }
+               }
+               if (success) {
+                       LoggingContext.statusCode(StatusCode.COMPLETE);
+                       LOGGER.info(uri + " call succeeded");
+               }
+               else {
+                       LoggingContext.statusCode(StatusCode.ERROR);
+                       LOGGER.error(uri + " call failed with responseCode=" + httpStatusCode);
+               }
+               LoggingContext.clear();
+               
+
+       }
+
+}
diff --git a/src/main/java/org/onap/aai/interceptors/post/ResponseHeaderManipulation.java b/src/main/java/org/onap/aai/interceptors/post/ResponseHeaderManipulation.java
new file mode 100644 (file)
index 0000000..9d4efe7
--- /dev/null
@@ -0,0 +1,64 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.interceptors.post;
+
+import org.onap.aai.interceptors.AAIContainerFilter;
+import org.onap.aai.interceptors.AAIHeaderProperties;
+
+import javax.annotation.Priority;
+import javax.ws.rs.container.ContainerRequestContext;
+import javax.ws.rs.container.ContainerResponseContext;
+import javax.ws.rs.container.ContainerResponseFilter;
+import javax.ws.rs.core.MediaType;
+import java.io.IOException;
+
+@Priority(AAIResponseFilterPriority.HEADER_MANIPULATION)
+public class ResponseHeaderManipulation extends AAIContainerFilter implements ContainerResponseFilter {
+
+       private static final String DEFAULT_XML_TYPE = MediaType.APPLICATION_XML;
+
+       @Override
+       public void filter(ContainerRequestContext requestContext, ContainerResponseContext responseContext)
+                       throws IOException {
+
+               updateResponseHeaders(requestContext, responseContext);
+
+       }
+
+       private void updateResponseHeaders(ContainerRequestContext requestContext,
+                       ContainerResponseContext responseContext) {
+
+               responseContext.getHeaders().add(AAIHeaderProperties.AAI_TX_ID, requestContext.getProperty(AAIHeaderProperties.AAI_TX_ID));
+
+               String responseContentType = responseContext.getHeaderString("Content-Type");
+
+               if(responseContentType == null){
+                       String acceptType = requestContext.getHeaderString("Accept");
+
+                       if(acceptType == null || "*/*".equals(acceptType)){
+                               responseContext.getHeaders().putSingle("Content-Type", DEFAULT_XML_TYPE);
+                       } else {
+                               responseContext.getHeaders().putSingle("Content-Type", acceptType);
+                       }
+               }
+
+       }
+
+}
diff --git a/src/main/java/org/onap/aai/interceptors/post/ResponseTransactionLogging.java b/src/main/java/org/onap/aai/interceptors/post/ResponseTransactionLogging.java
new file mode 100644 (file)
index 0000000..547a7c8
--- /dev/null
@@ -0,0 +1,123 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.interceptors.post;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.google.gson.JsonObject;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.interceptors.AAIContainerFilter;
+import org.onap.aai.interceptors.AAIHeaderProperties;
+import org.onap.aai.logging.ErrorLogHelper;
+import org.onap.aai.util.AAIConfig;
+import org.springframework.beans.factory.annotation.Autowired;
+
+import javax.annotation.Priority;
+import javax.servlet.http.HttpServletResponse;
+import javax.ws.rs.container.ContainerRequestContext;
+import javax.ws.rs.container.ContainerResponseContext;
+import javax.ws.rs.container.ContainerResponseFilter;
+import java.io.IOException;
+import java.util.Objects;
+import java.util.Optional;
+
+@Priority(AAIResponseFilterPriority.RESPONSE_TRANS_LOGGING)
+public class ResponseTransactionLogging extends AAIContainerFilter implements ContainerResponseFilter {
+
+       private static final EELFLogger TRANSACTION_LOGGER = EELFManager.getInstance().getLogger(ResponseTransactionLogging.class);
+
+       @Autowired
+       private HttpServletResponse httpServletResponse;
+
+       @Override
+       public void filter(ContainerRequestContext requestContext, ContainerResponseContext responseContext)
+                       throws IOException {
+
+               this.transLogging(requestContext, responseContext);
+
+       }
+
+       private void transLogging(ContainerRequestContext requestContext, ContainerResponseContext responseContext) {
+
+               String logValue;
+               String getValue;
+               String postValue;
+               
+               try {
+                       logValue = AAIConfig.get("aai.transaction.logging");
+                       getValue = AAIConfig.get("aai.transaction.logging.get");
+                       postValue = AAIConfig.get("aai.transaction.logging.post");
+               } catch (AAIException e) {
+                       return;
+               }
+
+               String transId = requestContext.getHeaderString(AAIHeaderProperties.TRANSACTION_ID);
+               String fromAppId = requestContext.getHeaderString(AAIHeaderProperties.FROM_APP_ID);
+               String fullUri = requestContext.getUriInfo().getRequestUri().toString();
+               String requestTs = (String)requestContext.getProperty(AAIHeaderProperties.AAI_REQUEST_TS);
+
+               String httpMethod = requestContext.getMethod();
+
+               String status = Integer.toString(responseContext.getStatus());
+               
+               String request = (String)requestContext.getProperty(AAIHeaderProperties.AAI_REQUEST);
+               String response = this.getResponseString(responseContext);
+
+               if (!Boolean.parseBoolean(logValue)) {
+               } else if (!Boolean.parseBoolean(getValue) && "GET".equals(httpMethod)) {
+               } else if (!Boolean.parseBoolean(postValue) && "POST".equals(httpMethod)) {
+               } else {
+                       
+                       JsonObject logEntry = new JsonObject();
+                       logEntry.addProperty("transactionId", transId);
+                       logEntry.addProperty("status", status);
+                       logEntry.addProperty("rqstDate", requestTs);
+                       logEntry.addProperty("respDate", this.genDate());
+                       logEntry.addProperty("sourceId", fromAppId + ":" + transId);
+                       logEntry.addProperty("resourceId", fullUri);
+                       logEntry.addProperty("resourceType", httpMethod);
+                       logEntry.addProperty("rqstBuf", Objects.toString(request, ""));
+                       logEntry.addProperty("respBuf", Objects.toString(response, ""));
+                       
+                       try {
+                               TRANSACTION_LOGGER.debug(logEntry.toString());
+                       } catch (Exception e) {
+                               ErrorLogHelper.logError("AAI_4000", "Exception writing transaction log.");
+                       }
+               }
+
+       }
+
+       private String getResponseString(ContainerResponseContext responseContext) {
+               JsonObject response = new JsonObject();
+               response.addProperty("ID", responseContext.getHeaderString(AAIHeaderProperties.AAI_TX_ID));
+               response.addProperty("Content-Type", this.httpServletResponse.getContentType());
+               response.addProperty("Response-Code", responseContext.getStatus());
+               response.addProperty("Headers", responseContext.getHeaders().toString());
+               Optional<Object> entityOptional = Optional.ofNullable(responseContext.getEntity());
+               if(entityOptional.isPresent()){
+                       response.addProperty("Entity", entityOptional.get().toString());
+               } else {
+                       response.addProperty("Entity", "");
+               }
+               return response.toString();
+       }
+
+}
diff --git a/src/main/java/org/onap/aai/interceptors/pre/AAIRequestFilterPriority.java b/src/main/java/org/onap/aai/interceptors/pre/AAIRequestFilterPriority.java
new file mode 100644 (file)
index 0000000..c3d9d3b
--- /dev/null
@@ -0,0 +1,46 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.interceptors.pre;
+
+public final class AAIRequestFilterPriority {
+       
+       private AAIRequestFilterPriority() {}
+       
+       public static final int REQUEST_TRANS_LOGGING = 1000;
+       
+       public static final int HEADER_VALIDATION = 2000;
+
+       public static final int SET_LOGGING_CONTEXT = 3000;
+
+       public static final int HTTP_HEADER = 4000;
+
+       public static final int LATEST = 4250;
+
+       public static final int AUTHORIZATION = 4500;
+
+       public static final int RETIRED_SERVICE = 5000;
+
+       public static final int VERSION = 5500;
+
+       public static final int HEADER_MANIPULATION = 6000;
+
+       public static final int REQUEST_MODIFICATION = 7000;
+
+}
diff --git a/src/main/java/org/onap/aai/interceptors/pre/HeaderValidation.java b/src/main/java/org/onap/aai/interceptors/pre/HeaderValidation.java
new file mode 100644 (file)
index 0000000..afacf66
--- /dev/null
@@ -0,0 +1,91 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.interceptors.pre;
+
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.interceptors.AAIContainerFilter;
+import org.onap.aai.interceptors.AAIHeaderProperties;
+import org.onap.aai.logging.ErrorLogHelper;
+
+import javax.annotation.Priority;
+import javax.ws.rs.container.ContainerRequestContext;
+import javax.ws.rs.container.ContainerRequestFilter;
+import javax.ws.rs.container.PreMatching;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.MultivaluedMap;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.ext.Provider;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Optional;
+import java.util.UUID;
+
+@Provider
+@PreMatching
+@Priority(AAIRequestFilterPriority.HEADER_VALIDATION)
+public class HeaderValidation extends AAIContainerFilter implements ContainerRequestFilter {
+
+       @Override
+       public void filter(ContainerRequestContext requestContext) throws IOException {
+
+               Optional<Response> oResp;
+
+               MultivaluedMap<String, String> headersMap = requestContext.getHeaders();
+       
+               String transId = headersMap.getFirst(AAIHeaderProperties.TRANSACTION_ID);
+               String fromAppId = headersMap.getFirst(AAIHeaderProperties.FROM_APP_ID);
+
+               List<MediaType> acceptHeaderValues = requestContext.getAcceptableMediaTypes();
+
+               oResp = this.validateHeaderValuePresence(fromAppId, "AAI_4009", acceptHeaderValues);
+               if (oResp.isPresent()) {
+                       requestContext.abortWith(oResp.get());
+                       return;
+               }
+               oResp = this.validateHeaderValuePresence(transId, "AAI_4010", acceptHeaderValues);
+               if (oResp.isPresent()) {
+                       requestContext.abortWith(oResp.get());
+                       return;
+               }
+
+               if (!this.isValidUUID(transId)) {
+                       transId = UUID.randomUUID().toString();
+                       requestContext.getHeaders().get(AAIHeaderProperties.TRANSACTION_ID).clear();
+                       requestContext.getHeaders().add(AAIHeaderProperties.TRANSACTION_ID, transId);
+               }
+
+       }
+       
+       private Optional<Response> validateHeaderValuePresence(String value, String errorCode,
+                       List<MediaType> acceptHeaderValues) {
+               Response response = null;
+               AAIException aaie;
+               if (value == null) {
+                       aaie = new AAIException(errorCode);
+                       return Optional.of(Response.status(aaie.getErrorObject().getHTTPResponseCode())
+                                       .entity(ErrorLogHelper.getRESTAPIErrorResponse(acceptHeaderValues, aaie, new ArrayList<>()))
+                                       .build());
+               }
+
+               return Optional.ofNullable(response);
+       }
+
+}
diff --git a/src/main/java/org/onap/aai/interceptors/pre/HttpHeaderInterceptor.java b/src/main/java/org/onap/aai/interceptors/pre/HttpHeaderInterceptor.java
new file mode 100644 (file)
index 0000000..94d8ca1
--- /dev/null
@@ -0,0 +1,55 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.interceptors.pre;
+
+import org.onap.aai.interceptors.AAIContainerFilter;
+import org.onap.aai.interceptors.AAIHeaderProperties;
+
+import javax.annotation.Priority;
+import javax.ws.rs.HttpMethod;
+import javax.ws.rs.container.ContainerRequestContext;
+import javax.ws.rs.container.ContainerRequestFilter;
+import javax.ws.rs.container.PreMatching;
+import javax.ws.rs.core.MultivaluedMap;
+import javax.ws.rs.ext.Provider;
+import java.io.IOException;
+
+/**
+ * The Class HttpHeaderInterceptor
+ */
+@Provider
+@PreMatching
+@Priority(AAIRequestFilterPriority.HTTP_HEADER)
+public class HttpHeaderInterceptor extends AAIContainerFilter implements ContainerRequestFilter {
+       public static final String patchMethod = "PATCH";
+       
+    @Override
+    public void filter(ContainerRequestContext containerRequestContext) throws IOException {
+
+               MultivaluedMap<String, String> headersMap = containerRequestContext.getHeaders();
+       String overrideMethod = headersMap.getFirst(AAIHeaderProperties.HTTP_METHOD_OVERRIDE);
+       String httpMethod = containerRequestContext.getMethod();
+       
+               if (HttpMethod.POST.equalsIgnoreCase(httpMethod) && patchMethod.equalsIgnoreCase(overrideMethod)) {
+                       containerRequestContext.setMethod(patchMethod);
+               }
+    }
+    
+}
diff --git a/src/main/java/org/onap/aai/interceptors/pre/OneWaySslAuthorization.java b/src/main/java/org/onap/aai/interceptors/pre/OneWaySslAuthorization.java
new file mode 100644 (file)
index 0000000..6563e23
--- /dev/null
@@ -0,0 +1,81 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.interceptors.pre;
+
+import org.onap.aai.Profiles;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.interceptors.AAIContainerFilter;
+import org.onap.aai.logging.ErrorLogHelper;
+import org.onap.aai.service.AuthorizationService;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.context.annotation.Profile;
+
+import javax.annotation.Priority;
+import javax.ws.rs.container.ContainerRequestContext;
+import javax.ws.rs.container.ContainerRequestFilter;
+import javax.ws.rs.container.PreMatching;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.ext.Provider;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Optional;
+
+@Provider
+@Profile(Profiles.ONE_WAY_SSL)
+@PreMatching
+@Priority(AAIRequestFilterPriority.AUTHORIZATION)
+public class OneWaySslAuthorization extends AAIContainerFilter implements ContainerRequestFilter {
+
+    @Autowired
+    private AuthorizationService authorizationService;
+
+    @Override
+    public void filter(ContainerRequestContext containerRequestContext) throws IOException
+    {
+
+        String basicAuth = containerRequestContext.getHeaderString("Authorization");
+        List<MediaType> acceptHeaderValues = containerRequestContext.getAcceptableMediaTypes();
+
+        if(basicAuth == null || !basicAuth.startsWith("Basic ")){
+            Optional<Response> responseOptional = errorResponse("AAI_3300", acceptHeaderValues);
+            containerRequestContext.abortWith(responseOptional.get());
+            return;
+        }
+
+        basicAuth = basicAuth.replaceAll("Basic ", "");
+
+        if(!authorizationService.checkIfUserAuthorized(basicAuth)){
+            Optional<Response> responseOptional = errorResponse("AAI_3300", acceptHeaderValues);
+            containerRequestContext.abortWith(responseOptional.get());
+            return;
+        }
+
+    }
+
+    private Optional<Response> errorResponse(String errorCode, List<MediaType> acceptHeaderValues) {
+        AAIException aaie = new AAIException(errorCode);
+        return Optional.of(Response.status(aaie.getErrorObject().getHTTPResponseCode())
+                .entity(ErrorLogHelper.getRESTAPIErrorResponse(acceptHeaderValues, aaie, new ArrayList<>()))
+                .build());
+
+    }
+}
diff --git a/src/main/java/org/onap/aai/interceptors/pre/RequestHeaderManipulation.java b/src/main/java/org/onap/aai/interceptors/pre/RequestHeaderManipulation.java
new file mode 100644 (file)
index 0000000..ee4807e
--- /dev/null
@@ -0,0 +1,62 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.interceptors.pre;
+
+import org.onap.aai.interceptors.AAIContainerFilter;
+import org.onap.aai.interceptors.AAIHeaderProperties;
+
+import javax.annotation.Priority;
+import javax.ws.rs.container.ContainerRequestContext;
+import javax.ws.rs.container.ContainerRequestFilter;
+import javax.ws.rs.container.PreMatching;
+import javax.ws.rs.core.MultivaluedMap;
+import javax.ws.rs.ext.Provider;
+import java.util.Collections;
+import java.util.regex.Matcher;
+
+@Provider
+@PreMatching
+@Priority(AAIRequestFilterPriority.HEADER_MANIPULATION)
+public class RequestHeaderManipulation extends AAIContainerFilter implements ContainerRequestFilter {
+
+       @Override
+       public void filter(ContainerRequestContext requestContext) {
+
+               String uri = requestContext.getUriInfo().getPath();
+               this.addRequestContext(uri, requestContext.getHeaders());
+
+       }
+       
+       private void addRequestContext(String uri, MultivaluedMap<String, String> requestHeaders) {
+
+               String rc = "";
+
+        Matcher match = VersionInterceptor.EXTRACT_VERSION_PATTERN.matcher(uri);
+        if (match.find()) {
+            rc = match.group(1);
+        }
+
+               if (requestHeaders.containsKey(AAIHeaderProperties.REQUEST_CONTEXT)) {
+                       requestHeaders.remove(AAIHeaderProperties.REQUEST_CONTEXT);
+               }
+               requestHeaders.put(AAIHeaderProperties.REQUEST_CONTEXT, Collections.singletonList(rc));
+       }
+
+}
diff --git a/src/main/java/org/onap/aai/interceptors/pre/RequestModification.java b/src/main/java/org/onap/aai/interceptors/pre/RequestModification.java
new file mode 100644 (file)
index 0000000..9c17ffc
--- /dev/null
@@ -0,0 +1,77 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.interceptors.pre;
+
+import org.onap.aai.interceptors.AAIContainerFilter;
+
+import javax.annotation.Priority;
+import javax.ws.rs.container.ContainerRequestContext;
+import javax.ws.rs.container.ContainerRequestFilter;
+import javax.ws.rs.container.PreMatching;
+import javax.ws.rs.core.MultivaluedMap;
+import javax.ws.rs.core.UriBuilder;
+import javax.ws.rs.ext.Provider;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+@Provider
+@PreMatching
+@Priority(AAIRequestFilterPriority.REQUEST_MODIFICATION)
+public class RequestModification extends AAIContainerFilter implements ContainerRequestFilter {
+
+       @Override
+       public void filter(ContainerRequestContext requestContext) throws IOException {
+
+               this.cleanDME2QueryParams(requestContext);
+
+       }
+       
+       private void cleanDME2QueryParams(ContainerRequestContext request) {
+               UriBuilder builder = request.getUriInfo().getRequestUriBuilder();
+               MultivaluedMap<String, String> queries = request.getUriInfo().getQueryParameters();
+
+               String[] blacklist = { "version", "envContext", "routeOffer" };
+               Set<String> blacklistSet = Arrays.stream(blacklist).collect(Collectors.toSet());
+
+               boolean remove = true;
+
+               for (String param : blacklistSet) {
+                       if (!queries.containsKey(param)) {
+                               remove = false;
+                               break;
+                       }
+               }
+
+               if (remove) {
+                       for (Map.Entry<String, List<String>> query : queries.entrySet()) {
+                               String key = query.getKey();
+                               if (blacklistSet.contains(key)) {
+                                       builder.replaceQueryParam(key);
+                               }
+                       }
+               }
+               request.setRequestUri(builder.build());
+       }
+
+}
diff --git a/src/main/java/org/onap/aai/interceptors/pre/RequestTransactionLogging.java b/src/main/java/org/onap/aai/interceptors/pre/RequestTransactionLogging.java
new file mode 100644 (file)
index 0000000..b770296
--- /dev/null
@@ -0,0 +1,136 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.interceptors.pre;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.google.gson.JsonObject;
+import org.glassfish.jersey.message.internal.ReaderWriter;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.interceptors.AAIContainerFilter;
+import org.onap.aai.interceptors.AAIHeaderProperties;
+import org.onap.aai.logging.LogFormatTools;
+import org.onap.aai.util.AAIConfig;
+import org.onap.aai.util.AAIConstants;
+import org.onap.aai.util.HbaseSaltPrefixer;
+import org.springframework.beans.factory.annotation.Autowired;
+
+import javax.annotation.Priority;
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.container.ContainerRequestContext;
+import javax.ws.rs.container.ContainerRequestFilter;
+import javax.ws.rs.container.PreMatching;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.MultivaluedMap;
+import javax.ws.rs.ext.Provider;
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.security.SecureRandom;
+import java.util.Random;
+import java.util.UUID;
+
+@Provider
+@PreMatching
+@Priority(AAIRequestFilterPriority.REQUEST_TRANS_LOGGING)
+public class RequestTransactionLogging extends AAIContainerFilter implements ContainerRequestFilter {
+
+       private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(RequestTransactionLogging.class);
+
+       @Autowired
+       private HttpServletRequest httpServletRequest;
+
+       private static final String DEFAULT_CONTENT_TYPE = MediaType.APPLICATION_JSON;
+       private static final String DEFAULT_RESPONSE_TYPE = MediaType.APPLICATION_XML;
+
+       private static final String CONTENT_TYPE = "Content-Type";
+       private static final String ACCEPT = "Accept";
+       private static final String TEXT_PLAIN = "text/plain";
+
+       @Override
+       public void filter(ContainerRequestContext requestContext) throws IOException {
+
+               String currentTimeStamp = genDate();
+               String fullId = this.getAAITxIdToHeader(currentTimeStamp);
+               this.addToRequestContext(requestContext, AAIHeaderProperties.AAI_TX_ID, fullId);
+               this.addToRequestContext(requestContext, AAIHeaderProperties.AAI_REQUEST, this.getRequest(requestContext, fullId));
+               this.addToRequestContext(requestContext, AAIHeaderProperties.AAI_REQUEST_TS, currentTimeStamp);
+               this.addDefaultContentType(requestContext);
+       }
+
+       private void addToRequestContext(ContainerRequestContext requestContext, String name, String aaiTxIdToHeader) {
+               requestContext.setProperty(name, aaiTxIdToHeader);
+       }
+
+       private void addDefaultContentType(ContainerRequestContext requestContext) {
+
+               MultivaluedMap<String, String> headersMap = requestContext.getHeaders();
+               String contentType = headersMap.getFirst(CONTENT_TYPE);
+               String acceptType  = headersMap.getFirst(ACCEPT);
+
+               if(contentType == null || contentType.contains(TEXT_PLAIN)){
+                       requestContext.getHeaders().putSingle(CONTENT_TYPE, DEFAULT_CONTENT_TYPE);
+               }
+
+               if(acceptType == null || acceptType.contains(TEXT_PLAIN)){
+                       requestContext.getHeaders().putSingle(ACCEPT, DEFAULT_RESPONSE_TYPE);
+               }
+       }
+
+       private String getAAITxIdToHeader(String currentTimeStamp) {
+               String txId = UUID.randomUUID().toString();
+               try {
+                       Random rand = new SecureRandom();
+                       int number = rand.nextInt(99999);
+                       txId = HbaseSaltPrefixer.getInstance().prependSalt(AAIConfig.get(AAIConstants.AAI_NODENAME) + "-"
+                                       + currentTimeStamp + "-" + number ); //new Random(System.currentTimeMillis()).nextInt(99999)
+               } catch (AAIException e) {
+               }
+
+               return txId;
+       }
+
+       private String getRequest(ContainerRequestContext requestContext, String fullId) {
+
+               JsonObject request = new JsonObject();
+               request.addProperty("ID", fullId);
+               request.addProperty("Http-Method", requestContext.getMethod());
+               request.addProperty(CONTENT_TYPE, httpServletRequest.getContentType());
+               request.addProperty("Headers", requestContext.getHeaders().toString());
+
+               ByteArrayOutputStream out = new ByteArrayOutputStream();
+               InputStream in = requestContext.getEntityStream();
+
+               try {
+                       if (in.available() > 0) {
+                               ReaderWriter.writeTo(in, out);
+                               byte[] requestEntity = out.toByteArray();
+                               request.addProperty("Payload", new String(requestEntity, "UTF-8"));
+                               requestContext.setEntityStream(new ByteArrayInputStream(requestEntity));
+                       }
+               } catch (IOException ex) {
+                       LOGGER.error("An exception occurred during the transaction logging: " + LogFormatTools.getStackTop(ex));
+               }
+
+               return request.toString();
+       }
+
+}
diff --git a/src/main/java/org/onap/aai/interceptors/pre/RetiredInterceptor.java b/src/main/java/org/onap/aai/interceptors/pre/RetiredInterceptor.java
new file mode 100644 (file)
index 0000000..9a33b05
--- /dev/null
@@ -0,0 +1,150 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.interceptors.pre;
+
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.interceptors.AAIContainerFilter;
+import org.onap.aai.logging.ErrorLogHelper;
+import org.onap.aai.service.RetiredService;
+import org.onap.aai.util.AAIConfig;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Value;
+
+import javax.annotation.Priority;
+import javax.ws.rs.container.ContainerRequestContext;
+import javax.ws.rs.container.ContainerRequestFilter;
+import javax.ws.rs.container.PreMatching;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.ext.Provider;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+// Can cache this so if the uri was already cached then it won't run the string
+// matching each time but only does it for the first time
+
+@Provider
+@PreMatching
+@Priority(AAIRequestFilterPriority.RETIRED_SERVICE)
+public class RetiredInterceptor extends AAIContainerFilter implements ContainerRequestFilter {
+
+    private static final Pattern VERSION_PATTERN = Pattern.compile("v\\d+|latest");
+
+    private RetiredService retiredService;
+
+    private String basePath;
+
+    @Autowired
+    public RetiredInterceptor(RetiredService retiredService, @Value("${schema.uri.base.path}") String basePath){
+        this.retiredService = retiredService;
+        this.basePath = basePath;
+        if(!basePath.endsWith("/")){
+            this.basePath = basePath + "/";
+        }
+    }
+    @Override
+    public void filter(ContainerRequestContext containerRequestContext) throws IOException {
+
+        String requestURI = containerRequestContext.getUriInfo().getAbsolutePath().getPath();
+
+        String version = extractVersionFromPath(requestURI);
+
+        List<Pattern> retiredAllVersionList = retiredService.getRetiredAllVersionList();
+
+
+        if(checkIfUriRetired(containerRequestContext, retiredAllVersionList, version, requestURI, "")){
+            return;
+        }
+
+        List<Pattern> retiredVersionList = retiredService.getRetiredPatterns();
+
+        checkIfUriRetired(containerRequestContext, retiredVersionList, version, requestURI);
+    }
+
+    public boolean checkIfUriRetired(ContainerRequestContext containerRequestContext,
+                                     List<Pattern> retiredPatterns,
+                                     String version,
+                                     String requestURI,
+                                     String message){
+
+
+        for(Pattern retiredPattern : retiredPatterns){
+            if(retiredPattern.matcher(requestURI).matches()){
+                AAIException e;
+
+                if(message == null){
+                    e = new AAIException("AAI_3007");
+                } else {
+                    e = new AAIException("AAI_3015");
+                }
+
+                ArrayList<String> templateVars = new ArrayList<>();
+
+                if (templateVars.isEmpty()) {
+                    templateVars.add("PUT");
+                    if(requestURI != null){
+                        requestURI = requestURI.replaceAll(basePath, "");
+                    }
+                    templateVars.add(requestURI);
+                    if(message == null){
+                        templateVars.add(version);
+                        templateVars.add(AAIConfig.get("aai.default.api.version", ""));
+                    }
+                }
+
+                Response response = Response
+                        .status(e.getErrorObject().getHTTPResponseCode())
+                        .entity(
+                                ErrorLogHelper
+                                        .getRESTAPIErrorResponse(
+                                                containerRequestContext.getAcceptableMediaTypes(), e, templateVars
+                                        )
+                        )
+                        .build();
+
+                containerRequestContext.abortWith(response);
+
+                return true;
+            }
+        }
+
+        return false;
+    }
+
+    public boolean checkIfUriRetired(ContainerRequestContext containerRequestContext,
+                                     List<Pattern> retiredPatterns,
+                                     String version,
+                                     String requestURI){
+        return checkIfUriRetired(containerRequestContext, retiredPatterns, version, requestURI, null);
+    }
+
+    protected String extractVersionFromPath(String requestURI) {
+        Matcher versionMatcher = VERSION_PATTERN.matcher(requestURI);
+        String version = null;
+
+        if(versionMatcher.find()){
+            version = versionMatcher.group(0);
+        }
+        return version;
+    }
+
+}
diff --git a/src/main/java/org/onap/aai/interceptors/pre/SetLoggingContext.java b/src/main/java/org/onap/aai/interceptors/pre/SetLoggingContext.java
new file mode 100644 (file)
index 0000000..6c3a7fc
--- /dev/null
@@ -0,0 +1,75 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.interceptors.pre;
+
+import org.onap.aai.interceptors.AAIContainerFilter;
+import org.onap.aai.interceptors.AAIHeaderProperties;
+import org.onap.aai.logging.LoggingContext;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.core.env.Environment;
+
+import javax.annotation.Priority;
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.container.ContainerRequestContext;
+import javax.ws.rs.container.ContainerRequestFilter;
+import javax.ws.rs.container.PreMatching;
+import javax.ws.rs.core.MultivaluedMap;
+import javax.ws.rs.ext.Provider;
+import java.io.IOException;
+
+@Provider
+@PreMatching
+@Priority(AAIRequestFilterPriority.SET_LOGGING_CONTEXT)
+public class SetLoggingContext extends AAIContainerFilter implements ContainerRequestFilter {
+
+       @Autowired
+       private Environment environment;
+
+       @Autowired
+       private HttpServletRequest httpServletRequest;
+       
+       @Override
+       public void filter(ContainerRequestContext requestContext) throws IOException {
+
+               String uri = httpServletRequest.getRequestURI();
+               String queryString = httpServletRequest.getQueryString();
+
+               if(queryString != null && !queryString.isEmpty()){
+                   uri = uri + "?" + queryString;
+               }
+
+               String httpMethod = requestContext.getMethod();
+
+               MultivaluedMap<String, String> headersMap = requestContext.getHeaders();
+
+               String transId = headersMap.getFirst(AAIHeaderProperties.TRANSACTION_ID);
+               String fromAppId = headersMap.getFirst(AAIHeaderProperties.FROM_APP_ID);
+               
+               LoggingContext.init();
+               LoggingContext.requestId(transId);
+               LoggingContext.partnerName(fromAppId);
+               LoggingContext.targetEntity(environment.getProperty("spring.application.name"));
+               LoggingContext.component(fromAppId);
+               LoggingContext.serviceName(httpMethod + " " + uri);
+               LoggingContext.targetServiceName(httpMethod + " " + uri);
+               LoggingContext.statusCode(LoggingContext.StatusCode.COMPLETE);
+       }
+       
+}
diff --git a/src/main/java/org/onap/aai/interceptors/pre/TwoWaySslAuthorization.java b/src/main/java/org/onap/aai/interceptors/pre/TwoWaySslAuthorization.java
new file mode 100644 (file)
index 0000000..73b7877
--- /dev/null
@@ -0,0 +1,187 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.interceptors.pre;
+
+import org.onap.aai.auth.AAIAuthCore;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.interceptors.AAIContainerFilter;
+import org.onap.aai.interceptors.AAIHeaderProperties;
+import org.onap.aai.logging.ErrorLogHelper;
+import org.onap.aai.restcore.HttpMethod;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.context.annotation.Profile;
+
+import javax.annotation.Priority;
+import javax.security.auth.x500.X500Principal;
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.container.ContainerRequestContext;
+import javax.ws.rs.container.ContainerRequestFilter;
+import javax.ws.rs.container.PreMatching;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.ext.Provider;
+import java.security.cert.X509Certificate;
+import java.util.*;
+import java.util.stream.Collectors;
+
+@Provider
+@PreMatching
+@Priority(AAIRequestFilterPriority.AUTHORIZATION)
+@Profile("two-way-ssl")
+public class TwoWaySslAuthorization extends AAIContainerFilter implements ContainerRequestFilter {
+
+       @Autowired
+       private HttpServletRequest httpServletRequest;
+
+       @Autowired
+       private AAIAuthCore aaiAuthCore;
+
+       @Override
+       public void filter(ContainerRequestContext requestContext) {
+
+               Optional<Response> oResp;
+
+               String uri = requestContext.getUriInfo().getAbsolutePath().getPath();
+               String httpMethod = getHttpMethod(requestContext);
+
+               List<MediaType> acceptHeaderValues = requestContext.getAcceptableMediaTypes();
+
+               Optional<String> authUser = getUser(this.httpServletRequest);
+
+               if (authUser.isPresent()) {
+                       oResp = this.authorize(uri, httpMethod, acceptHeaderValues, authUser.get(),
+                                       this.getHaProxyUser(this.httpServletRequest), getCertIssuer(this.httpServletRequest));
+                       if (oResp.isPresent()) {
+                               requestContext.abortWith(oResp.get());
+                               return;
+                       }
+               } else {
+                       AAIException aaie = new AAIException("AAI_9107");
+                       requestContext
+                                       .abortWith(Response
+                                                       .status(aaie.getErrorObject().getHTTPResponseCode()).entity(ErrorLogHelper
+                                                                       .getRESTAPIErrorResponseWithLogging(acceptHeaderValues, aaie, new ArrayList<>()))
+                                                       .build());
+               }
+
+       }
+
+       private String getCertIssuer(HttpServletRequest hsr) {
+               String issuer =  hsr.getHeader("X-AAI-SSL-Issuer");
+               if (issuer != null && !issuer.isEmpty()) {
+                       // the haproxy header replaces the ', ' with '/' and reverses on the '/' need to undo that.
+                       List<String> broken = Arrays.asList(issuer.split("/"));
+                       broken = broken.stream().filter(s -> !s.isEmpty()).collect(Collectors.toList());
+                       Collections.reverse(broken);
+                       issuer = String.join(", ", broken);
+               } else {
+                       if (hsr.getAttribute("javax.servlet.request.cipher_suite") != null) {
+                               X509Certificate[] certChain = (X509Certificate[]) hsr.getAttribute("javax.servlet.request.X509Certificate");
+                               if (certChain != null && certChain.length > 0) {
+                                       X509Certificate clientCert = certChain[0];
+                                       issuer = clientCert.getIssuerX500Principal().getName();
+                               }
+                       }
+               }
+               return issuer;
+       }
+
+       private String getHttpMethod(ContainerRequestContext requestContext) {
+               String httpMethod = requestContext.getMethod();
+               if ("POST".equalsIgnoreCase(httpMethod)
+                               && "PATCH".equals(requestContext.getHeaderString(AAIHeaderProperties.HTTP_METHOD_OVERRIDE))) {
+                       httpMethod = HttpMethod.MERGE_PATCH.toString();
+               }
+               if (httpMethod.equalsIgnoreCase(HttpMethod.MERGE_PATCH.toString()) || "patch".equalsIgnoreCase(httpMethod)) {
+                       httpMethod = HttpMethod.PUT.toString();
+               }
+               return httpMethod;
+       }
+
+       private Optional<String> getUser(HttpServletRequest hsr) {
+               String authUser = null;
+               if (hsr.getAttribute("javax.servlet.request.cipher_suite") != null) {
+                       X509Certificate[] certChain = (X509Certificate[]) hsr.getAttribute("javax.servlet.request.X509Certificate");
+
+                       /*
+                        * If the certificate is null or the certificate chain length is zero Then
+                        * retrieve the authorization in the request header Authorization Check that it
+                        * is not null and that it starts with Basic and then strip the basic portion to
+                        * get the base64 credentials Check if this is contained in the AAIBasicAuth
+                        * Singleton class If it is, retrieve the username associated with that
+                        * credentials and set to authUser Otherwise, get the principal from certificate
+                        * and use that authUser
+                        */
+
+                       if (certChain == null || certChain.length == 0) {
+
+                               String authorization = hsr.getHeader("Authorization");
+
+                               if (authorization != null && authorization.startsWith("Basic ")) {
+                                       authUser = authorization.replace("Basic ", "");
+                               }
+
+                       } else {
+                               X509Certificate clientCert = certChain[0];
+                               X500Principal subjectDN = clientCert.getSubjectX500Principal();
+                               authUser = subjectDN.toString().toLowerCase();
+                       }
+               }
+
+               return Optional.ofNullable(authUser);
+       }
+
+       private String getHaProxyUser(HttpServletRequest hsr) {
+               String haProxyUser;
+               if (Objects.isNull(hsr.getHeader("X-AAI-SSL-Client-CN")) 
+                               || Objects.isNull(hsr.getHeader("X-AAI-SSL-Client-OU"))
+                               || Objects.isNull(hsr.getHeader("X-AAI-SSL-Client-O"))
+                               || Objects.isNull(hsr.getHeader("X-AAI-SSL-Client-L"))
+                               || Objects.isNull(hsr.getHeader("X-AAI-SSL-Client-ST"))
+                               || Objects.isNull(hsr.getHeader("X-AAI-SSL-Client-C"))) {
+                       haProxyUser = "";
+               } else {
+                       haProxyUser = String.format("CN=%s, OU=%s, O=\"%s\", L=%s, ST=%s, C=%s",
+                                       Objects.toString(hsr.getHeader("X-AAI-SSL-Client-CN"), ""),
+                                       Objects.toString(hsr.getHeader("X-AAI-SSL-Client-OU"), ""),
+                                       Objects.toString(hsr.getHeader("X-AAI-SSL-Client-O"), ""),
+                                       Objects.toString(hsr.getHeader("X-AAI-SSL-Client-L"), ""),
+                                       Objects.toString(hsr.getHeader("X-AAI-SSL-Client-ST"), ""),
+                                       Objects.toString(hsr.getHeader("X-AAI-SSL-Client-C"), "")).toLowerCase();
+               }
+               return haProxyUser;
+       }
+
+       private Optional<Response> authorize(String uri, String httpMethod, List<MediaType> acceptHeaderValues,
+                       String authUser, String haProxyUser, String issuer) {
+               Response response = null;
+               try {
+                       if (!aaiAuthCore.authorize(authUser, uri, httpMethod, haProxyUser, issuer)) {
+                               throw new AAIException("AAI_9101", "Request on " + httpMethod + " " + uri + " status is not OK");
+                       }
+               } catch (AAIException e) {
+                       response = Response.status(e.getErrorObject().getHTTPResponseCode())
+                                       .entity(ErrorLogHelper.getRESTAPIErrorResponseWithLogging(acceptHeaderValues, e, new ArrayList<>()))
+                                       .build();
+               }
+               return Optional.ofNullable(response);
+       }
+
+}
diff --git a/src/main/java/org/onap/aai/interceptors/pre/VersionInterceptor.java b/src/main/java/org/onap/aai/interceptors/pre/VersionInterceptor.java
new file mode 100644 (file)
index 0000000..f591120
--- /dev/null
@@ -0,0 +1,101 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.interceptors.pre;
+
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.interceptors.AAIContainerFilter;
+import org.onap.aai.logging.ErrorLogHelper;
+import org.onap.aai.setup.SchemaVersion;
+import org.onap.aai.setup.SchemaVersions;
+import org.springframework.beans.factory.annotation.Autowired;
+
+import javax.annotation.Priority;
+import javax.ws.rs.container.ContainerRequestContext;
+import javax.ws.rs.container.ContainerRequestFilter;
+import javax.ws.rs.container.PreMatching;
+import javax.ws.rs.core.Response;
+import java.util.ArrayList;
+import java.util.Set;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+import java.util.stream.Collectors;
+
+@PreMatching
+@Priority(AAIRequestFilterPriority.VERSION)
+public class VersionInterceptor extends AAIContainerFilter implements ContainerRequestFilter {
+
+    public static final Pattern EXTRACT_VERSION_PATTERN = Pattern.compile("^(v[1-9][0-9]*).*$");
+
+    private final Set<String> allowedVersions;
+
+    private final SchemaVersions schemaVersions;
+
+    @Autowired
+    public VersionInterceptor(SchemaVersions schemaVersions){
+        this.schemaVersions = schemaVersions;
+        allowedVersions  = schemaVersions.getVersions()
+            .stream()
+            .map(SchemaVersion::toString)
+            .collect(Collectors.toSet());
+
+    }
+
+    @Override
+    public void filter(ContainerRequestContext requestContext) {
+
+        String uri = requestContext.getUriInfo().getPath();
+
+        if (uri.startsWith("search") || uri.startsWith("util/echo") || uri.startsWith("tools")) {
+            return;
+               }
+
+        Matcher matcher = EXTRACT_VERSION_PATTERN.matcher(uri);
+
+        String version = null;
+        if(matcher.matches()){
+            version = matcher.group(1);
+        } else {
+            requestContext.abortWith(createInvalidVersionResponse("AAI_3017", requestContext, version));
+            return;
+        }
+
+        if(!allowedVersions.contains(version)){
+            requestContext.abortWith(createInvalidVersionResponse("AAI_3016", requestContext, version));
+        }
+    }
+
+    private Response createInvalidVersionResponse(String errorCode, ContainerRequestContext context, String version) {
+        AAIException e = new AAIException(errorCode);
+        ArrayList<String> templateVars = new ArrayList<>();
+
+        if (templateVars.isEmpty()) {
+            templateVars.add(context.getMethod());
+            templateVars.add(context.getUriInfo().getPath());
+            templateVars.add(version);
+        }
+
+        String entity = ErrorLogHelper.getRESTAPIErrorResponse(context.getAcceptableMediaTypes(), e, templateVars);
+
+        return Response
+                .status(e.getErrorObject().getHTTPResponseCode())
+                .entity(entity)
+                .build();
+    }
+}
diff --git a/src/main/java/org/onap/aai/interceptors/pre/VersionLatestInterceptor.java b/src/main/java/org/onap/aai/interceptors/pre/VersionLatestInterceptor.java
new file mode 100644 (file)
index 0000000..61008b6
--- /dev/null
@@ -0,0 +1,56 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.interceptors.pre;
+
+import org.onap.aai.interceptors.AAIContainerFilter;
+import org.onap.aai.setup.SchemaVersions;
+import org.springframework.beans.factory.annotation.Autowired;
+
+import javax.annotation.Priority;
+import javax.ws.rs.container.ContainerRequestContext;
+import javax.ws.rs.container.ContainerRequestFilter;
+import javax.ws.rs.container.PreMatching;
+import java.net.URI;
+
+@PreMatching
+@Priority(AAIRequestFilterPriority.LATEST)
+public class VersionLatestInterceptor extends AAIContainerFilter implements ContainerRequestFilter {
+
+    private final SchemaVersions schemaVersions;
+
+    @Autowired
+    public VersionLatestInterceptor(SchemaVersions schemaVersions){
+        this.schemaVersions = schemaVersions;
+    }
+
+    @Override
+    public void filter(ContainerRequestContext requestContext) {
+
+        String uri = requestContext.getUriInfo().getPath();
+
+               if(uri.startsWith("latest")){
+                   String absolutePath = requestContext.getUriInfo().getAbsolutePath().toString();
+            String latest = absolutePath.replaceFirst("latest", schemaVersions.getDefaultVersion().toString());
+            requestContext.setRequestUri(URI.create(latest));
+            return;
+        }
+
+    }
+}
diff --git a/src/main/java/org/onap/aai/migration/EdgeMigrator.java b/src/main/java/org/onap/aai/migration/EdgeMigrator.java
new file mode 100644 (file)
index 0000000..99b4896
--- /dev/null
@@ -0,0 +1,145 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration;
+
+import java.util.List;
+
+import com.google.common.collect.Multimap;
+import org.javatuples.Pair;
+
+import org.apache.tinkerpop.gremlin.structure.Edge;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.onap.aai.db.props.AAIProperties;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.edges.EdgeRuleQuery;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.serialization.db.EdgeSerializer;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.edges.EdgeRule;
+import org.onap.aai.setup.SchemaVersions;
+
+/**
+ * A migration template for migrating all edge properties between "from" and "to" node from the DbedgeRules.json
+ * 
+ */
+@MigrationPriority(0)
+@MigrationDangerRating(1)
+public abstract class EdgeMigrator extends Migrator {
+
+       private boolean success = true;
+
+       public EdgeMigrator(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) {
+               super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+       }
+
+       public EdgeMigrator(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions, List<Pair<String, String>> nodePairList) {
+               super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+       }
+
+
+       /**
+        * Do not override this method as an inheritor of this class
+        */
+       @Override
+       public void run() {
+
+               executeModifyOperation();
+
+       }
+
+       /**
+        * This is where inheritors should add their logic
+        */
+       protected void executeModifyOperation() {
+               
+               changeEdgeProperties();
+               
+       }
+
+       protected void changeEdgeLabels() {
+       //TODO: when json file has edge label as well as edge property changes  
+       }
+       
+       
+       
+       protected void changeEdgeProperties() {
+               try {
+                       List<Pair<String, String>> nodePairList = this.getAffectedNodePairTypes();
+                       for (Pair<String, String> nodePair : nodePairList) {
+                               
+                               String NODE_A = nodePair.getValue0();
+                               String NODE_B = nodePair.getValue1();
+                               Multimap<String, EdgeRule> result = edgeIngestor.getRules(new EdgeRuleQuery.Builder(NODE_A, NODE_B).build());
+
+                               GraphTraversal<Vertex, Vertex> g = this.engine.asAdmin().getTraversalSource().V();
+                               /*
+                                * Find Out-Edges from Node A to Node B and change them
+                                * Also Find Out-Edges from Node B to Node A and change them 
+                                */
+                               g.union(__.has(AAIProperties.NODE_TYPE, NODE_A).outE().where(__.inV().has(AAIProperties.NODE_TYPE, NODE_B)),
+                                               __.has(AAIProperties.NODE_TYPE, NODE_B).outE().where(__.inV().has(AAIProperties.NODE_TYPE, NODE_A)))
+                                               .sideEffect(t -> {
+                                                       Edge e = t.get();
+                                                       try {
+                                                               Vertex out = e.outVertex();
+                                                               Vertex in = e.inVertex();
+                                                               if (out == null || in == null) {
+                                                                       logger.error(
+                                                                                       e.id() + " invalid because one vertex was null: out=" + out + " in=" + in);
+                                                               } else {
+                                                                       if (result.containsKey(e.label())) {
+                                                                               EdgeRule rule = result.get(e.label()).iterator().next();
+                                                                               e.properties().forEachRemaining(prop -> prop.remove());
+                                                                               edgeSerializer.addProperties(e, rule);
+                                                                       } else {
+                                                                               logger.info("found vertices connected by unkwown label: out=" + out + " label="
+                                                                                               + e.label() + " in=" + in);
+                                                                       }
+                                                               }
+                                                       } catch (Exception e1) {
+                                                               throw new RuntimeException(e1);
+                                                       }
+                                               }).iterate();
+                       }
+
+               } catch (Exception e) {
+                       logger.error("error encountered", e);
+                       success = false;
+               }
+       }
+  
+       @Override
+       public Status getStatus() {
+               if (success) {
+                       return Status.SUCCESS;
+               } else {
+                       return Status.FAILURE;
+               }
+       }
+
+       /**
+        * List of node pairs("from" and "to"), you would like EdgeMigrator to migrate from json files
+        * @return
+        */
+       public abstract List<Pair<String, String>> getAffectedNodePairTypes() ;
+       
+}
diff --git a/src/main/java/org/onap/aai/migration/EdgeSwingMigrator.java b/src/main/java/org/onap/aai/migration/EdgeSwingMigrator.java
new file mode 100644 (file)
index 0000000..616ff02
--- /dev/null
@@ -0,0 +1,288 @@
+/**\r
+ * ============LICENSE_START=======================================================\r
+ * org.onap.aai\r
+ * ================================================================================\r
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.\r
+ * ================================================================================\r
+ * Licensed under the Apache License, Version 2.0 (the "License");\r
+ * you may not use this file except in compliance with the License.\r
+ * You may obtain a copy of the License at\r
+ *\r
+ *    http://www.apache.org/licenses/LICENSE-2.0\r
+ *\r
+ * Unless required by applicable law or agreed to in writing, software\r
+ * distributed under the License is distributed on an "AS IS" BASIS,\r
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
+ * See the License for the specific language governing permissions and\r
+ * limitations under the License.\r
+ * ============LICENSE_END=========================================================\r
+ */\r
+package org.onap.aai.migration;\r
+\r
+\r
+import java.util.HashMap;\r
+import java.util.Iterator;\r
+import java.util.List;\r
+import java.util.Map;\r
+import org.apache.tinkerpop.gremlin.structure.Edge;\r
+import org.apache.tinkerpop.gremlin.structure.Property;\r
+import org.apache.tinkerpop.gremlin.structure.Direction;\r
+import org.apache.tinkerpop.gremlin.structure.Vertex;\r
+import org.javatuples.Pair;\r
+import org.onap.aai.db.props.AAIProperties;\r
+import org.onap.aai.edges.EdgeIngestor;\r
+import org.onap.aai.introspection.LoaderFactory;\r
+import org.onap.aai.serialization.db.EdgeSerializer;\r
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;\r
+import org.onap.aai.setup.SchemaVersions;\r
+\r
+/**\r
+ * A migration template for "swinging" edges that terminate on an old-node to a new target node. \r
+ *     That is, given an oldNode and a newNode we will swing edges that terminate on the\r
+ *     oldNode and terminate them on the newNode (actually we drop the old edges and add new ones).\r
+ *     \r
+ *     \r
+ *     We allow the passing of some parameters to restrict what edges get swung over: \r
+ *      > otherEndNodeTypeRestriction: only swing edges that terminate on the oldNode if the\r
+ *                     node at the other end of the edge is of this nodeType.\r
+ *      > edgeLabelRestriction: Only swing edges that have this edgeLabel\r
+ *      > edgeDirectionRestriction: Only swing edges that go this direction (from the oldNode)\r
+ *             this is a required parameter.  valid values are: BOTH, IN, OUT\r
+ *     \r
+ */\r
+@MigrationPriority(0)\r
+@MigrationDangerRating(1)\r
+public abstract class EdgeSwingMigrator extends Migrator {\r
+\r
+       private boolean success = true;\r
+       private String nodeTypeRestriction = null;\r
+       private String edgeLabelRestriction = null;  \r
+       private String edgeDirRestriction = null;  \r
+       private List<Pair<Vertex, Vertex>> nodePairList;\r
+       \r
+       \r
+       public EdgeSwingMigrator(TransactionalGraphEngine engine , LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) {\r
+               super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);\r
+       }\r
+       \r
+\r
+       /**\r
+        * Do not override this method as an inheritor of this class\r
+        */\r
+       @Override\r
+       public void run() {\r
+               executeModifyOperation();\r
+               cleanupAsAppropriate(this.nodePairList);\r
+       }\r
+\r
+       /**\r
+        * This is where inheritors should add their logic\r
+        */\r
+       protected void executeModifyOperation() {\r
+       \r
+               try {\r
+                       this.nodeTypeRestriction = this.getNodeTypeRestriction();\r
+                       this.edgeLabelRestriction = this.getEdgeLabelRestriction();\r
+                       this.edgeDirRestriction = this.getEdgeDirRestriction();\r
+                       nodePairList = this.getAffectedNodePairs();\r
+                       for (Pair<Vertex, Vertex> nodePair : nodePairList) {\r
+                               Vertex fromNode = nodePair.getValue0();\r
+                               Vertex toNode = nodePair.getValue1();\r
+                               this.swingEdges(fromNode, toNode,\r
+                                               this.nodeTypeRestriction,this.edgeLabelRestriction,this.edgeDirRestriction);\r
+                       }\r
+               } catch (Exception e) {\r
+                       logger.error("error encountered", e);\r
+                       success = false;\r
+               }\r
+       }\r
+\r
+\r
+       protected void swingEdges(Vertex oldNode, Vertex newNode, String nodeTypeRestr, String edgeLabelRestr, String edgeDirRestr) {\r
+               try {\r
+                       // If the old and new Vertices aren't populated, throw an exception\r
+                       if( oldNode == null  ){\r
+                               logger.info ( "null oldNode passed to swingEdges() ");\r
+                               success = false;\r
+                               return;\r
+                       }\r
+                       else if( newNode == null ){\r
+                               logger.info ( "null newNode passed to swingEdges() ");\r
+                               success = false;\r
+                               return;\r
+                       }\r
+                       else if( edgeDirRestr == null ||\r
+                                               (!edgeDirRestr.equals("BOTH") \r
+                                                       && !edgeDirRestr.equals("IN")  \r
+                                                       && !edgeDirRestr.equals("OUT") )\r
+                                               ){\r
+                               logger.info ( "invalid direction passed to swingEdges(). valid values are BOTH/IN/OUT ");\r
+                               success = false;\r
+                               return;\r
+                       }\r
+                       else if( edgeLabelRestr != null \r
+                                       && (edgeLabelRestr.trim().equals("none") || edgeLabelRestr.trim().equals("")) ){\r
+                               edgeLabelRestr = null;\r
+                       }\r
+                       else if( nodeTypeRestr == null || nodeTypeRestr.trim().equals("") ){\r
+                               nodeTypeRestr = "none";\r
+                       }\r
+                               \r
+                       String oldNodeType = oldNode.value(AAIProperties.NODE_TYPE);\r
+                       String oldUri = oldNode.<String> property("aai-uri").isPresent()  ? oldNode.<String> property("aai-uri").value() : "URI Not present"; \r
+                       \r
+                       String newNodeType = newNode.value(AAIProperties.NODE_TYPE);\r
+                       String newUri = newNode.<String> property("aai-uri").isPresent()  ? newNode.<String> property("aai-uri").value() : "URI Not present"; \r
+\r
+                       // If the nodeTypes don't match, throw an error \r
+                       if( !oldNodeType.equals(newNodeType) ){\r
+                               logger.info ( "Can not swing edge from a [" + oldNodeType + "] node to a [" +\r
+                                               newNodeType + "] node. ");\r
+                               success = false;\r
+                               return;\r
+                       }\r
+                       \r
+                       // Find and migrate any applicable OUT edges.\r
+                       if( edgeDirRestr.equals("BOTH") || edgeDirRestr.equals("OUT") ){\r
+                               Iterator <Edge> edgeOutIter = null;\r
+                               if( edgeLabelRestr == null ) {\r
+                                       edgeOutIter = oldNode.edges(Direction.OUT);\r
+                               }\r
+                               else {\r
+                                       edgeOutIter = oldNode.edges(Direction.OUT, edgeLabelRestr);\r
+                               }\r
+                               \r
+                               while( edgeOutIter.hasNext() ){\r
+                                       Edge oldOutE = edgeOutIter.next();\r
+                                       String eLabel = oldOutE.label();\r
+                                       Vertex otherSideNode4ThisEdge = oldOutE.inVertex();\r
+                                       String otherSideNodeType = otherSideNode4ThisEdge.value(AAIProperties.NODE_TYPE);\r
+                                       if( nodeTypeRestr.equals("none") || nodeTypeRestr.toLowerCase().equals(otherSideNodeType) ){\r
+                                               Iterator <Property<Object>> propsIter = oldOutE.properties();\r
+                                               HashMap<String, String> propMap = new HashMap<String,String>();\r
+                                               while( propsIter.hasNext() ){\r
+                                                       Property <Object> ep = propsIter.next();\r
+                                                       propMap.put(ep.key(), ep.value().toString());\r
+                                               }\r
+                                               \r
+                                               String otherSideUri = otherSideNode4ThisEdge.<String> property("aai-uri").isPresent()  ? otherSideNode4ThisEdge.<String> property("aai-uri").value() : "URI Not present"; \r
+                                               logger.info ( "\nSwinging [" + eLabel + "] OUT edge.  \n    >> Unchanged side is [" \r
+                                                               + otherSideNodeType + "][" + otherSideUri + "] \n    >> Edge used to go to [" + oldNodeType \r
+                                                               + "][" + oldUri + "],\n    >> now swung to [" + newNodeType + "][" + newUri + "]. ");\r
+                                               // remove the old edge\r
+                                               oldOutE.remove();\r
+                                               \r
+                                               // add the new edge with properties that match the edge that was deleted.  We don't want to\r
+                                               // change any edge properties - just swinging one end of the edge to a new node.\r
+                                               // NOTE - addEdge adds an OUT edge to the vertex passed as a parameter, so we are \r
+                                               //       adding from the newNode side.\r
+                                               Edge newOutE = newNode.addEdge(eLabel, otherSideNode4ThisEdge);\r
+                                               \r
+                                               Iterator it = propMap.entrySet().iterator();\r
+                                           while (it.hasNext()) {\r
+                                               Map.Entry pair = (Map.Entry)it.next();\r
+                                               newOutE.property(pair.getKey().toString(), pair.getValue().toString() );\r
+                                           }\r
+                                           \r
+                                       }\r
+                               }\r
+                       }       \r
+                       \r
+                       // Find and migrate any applicable IN edges.\r
+                       if( edgeDirRestr.equals("BOTH") || edgeDirRestr.equals("IN") ){\r
+                               Iterator <Edge> edgeInIter = null;\r
+                               if( edgeLabelRestr == null ) {\r
+                                       edgeInIter = oldNode.edges(Direction.IN);\r
+                               }\r
+                               else {\r
+                                       edgeInIter = oldNode.edges(Direction.IN, edgeLabelRestr);\r
+                               }                       \r
+                               \r
+                               while( edgeInIter.hasNext() ){\r
+                                       Edge oldInE = edgeInIter.next();\r
+                                       String eLabel = oldInE.label();\r
+                                       Vertex otherSideNode4ThisEdge = oldInE.outVertex();\r
+                                       String otherSideNodeType = otherSideNode4ThisEdge.value(AAIProperties.NODE_TYPE);\r
+                                       if( nodeTypeRestr.equals("none") || nodeTypeRestr.toLowerCase().equals(otherSideNodeType) ){\r
+                                               Iterator <Property<Object>> propsIter = oldInE.properties();\r
+                                               HashMap<String, String> propMap = new HashMap<String,String>();\r
+                                               while( propsIter.hasNext() ){\r
+                                                       Property <Object> ep = propsIter.next();\r
+                                                       propMap.put(ep.key(), ep.value().toString());\r
+                                               }\r
+\r
+                                               String otherSideUri = otherSideNode4ThisEdge.<String> property("aai-uri").isPresent()  ? otherSideNode4ThisEdge.<String> property("aai-uri").value() : "URI Not present"; \r
+                                               logger.info ( "\nSwinging [" + eLabel + "] IN edge.  \n    >> Unchanged side is  [" \r
+                                                               + otherSideNodeType + "][" + otherSideUri + "] \n    >>  Edge used to go to [" + oldNodeType \r
+                                                               + "][" + oldUri + "],\n    >>   now swung to [" + newNodeType + "][" + newUri + "]. ");\r
+                                               \r
+                                               // remove the old edge\r
+                                               oldInE.remove();\r
+                                               \r
+                                               // add the new edge with properties that match the edge that was deleted.  We don't want to\r
+                                               // change any edge properties - just swinging one end of the edge to a new node.\r
+                                               // NOTE - addEdge adds an OUT edge to the vertex passed as a parameter, so we are \r
+                                               //       adding from the node on the other-end of the original edge so we'll get \r
+                                               //       an IN-edge to the newNode.\r
+                                               Edge newInE = otherSideNode4ThisEdge.addEdge(eLabel, newNode);\r
+                                               \r
+                                               Iterator it = propMap.entrySet().iterator();\r
+                                           while (it.hasNext()) {\r
+                                               Map.Entry pair = (Map.Entry)it.next();\r
+                                               newInE.property(pair.getKey().toString(), pair.getValue().toString() );\r
+                                           } \r
+                                       }\r
+                               }\r
+                       }       \r
+                       \r
+               } catch (Exception e) {\r
+                       logger.error("error encountered", e);\r
+                       success = false;\r
+               }\r
+       }\r
+  \r
+       @Override\r
+       public Status getStatus() {\r
+               if (success) {\r
+                       return Status.SUCCESS;\r
+               } else {\r
+                       return Status.FAILURE;\r
+               }\r
+       }\r
+       \r
+       \r
+       /**\r
+        * Get the List of node pairs("from" and "to"), you would like EdgeSwingMigrator to migrate from json files\r
+        * @return\r
+        */\r
+       public abstract List<Pair<Vertex, Vertex>> getAffectedNodePairs() ;\r
+       \r
+       \r
+       /**\r
+        * Get the nodeTypeRestriction that you want EdgeSwingMigrator to use\r
+        * @return\r
+        */\r
+       public abstract String getNodeTypeRestriction() ;\r
+       \r
+       \r
+       /**\r
+        * Get the nodeTypeRestriction that you want EdgeSwingMigrator to use\r
+        * @return\r
+        */\r
+       public abstract String getEdgeLabelRestriction() ;\r
+       \r
+       /**\r
+        * Get the nodeTypeRestriction that you want EdgeSwingMigrator to use\r
+        * @return\r
+        */\r
+       public abstract String getEdgeDirRestriction() ;\r
+       \r
+\r
+       \r
+       /**\r
+        * Cleanup (remove) the nodes that edges were moved off of if appropriate\r
+        * @return\r
+        */\r
+       public abstract void cleanupAsAppropriate(List<Pair<Vertex, Vertex>> nodePairL);\r
+\r
+}
\ No newline at end of file
diff --git a/src/main/java/org/onap/aai/migration/Enabled.java b/src/main/java/org/onap/aai/migration/Enabled.java
new file mode 100644 (file)
index 0000000..1b7bba3
--- /dev/null
@@ -0,0 +1,35 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+
+/**
+ * Used to enable a migration to be picked up by the {@link org.onap.aai.migration.MigrationControllerInternal MigrationController}
+ */
+@Target(ElementType.TYPE)
+@Retention(value = RetentionPolicy.RUNTIME)
+public @interface Enabled {
+
+}
diff --git a/src/main/java/org/onap/aai/migration/EventAction.java b/src/main/java/org/onap/aai/migration/EventAction.java
new file mode 100644 (file)
index 0000000..830685b
--- /dev/null
@@ -0,0 +1,29 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration;
+
+/**
+ * Used to describe the type of DMaaP event you would like to create
+ */
+public enum EventAction {
+       CREATE,
+       UPDATE,
+       DELETE
+}
diff --git a/src/main/java/org/onap/aai/migration/MigrationController.java b/src/main/java/org/onap/aai/migration/MigrationController.java
new file mode 100644 (file)
index 0000000..0e65745
--- /dev/null
@@ -0,0 +1,78 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration;
+
+import java.util.UUID;
+
+import org.onap.aai.dbmap.AAIGraph;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.logging.LoggingContext;
+import org.onap.aai.logging.LoggingContext.StatusCode;
+import org.onap.aai.serialization.db.EdgeSerializer;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.util.AAIConstants;
+import org.springframework.context.annotation.AnnotationConfigApplicationContext;
+
+/**
+ * Wrapper class to allow {@link org.onap.aai.migration.MigrationControllerInternal MigrationControllerInternal}
+ * to be run from a shell script
+ */
+public class MigrationController {
+
+       /**
+        * The main method.
+        *
+        * @param args
+        *            the arguments
+        */
+       public static void main(String[] args) {
+
+               LoggingContext.init();
+               LoggingContext.partnerName("Migration");
+               LoggingContext.serviceName(AAIConstants.AAI_RESOURCES_MS);
+               LoggingContext.component("MigrationController");
+               LoggingContext.targetEntity(AAIConstants.AAI_RESOURCES_MS);
+               LoggingContext.targetServiceName("main");
+               LoggingContext.requestId(UUID.randomUUID().toString());
+               LoggingContext.statusCode(StatusCode.COMPLETE);
+               LoggingContext.responseCode(LoggingContext.SUCCESS);
+
+               AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext(
+                               "org.onap.aai.config",
+                               "org.onap.aai.setup"
+               );
+
+               LoaderFactory loaderFactory   = ctx.getBean(LoaderFactory.class);
+               EdgeIngestor   edgeIngestor   = ctx.getBean(EdgeIngestor.class);
+               EdgeSerializer edgeSerializer = ctx.getBean(EdgeSerializer.class);
+               SchemaVersions schemaVersions = ctx.getBean(SchemaVersions.class);
+
+               MigrationControllerInternal internal = new MigrationControllerInternal(loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+
+               try {
+                       internal.run(args);
+               } catch (Exception e) {
+                       e.printStackTrace();
+               }
+               AAIGraph.getInstance().graphShutdown();
+               System.exit(0);
+       }
+}
diff --git a/src/main/java/org/onap/aai/migration/MigrationControllerInternal.java b/src/main/java/org/onap/aai/migration/MigrationControllerInternal.java
new file mode 100644 (file)
index 0000000..8ef0603
--- /dev/null
@@ -0,0 +1,498 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration;
+
+import java.io.File;
+import java.io.IOException;
+import java.lang.reflect.InvocationTargetException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Properties;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+import org.apache.commons.configuration.ConfigurationException;
+import org.apache.commons.configuration.PropertiesConfiguration;
+import org.apache.commons.lang.exception.ExceptionUtils;
+import org.apache.tinkerpop.gremlin.structure.Graph;
+import org.apache.tinkerpop.gremlin.structure.io.IoCore;
+import org.onap.aai.db.props.AAIProperties;
+import org.onap.aai.dbmap.AAIGraph;
+import org.onap.aai.dbmap.DBConnectionType;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.introspection.Loader;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.serialization.db.EdgeSerializer;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.setup.SchemaVersion;
+import org.onap.aai.logging.LoggingContext;
+import org.onap.aai.logging.LoggingContext.StatusCode;
+import org.onap.aai.serialization.engines.QueryStyle;
+import org.onap.aai.serialization.engines.JanusGraphDBEngine;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.util.AAIConstants;
+import org.onap.aai.util.FormatDate;
+import org.reflections.Reflections;
+import org.slf4j.MDC;
+
+import com.att.eelf.configuration.Configuration;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.beust.jcommander.JCommander;
+import com.beust.jcommander.Parameter;
+
+/**
+ * Runs a series of migrations from a defined directory based on the presence of
+ * the {@link org.onap.aai.migration.Enabled Enabled} annotation
+ *
+ * It will also write a record of the migrations run to the database.
+ */
+public class MigrationControllerInternal {
+
+       private EELFLogger logger;
+       private final int DANGER_ZONE = 10;
+       public static final String VERTEX_TYPE = "migration-list-1707";
+       private final List<String> resultsSummary = new ArrayList<>();
+       private final List<NotificationHelper> notifications = new ArrayList<>();
+       private static final String SNAPSHOT_LOCATION = AAIConstants.AAI_HOME + AAIConstants.AAI_FILESEP + "logs" + AAIConstants.AAI_FILESEP + "data" + AAIConstants.AAI_FILESEP + "migrationSnapshots";
+
+       private LoaderFactory loaderFactory;
+       private EdgeIngestor edgeIngestor;
+       private EdgeSerializer edgeSerializer;
+       private final SchemaVersions schemaVersions;
+
+       public MigrationControllerInternal(LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions){
+           this.loaderFactory = loaderFactory;
+               this.edgeIngestor = edgeIngestor;
+               this.edgeSerializer = edgeSerializer;
+               this.schemaVersions = schemaVersions;
+       }
+
+       /**
+        * The main method.
+        *
+        * @param args
+        *            the arguments
+        */
+       public void run(String[] args) {
+               // Set the logging file properties to be used by EELFManager
+               System.setProperty("aai.service.name", MigrationController.class.getSimpleName());
+               Properties props = System.getProperties();
+               props.setProperty(Configuration.PROPERTY_LOGGING_FILE_NAME, "migration-logback.xml");
+               props.setProperty(Configuration.PROPERTY_LOGGING_FILE_PATH, AAIConstants.AAI_HOME_ETC_APP_PROPERTIES);
+
+               logger = EELFManager.getInstance().getLogger(MigrationControllerInternal.class.getSimpleName());
+               MDC.put("logFilenameAppender", MigrationController.class.getSimpleName());
+
+               boolean loadSnapshot = false;
+
+               CommandLineArgs cArgs = new CommandLineArgs();
+
+               JCommander jCommander = new JCommander(cArgs, args);
+               jCommander.setProgramName(MigrationController.class.getSimpleName());
+
+               // Set flag to load from snapshot based on the presence of snapshot and
+               // graph storage backend of inmemory
+               if (cArgs.dataSnapshot != null && !cArgs.dataSnapshot.isEmpty()) {
+                       try {
+                               PropertiesConfiguration config = new PropertiesConfiguration(cArgs.config);
+                               if (config.getString("storage.backend").equals("inmemory")) {
+                                       loadSnapshot = true;
+                                       System.setProperty("load.snapshot.file", "true");
+                                       System.setProperty("snapshot.location", cArgs.dataSnapshot);
+                               }
+                       } catch (ConfigurationException e) {
+                               LoggingContext.statusCode(StatusCode.ERROR);
+                               LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                               logAndPrint("ERROR: Could not load janusgraph configuration.\n" + ExceptionUtils.getFullStackTrace(e));
+                               return;
+                       }
+               }
+               System.setProperty("realtime.db.config", cArgs.config);
+               logAndPrint("\n\n---------- Connecting to Graph ----------");
+               AAIGraph.getInstance();
+
+               logAndPrint("---------- Connection Established ----------");
+               SchemaVersion version = schemaVersions.getDefaultVersion();
+               QueryStyle queryStyle = QueryStyle.TRAVERSAL;
+               ModelType introspectorFactoryType = ModelType.MOXY;
+               Loader loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, version);
+               TransactionalGraphEngine engine = new JanusGraphDBEngine(queryStyle, DBConnectionType.REALTIME, loader);
+
+               if (cArgs.help) {
+                       jCommander.usage();
+                       engine.rollback();
+                       return;
+               }
+
+               Reflections reflections = new Reflections("org.onap.aai.migration");
+               List<Class<? extends Migrator>> migratorClasses = new ArrayList<>(findClasses(reflections));
+               //Displays list of migration classes which needs to be executed.Pass flag "-l" following by the class names
+               if (cArgs.list) {
+                       listMigrationWithStatus(cArgs, migratorClasses, engine);
+                       return;
+               }
+
+               logAndPrint("---------- Looking for migration scripts to be executed. ----------");
+               //Excluding any migration class when run migration from script.Pass flag "-e" following by the class names
+               if (!cArgs.excludeClasses.isEmpty()) {
+                       migratorClasses = filterMigrationClasses(cArgs.excludeClasses, migratorClasses);
+                       listMigrationWithStatus(cArgs, migratorClasses, engine);
+               }
+               List<Class<? extends Migrator>> migratorClassesToRun = createMigratorList(cArgs, migratorClasses);
+
+               sortList(migratorClassesToRun);
+
+               if (!cArgs.scripts.isEmpty() && migratorClassesToRun.isEmpty()) {
+                       LoggingContext.statusCode(StatusCode.ERROR);
+                       LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
+                       logAndPrint("\tERROR: Failed to find migrations " + cArgs.scripts + ".");
+                       logAndPrint("---------- Done ----------");
+                       LoggingContext.successStatusFields();
+               }
+
+               logAndPrint("\tFound " + migratorClassesToRun.size() + " migration scripts.");
+               logAndPrint("---------- Executing Migration Scripts ----------");
+
+
+               if (!cArgs.skipPreMigrationSnapShot) {
+                       takePreSnapshotIfRequired(engine, cArgs, migratorClassesToRun);
+               }
+
+               for (Class<? extends Migrator> migratorClass : migratorClassesToRun) {
+                       String name = migratorClass.getSimpleName();
+                       Migrator migrator;
+                       if (migratorClass.isAnnotationPresent(Enabled.class)) {
+
+                               try {
+                                       engine.startTransaction();
+                                       if (!cArgs.forced && hasAlreadyRun(name, engine)) {
+                                               logAndPrint("Migration " + name + " has already been run on this database and will not be executed again. Use -f to force execution");
+                                               continue;
+                                       }
+                                       migrator = migratorClass
+                                               .getConstructor(
+                                                       TransactionalGraphEngine.class,
+                                                       LoaderFactory.class,
+                                                       EdgeIngestor.class,
+                                                       EdgeSerializer.class,
+                                                       SchemaVersions.class
+                                               ).newInstance(engine, loaderFactory, edgeIngestor, edgeSerializer,schemaVersions);
+                               } catch (InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException | NoSuchMethodException | SecurityException e) {
+                                       LoggingContext.statusCode(StatusCode.ERROR);
+                                       LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                                       logAndPrint("EXCEPTION caught initalizing migration class " + migratorClass.getSimpleName() + ".\n" + ExceptionUtils.getFullStackTrace(e));
+                                       LoggingContext.successStatusFields();
+                                       engine.rollback();
+                                       continue;
+                               }
+                               logAndPrint("\tRunning " + migratorClass.getSimpleName() + " migration script.");
+                               logAndPrint("\t\t See " + System.getProperty("AJSC_HOME") + "/logs/migration/" + migratorClass.getSimpleName() + "/* for logs.");
+                               MDC.put("logFilenameAppender", migratorClass.getSimpleName() + "/" + migratorClass.getSimpleName());
+
+                               migrator.run();
+
+                               commitChanges(engine, migrator, cArgs);
+                       } else {
+                               logAndPrint("\tSkipping " + migratorClass.getSimpleName() + " migration script because it has been disabled.");
+                       }
+               }
+               MDC.put("logFilenameAppender", MigrationController.class.getSimpleName());
+               for (NotificationHelper notificationHelper : notifications) {
+                       try {
+                               notificationHelper.triggerEvents();
+                       } catch (AAIException e) {
+                               LoggingContext.statusCode(StatusCode.ERROR);
+                               LoggingContext.responseCode(LoggingContext.AVAILABILITY_TIMEOUT_ERROR);
+                               logAndPrint("\tcould not event");
+                               logger.error("could not event", e);
+                               LoggingContext.successStatusFields();
+                       }
+               }
+               logAndPrint("---------- Done ----------");
+
+               // Save post migration snapshot if snapshot was loaded
+               if (!cArgs.skipPostMigrationSnapShot) {
+                       generateSnapshot(engine, "post");
+               }
+
+               outputResultsSummary();
+       }
+
+       /**
+        * This method is used to remove excluded classes from migration from the
+        * script command.
+        *
+        * @param excludeClasses
+        *            : Classes to be removed from Migration
+        * @param migratorClasses
+        *            : Classes to execute migration.
+        * @return
+        */
+       private List<Class<? extends Migrator>> filterMigrationClasses(
+                       List<String> excludeClasses,
+                       List<Class<? extends Migrator>> migratorClasses) {
+
+               List<Class<? extends Migrator>> filteredMigratorClasses = migratorClasses
+                               .stream()
+                               .filter(migratorClass -> !excludeClasses.contains(migratorClass
+                                               .getSimpleName())).collect(Collectors.toList());
+
+               return filteredMigratorClasses;
+       }
+
+       private void listMigrationWithStatus(CommandLineArgs cArgs,
+                       List<Class<? extends Migrator>> migratorClasses, TransactionalGraphEngine engine) {
+                       sortList(migratorClasses);
+                       engine.startTransaction();
+                       System.out.println("---------- List of all migrations ----------");
+                       migratorClasses.forEach(migratorClass -> {
+                               boolean enabledAnnotation = migratorClass.isAnnotationPresent(Enabled.class);
+                               String enabled = enabledAnnotation ? "Enabled" : "Disabled";
+                               StringBuilder sb = new StringBuilder();
+                               sb.append(migratorClass.getSimpleName());
+                               sb.append(" in package ");
+                               sb.append(migratorClass.getPackage().getName().substring(migratorClass.getPackage().getName().lastIndexOf('.')+1));
+                               sb.append(" is ");
+                               sb.append(enabled);
+                               sb.append(" ");
+                               sb.append("[" + getDbStatus(migratorClass.getSimpleName(), engine) + "]");
+                               System.out.println(sb.toString());
+                       });
+                       engine.rollback();
+                       System.out.println("---------- Done ----------");
+               }
+
+       private String getDbStatus(String name, TransactionalGraphEngine engine) {
+               if (hasAlreadyRun(name, engine)) {
+                       return "Already executed in this env";
+               }
+               return "Will be run on next execution if Enabled";
+       }
+
+       private boolean hasAlreadyRun(String name, TransactionalGraphEngine engine) {
+               return engine.asAdmin().getReadOnlyTraversalSource().V().has(AAIProperties.NODE_TYPE, VERTEX_TYPE).has(name, true).hasNext();
+       }
+       private Set<Class<? extends Migrator>> findClasses(Reflections reflections) {
+               Set<Class<? extends Migrator>> migratorClasses = reflections.getSubTypesOf(Migrator.class);
+               /*
+                * TODO- Change this to make sure only classes in the specific $release are added in the runList
+                * Or add a annotation like exclude which folks again need to remember to add ??
+                */
+
+               migratorClasses.remove(PropertyMigrator.class);
+               migratorClasses.remove(EdgeMigrator.class);
+               return migratorClasses;
+       }
+
+
+       private void takePreSnapshotIfRequired(TransactionalGraphEngine engine, CommandLineArgs cArgs, List<Class<? extends Migrator>> migratorClassesToRun) {
+
+               /*int sum = 0;
+               for (Class<? extends Migrator> migratorClass : migratorClassesToRun) {
+                       if (migratorClass.isAnnotationPresent(Enabled.class)) {
+                               sum += migratorClass.getAnnotation(MigrationPriority.class).value();
+                       }
+               }
+
+               if (sum >= DANGER_ZONE) {
+
+                       logAndPrint("Entered Danger Zone. Taking snapshot.");
+               }*/
+
+               //always take snapshot for now
+
+               generateSnapshot(engine, "pre");
+
+       }
+
+
+       private List<Class<? extends Migrator>> createMigratorList(CommandLineArgs cArgs,
+                       List<Class<? extends Migrator>> migratorClasses) {
+               List<Class<? extends Migrator>> migratorClassesToRun = new ArrayList<>();
+
+               for (Class<? extends Migrator> migratorClass : migratorClasses) {
+                       if (!cArgs.scripts.isEmpty() && !cArgs.scripts.contains(migratorClass.getSimpleName())) {
+                               continue;
+                       } else {
+                               migratorClassesToRun.add(migratorClass);
+                       }
+               }
+               return migratorClassesToRun;
+       }
+
+
+       private void sortList(List<Class<? extends Migrator>> migratorClasses) {
+               Collections.sort(migratorClasses, (m1, m2) -> {
+                       try {
+                               if (m1.getAnnotation(MigrationPriority.class).value() > m2.getAnnotation(MigrationPriority.class).value()) {
+                                       return 1;
+                               } else if (m1.getAnnotation(MigrationPriority.class).value() < m2.getAnnotation(MigrationPriority.class).value()) {
+                                       return -1;
+                               } else {
+                                       return m1.getSimpleName().compareTo(m2.getSimpleName());
+                               }
+                       } catch (Exception e) {
+                               return 0;
+                       }
+               });
+       }
+
+
+       private void generateSnapshot(TransactionalGraphEngine engine, String phase) {
+
+               FormatDate fd = new FormatDate("yyyyMMddHHmm", "GMT");
+               String dateStr= fd.getDateTime();
+               String fileName = SNAPSHOT_LOCATION + File.separator + phase + "Migration." + dateStr + ".graphson";
+               logAndPrint("Saving snapshot of graph " + phase + " migration to " + fileName);
+               Graph transaction = null;
+               try {
+
+                       Path pathToFile = Paths.get(fileName);
+                       if (!pathToFile.toFile().exists()) {
+                               Files.createDirectories(pathToFile.getParent());
+                       }
+                       transaction = engine.startTransaction();
+                       transaction.io(IoCore.graphson()).writeGraph(fileName);
+                       engine.rollback();
+               } catch (IOException e) {
+                       LoggingContext.statusCode(StatusCode.ERROR);
+                       LoggingContext.responseCode(LoggingContext.AVAILABILITY_TIMEOUT_ERROR);
+                       logAndPrint("ERROR: Could not write in memory graph to " + phase + "Migration file. \n" + ExceptionUtils.getFullStackTrace(e));
+                       LoggingContext.successStatusFields();
+                       engine.rollback();
+               }
+
+               logAndPrint( phase + " migration snapshot saved to " + fileName);
+       }
+       /**
+        * Log and print.
+        *
+        * @param msg
+        *            the msg
+        */
+       protected void logAndPrint(String msg) {
+               System.out.println(msg);
+               logger.info(msg);
+       }
+
+       /**
+        * Commit changes.
+        *
+        * @param engine
+        *            the graph transaction
+        * @param migrator
+        *            the migrator
+        * @param cArgs
+        */
+       protected void commitChanges(TransactionalGraphEngine engine, Migrator migrator, CommandLineArgs cArgs) {
+
+               String simpleName = migrator.getClass().getSimpleName();
+               String message;
+               if (migrator.getStatus().equals(Status.FAILURE)) {
+                       message = "Migration " + simpleName + " Failed. Rolling back.";
+                       LoggingContext.statusCode(StatusCode.ERROR);
+                       LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                       logAndPrint("\t" + message);
+                       LoggingContext.successStatusFields();
+                       migrator.rollback();
+               } else if (migrator.getStatus().equals(Status.CHECK_LOGS)) {
+                       message = "Migration " + simpleName + " encountered an anomaly, check logs. Rolling back.";
+                       LoggingContext.statusCode(StatusCode.ERROR);
+                       LoggingContext.responseCode(LoggingContext.DATA_ERROR);
+                       logAndPrint("\t" + message);
+                       LoggingContext.successStatusFields();
+                       migrator.rollback();
+               } else {
+                       MDC.put("logFilenameAppender", simpleName + "/" + simpleName);
+
+                       if (cArgs.commit) {
+                               if (!engine.asAdmin().getTraversalSource().V().has(AAIProperties.NODE_TYPE, VERTEX_TYPE).hasNext()) {
+                                       engine.asAdmin().getTraversalSource().addV(AAIProperties.NODE_TYPE, VERTEX_TYPE).iterate();
+                               }
+                               engine.asAdmin().getTraversalSource().V().has(AAIProperties.NODE_TYPE, VERTEX_TYPE)
+                               .property(simpleName, true).iterate();
+                               MDC.put("logFilenameAppender", MigrationController.class.getSimpleName());
+                               notifications.add(migrator.getNotificationHelper());
+                               migrator.commit();
+                               message = "Migration " + simpleName + " Succeeded. Changes Committed.";
+                               logAndPrint("\t"+ message +"\t");
+                       } else {
+                               message = "--commit not specified. Not committing changes for " + simpleName + " to database.";
+                               logAndPrint("\t" + message);
+                               migrator.rollback();
+                       }
+
+               }
+
+               resultsSummary.add(message);
+
+       }
+
+       private void outputResultsSummary() {
+               logAndPrint("---------------------------------");
+               logAndPrint("-------------Summary-------------");
+               for (String result : resultsSummary) {
+                       logAndPrint(result);
+               }
+               logAndPrint("---------------------------------");
+               logAndPrint("---------------------------------");
+       }
+
+}
+
+class CommandLineArgs {
+
+       @Parameter(names = "--help", help = true)
+       public boolean help;
+
+       @Parameter(names = "-c", description = "location of configuration file")
+       public String config;
+
+       @Parameter(names = "-m", description = "names of migration scripts")
+       public List<String> scripts = new ArrayList<>();
+
+       @Parameter(names = "-l", description = "list the status of migrations")
+       public boolean list = false;
+
+       @Parameter(names = "-d", description = "location of data snapshot", hidden = true)
+       public String dataSnapshot;
+
+       @Parameter(names = "-f", description = "force migrations to be rerun")
+       public boolean forced = false;
+
+       @Parameter(names = "--commit", description = "commit changes to graph")
+       public boolean commit = false;
+
+       @Parameter(names = "-e", description = "exclude list of migrator classes")
+       public List<String> excludeClasses = new ArrayList<>();
+
+       @Parameter(names = "--skipPreMigrationSnapShot", description = "skips taking the PRE migration snapshot")
+       public boolean skipPreMigrationSnapShot = false;
+
+       @Parameter(names = "--skipPostMigrationSnapShot", description = "skips taking the POST migration snapshot")
+       public boolean skipPostMigrationSnapShot = false;
+}
diff --git a/src/main/java/org/onap/aai/migration/MigrationDangerRating.java b/src/main/java/org/onap/aai/migration/MigrationDangerRating.java
new file mode 100644 (file)
index 0000000..1d82dc3
--- /dev/null
@@ -0,0 +1,41 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+
+/**
+ * Used to enable a migration to be picked up by the {@link com.openecomp.aai.migration.MigrationControllerInternal MigrationController}
+ * 
+ * The larger the number, the more danger
+ * 
+ * Range is 0-10
+ */
+@Target(ElementType.TYPE)
+@Retention(value = RetentionPolicy.RUNTIME)
+public @interface MigrationDangerRating {
+
+       int value();
+
+}
diff --git a/src/main/java/org/onap/aai/migration/MigrationPriority.java b/src/main/java/org/onap/aai/migration/MigrationPriority.java
new file mode 100644 (file)
index 0000000..d9e84b8
--- /dev/null
@@ -0,0 +1,41 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+
+/**
+ * Used to enable a migration to be picked up by the {@link org.onap.aai.migration.MigrationControllerInternal MigrationController}
+ * 
+ * The priority of the migration.
+ *
+ * Lower number has higher priority
+ */
+@Target(ElementType.TYPE)
+@Retention(value = RetentionPolicy.RUNTIME)
+public @interface MigrationPriority {
+
+       int value();
+
+}
diff --git a/src/main/java/org/onap/aai/migration/Migrator.java b/src/main/java/org/onap/aai/migration/Migrator.java
new file mode 100644 (file)
index 0000000..106d5e4
--- /dev/null
@@ -0,0 +1,385 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.UnsupportedEncodingException;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+import java.nio.file.StandardOpenOption;
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Optional;
+import java.util.UUID;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__;
+import org.apache.tinkerpop.gremlin.structure.Direction;
+import org.apache.tinkerpop.gremlin.structure.Edge;
+import org.apache.tinkerpop.gremlin.structure.Property;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.apache.tinkerpop.gremlin.structure.VertexProperty;
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.edges.enums.EdgeType;
+import org.onap.aai.edges.exceptions.AmbiguousRuleChoiceException;
+import org.onap.aai.edges.exceptions.EdgeRuleNotFoundException;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.introspection.Introspector;
+import org.onap.aai.introspection.Loader;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.serialization.db.DBSerializer;
+import org.onap.aai.serialization.db.EdgeSerializer;
+import org.onap.aai.serialization.db.exceptions.NoEdgeRuleFoundException;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.setup.SchemaVersion;
+import org.onap.aai.setup.SchemaVersions;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+
+/**
+ * This class defines an A&AI Migration
+ */
+@MigrationPriority(0)
+@MigrationDangerRating(0)
+public abstract class Migrator implements Runnable {
+       
+       protected EELFLogger logger = null;
+
+       protected DBSerializer serializer = null;
+       protected Loader loader = null;
+
+       protected TransactionalGraphEngine engine;
+       protected NotificationHelper notificationHelper;
+
+       protected EdgeSerializer edgeSerializer;
+       protected EdgeIngestor edgeIngestor;
+
+       protected LoaderFactory loaderFactory;
+       protected SchemaVersions schemaVersions;
+
+       protected static final String MIGRATION_ERROR = "Migration Error: ";
+       protected static final String MIGRATION_SUMMARY_COUNT = "Migration Summary Count: ";
+
+       /**
+        * Instantiates a new migrator.
+        *
+        * @param g the g
+        * @param schemaVersions
+        */
+       public Migrator(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions){
+        this.engine = engine;
+               this.loaderFactory  = loaderFactory;
+               this.edgeIngestor = edgeIngestor;
+               this.edgeSerializer = edgeSerializer;
+               this.schemaVersions = schemaVersions;
+        initDBSerializer();
+        this.notificationHelper = new NotificationHelper(loader, serializer, loaderFactory, schemaVersions, engine, "AAI-MIGRATION", this.getMigrationName());
+               logger = EELFManager.getInstance().getLogger(this.getClass().getSimpleName());
+               logger.info("\tInitilization of " + this.getClass().getSimpleName() + " migration script complete.");
+       }
+
+       /**
+        * Gets the status.
+        *
+        * @return the status
+        */
+       public abstract Status getStatus();
+
+       /**
+        * Rollback.
+        */
+       public void rollback() {
+        engine.rollback();
+       }
+
+       /**
+        * Commit.
+        */
+       public void commit() {
+        engine.commit();
+       }
+
+       /**
+        * Create files containing vertices for dmaap Event Generation
+        * @param dmaapMsgList
+        */
+       public void createDmaapFiles(List<String> dmaapMsgList) {
+               String fileName = getMigrationName() + "-" + UUID.randomUUID();
+               String logDirectory = System.getProperty("AJSC_HOME") + "/logs/migration/dmaapEvents";
+
+               File f = new File(logDirectory);
+               f.mkdirs();
+
+               if (dmaapMsgList.size() > 0) {
+                       try {
+                               Files.write(Paths.get(logDirectory+"/"+fileName), (Iterable<String>)dmaapMsgList.stream()::iterator);
+                       } catch (IOException e) {
+                               logger.error("Unable to generate file with dmaap msgs for MigrateHUBEvcInventory", e);
+                       }
+               } else {
+                       logger.info("No dmaap msgs detected for MigrateForwardEvcCircuitId");
+               }
+       }
+
+       /**
+        * Create files containing data for dmaap delete Event Generation
+        * @param dmaapVertexList
+        */
+       public void createDmaapFilesForDelete(List<Introspector> dmaapDeleteIntrospectorList) {try {
+               System.out.println("dmaapDeleteIntrospectorList :: " + dmaapDeleteIntrospectorList.size());
+               String fileName = "DELETE-"+ getMigrationName() + "-" + UUID.randomUUID();
+               String logDirectory = System.getProperty("AJSC_HOME") + "/logs/migration/dmaapEvents/";
+               File f = new File(logDirectory);
+               f.mkdirs();
+               
+               try{
+                       Files.createFile(Paths.get(logDirectory + "/" + fileName));
+               }catch(Exception e) {
+                       e.printStackTrace();
+               }
+               
+               if (dmaapDeleteIntrospectorList.size() > 0) {                           
+                       dmaapDeleteIntrospectorList.stream().forEach(svIntr-> {
+                               try {
+                                       String str = svIntr.marshal(false);
+                                       String finalStr="";
+                                       try {
+                                               finalStr=svIntr.getName() + "#@#" + svIntr.getURI() + "#@#" + str+"\n";
+                                               Files.write(Paths.get(logDirectory + "/" + fileName),finalStr.getBytes(),StandardOpenOption.APPEND);
+                                       } catch (IOException e) {
+                                               logger.error("Unable to generate file with dmaap msgs for "+getMigrationName(), e);
+                                       }
+
+                               }catch (Exception e) {
+                                               // TODO Auto-generated catch block
+                                               e.printStackTrace();
+                                       }
+                               });
+                               
+                               //Files.write(Paths.get(logDirectory+"/"+fileName), (Iterable<Vertex>)dmaapVertexList.stream()::iterator);
+                       } 
+               }catch (Exception e) {
+                       e.printStackTrace();
+                       logger.error("Unable to generate file with dmaap msgs for "+getMigrationName(), e);
+               }}
+
+       /**
+        * As string.
+        *
+        * @param v the v
+        * @return the string
+        */
+       protected String asString(Vertex v) {
+               final JSONObject result = new JSONObject();
+               Iterator<VertexProperty<Object>> properties = v.properties();
+               Property<Object> pk = null;
+               try {
+                       while (properties.hasNext()) {
+                               pk = properties.next();
+                               result.put(pk.key(), pk.value());
+                       }
+               } catch (JSONException e) {
+                       logger.error("Warning error reading vertex: " + e);
+               }
+
+               return result.toString();
+       }
+
+       /**
+        * As string.
+        *
+        * @param edge the edge
+        * @return the string
+        */
+       protected String asString(Edge edge) {
+               final JSONObject result = new JSONObject();
+               Iterator<Property<Object>> properties = edge.properties();
+               Property<Object> pk = null;
+               try {
+                       while (properties.hasNext()) {
+                               pk = properties.next();
+                               result.put(pk.key(), pk.value());
+                       }
+               } catch (JSONException e) {
+                       logger.error("Warning error reading edge: " + e);
+               }
+
+               return result.toString();
+       }
+
+       /**
+        *
+        * @param v
+        * @param numLeadingTabs number of leading \t char's
+        * @return
+        */
+       protected String toStringForPrinting(Vertex v, int numLeadingTabs) {
+               String prefix = String.join("", Collections.nCopies(numLeadingTabs, "\t"));
+               if (v == null) {
+                       return "";
+               }
+               final StringBuilder sb = new StringBuilder();
+               sb.append(prefix + v + "\n");
+               v.properties().forEachRemaining(prop -> sb.append(prefix + prop + "\n"));
+               return sb.toString();
+       }
+
+       /**
+        *
+        * @param e
+        * @param numLeadingTabs number of leading \t char's
+        * @return
+        */
+       protected String toStringForPrinting(Edge e, int numLeadingTabs) {
+               String prefix = String.join("", Collections.nCopies(numLeadingTabs, "\t"));
+               if (e == null) {
+                       return "";
+               }
+               final StringBuilder sb = new StringBuilder();
+               sb.append(prefix + e + "\n");
+               sb.append(prefix + e.label() + "\n");
+               e.properties().forEachRemaining(prop -> sb.append(prefix + "\t" + prop + "\n"));
+               return sb.toString();
+       }
+
+       /**
+        * Checks for edge between.
+        *
+        * @param a a
+        * @param b b
+        * @param d d
+        * @param edgeLabel the edge label
+        * @return true, if successful
+        */
+       protected boolean hasEdgeBetween(Vertex a, Vertex b, Direction d, String edgeLabel) {
+
+               if (d.equals(Direction.OUT)) {
+                       return engine.asAdmin().getReadOnlyTraversalSource().V(a).out(edgeLabel).where(__.otherV().hasId(b)).hasNext();
+               } else {
+                       return engine.asAdmin().getReadOnlyTraversalSource().V(a).in(edgeLabel).where(__.otherV().hasId(b)).hasNext();
+               }
+
+       }
+       
+       /**
+        * Creates the edge
+        *
+        * @param type the edge type - COUSIN or TREE
+        * @param out the out
+        * @param in the in
+        * @return the edge
+        */
+       protected Edge createEdge(EdgeType type, Vertex out, Vertex in) throws AAIException {
+               Edge newEdge = null;
+               try {
+                       if (type.equals(EdgeType.COUSIN)){
+                               newEdge = edgeSerializer.addEdge(this.engine.asAdmin().getTraversalSource(), out, in);
+                       } else {
+                               newEdge = edgeSerializer.addTreeEdge(this.engine.asAdmin().getTraversalSource(), out, in);
+                       }
+               } catch (NoEdgeRuleFoundException e) {
+                       throw new AAIException("AAI_6129", e);
+               }
+               return newEdge;
+       }
+
+       /**
+        * Creates the edge
+        *
+        * @param type the edge type - COUSIN or TREE
+        * @param out the out
+        * @param in the in
+        * @return the edge
+        */
+       protected Edge createPrivateEdge(Vertex out, Vertex in) throws AAIException {
+               Edge newEdge = null;
+               try {
+                       newEdge = edgeSerializer.addPrivateEdge(this.engine.asAdmin().getTraversalSource(), out, in, null);
+               } catch (EdgeRuleNotFoundException | AmbiguousRuleChoiceException e) {
+                       throw new AAIException("AAI_6129", e);
+               }
+               return newEdge;
+       }
+
+       /**
+        * Creates the TREE edge 
+        *
+        * @param out the out
+        * @param in the in
+        * @return the edge
+        */
+       protected Edge createTreeEdge(Vertex out, Vertex in) throws AAIException {
+               Edge newEdge = createEdge(EdgeType.TREE, out, in);
+               return newEdge;
+       }
+       
+       /**
+        * Creates the COUSIN edge 
+        *
+        * @param out the out
+        * @param in the in
+        * @return the edge
+        */
+       protected Edge createCousinEdge(Vertex out, Vertex in) throws AAIException {
+               Edge newEdge = createEdge(EdgeType.COUSIN, out, in);
+               return newEdge;
+       }
+
+       private void initDBSerializer() {
+               SchemaVersion version = schemaVersions.getDefaultVersion();
+               ModelType introspectorFactoryType = ModelType.MOXY;
+               loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, version);
+               try {
+                       this.serializer = new DBSerializer(version, this.engine, introspectorFactoryType, this.getMigrationName());
+               } catch (AAIException e) {
+                       throw new RuntimeException("could not create seralizer", e);
+               }
+       }
+       
+       /**
+        * These are the node types you would like your traversal to process
+        * @return
+        */
+       public abstract Optional<String[]> getAffectedNodeTypes();
+       
+       /**
+        * used as the "fromAppId" when modifying vertices
+        * @return
+        */
+       public abstract String getMigrationName();
+       
+       /**
+        * updates all internal vertex properties
+        * @param v
+        * @param isNewVertex
+        */
+       protected void touchVertexProperties(Vertex v, boolean isNewVertex) {
+               this.serializer.touchStandardVertexProperties(v, isNewVertex);
+       }
+       
+       public NotificationHelper getNotificationHelper() {
+               return this.notificationHelper;
+       }
+}
diff --git a/src/main/java/org/onap/aai/migration/NotificationHelper.java b/src/main/java/org/onap/aai/migration/NotificationHelper.java
new file mode 100644 (file)
index 0000000..ff5c030
--- /dev/null
@@ -0,0 +1,118 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration;
+
+import java.io.UnsupportedEncodingException;
+import java.net.URI;
+import java.util.HashMap;
+import java.util.List;
+
+import javax.ws.rs.core.Response.Status;
+
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.introspection.Introspector;
+import org.onap.aai.introspection.Loader;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.introspection.exceptions.AAIUnknownObjectException;
+import org.onap.aai.rest.ueb.UEBNotification;
+import org.onap.aai.serialization.db.DBSerializer;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.serialization.engines.query.QueryEngine;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.onap.aai.setup.SchemaVersions;
+
+/**
+ * Allows for DMaaP notifications from Migrations
+ */
+public class NotificationHelper {
+
+       private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(NotificationHelper.class);
+       protected final DBSerializer serializer;
+       protected final Loader loader;
+       protected final TransactionalGraphEngine engine;
+       protected final String transactionId;
+       protected final String sourceOfTruth;
+       protected final UEBNotification notification;
+
+       public NotificationHelper(Loader loader, DBSerializer serializer, LoaderFactory loaderFactory, SchemaVersions schemaVersions, TransactionalGraphEngine engine, String transactionId, String sourceOfTruth) {
+               this.loader = loader;
+               this.serializer = serializer;
+               this.engine = engine;
+               this.transactionId = transactionId;
+               this.sourceOfTruth = sourceOfTruth;
+               this.notification = new UEBNotification(loader, loaderFactory, schemaVersions);
+       }
+       
+       public void addEvent(Vertex v, Introspector obj, EventAction action, URI uri, String basePath) throws UnsupportedEncodingException, AAIException {
+               HashMap<String, Introspector> relatedObjects = new HashMap<>();
+               Status status = mapAction(action);
+
+               if (!obj.isTopLevel()) {
+                       relatedObjects = this.getRelatedObjects(serializer, engine.getQueryEngine(), v);
+               }
+               notification.createNotificationEvent(transactionId, sourceOfTruth, status, uri, obj, relatedObjects, basePath);
+               
+       }
+       
+       public void addDeleteEvent(String transactionId, String sourceOfTruth, EventAction action, URI uri, Introspector obj, HashMap relatedObjects,String basePath) throws UnsupportedEncodingException, AAIException {
+               Status status = mapAction(action);
+               notification.createNotificationEvent(transactionId, sourceOfTruth, status, uri, obj, relatedObjects, basePath);
+               
+       }
+       
+       private HashMap<String, Introspector> getRelatedObjects(DBSerializer serializer, QueryEngine queryEngine, Vertex v) throws AAIException {
+               HashMap<String, Introspector> relatedVertices = new HashMap<>();
+               List<Vertex> vertexChain = queryEngine.findParents(v);
+               for (Vertex vertex : vertexChain) {
+                       try {
+                               final Introspector vertexObj = serializer.getVertexProperties(vertex);
+                               relatedVertices.put(vertexObj.getObjectId(), vertexObj);
+                       } catch (AAIUnknownObjectException | UnsupportedEncodingException e) {
+                               LOGGER.warn("Unable to get vertex properties, partial list of related vertices returned");
+                       }
+                       
+               }
+               
+               return relatedVertices;
+       }
+       
+       private Status mapAction(EventAction action) {
+               if (EventAction.CREATE.equals(action)) {
+                       return Status.CREATED;
+               } else if (EventAction.UPDATE.equals(action)) {
+                       return Status.OK;
+               } else if (EventAction.DELETE.equals(action)) {
+                       return Status.NO_CONTENT;
+               } else {
+                       return Status.OK;
+               }
+       }
+       
+       public void triggerEvents() throws AAIException {
+               notification.triggerEvents();
+       }
+
+       public UEBNotification getNotifications() {
+               return this.notification;
+       }
+}
diff --git a/src/main/java/org/onap/aai/migration/PropertyMigrator.java b/src/main/java/org/onap/aai/migration/PropertyMigrator.java
new file mode 100644 (file)
index 0000000..4599243
--- /dev/null
@@ -0,0 +1,146 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration;
+
+import java.util.Optional;
+
+import org.apache.tinkerpop.gremlin.process.traversal.P;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.onap.aai.db.props.AAIProperties;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.serialization.db.EdgeSerializer;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+
+import org.janusgraph.core.Cardinality;
+import org.janusgraph.core.PropertyKey;
+import org.janusgraph.core.schema.JanusGraphManagement;
+import org.onap.aai.setup.SchemaVersions;
+
+/**
+ * A migration template for migrating a property from one name to another
+ */
+@MigrationPriority(0)
+@MigrationDangerRating(1)
+public abstract class PropertyMigrator extends Migrator {
+
+       protected String OLD_FIELD;
+       protected String NEW_FIELD;
+    protected Integer changedVertexCount;
+       protected Class<?> fieldType;
+       protected Cardinality cardinality;
+       protected final JanusGraphManagement graphMgmt;
+
+
+       public PropertyMigrator(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions){
+               super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+               this.changedVertexCount = 0;
+               this.graphMgmt = engine.asAdmin().getManagementSystem();
+       }
+
+       public void initialize(String oldName, String newName, Class<?> type, Cardinality cardinality){
+               this.OLD_FIELD = oldName;
+               this.NEW_FIELD = newName;
+               this.fieldType = type;
+               this.cardinality = cardinality;
+       }
+
+       /**
+        * Do not override this method as an inheritor of this class
+        */
+       @Override
+       public void run() {
+           logger.info("-------- Starting PropertyMigrator for node type " + P.within(this.getAffectedNodeTypes().get())
+                + " from property " + OLD_FIELD + " to " + NEW_FIELD + " --------");
+               modifySchema();
+               executeModifyOperation();
+               logger.info(Migrator.MIGRATION_SUMMARY_COUNT + changedVertexCount + " vertices modified.");
+       }
+
+       protected void modifySchema() {
+               this.addIndex(this.addProperty());
+               graphMgmt.commit();
+       }
+       
+       /**
+        * This is where inheritors should add their logic
+        */
+       protected void executeModifyOperation() {
+               changePropertyName();
+       }
+       
+       protected void changePropertyName() {
+               GraphTraversal<Vertex, Vertex> g = this.engine.asAdmin().getTraversalSource().V();
+               if (this.getAffectedNodeTypes().isPresent()) {
+                       g.has(AAIProperties.NODE_TYPE, P.within(this.getAffectedNodeTypes().get()));
+               }
+               g.has(OLD_FIELD).sideEffect(t -> {
+                       final Vertex v = t.get();
+                       logger.info("Migrating property for vertex " + v.toString());
+                       final String value = v.value(OLD_FIELD);
+                       v.property(OLD_FIELD).remove();
+                       v.property(NEW_FIELD, value);
+                       this.touchVertexProperties(v, false);
+                       this.changedVertexCount += 1;
+            logger.info(v.toString() + " : Migrated property " + OLD_FIELD + " to " + NEW_FIELD + " with value = " + value);
+               }).iterate();
+       }
+       
+       @Override
+       public Status getStatus() {
+               GraphTraversal<Vertex, Vertex> g = this.engine.asAdmin().getTraversalSource().V();
+               if (this.getAffectedNodeTypes().isPresent()) {
+                       g.has(AAIProperties.NODE_TYPE, P.within(this.getAffectedNodeTypes().get()));
+               }
+               long result = g.has(OLD_FIELD).count().next();
+               if (result == 0) {
+                       return Status.SUCCESS;
+               } else {
+                       return Status.FAILURE;
+               }
+       }
+
+       protected Optional<PropertyKey> addProperty() {
+
+               if (!graphMgmt.containsPropertyKey(this.NEW_FIELD)) {
+                       logger.info(" PropertyKey  [" + this.NEW_FIELD + "] created in the DB. ");
+                       return Optional.of(graphMgmt.makePropertyKey(this.NEW_FIELD).dataType(this.fieldType).cardinality(this.cardinality)
+                                       .make());
+               } else {
+                       logger.info(" PropertyKey  [" + this.NEW_FIELD + "] already existed in the DB. ");
+                       return Optional.empty();
+               }
+
+       }
+       
+       protected void addIndex(Optional<PropertyKey> key) {
+               if (isIndexed() && key.isPresent()) {
+                       if (graphMgmt.containsGraphIndex(key.get().name())) {
+                               logger.debug(" Index  [" + key.get().name() + "] already existed in the DB. ");
+                       } else {
+                               logger.info("Add index for PropertyKey: [" + key.get().name() + "]");
+                               graphMgmt.buildIndex(key.get().name(), Vertex.class).addKey(key.get()).buildCompositeIndex();
+                       }
+               }
+       }
+       public abstract boolean isIndexed();
+       
+}
diff --git a/src/main/java/org/onap/aai/migration/Status.java b/src/main/java/org/onap/aai/migration/Status.java
new file mode 100644 (file)
index 0000000..0338594
--- /dev/null
@@ -0,0 +1,29 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration;
+
+/**
+ * Defines the status of the completed migration
+ */
+public enum Status {
+       SUCCESS,
+       CHECK_LOGS,
+       FAILURE
+}
diff --git a/src/main/java/org/onap/aai/migration/ValueMigrator.java b/src/main/java/org/onap/aai/migration/ValueMigrator.java
new file mode 100644 (file)
index 0000000..6d02563
--- /dev/null
@@ -0,0 +1,104 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration;
+
+import java.util.Map;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.onap.aai.db.props.AAIProperties;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.serialization.db.EdgeSerializer;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.janusgraph.core.schema.JanusGraphManagement;
+import org.onap.aai.setup.SchemaVersions;
+
+/**
+ * A migration template for filling in default values that are missing or are empty
+ */
+@MigrationPriority(0)
+@MigrationDangerRating(1)
+public abstract class ValueMigrator extends Migrator {
+
+    protected final Map<String, Map<String, ?>> propertyValuePairByNodeType;
+    protected final Boolean updateExistingValues;
+       protected final JanusGraphManagement graphMgmt;
+
+    /**
+     *
+     * @param engine
+     * @param propertyValuePairByNodeType - format {nodeType: { property: newValue}}
+     * @param updateExistingValues - if true, updates the value regardless if it already exists
+     */
+       public ValueMigrator(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions, Map propertyValuePairByNodeType, Boolean updateExistingValues) {
+               super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+           this.propertyValuePairByNodeType = propertyValuePairByNodeType;
+           this.updateExistingValues = updateExistingValues;
+               this.graphMgmt = engine.asAdmin().getManagementSystem();
+       }
+
+       /**
+        * Do not override this method as an inheritor of this class
+        */
+       @Override
+       public void run() {
+           updateValues();
+       }
+
+    protected void updateValues() {
+        for (Map.Entry<String, Map<String, ?>> entry: propertyValuePairByNodeType.entrySet()) {
+            String nodeType = entry.getKey();
+            Map<String, ?> propertyValuePair = entry.getValue();
+            for (Map.Entry<String, ?> pair : propertyValuePair.entrySet()) {
+                String property = pair.getKey();
+                Object newValue = pair.getValue();
+                try {
+                    GraphTraversal<Vertex, Vertex> g = this.engine.asAdmin().getTraversalSource().V()
+                            .has(AAIProperties.NODE_TYPE, nodeType);
+                    while (g.hasNext()) {
+                        Vertex v = g.next();
+                        if (v.property(property).isPresent() && !updateExistingValues) {
+                            String propertyValue = v.property(property).value().toString();
+                            if (propertyValue.isEmpty()) {
+                                v.property(property, newValue);
+                                logger.info(String.format("Node Type %s: Property %s is empty, adding value %s",
+                                        nodeType, property, newValue.toString()));
+                                this.touchVertexProperties(v, false);
+                            } else {
+                                logger.info(String.format("Node Type %s: Property %s value already exists - skipping",
+                                        nodeType, property));
+                            }
+                        } else {
+                            logger.info(String.format("Node Type %s: Property %s does not exist or " +
+                                    "updateExistingValues flag is set to True - adding the property with value %s",
+                                    nodeType, property, newValue.toString()));
+                            v.property(property, newValue);
+                            this.touchVertexProperties(v, false);
+                        }
+                    }
+                } catch (Exception e) {
+                    logger.error(String.format("caught exception updating aai-node-type %s's property %s's value to " +
+                            "%s: %s", nodeType, property, newValue.toString(), e.getMessage()));
+                    logger.error(e.getMessage());
+                }
+            }
+        }
+    }
+}
diff --git a/src/main/java/org/onap/aai/migration/VertexMerge.java b/src/main/java/org/onap/aai/migration/VertexMerge.java
new file mode 100644 (file)
index 0000000..abf19be
--- /dev/null
@@ -0,0 +1,255 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration;
+
+import java.io.UnsupportedEncodingException;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Optional;
+import java.util.Set;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+
+import org.onap.aai.db.props.AAIProperties;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.introspection.Introspector;
+import org.onap.aai.introspection.Loader;
+import org.onap.aai.introspection.exceptions.AAIUnknownObjectException;
+import org.onap.aai.serialization.db.DBSerializer;
+import org.onap.aai.edges.enums.EdgeType;
+import org.onap.aai.serialization.db.EdgeSerializer;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+
+/**
+ * This class recursively merges two vertices passed in.
+ * <br>
+ * You can start with any two vertices, but after the vertices are merged based off the equality of their keys
+ *
+ */
+public class VertexMerge {
+
+       private final EELFLogger logger = EELFManager.getInstance().getLogger(this.getClass().getSimpleName());
+
+       private final GraphTraversalSource g;
+       private final TransactionalGraphEngine engine;
+       private final DBSerializer serializer;
+       private final EdgeSerializer edgeSerializer;
+       private final Loader loader;
+       private final NotificationHelper notificationHelper;
+       private final boolean hasNotifications;
+
+       private VertexMerge(Builder builder) {
+               this.engine = builder.getEngine();
+               this.serializer = builder.getSerializer();
+               this.g = engine.asAdmin().getTraversalSource();
+               this.edgeSerializer = builder.getEdgeSerializer();
+               this.loader = builder.getLoader();
+               this.notificationHelper = builder.getHelper();
+               this.hasNotifications = builder.isHasNotifications();
+       }
+       
+       /**
+        * Merges vertices. forceCopy is a map of the form [{aai-node-type}:{set of properties}]
+        * @param primary
+        * @param secondary
+        * @param forceCopy
+        * @throws AAIException
+        * @throws UnsupportedEncodingException
+        */
+       public void performMerge(Vertex primary, Vertex secondary, Map<String, Set<String>> forceCopy, String basePath) throws AAIException, UnsupportedEncodingException {
+               final Optional<Introspector> secondarySnapshot;
+               if (this.hasNotifications) {
+                       secondarySnapshot = Optional.of(serializer.getLatestVersionView(secondary));
+               } else {
+                       secondarySnapshot = Optional.empty();
+               }
+               mergeProperties(primary, secondary, forceCopy);
+               
+               Collection<Vertex> secondaryChildren = this.engine.getQueryEngine().findChildren(secondary);
+               Collection<Vertex> primaryChildren = this.engine.getQueryEngine().findChildren(primary);
+               
+               mergeChildren(primary, secondary, primaryChildren, secondaryChildren, forceCopy);
+               
+               Collection<Vertex> secondaryCousins = this.engine.getQueryEngine().findCousinVertices(secondary);
+               Collection<Vertex> primaryCousins = this.engine.getQueryEngine().findCousinVertices(primary);
+               
+               secondaryCousins.removeAll(primaryCousins);
+               logger.info("removing vertex after merge: " + secondary );
+               if (this.hasNotifications && secondarySnapshot.isPresent()) {
+                       this.notificationHelper.addEvent(secondary, secondarySnapshot.get(), EventAction.DELETE, this.serializer.getURIForVertex(secondary, false), basePath);
+               }
+               secondary.remove();
+               for (Vertex v : secondaryCousins) {
+                       this.edgeSerializer.addEdgeIfPossible(g, v, primary);
+               }
+               if (this.hasNotifications) {
+                       final Introspector primarySnapshot = serializer.getLatestVersionView(primary);
+                       this.notificationHelper.addEvent(primary, primarySnapshot, EventAction.UPDATE, this.serializer.getURIForVertex(primary, false), basePath);
+               }
+       }
+       
+       /**
+        * This method may go away if we choose to event on each modification performed
+        * @param primary
+        * @param secondary
+        * @param forceCopy
+        * @throws AAIException
+        * @throws UnsupportedEncodingException
+        */
+       protected void performMergeHelper(Vertex primary, Vertex secondary, Map<String, Set<String>> forceCopy) throws AAIException, UnsupportedEncodingException {
+               mergeProperties(primary, secondary, forceCopy);
+               
+               Collection<Vertex> secondaryChildren = this.engine.getQueryEngine().findChildren(secondary);
+               Collection<Vertex> primaryChildren = this.engine.getQueryEngine().findChildren(primary);
+               
+               mergeChildren(primary, secondary, primaryChildren, secondaryChildren, forceCopy);
+               
+               Collection<Vertex> secondaryCousins = this.engine.getQueryEngine().findCousinVertices(secondary);
+               Collection<Vertex> primaryCousins = this.engine.getQueryEngine().findCousinVertices(primary);
+               
+               secondaryCousins.removeAll(primaryCousins);
+               secondary.remove();
+               for (Vertex v : secondaryCousins) {
+                       this.edgeSerializer.addEdgeIfPossible(g, v, primary);
+               }
+       }
+       
+       private String getURI(Vertex v) throws UnsupportedEncodingException, AAIException {
+               Introspector obj = loader.introspectorFromName(v.<String>property(AAIProperties.NODE_TYPE).orElse(""));
+               this.serializer.dbToObject(Collections.singletonList(v), obj, 0, true, "false");
+               return obj.getURI();
+
+       }
+       private void mergeChildren(Vertex primary, Vertex secondary, Collection<Vertex> primaryChildren, Collection<Vertex> secondaryChildren, Map<String, Set<String>> forceCopy) throws UnsupportedEncodingException, AAIException {
+               Map<String, Vertex> primaryMap = uriMap(primaryChildren);
+               Map<String, Vertex> secondaryMap = uriMap(secondaryChildren);
+               Set<String> primaryKeys = new HashSet<>(primaryMap.keySet());
+               Set<String> secondaryKeys = new HashSet<>(secondaryMap.keySet());
+               primaryKeys.retainAll(secondaryKeys);
+               final Set<String> mergeItems = new HashSet<>(primaryKeys);
+               primaryKeys = new HashSet<>(primaryMap.keySet());
+               secondaryKeys = new HashSet<>(secondaryMap.keySet());
+               secondaryKeys.removeAll(primaryKeys);
+               final Set<String> copyItems = new HashSet<>(secondaryKeys);
+               
+               for (String key : mergeItems) {
+                       this.performMergeHelper(primaryMap.get(key), secondaryMap.get(key), forceCopy);
+               }
+               
+               for (String key : copyItems) {
+                       this.edgeSerializer.addTreeEdgeIfPossible(g, secondaryMap.get(key), primary);
+                       this.serializer.getEdgeBetween(EdgeType.TREE, secondary, secondaryMap.get(key)).remove();
+               }
+
+       }
+       
+       private Map<String, Vertex> uriMap(Collection<Vertex> vertices) throws UnsupportedEncodingException, AAIException {
+               final Map<String, Vertex> result = new HashMap<>();
+               for (Vertex v : vertices) {
+                       result.put(getURI(v), v);
+               }
+               return result;
+       }
+       
+       private void mergeProperties(Vertex primary, Vertex secondary, Map<String, Set<String>> forceCopy) throws AAIUnknownObjectException {
+               final String primaryType = primary.<String>property(AAIProperties.NODE_TYPE).orElse("");
+               final String secondaryType = secondary.<String>property(AAIProperties.NODE_TYPE).orElse("");
+
+               final Introspector secondaryObj = loader.introspectorFromName(secondaryType);
+               secondary.properties().forEachRemaining(prop -> {
+                       if (!primary.property(prop.key()).isPresent() || forceCopy.getOrDefault(primaryType, new HashSet<String>()).contains(prop.key())) {
+                               primary.property(prop.key(), prop.value());
+                       }
+                       if (primary.property(prop.key()).isPresent() && secondary.property(prop.key()).isPresent() && secondaryObj.isListType(prop.key())) {
+                               mergeCollection(primary, prop.key(), secondary.values(prop.key()));
+                       }
+               });
+       }
+       private void mergeCollection(Vertex primary, String propName, Iterator<Object> secondaryValues) {
+               secondaryValues.forEachRemaining(item -> {
+                       primary.property(propName, item);
+               });
+       }
+       
+       
+       public static class Builder {
+               private final TransactionalGraphEngine engine;
+
+               private final DBSerializer serializer;
+               private EdgeSerializer edgeSerializer;
+
+               private final Loader loader;
+               private NotificationHelper helper = null;
+               private boolean hasNotifications = false;
+               public Builder(Loader loader, TransactionalGraphEngine engine, DBSerializer serializer) {
+                       this.loader = loader;
+                       this.engine = engine;
+                       this.serializer = serializer;
+               }
+               
+               public Builder addNotifications(NotificationHelper helper) {
+                       this.helper = helper;
+                       this.hasNotifications = true;
+                       return this;
+               }
+
+               public Builder edgeSerializer(EdgeSerializer edgeSerializer){
+                       this.edgeSerializer = edgeSerializer;
+                       return this;
+               }
+
+               public EdgeSerializer getEdgeSerializer(){
+                       return edgeSerializer;
+               }
+
+               public VertexMerge build() {
+                       return new VertexMerge(this);
+               }
+               
+               protected TransactionalGraphEngine getEngine() {
+                       return engine;
+               }
+
+               protected DBSerializer getSerializer() {
+                       return serializer;
+               }
+
+               protected Loader getLoader() {
+                       return loader;
+               }
+
+               protected NotificationHelper getHelper() {
+                       return helper;
+               }
+
+               protected boolean isHasNotifications() {
+                       return hasNotifications;
+               }
+               
+       }
+       
+}
diff --git a/src/main/java/org/onap/aai/migration/v12/ContainmentDeleteOtherVPropertyMigration.java b/src/main/java/org/onap/aai/migration/v12/ContainmentDeleteOtherVPropertyMigration.java
new file mode 100644 (file)
index 0000000..361e8bc
--- /dev/null
@@ -0,0 +1,106 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v12;
+
+import java.util.Optional;
+
+import org.apache.commons.lang.exception.ExceptionUtils;
+import org.apache.tinkerpop.gremlin.structure.Edge;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.edges.enums.EdgeProperty;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.migration.MigrationDangerRating;
+import org.onap.aai.migration.MigrationPriority;
+import org.onap.aai.migration.Migrator;
+import org.onap.aai.migration.Status;
+import org.onap.aai.edges.enums.AAIDirection;
+import org.onap.aai.serialization.db.EdgeSerializer;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.setup.SchemaVersions;
+
+
+//@Enabled
+@MigrationPriority(-100)
+@MigrationDangerRating(10)
+public class ContainmentDeleteOtherVPropertyMigration extends Migrator {
+
+       private boolean success = true;
+       
+       public ContainmentDeleteOtherVPropertyMigration(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) {
+               super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+       }
+       
+       //just for testing using test edge rule files
+       public ContainmentDeleteOtherVPropertyMigration(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions, String edgeRulesFile) {
+               super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+       }
+       
+       @Override
+       public void run() {
+               try {
+                       engine.asAdmin().getTraversalSource().E().sideEffect(t -> {
+                               Edge e = t.get();
+                               logger.info("out vertex: " + e.outVertex().property("aai-node-type").value() + 
+                                                               " in vertex: " + e.inVertex().property("aai-node-type").value() +
+                                                               " label : " + e.label());
+                               if (e.property(EdgeProperty.CONTAINS.toString()).isPresent() &&
+                                               e.property(EdgeProperty.DELETE_OTHER_V.toString()).isPresent()) {
+                                       //in case of orphans
+                                       if (!("constrained-element-set".equals(e.inVertex().property("aai-node-type").value())
+                                                       && "model-element".equals(e.outVertex().property("aai-node-type").value()))) {
+                                               //skip the weird horrible problem child edge
+                                               String containment = (String) e.property(EdgeProperty.CONTAINS.toString()).value();
+                                               if (AAIDirection.OUT.toString().equalsIgnoreCase(containment) ||
+                                                               AAIDirection.IN.toString().equalsIgnoreCase(containment) ||
+                                                               AAIDirection.BOTH.toString().equalsIgnoreCase(containment)) {
+                                                       logger.info("updating delete-other-v property");
+                                                       e.property(EdgeProperty.DELETE_OTHER_V.toString(), containment);
+                                               }
+                                       }
+                               }
+                       }).iterate();
+               } catch (Exception e) {
+                       logger.info("error encountered " + e.getClass() + " " + e.getMessage() + " " + ExceptionUtils.getFullStackTrace(e));
+                       logger.error("error encountered " + e.getClass() + " " + e.getMessage() + " " + ExceptionUtils.getFullStackTrace(e));
+                       success = false;
+               }
+               
+       }
+
+       @Override
+       public Status getStatus() {
+               if (success) {
+                       return Status.SUCCESS;
+               } else {
+                       return Status.FAILURE;
+               }
+       }
+
+       @Override
+       public Optional<String[]> getAffectedNodeTypes() {
+               return Optional.empty();
+       }
+
+       @Override
+       public String getMigrationName() {
+               return "migrate-containment-delete-other-v";
+       }
+
+}
diff --git a/src/main/java/org/onap/aai/migration/v12/DeletePInterface.java b/src/main/java/org/onap/aai/migration/v12/DeletePInterface.java
new file mode 100644 (file)
index 0000000..1089b2f
--- /dev/null
@@ -0,0 +1,131 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v12;
+
+import java.util.Collection;
+import java.util.List;
+import java.util.Optional;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.onap.aai.db.props.AAIProperties;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.migration.MigrationDangerRating;
+import org.onap.aai.migration.MigrationPriority;
+import org.onap.aai.migration.Migrator;
+import org.onap.aai.migration.Status;
+import org.onap.aai.edges.enums.EdgeType;
+import org.onap.aai.serialization.db.EdgeSerializer;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.setup.SchemaVersions;
+
+@MigrationPriority(0)
+@MigrationDangerRating(0)
+public class DeletePInterface extends Migrator {
+       private boolean success = true;
+       private final GraphTraversalSource g;
+       public DeletePInterface(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) {
+               super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+               this.g = this.engine.asAdmin().getTraversalSource();
+       }
+
+       @Override
+       public void run() {
+               int count = 0;
+               int skipCount = 0;
+               int errorCount = 0;
+               logger.info("---------- Start deleting p-interfaces  ----------");
+               List<Vertex> pIntfList;
+               try {
+                       pIntfList = g.V().has(AAIProperties.NODE_TYPE, "p-interface").has("source-of-truth", "AAI-CSVP-INSTARAMS")
+                                       .where(this.engine.getQueryBuilder().createEdgeTraversal(EdgeType.TREE, "p-interface", "pnf")
+                                       .<GraphTraversal<?, ?>>getQuery()).toList();
+                       
+                       if (pIntfList != null && !pIntfList.isEmpty()) {
+                               for (Vertex pInterfV : pIntfList) {
+                                       try {
+                                               Collection<Vertex> cousins = this.engine.getQueryEngine().findCousinVertices(pInterfV);
+                                               
+                                               Collection<Vertex> children = this.engine.getQueryEngine().findChildren(pInterfV);
+                                               if (cousins == null || cousins.isEmpty()) {
+                                                       if (children == null || children.isEmpty()) {
+                                                               logger.info("Delete p-interface: " + getVertexURI(pInterfV));
+                                                               pInterfV.remove();
+                                                               count++;
+                                                       } else {
+                                                               skipCount++;
+                                                               logger.info("skip p-interface " + getVertexURI(pInterfV) + " due to an existing relationship");
+                                                       }
+                                               } else {
+                                                       skipCount++;
+                                                       logger.info("skip p-interface " + getVertexURI(pInterfV) + " due to an existing relationship");
+                                               }
+                                       } catch (Exception e) {
+                                               success = false;
+                                               errorCount++;
+                                               logger.error("error occured in deleting p-interface " + getVertexURI(pInterfV) + ", "+ e);
+                                       }
+                               }
+                       logger.info ("\n \n ******* Final Summary for deleting p-interfaces Migration ********* \n");
+                       logger.info("Number of p-interfaces removed: "+ count +"\n");
+                       logger.info("Number of p-interfaces skipped: "+ skipCount  +"\n");
+                       logger.info("Number of p-interfaces failed to delete due to error : "+ errorCount  +"\n");
+                       }
+               } catch (AAIException e) {
+                       success = false;
+                       logger.error("error occured in deleting p-interfaces " + e);
+               }
+       }
+       
+       private String getVertexURI(Vertex v) {
+               if (v != null) {
+               if (v.property("aai-uri").isPresent()) {
+                       return v.property("aai-uri").value().toString();
+                       } else {
+                               return "Vertex ID: " + v.id().toString();
+                       }
+               } else {
+                       return "";
+               }
+       }
+
+       @Override
+       public Status getStatus() {
+               if (success) {
+                       return Status.SUCCESS;
+               } else {
+                       return Status.FAILURE;
+               }
+       }
+
+       @Override
+       public Optional<String[]> getAffectedNodeTypes() {
+               return Optional.of(new String[] { "p-interface" });
+       }
+
+       @Override
+       public String getMigrationName() {
+               return "DeletePInterface";
+       }
+
+}
diff --git a/src/main/java/org/onap/aai/migration/v12/EdgeReportForToscaMigration.java b/src/main/java/org/onap/aai/migration/v12/EdgeReportForToscaMigration.java
new file mode 100644 (file)
index 0000000..1bdddf3
--- /dev/null
@@ -0,0 +1,162 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v12;
+/*-
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.structure.Edge;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.migration.*;
+import org.onap.aai.serialization.db.EdgeSerializer;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.setup.SchemaVersions;
+
+import java.util.*;
+
+@MigrationPriority(0)
+@MigrationDangerRating(0)
+public class EdgeReportForToscaMigration extends Migrator {
+
+    private boolean success = true;
+
+    public EdgeReportForToscaMigration(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions){
+               super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+    }
+
+    @Override
+    public Status getStatus() {
+       if (success) {
+               return Status.SUCCESS;
+       } else {
+               return Status.FAILURE;
+       }
+    }
+
+    @Override
+    public void run() {
+               Vertex out = null;
+               Vertex in = null;
+               String label = "";
+               String outURI = "";
+               String inURI = "";
+               String parentCousinIndicator = "NONE";
+               String oldEdgeString = null;
+               List<String> edgeMissingParentProperty = new ArrayList<>();
+               StringBuilder sb = new StringBuilder();
+               Set<String> noURI = new HashSet<>();
+               sb.append("----------EDGES----------\n");
+
+               GraphTraversalSource g = engine.asAdmin().getTraversalSource();
+
+       try {
+                       Set<Edge> edges = g.E().toSet();
+               for (Edge edge : edges) {
+                               out = edge.outVertex();
+                               in = edge.inVertex();
+                               label = edge.label();
+                               outURI = this.getVertexURI(out);
+                               inURI = this.getVertexURI(in);
+                               parentCousinIndicator = "NONE";
+                               oldEdgeString = this.toStringForPrinting(edge, 1);
+
+                               if (!outURI.startsWith("/")) {
+                                       noURI.add(outURI);
+                               }
+                               if (!inURI.startsWith("/")) {
+                                       noURI.add(inURI);
+                               }
+
+                               if (out == null || in == null) {
+                                       logger.error(edge.id() + " invalid because one vertex was null: out=" + edge.outVertex() + " in=" + edge.inVertex());
+                               } else {
+
+                                       if (edge.property("contains-other-v").isPresent()) {
+                                               parentCousinIndicator = edge.property("contains-other-v").value().toString();
+                                       } else if (edge.property("isParent").isPresent()) {
+                                               if ((Boolean)edge.property("isParent").value()) {
+                                                       parentCousinIndicator = "OUT";
+                                               } else if (edge.property("isParent-REV").isPresent() && (Boolean)edge.property("isParent-REV").value()) {
+                                                       parentCousinIndicator = "IN";
+                                               }
+                                       } else {
+                                               edgeMissingParentProperty.add(this.toStringForPrinting(edge, 1));
+                                       }
+
+                                       sb.append(outURI + "|" + label + "|" + inURI + "|" + parentCousinIndicator + "\n");
+                               }
+                       }
+        } catch(Exception ex){
+               logger.error("exception occurred during migration, failing: out=" + out + " in=" + in + "edge=" + oldEdgeString, ex);
+               success = false;
+        }
+               sb.append("--------EDGES END--------\n");
+
+               logger.info(sb.toString());
+               edgeMissingParentProperty.forEach(s -> logger.warn("Edge Missing Parent Property: " + s));
+               logger.info("Edge Missing Parent Property Count: " + edgeMissingParentProperty.size());
+               logger.info("Vertex Missing URI Property Count: " + noURI.size());
+
+       }
+
+       private String getVertexURI(Vertex v) {
+       if (v.property("aai-uri").isPresent()) {
+               return v.property("aai-uri").value().toString();
+               } else {
+                       return v.id().toString() + "(" + v.property("aai-node-type").value().toString() + ")";
+               }
+       }
+
+       @Override
+       public Optional<String[]> getAffectedNodeTypes() {
+               return Optional.empty();
+       }
+
+       @Override
+       public String getMigrationName() {
+               return "edge-report-for-tosca-migration";
+       }
+
+       @Override
+       public void commit() {
+               engine.rollback();
+       }
+
+}
diff --git a/src/main/java/org/onap/aai/migration/v12/MigrateModelVerDistriubutionStatusProperty.java b/src/main/java/org/onap/aai/migration/v12/MigrateModelVerDistriubutionStatusProperty.java
new file mode 100644 (file)
index 0000000..c09643f
--- /dev/null
@@ -0,0 +1,85 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v12;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.onap.aai.db.props.AAIProperties;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.migration.*;
+import org.onap.aai.serialization.db.EdgeSerializer;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.setup.SchemaVersions;
+
+import java.util.Optional;
+
+@MigrationPriority(20)
+@MigrationDangerRating(2)
+public class MigrateModelVerDistriubutionStatusProperty extends Migrator{
+
+    private final String PARENT_NODE_TYPE = "model-ver";
+    private boolean success = true;
+
+    public MigrateModelVerDistriubutionStatusProperty(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) {
+        super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+    }
+
+
+
+    @Override
+    public void run() {
+
+
+        GraphTraversal<Vertex, Vertex> f = this.engine.asAdmin().getTraversalSource().V().has(AAIProperties.NODE_TYPE,"model-ver");
+
+        while(f.hasNext()) {
+            Vertex v = f.next();
+            try {
+                    v.property("distribution-status", "DISTRIBUTION_COMPLETE_OK");
+                    logger.info("changed model-ver.distribution-status property value for model-version-id: " + v.property("model-version-id").value());
+
+            } catch (Exception e) {
+                e.printStackTrace();
+                success = false;
+                logger.error("encountered exception for model-version-id:" + v.property("model-version-id").value(), e);
+            }
+        }
+    }
+
+
+    @Override
+    public Status getStatus() {
+        if (success) {
+            return Status.SUCCESS;
+        } else {
+            return Status.FAILURE;
+        }
+    }
+    @Override
+    public Optional<String[]> getAffectedNodeTypes() {
+        return Optional.of(new String[]{PARENT_NODE_TYPE});
+    }
+
+    @Override
+    public String getMigrationName() {
+        return "MigrateModelVerDistriubutionStatusProperty";
+    }
+
+}
diff --git a/src/main/java/org/onap/aai/migration/v12/MigrateServiceInstanceToConfiguration.java b/src/main/java/org/onap/aai/migration/v12/MigrateServiceInstanceToConfiguration.java
new file mode 100644 (file)
index 0000000..b4208af
--- /dev/null
@@ -0,0 +1,193 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v12;
+
+import java.io.UnsupportedEncodingException;
+import java.util.Iterator;
+import java.util.Optional;
+import java.util.UUID;
+
+import org.apache.tinkerpop.gremlin.process.traversal.P;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
+import org.apache.tinkerpop.gremlin.structure.Direction;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.onap.aai.db.props.AAIProperties;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.introspection.Introspector;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.introspection.exceptions.AAIUnknownObjectException;
+import org.onap.aai.migration.MigrationDangerRating;
+import org.onap.aai.migration.MigrationPriority;
+import org.onap.aai.migration.Migrator;
+import org.onap.aai.migration.Status;
+import org.onap.aai.edges.enums.EdgeType;
+import org.onap.aai.serialization.db.EdgeSerializer;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.setup.SchemaVersions;
+
+//@Enabled
+@MigrationPriority(10)
+@MigrationDangerRating(10)
+public class MigrateServiceInstanceToConfiguration extends Migrator {
+
+       private boolean success = true;
+       private final String CONFIGURATION_NODE_TYPE = "configuration";
+       private final String SERVICE_INSTANCE_NODE_TYPE = "service-instance";
+       private Introspector configObj;
+
+       public MigrateServiceInstanceToConfiguration(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) {
+               super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+               try {
+                       this.configObj = this.loader.introspectorFromName(CONFIGURATION_NODE_TYPE);
+               } catch (AAIUnknownObjectException e) {
+                       this.configObj = null;
+               }
+       }
+
+       @Override
+       public void run() {             
+               Vertex serviceInstance = null;
+               Vertex configuration = null;
+               String serviceInstanceId = "", tunnelBandwidth = "";
+               String bandwidthTotal, configType, nodeType;
+               GraphTraversal<Vertex, Vertex> serviceInstanceItr;
+               Iterator<Vertex> configurationItr;
+
+               try {
+                       serviceInstanceItr = this.engine.asAdmin().getTraversalSource().V()
+                                       .has(AAIProperties.NODE_TYPE, P.within(getAffectedNodeTypes().get()))
+                                       .where(this.engine.getQueryBuilder()
+                                                       .createEdgeTraversal(EdgeType.TREE, "service-instance", "service-subscription")
+                                                       .getVerticesByProperty("service-type", "DHV")
+                                                       .<GraphTraversal<?, ?>>getQuery());
+
+                       if (serviceInstanceItr == null || !serviceInstanceItr.hasNext()) {
+                               logger.info("No servince-instance nodes found with service-type of DHV");
+                               return;
+                       }
+
+                       // iterate through all service instances of service-type DHV
+                       while (serviceInstanceItr.hasNext()) {
+                               serviceInstance = serviceInstanceItr.next();
+
+                               if (serviceInstance != null && serviceInstance.property("bandwidth-total").isPresent()) {
+                                       serviceInstanceId = serviceInstance.value("service-instance-id");
+                                       logger.info("Processing service instance with id=" + serviceInstanceId);
+                                       bandwidthTotal = serviceInstance.value("bandwidth-total");
+
+                                       if (bandwidthTotal != null && !bandwidthTotal.isEmpty()) {
+
+                                               // check for existing edges to configuration nodes 
+                                               configurationItr = serviceInstance.vertices(Direction.OUT, "has");
+
+                                               // create new configuration node if service-instance does not have existing ones
+                                               if (!configurationItr.hasNext()) {
+                                                       logger.info(serviceInstanceId + " has no existing configuration nodes, creating new node");
+                                                       createConfigurationNode(serviceInstance, bandwidthTotal);
+                                                       continue;
+                                               }
+
+                                               // in case if configuration nodes exist, but none are DHV
+                                               boolean hasDHVConfig = false;
+
+                                               // service-instance has existing configuration nodes
+                                               while (configurationItr.hasNext()) {
+                                                       configuration = configurationItr.next();
+                                                       nodeType = configuration.value("aai-node-type").toString();
+
+                                                       if (configuration != null && "configuration".equalsIgnoreCase(nodeType)) {
+                                                               logger.info("Processing configuration node with id=" + configuration.property("configuration-id").value());
+                                                               configType = configuration.value("configuration-type");
+                                                               logger.info("Configuration type: " + configType);
+
+                                                               // if configuration-type is DHV, update tunnel-bandwidth to bandwidth-total value
+                                                               if ("DHV".equalsIgnoreCase(configType)) {
+                                                                       if (configuration.property("tunnel-bandwidth").isPresent()) {
+                                                                               tunnelBandwidth = configuration.value("tunnel-bandwidth");
+                                                                       } else {
+                                                                               tunnelBandwidth = "";
+                                                                       }
+
+                                                                       logger.info("Existing tunnel-bandwidth: " + tunnelBandwidth);
+                                                                       configuration.property("tunnel-bandwidth", bandwidthTotal);
+                                                                       touchVertexProperties(configuration, false);
+                                                                       logger.info("Updated tunnel-bandwidth: " + configuration.value("tunnel-bandwidth"));
+                                                                       hasDHVConfig = true;
+                                                               }
+                                                       }
+                                               }
+
+                                               // create new configuration node if none of existing config nodes are of type DHV 
+                                               if (!hasDHVConfig) {
+                                                       logger.info(serviceInstanceId + " has existing configuration nodes, but none are DHV, create new node");
+                                                       createConfigurationNode(serviceInstance, bandwidthTotal);
+                                               }
+                                       }
+                               }
+                       }
+               } catch (AAIException | UnsupportedEncodingException e) {
+                       logger.error("Caught exception while processing service instance with id=" + serviceInstanceId + " | " + e.toString());
+                       success = false;
+               }
+       }
+
+       private void createConfigurationNode(Vertex serviceInstance, String bandwidthTotal) throws UnsupportedEncodingException, AAIException {
+               // create new vertex
+               Vertex configurationNode = serializer.createNewVertex(configObj);
+
+               // configuration-id: UUID format
+               String configurationUUID = UUID.randomUUID().toString();
+               configObj.setValue("configuration-id", configurationUUID);
+
+               // configuration-type: DHV
+               configObj.setValue("configuration-type", "DHV");
+
+               // migrate the bandwidth-total property from the service-instance to the 
+               // tunnel-bandwidth property of the related configuration object
+               configObj.setValue("tunnel-bandwidth", bandwidthTotal);
+
+               // create edge between service instance and configuration: cousinEdge(out, in)
+               createCousinEdge(serviceInstance, configurationNode);
+
+               // serialize edge & vertex, takes care of everything
+               serializer.serializeSingleVertex(configurationNode, configObj, "migrations");
+               logger.info("Created configuration node with uuid=" + configurationUUID + ", tunnel-bandwidth=" + bandwidthTotal);
+       }
+
+       @Override
+       public Status getStatus() {
+               if (success) {
+                       return Status.SUCCESS;
+               } else {
+                       return Status.FAILURE;
+               }
+       }
+
+       @Override
+       public Optional<String[]> getAffectedNodeTypes() {
+               return Optional.of(new String[] {SERVICE_INSTANCE_NODE_TYPE});
+       }
+
+       @Override
+       public String getMigrationName() {
+               return "service-instance-to-configuration";
+       }
+}
diff --git a/src/main/java/org/onap/aai/migration/v12/SDWANSpeedChangeMigration.java b/src/main/java/org/onap/aai/migration/v12/SDWANSpeedChangeMigration.java
new file mode 100644 (file)
index 0000000..b420c57
--- /dev/null
@@ -0,0 +1,258 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v12;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.migration.*;
+import org.onap.aai.edges.enums.EdgeType;
+import org.onap.aai.serialization.db.EdgeSerializer;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.setup.SchemaVersions;
+
+import java.util.*;
+
+@MigrationPriority(1)
+@MigrationDangerRating(1)
+//@Enabled
+public class SDWANSpeedChangeMigration extends Migrator {
+
+    private final String PARENT_NODE_TYPE = "alloted-resource";
+    private boolean success = true;
+
+    Vertex allottedRsrcVertex;
+
+    Map<String, String> bandwidthMap = new HashMap<>();
+    Set<String> bandWidthSet = new HashSet<>();
+
+    GraphTraversal<Vertex, Vertex> allottedRsrcTraversal;
+    GraphTraversal<Vertex, Vertex> tunnelXConnectTraversal;
+    GraphTraversal<Vertex, Vertex> pinterfaceTraversal;
+    GraphTraversal<Vertex, Vertex> plinkTraversal;
+
+    public SDWANSpeedChangeMigration(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) {
+        super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+        bandWidthSet.add("bandwidth-up-wan1");
+        bandWidthSet.add("bandwidth-down-wan1");
+        bandWidthSet.add("bandwidth-up-wan2");
+        bandWidthSet.add("bandwidth-down-wan2");
+    }
+
+
+    @Override
+    public void run() {
+
+        logger.info("Started the migration "+ getMigrationName());
+
+        try {
+
+            allottedRsrcTraversal = this.engine.asAdmin().getTraversalSource().V()
+                    .has("aai-node-type", "service-subscription")
+                    .has("service-type", "DHV")
+                    .in("org.onap.relationships.inventory.BelongsTo")
+                    .has("aai-node-type", "service-instance")
+                    .out("org.onap.relationships.inventory.Uses")
+                    .has("aai-node-type", "allotted-resource")
+                    .where(
+                            this.engine.getQueryBuilder()
+                                    .createEdgeTraversal(EdgeType.TREE, "allotted-resource", "service-instance")
+                                    .createEdgeTraversal(EdgeType.TREE, "service-instance", "service-subscription")
+                                    .<GraphTraversal<Vertex, Vertex>>getQuery()
+                                    .has("service-type", "VVIG")
+                    );
+
+            if(!(allottedRsrcTraversal.hasNext())){
+
+                logger.info("unable to find allotted resource to DHV as cousin and child of VVIG");
+            }
+
+            while (allottedRsrcTraversal.hasNext()) {
+                bandwidthMap.clear();
+
+                allottedRsrcVertex = allottedRsrcTraversal.next();
+                String allottedResourceId = allottedRsrcVertex.property("id").value().toString();
+                logger.info("Found an allotted resource with id " + allottedResourceId);
+
+                tunnelXConnectTraversal = this.engine.asAdmin().getTraversalSource()
+                        .V(allottedRsrcVertex)
+                        .in("org.onap.relationships.inventory.BelongsTo")
+                        .has("aai-node-type", "tunnel-xconnect");
+
+                if (tunnelXConnectTraversal != null && tunnelXConnectTraversal.hasNext()) {
+                    Vertex xConnect = tunnelXConnectTraversal.next();
+                    String tunnelId = xConnect.property("id").value().toString();
+                    logger.info("Found an tunnelxconnect object with id " + tunnelId);
+                    extractBandwidthProps(xConnect);
+                    modifyPlink(allottedRsrcVertex);
+                } else {
+                    logger.info("Unable to find the tunnel connect for the current allotted resource traversal");
+                }
+
+            }
+        } catch (AAIException e) {
+            e.printStackTrace();
+            success = false;
+        }
+
+        logger.info("Successfully finished the " + getMigrationName());
+    }
+
+    public void extractBandwidthProps(Vertex vertex) {
+        logger.info("Trying to extract bandwith props");
+        bandWidthSet.stream().forEach((key) -> {
+            if (vertex.property(key).isPresent()) {
+                bandwidthMap.put(key, vertex.property(key).value().toString());
+            }
+        });
+        logger.info("Extracted bandwith props for tunnelXConnect " +vertex.value("id"));
+    }
+
+    public void modifyPlink(Vertex v) {
+
+        try {
+            pinterfaceTraversal = this.engine.asAdmin().getTraversalSource().V(v)
+                    .in("org.onap.relationships.inventory.Uses").has("aai-node-type", "service-instance")
+                    .where(
+                            __.out("org.onap.relationships.inventory.BelongsTo")
+                                    .has("aai-node-type", "service-subscription")
+                                    .has("service-type", "DHV")
+                    )
+                    .out("org.onap.relationships.inventory.ComposedOf").has("aai-node-type", "generic-vnf")
+                    .out("tosca.relationships.HostedOn").has("aai-node-type", "vserver")
+                    .out("tosca.relationships.HostedOn").has("aai-node-type", "pserver")
+                    .in("tosca.relationships.network.BindsTo").has("aai-node-type", "p-interface");
+        } catch (Exception e) {
+            logger.info("error trying to find p interfaces");
+        }
+
+
+        while (pinterfaceTraversal.hasNext()) {
+
+            Vertex pInterfaceVertex = pinterfaceTraversal.next();
+
+            String pinterfaceName = pInterfaceVertex.property("interface-name").value().toString();
+            logger.info("p-interface "+ pinterfaceName + " found from traversal from allotted-resource " +v.value("id"));
+            String[] parts = pinterfaceName.split("/");
+
+            if (parts[parts.length - 1].equals("10")) {
+
+                logger.info("Found the pinterface with the interface name ending with /10");
+
+                try {
+                    plinkTraversal = this.engine.asAdmin().getTraversalSource()
+                            .V(pInterfaceVertex)
+                            .out("tosca.relationships.network.LinksTo")
+                            .has("aai-node-type", "physical-link");
+                } catch (Exception e) {
+                    logger.info("error trying to find the p Link for /10");
+                }
+                if (plinkTraversal != null && plinkTraversal.hasNext()) {
+                    Vertex pLink = plinkTraversal.next();
+
+
+                    if ( bandwidthMap.containsKey("bandwidth-up-wan1")
+                            && bandwidthMap.containsKey("bandwidth-down-wan1")
+                            && !(("").equals(bandwidthMap.get("bandwidth-up-wan1").replaceAll("[^0-9]", "").trim()))
+                            && !(("").equals(bandwidthMap.get("bandwidth-down-wan1").replaceAll("[^0-9]", "").trim())))
+                    {
+
+                        pLink.property("service-provider-bandwidth-up-value", Integer.valueOf(bandwidthMap.get("bandwidth-up-wan1").replaceAll("[^0-9]", "").trim()));
+                        pLink.property("service-provider-bandwidth-up-units", "Mbps");
+                        pLink.property("service-provider-bandwidth-down-value", Integer.valueOf(bandwidthMap.get("bandwidth-down-wan1").replaceAll("[^0-9]", "").trim()));
+                        pLink.property("service-provider-bandwidth-down-units", "Mbps");
+                        logger.info("Successfully modified the plink with link name ", pLink.property("link-name").value().toString());
+                        this.touchVertexProperties(pLink, false);
+                    } else {
+                        logger.info("missing up and down vals for the plink with link name ", pLink.property("link-name").value().toString());
+                    }
+
+
+                } else {
+                    logger.info("missing plink for p interface" + pinterfaceName);
+                }
+
+            }
+
+            if (parts[parts.length - 1].equals("11")) {
+
+                logger.info("Found the pinterface with the interface name ending with /11");
+                try {
+                    plinkTraversal = this.engine.asAdmin()
+                            .getTraversalSource()
+                            .V(pInterfaceVertex)
+                            .out("tosca.relationships.network.LinksTo")
+                            .has("aai-node-type", "physical-link");
+                } catch (Exception e) {
+                    logger.info("error trying to find the p Link for /11");
+                }
+
+                if (plinkTraversal != null && plinkTraversal.hasNext()) {
+                    Vertex pLink = plinkTraversal.next();
+
+
+                    if ( bandwidthMap.containsKey("bandwidth-up-wan2")
+                            && bandwidthMap.containsKey("bandwidth-down-wan2")
+                            && !(("").equals(bandwidthMap.get("bandwidth-up-wan2").replaceAll("[^0-9]", "").trim()))
+                            && !(("").equals(bandwidthMap.get("bandwidth-down-wan2").replaceAll("[^0-9]", "").trim())))
+                    {
+                        pLink.property("service-provider-bandwidth-up-value", Integer.valueOf(bandwidthMap.get("bandwidth-up-wan2").replaceAll("[^0-9]", "").trim()));
+                        pLink.property("service-provider-bandwidth-up-units", "Mbps");
+                        pLink.property("service-provider-bandwidth-down-value", Integer.valueOf(bandwidthMap.get("bandwidth-down-wan2").replaceAll("[^0-9]", "").trim()));
+                        pLink.property("service-provider-bandwidth-down-units", "Mbps");
+                        logger.info("Successfully modified the plink with link name ", pLink.property("link-name").value().toString());
+                        this.touchVertexProperties(pLink, false);
+                    } else {
+                       logger.error("missing up and down vals for the plink with link name ", pLink.property("link-name").value().toString());
+                    }
+
+                } else {
+                    logger.info("missing plink for p interface" + pinterfaceName);
+                }
+            }
+        }
+    }
+
+
+    @Override
+    public Status getStatus() {
+        if (success) {
+            return Status.SUCCESS;
+        } else {
+            return Status.FAILURE;
+        }
+    }
+
+    @Override
+    public Optional<String[]> getAffectedNodeTypes() {
+
+        return Optional.of(new String[]{PARENT_NODE_TYPE});
+    }
+
+    @Override
+    public String getMigrationName() {
+        return "SDWANSpeedChangeMigration";
+    }
+
+
+}
diff --git a/src/main/java/org/onap/aai/migration/v12/UpdateAaiUriIndexMigration.java b/src/main/java/org/onap/aai/migration/v12/UpdateAaiUriIndexMigration.java
new file mode 100644 (file)
index 0000000..33689b5
--- /dev/null
@@ -0,0 +1,328 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v12;
+
+import org.janusgraph.core.Cardinality;
+import org.janusgraph.core.PropertyKey;
+import org.janusgraph.core.schema.SchemaAction;
+import org.janusgraph.core.schema.SchemaStatus;
+import org.janusgraph.core.schema.JanusGraphIndex;
+import org.janusgraph.core.schema.JanusGraphManagement;
+import org.janusgraph.graphdb.database.management.ManagementSystem;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.onap.aai.db.props.AAIProperties;
+import org.onap.aai.dbmap.AAIGraph;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.serialization.db.EdgeSerializer;
+import org.onap.aai.migration.*;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.setup.SchemaVersion;
+import org.onap.aai.setup.SchemaVersions;
+
+import java.time.temporal.ChronoUnit;
+import java.util.*;
+
+/**
+ * Remove old aai-uri index per
+ * https://github.com/JanusGraph/janusgraph/wiki/Indexing
+ */
+
+@Enabled
+
+@MigrationPriority(500)
+@MigrationDangerRating(1000)
+public class UpdateAaiUriIndexMigration extends Migrator {
+
+       private final SchemaVersion version;
+       private final ModelType introspectorFactoryType;
+       private GraphTraversalSource g;
+       private JanusGraphManagement graphMgmt;
+       private Status status = Status.SUCCESS;
+
+       private String retiredName = AAIProperties.AAI_URI + "-RETIRED-" + System.currentTimeMillis();
+
+       /**
+        * Instantiates a new migrator.
+        *
+        * @param engine
+        */
+       public UpdateAaiUriIndexMigration(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) throws AAIException {
+               super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+               version = schemaVersions.getDefaultVersion();
+               introspectorFactoryType = ModelType.MOXY;
+               loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, version);
+               g = this.engine.asAdmin().getTraversalSource();
+               this.engine.rollback();
+               graphMgmt = engine.asAdmin().getManagementSystem();
+
+       }
+
+       @Override
+       public Status getStatus() {
+               return status;
+       }
+
+       @Override
+       public Optional<String[]> getAffectedNodeTypes() {
+               return Optional.empty();
+       }
+
+       @Override
+       public String getMigrationName() {
+               return "UpdateAaiUriIndex";
+       }
+
+       @Override
+       public void run() {
+
+               // close all but current open titan instances
+               closeAllButCurrentInstances();
+
+               // get all indexes containing aai-uri
+               Set<IndexDetails> indexes = getIndexesWithAaiUri();
+               logger.info("Found " + indexes.size() + " aai uri index.");
+               indexes.stream().map(s -> "\t" + s.getIndexName() + " : " + s.getPropertyName() + " : " + s.getStatus() ).forEach(System.out::println);
+
+               renameAaiUriIndex(indexes);
+
+               // remove all of the aai-uri indexes that are in the list
+               removeIndexes(indexes);
+
+               //retire old property
+               verifyGraphManagementIsOpen();
+               PropertyKey aaiUri = graphMgmt.getPropertyKey(AAIProperties.AAI_URI);
+               if (aaiUri != null) {
+                       graphMgmt.changeName(aaiUri, retiredName);
+               }
+               graphMgmt.commit();
+
+               //remove all aai uri keys
+               logger.info("Remove old keys.");
+               dropAllKeyProperties(indexes);
+
+               // add aai-uri unique index
+               logger.info("Create new unique aai-uri index");
+               createUniqueAaiUriIndex();
+
+
+               // change index status to ENABLED STATE
+               logger.info("Enable index");
+               enableIndex();
+
+               this.engine.startTransaction();
+
+               logger.info("Checking and dropping retired properties.");
+               g = this.engine.asAdmin().getTraversalSource();
+               g.V().has(retiredName).properties(retiredName).drop().iterate();
+               logger.info("Done.");
+       }
+
+
+       protected void createUniqueAaiUriIndex() {
+               verifyGraphManagementIsOpen();
+               // create new aaiuri property
+               PropertyKey aaiUriProperty = graphMgmt.getPropertyKey(AAIProperties.AAI_URI);
+               if (aaiUriProperty == null) {
+                       logger.info("Creating new aai-uri property.");
+                       aaiUriProperty = graphMgmt.makePropertyKey(AAIProperties.AAI_URI).dataType(String.class)
+                                       .cardinality(Cardinality.SINGLE).make();
+               }
+               logger.info("Creating new aai-uri index.");
+               graphMgmt.buildIndex(AAIProperties.AAI_URI, Vertex.class).addKey(aaiUriProperty).unique().buildCompositeIndex();
+               graphMgmt.commit();
+       }
+
+       private void dropAllKeyProperties(Set<IndexDetails> indexes) {
+               indexes.stream().map(e -> e.getPropertyName()).distinct().forEach(p -> {
+                       verifyGraphManagementIsOpen();
+                       if (graphMgmt.getPropertyKey(p) != null) {
+                               graphMgmt.getPropertyKey(p).remove();
+                       }
+                       graphMgmt.commit();
+               });
+       }
+
+       private void renameAaiUriIndex(Set<IndexDetails> indexes) {
+               verifyGraphManagementIsOpen();
+               indexes.stream().filter(s -> s.getIndexName().equals(AAIProperties.AAI_URI)).forEach( s -> {
+                       JanusGraphIndex index = graphMgmt.getGraphIndex(s.getIndexName());
+                       graphMgmt.changeName(index, retiredName);
+                       s.setIndexName(retiredName);
+               });
+               graphMgmt.commit();
+       }
+
+       private void removeIndexes(Set<IndexDetails> indexes) {
+
+               for (IndexDetails index : indexes) {
+                       verifyGraphManagementIsOpen();
+
+                       JanusGraphIndex aaiUriIndex = graphMgmt.getGraphIndex(index.getIndexName());
+
+                       if (!index.getStatus().equals(SchemaStatus.DISABLED)) {
+                               logger.info("Disabling index: " + index.getIndexName());
+                               logger.info("\tCurrent state: " + aaiUriIndex.getIndexStatus(graphMgmt.getPropertyKey(index.getPropertyName())));
+
+                               graphMgmt.updateIndex(aaiUriIndex, SchemaAction.DISABLE_INDEX);
+                               graphMgmt.commit();
+                               try {
+                                       ManagementSystem.awaitGraphIndexStatus(AAIGraph.getInstance().getGraph(), index.getIndexName())
+                                                       .timeout(10, ChronoUnit.MINUTES)
+                                                       .status(SchemaStatus.DISABLED)
+                                                       .call();
+                               } catch (Exception e) {
+                                       e.printStackTrace();
+                               }
+                       }
+
+                       verifyGraphManagementIsOpen();
+                       aaiUriIndex = graphMgmt.getGraphIndex(index.getIndexName());
+                       if (aaiUriIndex.getIndexStatus(graphMgmt.getPropertyKey(index.getPropertyName())).equals(SchemaStatus.DISABLED)) {
+                               logger.info("Removing index: " + index.getIndexName());
+                               graphMgmt.updateIndex(aaiUriIndex, SchemaAction.REMOVE_INDEX);
+                               graphMgmt.commit();
+                       }
+                       if(graphMgmt.isOpen()) {
+                               graphMgmt.commit();
+                       }
+               }
+
+       }
+
+       protected Set<IndexDetails> getIndexesWithAaiUri() {
+               verifyGraphManagementIsOpen();
+               Set<IndexDetails> aaiUriIndexName = new HashSet<>();
+
+               Iterator<JanusGraphIndex> titanIndexes = graphMgmt.getGraphIndexes(Vertex.class).iterator();
+               JanusGraphIndex titanIndex;
+               while (titanIndexes.hasNext()) {
+                       titanIndex = titanIndexes.next();
+                       if (titanIndex.name().contains(AAIProperties.AAI_URI) && titanIndex.getFieldKeys().length > 0) {
+                               logger.info("Found aai-uri index: " + titanIndex.name());
+                               aaiUriIndexName.add(new IndexDetails(titanIndex.name(), titanIndex.getIndexStatus(titanIndex.getFieldKeys()[0]), titanIndex.getFieldKeys()[0].name()));
+                       }
+               }
+               graphMgmt.rollback();
+               return aaiUriIndexName;
+       }
+
+       private void closeAllButCurrentInstances() {
+               verifyGraphManagementIsOpen();
+               logger.info("Closing all but current titan instances.");
+               graphMgmt.getOpenInstances().stream().filter(s -> !s.contains("(current)")).forEach(s -> {
+                       logger.info("\t"+s);
+                       graphMgmt.forceCloseInstance(s);
+               });
+               graphMgmt.commit();
+       }
+
+
+       private void verifyGraphManagementIsOpen() {
+               if (!graphMgmt.isOpen()) {
+                       graphMgmt = this.engine.asAdmin().getManagementSystem();
+               }
+       }
+
+       private void enableIndex() {
+               verifyGraphManagementIsOpen();
+               JanusGraphIndex aaiUriIndex = graphMgmt.getGraphIndex(AAIProperties.AAI_URI);
+               SchemaStatus schemaStatus = aaiUriIndex.getIndexStatus(graphMgmt.getPropertyKey(AAIProperties.AAI_URI));
+               if (schemaStatus.equals(SchemaStatus.INSTALLED)) {
+                       logger.info("Registering index: " + AAIProperties.AAI_URI);
+                       logger.info("\tCurrent state: " + schemaStatus);
+
+                       graphMgmt.updateIndex(aaiUriIndex, SchemaAction.REGISTER_INDEX);
+                       graphMgmt.commit();
+                       try {
+                               ManagementSystem.awaitGraphIndexStatus(AAIGraph.getInstance().getGraph(), AAIProperties.AAI_URI)
+                                               .timeout(10, ChronoUnit.MINUTES)
+                                               .status(SchemaStatus.REGISTERED)
+                                               .call();
+                       } catch (Exception e) {
+                               e.printStackTrace();
+                       }
+               }
+
+               verifyGraphManagementIsOpen();
+               aaiUriIndex = graphMgmt.getGraphIndex(AAIProperties.AAI_URI);
+               schemaStatus = aaiUriIndex.getIndexStatus(graphMgmt.getPropertyKey(AAIProperties.AAI_URI));
+               if (schemaStatus.equals(SchemaStatus.REGISTERED)) {
+                       logger.info("Enabling index: " + AAIProperties.AAI_URI);
+                       logger.info("\tCurrent state: " + schemaStatus);
+
+                       graphMgmt.updateIndex(aaiUriIndex, SchemaAction.ENABLE_INDEX);
+                       graphMgmt.commit();
+                       try {
+                               ManagementSystem.awaitGraphIndexStatus(AAIGraph.getInstance().getGraph(), AAIProperties.AAI_URI)
+                                               .timeout(10, ChronoUnit.MINUTES)
+                                               .status(SchemaStatus.ENABLED)
+                                               .call();
+                       } catch (Exception e) {
+                               e.printStackTrace();
+                       }
+               }
+
+               verifyGraphManagementIsOpen();
+               aaiUriIndex = graphMgmt.getGraphIndex(AAIProperties.AAI_URI);
+               schemaStatus = aaiUriIndex.getIndexStatus(graphMgmt.getPropertyKey(AAIProperties.AAI_URI));
+               logger.info("Final state: " + schemaStatus);
+               graphMgmt.rollback();
+       }
+
+       private class IndexDetails {
+               private String indexName;
+               private SchemaStatus status;
+               private String propertyName;
+
+               public IndexDetails(String indexName, SchemaStatus status, String propertyName) {
+                       this.indexName = indexName;
+                       this.status = status;
+                       this.propertyName = propertyName;
+               }
+
+               public String getIndexName() {
+                       return indexName;
+               }
+
+               public SchemaStatus getStatus() {
+                       return status;
+               }
+
+               public String getPropertyName() {
+                       return propertyName;
+               }
+
+               public void setIndexName(String indexName) {
+                       this.indexName = indexName;
+               }
+
+               public void setStatus(SchemaStatus status) {
+                       this.status = status;
+               }
+
+               public void setPropertyName(String propertyName) {
+                       this.propertyName = propertyName;
+               }
+       }
+}
diff --git a/src/main/java/org/onap/aai/migration/v12/UriMigration.java b/src/main/java/org/onap/aai/migration/v12/UriMigration.java
new file mode 100644 (file)
index 0000000..cb0926e
--- /dev/null
@@ -0,0 +1,180 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v12;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.onap.aai.db.props.AAIProperties;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.edges.enums.EdgeProperty;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.serialization.db.EdgeSerializer;
+import org.onap.aai.introspection.exceptions.AAIUnknownObjectException;
+import org.onap.aai.migration.*;
+import org.onap.aai.edges.enums.AAIDirection;
+import org.onap.aai.serialization.db.DBSerializer;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.setup.SchemaVersion;
+import org.onap.aai.setup.SchemaVersions;
+import org.springframework.web.util.UriUtils;
+
+import javax.ws.rs.core.UriBuilder;
+import java.io.UnsupportedEncodingException;
+import java.util.*;
+import java.util.stream.Collectors;
+
+@Enabled
+
+@MigrationPriority(1000)
+@MigrationDangerRating(1000)
+public class UriMigration extends Migrator {
+
+       private final SchemaVersion version;
+       private final ModelType introspectorFactoryType;
+       private GraphTraversalSource g;
+
+       private Map<String, UriBuilder> nodeTypeToUri;
+       private Map<String, Set<String>> nodeTypeToKeys;
+
+       protected Set<Object> seen = new HashSet<>();
+
+       /**
+        * Instantiates a new migrator.
+        *
+        * @param engine
+        */
+       public UriMigration(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) throws AAIException {
+               super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+               version = schemaVersions.getDefaultVersion();
+               introspectorFactoryType = ModelType.MOXY;
+               loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, version);
+               g = this.engine.asAdmin().getTraversalSource();
+               this.serializer = new DBSerializer(version, this.engine, introspectorFactoryType, this.getMigrationName());
+
+       }
+
+       @Override
+       public void run() {
+               long start = System.currentTimeMillis();
+               nodeTypeToUri = loader.getAllObjects().entrySet().stream().filter(e -> e.getValue().getGenericURI().contains("{")).collect(
+                               Collectors.toMap(
+                                               e -> e.getKey(),
+                                               e -> UriBuilder.fromPath(e.getValue().getFullGenericURI().replaceAll("\\{"+ e.getKey() + "-", "{"))
+                               ));
+
+               nodeTypeToKeys = loader.getAllObjects().entrySet().stream().filter(e -> e.getValue().getGenericURI().contains("{")).collect(
+                               Collectors.toMap(
+                                               e -> e.getKey(),
+                                               e -> e.getValue().getKeys()
+                               ));
+
+               Set<String> topLevelNodeTypes = loader.getAllObjects().entrySet().stream()
+                               .filter(e -> e.getValue().isTopLevel()).map(Map.Entry::getKey)
+                               .collect(Collectors.toSet());
+
+               logger.info("Top level count : " + topLevelNodeTypes.size());
+               topLevelNodeTypes.stream().forEach(topLevelNodeType -> {
+                       Set<Vertex> parentSet = g.V().has(AAIProperties.NODE_TYPE, topLevelNodeType).toSet();
+                       logger.info(topLevelNodeType + " : " + parentSet.size());
+                       try {
+                               this.verifyOrAddUri("", parentSet);
+                       } catch (AAIUnknownObjectException e) {
+                               e.printStackTrace();
+                       } catch (AAIException e) {
+                               e.printStackTrace();
+                       } catch (UnsupportedEncodingException e) {
+                               e.printStackTrace();
+                       }
+               });
+               logger.info("RUNTIME: " + (System.currentTimeMillis() - start));
+               logger.info("NO URI: " + g.V().hasNot(AAIProperties.AAI_URI).count().next());
+               logger.info("NUM VERTEXES SEEN: " + seen.size());
+               seen = new HashSet<>();
+
+       }
+
+       protected void verifyOrAddUri(String parentUri, Set<Vertex> vertexSet) throws UnsupportedEncodingException, AAIException {
+               String correctUri;
+               for (Vertex v : vertexSet) {
+                       seen.add(v.id());
+                       //if there is an issue generating the uri catch, log and move on;
+                       try {
+                               correctUri = parentUri + this.getUriForVertex(v);
+                       } catch (Exception e) {
+                               logger.error("Vertex has issue generating uri " + e.getMessage() + "\n\t" + this.asString(v));
+                               continue;
+                       }
+                       try {
+                               v.property(AAIProperties.AAI_URI, correctUri);
+                       } catch (Exception e) {
+                               logger.info(e.getMessage() + "\n\t" + this.asString(v));
+                       }
+                       if (!v.property(AAIProperties.AAI_UUID).isPresent()) {
+                               v.property(AAIProperties.AAI_UUID, UUID.randomUUID().toString());
+                       }
+                       this.verifyOrAddUri(correctUri, getChildren(v));
+               }
+       }
+
+       protected Set<Vertex> getChildren(Vertex v) {
+
+               Set<Vertex> children = g.V(v).bothE().not(__.has(EdgeProperty.CONTAINS.toString(), AAIDirection.NONE.toString())).otherV().toSet();
+
+               return children.stream().filter(child -> !seen.contains(child.id())).collect(Collectors.toSet());
+       }
+
+       protected String getUriForVertex(Vertex v) {
+               String aaiNodeType = v.property(AAIProperties.NODE_TYPE).value().toString();
+
+
+               Map<String, String> parameters = this.nodeTypeToKeys.get(aaiNodeType).stream().collect(Collectors.toMap(
+                               key -> key,
+                               key -> encodeProp(v.property(key).value().toString())
+               ));
+
+               return this.nodeTypeToUri.get(aaiNodeType).buildFromEncodedMap(parameters).toString();
+       }
+
+       private static String encodeProp(String s) {
+               try {
+                       return UriUtils.encode(s, "UTF-8");
+               } catch (UnsupportedEncodingException e) {
+                       return "";
+               }
+       }
+
+       @Override
+       public Status getStatus() {
+               return Status.SUCCESS;
+       }
+
+       @Override
+       public Optional<String[]> getAffectedNodeTypes() {
+               return Optional.empty();
+       }
+
+       @Override
+       public String getMigrationName() {
+               return UriMigration.class.getSimpleName();
+       }
+}
diff --git a/src/main/java/org/onap/aai/migration/v13/MigrateBooleanDefaultsToFalse.java b/src/main/java/org/onap/aai/migration/v13/MigrateBooleanDefaultsToFalse.java
new file mode 100644 (file)
index 0000000..89a9459
--- /dev/null
@@ -0,0 +1,114 @@
+/**\r
+ * ============LICENSE_START=======================================================\r
+ * org.onap.aai\r
+ * ================================================================================\r
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.\r
+ * ================================================================================\r
+ * Licensed under the Apache License, Version 2.0 (the "License");\r
+ * you may not use this file except in compliance with the License.\r
+ * You may obtain a copy of the License at\r
+ *\r
+ *    http://www.apache.org/licenses/LICENSE-2.0\r
+ *\r
+ * Unless required by applicable law or agreed to in writing, software\r
+ * distributed under the License is distributed on an "AS IS" BASIS,\r
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
+ * See the License for the specific language governing permissions and\r
+ * limitations under the License.\r
+ * ============LICENSE_END=========================================================\r
+ */\r
+package org.onap.aai.migration.v13;\r
+\r
+import java.util.HashMap;\r
+import java.util.Map;\r
+import java.util.Optional;\r
+\r
+import org.onap.aai.edges.EdgeIngestor;\r
+import org.onap.aai.introspection.LoaderFactory;\r
+import org.onap.aai.serialization.db.EdgeSerializer;\r
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;\r
+import org.onap.aai.migration.Enabled;\r
+import org.onap.aai.migration.MigrationDangerRating;\r
+import org.onap.aai.migration.MigrationPriority;\r
+import org.onap.aai.migration.Status;\r
+import org.onap.aai.migration.ValueMigrator;\r
+import org.onap.aai.setup.SchemaVersions;\r
+\r
+\r
+@MigrationPriority(1)\r
+@MigrationDangerRating(1)\r
+public class MigrateBooleanDefaultsToFalse extends ValueMigrator {\r
+       protected static final String VNF_NODE_TYPE = "generic-vnf";\r
+       protected static final String VSERVER_NODE_TYPE = "vserver";\r
+       protected static final String VNFC_NODE_TYPE = "vnfc";\r
+       protected static final String L3NETWORK_NODE_TYPE = "l3-network";\r
+       protected static final String SUBNET_NODE_TYPE = "subnet";\r
+       protected static final String LINTERFACE_NODE_TYPE = "l-interface";\r
+       protected static final String VFMODULE_NODE_TYPE = "vf-module";\r
+       \r
+       private static Map<String, Map> map;\r
+    private static Map<String, Boolean> pair1;\r
+    private static Map<String, Boolean> pair2;\r
+    private static Map<String, Boolean> pair3;\r
+    private static Map<String, Boolean> pair4;\r
+    private static Map<String, Boolean> pair5;\r
+    private static Map<String, Boolean> pair6;\r
\r
+       public MigrateBooleanDefaultsToFalse(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) {\r
+               super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions, setBooleanDefaultsToFalse(), false);\r
+               \r
+       }\r
+       \r
+       private static Map<String, Map> setBooleanDefaultsToFalse(){\r
+               map = new HashMap<>();\r
+        pair1 = new HashMap<>();\r
+        pair2 = new HashMap<>();\r
+        pair3 = new HashMap<>();\r
+        pair4 = new HashMap<>();\r
+        pair5 = new HashMap<>();\r
+        pair6 = new HashMap<>();\r
+\r
+\r
+               pair1.put("is-closed-loop-disabled", false);            \r
+               map.put("generic-vnf", pair1);\r
+               map.put("vnfc", pair1);\r
+               map.put("vserver", pair1);\r
+               \r
+               pair2.put("is-bound-to-vpn", false);\r
+               pair2.put("is-provider-network", false);\r
+               pair2.put("is-shared-network", false);\r
+               pair2.put("is-external-network", false);\r
+               map.put("l3-network", pair2);\r
+               \r
+               pair3.put("dhcp-enabled", false);\r
+               map.put("subnet", pair3);\r
+               \r
+               pair4.put("is-port-mirrored", false);\r
+               pair4.put("is-ip-unnumbered", false);\r
+               map.put("l-interface", pair4);\r
+               \r
+               pair5.put("is-base-vf-module", false);\r
+               map.put("vf-module", pair5);\r
+               \r
+               pair6.put("is-ip-unnumbered", false);\r
+               map.put("vlan", pair6);\r
+        \r
+        return map;\r
+       }\r
+\r
+       @Override\r
+       public Status getStatus() {\r
+               return Status.SUCCESS;\r
+       }\r
+\r
+       @Override\r
+       public Optional<String[]> getAffectedNodeTypes() {\r
+               return Optional.of(new String[]{VNF_NODE_TYPE,VSERVER_NODE_TYPE,VNFC_NODE_TYPE,L3NETWORK_NODE_TYPE,SUBNET_NODE_TYPE,LINTERFACE_NODE_TYPE,VFMODULE_NODE_TYPE});\r
+       }\r
+\r
+       @Override\r
+       public String getMigrationName() {\r
+               return "MigrateBooleanDefaultsToFalse";\r
+       }\r
+\r
+}
\ No newline at end of file
diff --git a/src/main/java/org/onap/aai/migration/v13/MigrateInMaintDefaultToFalse.java b/src/main/java/org/onap/aai/migration/v13/MigrateInMaintDefaultToFalse.java
new file mode 100644 (file)
index 0000000..1773038
--- /dev/null
@@ -0,0 +1,98 @@
+/**\r
+ * ============LICENSE_START=======================================================\r
+ * org.onap.aai\r
+ * ================================================================================\r
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.\r
+ * ================================================================================\r
+ * Licensed under the Apache License, Version 2.0 (the "License");\r
+ * you may not use this file except in compliance with the License.\r
+ * You may obtain a copy of the License at\r
+ *\r
+ *    http://www.apache.org/licenses/LICENSE-2.0\r
+ *\r
+ * Unless required by applicable law or agreed to in writing, software\r
+ * distributed under the License is distributed on an "AS IS" BASIS,\r
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
+ * See the License for the specific language governing permissions and\r
+ * limitations under the License.\r
+ * ============LICENSE_END=========================================================\r
+ */\r
+package org.onap.aai.migration.v13;\r
+\r
+import java.util.HashMap;\r
+import java.util.List;\r
+import java.util.Map;\r
+import java.util.Optional;\r
+\r
+import org.onap.aai.edges.EdgeIngestor;\r
+import org.onap.aai.introspection.LoaderFactory;\r
+import org.onap.aai.serialization.db.EdgeSerializer;\r
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;\r
+import org.onap.aai.migration.Enabled;\r
+import org.onap.aai.migration.MigrationDangerRating;\r
+import org.onap.aai.migration.MigrationPriority;\r
+import org.onap.aai.migration.Status;\r
+import org.onap.aai.migration.ValueMigrator;\r
+import org.onap.aai.setup.SchemaVersions;\r
+\r
+\r
+@MigrationPriority(1)\r
+@MigrationDangerRating(1)\r
+public class MigrateInMaintDefaultToFalse extends ValueMigrator {\r
+       \r
+       protected static final String VNF_NODE_TYPE = "generic-vnf";\r
+       protected static final String LINTERFACE_NODE_TYPE = "l-interface";\r
+       protected static final String LAG_INTERFACE_NODE_TYPE = "lag-interface";\r
+       protected static final String LOGICAL_LINK_NODE_TYPE = "logical-link";\r
+       protected static final String PINTERFACE_NODE_TYPE = "p-interface";\r
+       protected static final String VLAN_NODE_TYPE = "vlan";\r
+       protected static final String VNFC_NODE_TYPE = "vnfc";\r
+       protected static final String VSERVER_NODE_TYPE = "vserver";\r
+       protected static final String PSERVER_NODE_TYPE = "pserver";\r
+       protected static final String PNF_NODE_TYPE = "pnf";\r
+       protected static final String NOS_SERVER_NODE_TYPE = "nos-server";\r
+               \r
+       private static Map<String, Map> map;\r
+    private static Map<String, Boolean> pair;\r
\r
+       public MigrateInMaintDefaultToFalse(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) {\r
+               super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions, setInMaintToFalse(), false);\r
+       }       \r
+               \r
+       private static Map<String, Map> setInMaintToFalse(){\r
+               map = new HashMap<>();\r
+        pair = new HashMap<>();\r
+\r
+               pair.put("in-maint", false);\r
+               \r
+               map.put("generic-vnf", pair);\r
+               map.put("l-interface", pair);\r
+               map.put("lag-interface", pair);\r
+               map.put("logical-link", pair);\r
+               map.put("p-interface", pair);\r
+               map.put("vlan", pair);\r
+               map.put("vnfc", pair);\r
+               map.put("vserver", pair);\r
+               map.put("pserver", pair);\r
+        map.put("pnf", pair);\r
+        map.put("nos-server", pair);\r
+        \r
+        return map;\r
+       }       \r
+\r
+       @Override\r
+       public Status getStatus() {\r
+               return Status.SUCCESS;\r
+       }\r
+\r
+       @Override\r
+       public Optional<String[]> getAffectedNodeTypes() {\r
+               return Optional.of(new String[]{VNF_NODE_TYPE,LINTERFACE_NODE_TYPE,LAG_INTERFACE_NODE_TYPE,LOGICAL_LINK_NODE_TYPE,PINTERFACE_NODE_TYPE,VLAN_NODE_TYPE,VNFC_NODE_TYPE,VSERVER_NODE_TYPE,PSERVER_NODE_TYPE,PNF_NODE_TYPE,NOS_SERVER_NODE_TYPE});\r
+       }\r
+\r
+       @Override\r
+       public String getMigrationName() {\r
+               return "MigrateInMaintDefaultToFalse";\r
+       }\r
+\r
+}
\ No newline at end of file
diff --git a/src/main/java/org/onap/aai/migration/v13/MigrateInstanceGroupModelInvariantId.java b/src/main/java/org/onap/aai/migration/v13/MigrateInstanceGroupModelInvariantId.java
new file mode 100644 (file)
index 0000000..1244c59
--- /dev/null
@@ -0,0 +1,85 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v13;
+/*-
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+
+import org.janusgraph.core.Cardinality;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.migration.Enabled;
+import org.onap.aai.migration.MigrationDangerRating;
+import org.onap.aai.migration.MigrationPriority;
+import org.onap.aai.migration.PropertyMigrator;
+import org.onap.aai.serialization.db.EdgeSerializer;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.setup.SchemaVersions;
+
+import java.util.Optional;
+
+
+@MigrationPriority(19)
+@MigrationDangerRating(2)
+@Enabled
+public class MigrateInstanceGroupModelInvariantId extends PropertyMigrator {
+
+    private static final String INSTANCE_GROUP_NODE_TYPE = "instance-group";
+    private static final String INSTANCE_GROUP_MODEL_INVARIANT_ID_PROPERTY = "model-invariant-id";
+    private static final String INSTANCE_GROUP_MODEL_INVARIANT_ID_LOCAL_PROPERTY = "model-invariant-id-local";
+
+    public MigrateInstanceGroupModelInvariantId(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) {
+        super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+        this.initialize(INSTANCE_GROUP_MODEL_INVARIANT_ID_PROPERTY, INSTANCE_GROUP_MODEL_INVARIANT_ID_LOCAL_PROPERTY,
+                String.class, Cardinality.SINGLE);
+    }
+
+    @Override
+    public Optional<String[]> getAffectedNodeTypes() {
+        return Optional.of(new String[]{this.INSTANCE_GROUP_NODE_TYPE});
+    }
+
+    @Override
+    public String getMigrationName() {
+        return "MigrateInstanceGroupModelInvariantId";
+    }
+
+    @Override
+    public boolean isIndexed() {
+        return true;
+    }
+}
diff --git a/src/main/java/org/onap/aai/migration/v13/MigrateInstanceGroupModelVersionId.java b/src/main/java/org/onap/aai/migration/v13/MigrateInstanceGroupModelVersionId.java
new file mode 100644 (file)
index 0000000..64341ba
--- /dev/null
@@ -0,0 +1,84 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v13;
+/*-
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+
+import java.util.Optional;
+import org.janusgraph.core.Cardinality;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.migration.Enabled;
+import org.onap.aai.migration.MigrationDangerRating;
+import org.onap.aai.migration.MigrationPriority;
+import org.onap.aai.migration.PropertyMigrator;
+import org.onap.aai.serialization.db.EdgeSerializer;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.setup.SchemaVersions;
+
+
+@MigrationPriority(19)
+@MigrationDangerRating(2)
+@Enabled
+public class MigrateInstanceGroupModelVersionId extends PropertyMigrator {
+
+    private static final String INSTANCE_GROUP_NODE_TYPE = "instance-group";
+    private static final String INSTANCE_GROUP_MODEL_VERSION_ID_PROPERTY = "model-version-id";
+    private static final String INSTANCE_GROUP_MODEL_VERSION_ID_LOCAL_PROPERTY = "model-version-id-local";
+
+    public MigrateInstanceGroupModelVersionId(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) {
+        super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+        this.initialize(INSTANCE_GROUP_MODEL_VERSION_ID_PROPERTY, INSTANCE_GROUP_MODEL_VERSION_ID_LOCAL_PROPERTY,
+                String.class, Cardinality.SINGLE);
+    }
+
+    @Override
+    public Optional<String[]> getAffectedNodeTypes() {
+        return Optional.of(new String[]{this.INSTANCE_GROUP_NODE_TYPE});
+    }
+
+    @Override
+    public String getMigrationName() {
+        return "MigrateInstanceGroupModelVersionId";
+    }
+
+    @Override
+    public boolean isIndexed() {
+        return true;
+    }
+}
diff --git a/src/main/java/org/onap/aai/migration/v13/MigrateInstanceGroupSubType.java b/src/main/java/org/onap/aai/migration/v13/MigrateInstanceGroupSubType.java
new file mode 100644 (file)
index 0000000..6823da8
--- /dev/null
@@ -0,0 +1,64 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v13;
+import java.util.Optional;
+
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.migration.Enabled;
+import org.onap.aai.migration.MigrationDangerRating;
+import org.onap.aai.migration.MigrationPriority;
+import org.onap.aai.migration.PropertyMigrator;
+import org.onap.aai.serialization.db.EdgeSerializer;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+
+import org.janusgraph.core.Cardinality;
+import org.onap.aai.setup.SchemaVersions;
+
+@MigrationPriority(20)
+@MigrationDangerRating(2)
+@Enabled
+public class MigrateInstanceGroupSubType extends PropertyMigrator{
+
+    protected static final String SUB_TYPE_PROPERTY = "sub-type";
+    protected static final String INSTANCE_GROUP_ROLE_PROPERTY = "instance-group-role";
+    protected static final String INSTANCE_GROUP_NODE_TYPE = "instance-group";
+
+    public MigrateInstanceGroupSubType(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) {
+        super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+        this.initialize(SUB_TYPE_PROPERTY , INSTANCE_GROUP_ROLE_PROPERTY, String.class,Cardinality.SINGLE);
+    }
+    
+    @Override
+    public Optional<String[]> getAffectedNodeTypes() {
+       return Optional.of(new String[]{INSTANCE_GROUP_NODE_TYPE});
+    }
+
+    @Override
+    public String getMigrationName() {
+        return "MigrateInstanceGroupSubType";
+    }
+
+       @Override
+       public boolean isIndexed() {
+               return true;
+       }
+
+}
diff --git a/src/main/java/org/onap/aai/migration/v13/MigrateInstanceGroupType.java b/src/main/java/org/onap/aai/migration/v13/MigrateInstanceGroupType.java
new file mode 100644 (file)
index 0000000..f3cd669
--- /dev/null
@@ -0,0 +1,64 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v13;
+import java.util.Optional;
+
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.migration.Enabled;
+import org.onap.aai.migration.MigrationDangerRating;
+import org.onap.aai.migration.MigrationPriority;
+import org.onap.aai.migration.PropertyMigrator;
+import org.onap.aai.serialization.db.EdgeSerializer;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+
+import org.janusgraph.core.Cardinality;
+import org.onap.aai.setup.SchemaVersions;
+
+@MigrationPriority(20)
+@MigrationDangerRating(2)
+@Enabled
+public class MigrateInstanceGroupType extends PropertyMigrator{
+
+    protected static final String TYPE_PROPERTY = "type";
+    protected static final String INSTANCE_GROUP_TYPE_PROPERTY = "instance-group-type";
+    protected static final String INSTANCE_GROUP_NODE_TYPE = "instance-group";
+
+    public MigrateInstanceGroupType(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) {
+        super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+        this.initialize(TYPE_PROPERTY , INSTANCE_GROUP_TYPE_PROPERTY, String.class,Cardinality.SINGLE);
+    }
+    
+    @Override
+    public Optional<String[]> getAffectedNodeTypes() {
+       return Optional.of(new String[]{INSTANCE_GROUP_NODE_TYPE});
+    }
+
+    @Override
+    public String getMigrationName() {
+        return "MigrateInstanceGroupType";
+    }
+
+       @Override
+       public boolean isIndexed() {
+               return true;
+       }
+
+}
diff --git a/src/main/java/org/onap/aai/migration/v13/MigrateModelVer.java b/src/main/java/org/onap/aai/migration/v13/MigrateModelVer.java
new file mode 100644 (file)
index 0000000..7bc9a7d
--- /dev/null
@@ -0,0 +1,229 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v13;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.onap.aai.db.props.AAIProperties;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.migration.Enabled;
+import org.onap.aai.migration.MigrationDangerRating;
+import org.onap.aai.migration.MigrationPriority;
+import org.onap.aai.migration.Migrator;
+import org.onap.aai.migration.Status;
+import org.onap.aai.serialization.db.EdgeSerializer;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.setup.SchemaVersions;
+
+@MigrationPriority(20)
+@MigrationDangerRating(2)
+@Enabled
+public class MigrateModelVer extends Migrator{
+
+    protected static final String MODELINVARIANTID = "model-invariant-id";
+    protected static final String MODELVERSIONID = "model-version-id";
+    protected static final String MODELINVARIANTIDLOCAL = "model-invariant-id-local";
+    protected static final String MODELVERSIONIDLOCAL = "model-version-id-local";
+    
+    protected static final String MODELVER = "model-ver";
+    protected static final String MODEL = "model";
+
+    protected static final String CONNECTOR_NODETYPE = "connector";
+    protected static final String SERVICEINSTANCE_NODETYPE = "service-instance";
+    protected static final String CONFIGURATION_NODETYPE = "configuration";
+    protected static final String LOGICALLINK_NODETYPE = "logical-link";
+    protected static final String VNFC_NODETYPE = "vnfc";
+    protected static final String L3NETWORK_NODETYPE = "l3-network";
+    protected static final String GENERICVNF_NODETYPE = "generic-vnf";
+    protected static final String PNF_NODETYPE = "pnf";
+    protected static final String VFMODULE_NODETYPE = "vf-module";
+    protected static final String INSTANCEGROUP_NODETYPE = "instance-group";
+    protected static final String ALLOTTEDRESOURCE_NODETYPE = "allotted-resource";
+    protected static final String COLLECTION_NODETYPE = "collection";
+
+    private boolean success = true;
+    
+    private static Map<String, String> NODETYPEKEYMAP = new HashMap<String, String>();
+    
+    static {
+       NODETYPEKEYMAP.put(CONNECTOR_NODETYPE,"resource-instance-id");
+       NODETYPEKEYMAP.put(SERVICEINSTANCE_NODETYPE,"service-instance-id");
+       NODETYPEKEYMAP.put(CONFIGURATION_NODETYPE, "configuration-id");
+       NODETYPEKEYMAP.put(LOGICALLINK_NODETYPE,"link-name");
+       NODETYPEKEYMAP.put(VNFC_NODETYPE, "vnfc-name");
+       NODETYPEKEYMAP.put(L3NETWORK_NODETYPE, "network-id");
+       NODETYPEKEYMAP.put(GENERICVNF_NODETYPE,"vnf-id");
+       NODETYPEKEYMAP.put(PNF_NODETYPE,"pnf-name");
+       NODETYPEKEYMAP.put(VFMODULE_NODETYPE,"vf-module-id");
+       NODETYPEKEYMAP.put(INSTANCEGROUP_NODETYPE,"id");
+       NODETYPEKEYMAP.put(ALLOTTEDRESOURCE_NODETYPE,"id");
+       NODETYPEKEYMAP.put(COLLECTION_NODETYPE,"collection-id");
+    }
+
+    public MigrateModelVer(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) {
+        super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+    }
+
+    @Override
+    public void run() {
+
+        List<Vertex> vertextList = this.engine.asAdmin().getTraversalSource().V().has(AAIProperties.NODE_TYPE, CONNECTOR_NODETYPE).has(MODELINVARIANTIDLOCAL).has(MODELVERSIONIDLOCAL).toList();
+        createEdges(vertextList, CONNECTOR_NODETYPE);
+
+        vertextList = this.engine.asAdmin().getTraversalSource().V().has(AAIProperties.NODE_TYPE, SERVICEINSTANCE_NODETYPE).has(MODELINVARIANTIDLOCAL).has(MODELVERSIONIDLOCAL).toList();
+        createEdges(vertextList, SERVICEINSTANCE_NODETYPE);
+
+        vertextList = this.engine.asAdmin().getTraversalSource().V().has(AAIProperties.NODE_TYPE, CONFIGURATION_NODETYPE).has(MODELINVARIANTIDLOCAL).has(MODELVERSIONIDLOCAL).toList();
+        createEdges(vertextList, CONFIGURATION_NODETYPE);
+
+        vertextList = this.engine.asAdmin().getTraversalSource().V().has(AAIProperties.NODE_TYPE, LOGICALLINK_NODETYPE).has(MODELINVARIANTIDLOCAL).has(MODELVERSIONIDLOCAL).toList();
+        createEdges(vertextList, LOGICALLINK_NODETYPE);
+
+        vertextList = this.engine.asAdmin().getTraversalSource().V().has(AAIProperties.NODE_TYPE, VNFC_NODETYPE).has(MODELINVARIANTIDLOCAL).has(MODELVERSIONIDLOCAL).toList();
+        createEdges(vertextList, VNFC_NODETYPE);
+
+        vertextList = this.engine.asAdmin().getTraversalSource().V().has(AAIProperties.NODE_TYPE, L3NETWORK_NODETYPE).has(MODELINVARIANTIDLOCAL).has(MODELVERSIONIDLOCAL).toList();
+        createEdges(vertextList, L3NETWORK_NODETYPE);
+
+        vertextList = this.engine.asAdmin().getTraversalSource().V().has(AAIProperties.NODE_TYPE, GENERICVNF_NODETYPE).has(MODELINVARIANTIDLOCAL).has(MODELVERSIONIDLOCAL).toList();
+        createEdges(vertextList, GENERICVNF_NODETYPE);
+
+        vertextList = this.engine.asAdmin().getTraversalSource().V().has(AAIProperties.NODE_TYPE, PNF_NODETYPE).has(MODELINVARIANTIDLOCAL).has(MODELVERSIONIDLOCAL).toList();
+        createEdges(vertextList, PNF_NODETYPE);
+
+        vertextList = this.engine.asAdmin().getTraversalSource().V().has(AAIProperties.NODE_TYPE, VFMODULE_NODETYPE).has(MODELINVARIANTIDLOCAL).has(MODELVERSIONIDLOCAL).toList();
+        createEdges(vertextList, VFMODULE_NODETYPE);
+
+        vertextList = this.engine.asAdmin().getTraversalSource().V().has(AAIProperties.NODE_TYPE, INSTANCEGROUP_NODETYPE).has(MODELINVARIANTIDLOCAL).has(MODELVERSIONIDLOCAL).toList();
+        createEdges(vertextList, INSTANCEGROUP_NODETYPE);
+
+        vertextList = this.engine.asAdmin().getTraversalSource().V().has(AAIProperties.NODE_TYPE, ALLOTTEDRESOURCE_NODETYPE).has(MODELINVARIANTIDLOCAL).has(MODELVERSIONIDLOCAL).toList();
+        createEdges(vertextList, ALLOTTEDRESOURCE_NODETYPE);
+
+        vertextList = this.engine.asAdmin().getTraversalSource().V().has(AAIProperties.NODE_TYPE, COLLECTION_NODETYPE).has(MODELINVARIANTIDLOCAL).has(MODELVERSIONIDLOCAL).toList();
+        createEdges(vertextList, COLLECTION_NODETYPE);
+    }
+
+    private void createEdges(List<Vertex> sourceVertexList, String nodeTypeString)
+    {
+        int modelVerEdgeCount = 0;
+        int modelVerEdgeErrorCount = 0;
+
+        logger.info("---------- Start Creating an Edge for " + nodeTypeString + " nodes with Model Invariant Id and Model Version Id to the model-ver  ----------");
+        Map<String, Vertex> modelVerUriVtxIdMap = new HashMap<String, Vertex>();
+        for (Vertex vertex : sourceVertexList) {
+            String currentValueModelVersionID = null;
+            String currrentValueModelInvariantID = null;
+            try {
+                currentValueModelVersionID = getModelVersionIdNodeValue(vertex);
+                currrentValueModelInvariantID = getModelInvariantIdNodeValue(vertex);
+                
+                String uri = String.format("/service-design-and-creation/models/model/%s/model-vers/model-ver/%s", currrentValueModelInvariantID, currentValueModelVersionID);
+                String propertyKey = NODETYPEKEYMAP.get(nodeTypeString);
+                String propertyValue = vertex.value(propertyKey).toString();
+                logger.info("Processing "+nodeTypeString+ " vertex with key "+vertex.value(propertyKey).toString());
+                Vertex modelVerVertex = null;
+                
+                if (modelVerUriVtxIdMap.containsKey(uri)){
+                       modelVerVertex = modelVerUriVtxIdMap.get(uri);
+                } else {
+                       List<Vertex> modelverList = this.engine.asAdmin().getTraversalSource().V().has(MODELINVARIANTID,currrentValueModelInvariantID).has(AAIProperties.NODE_TYPE, MODEL).in()
+                               .has(AAIProperties.NODE_TYPE, "model-ver" ).has("aai-uri", uri).toList();
+                       if (modelverList != null && !modelverList.isEmpty()) {
+                        modelVerVertex = modelverList.get(0);
+                        modelVerUriVtxIdMap.put(uri, modelVerVertex);
+                       }
+                }
+                
+                if (modelVerVertex != null && modelVerVertex.property("model-version-id").isPresent() ) {
+                    boolean edgePresent = false;
+                    //Check if edge already exists for each of the source vertex
+                    List<Vertex> outVertexList = this.engine.asAdmin().getTraversalSource().V(modelVerVertex).in().has("aai-node-type", nodeTypeString).has(propertyKey, propertyValue).toList();
+                    Iterator<Vertex> vertexItr = outVertexList.iterator();
+                    if (outVertexList != null &&  !outVertexList.isEmpty()  && vertexItr.hasNext()){
+                       logger.info("\t Edge already exists from " + nodeTypeString + " node to models-ver with model-invariant-id :" + currrentValueModelInvariantID + " and model-version-id :" + currentValueModelVersionID);
+                               edgePresent = true;
+                               continue;
+                    }
+                    // Build edge from vertex to modelVerVertex
+                    if (!edgePresent) {
+                       this.createPrivateEdge(vertex, modelVerVertex);
+                       modelVerEdgeCount++;
+                    }
+                } else
+                {
+                    modelVerEdgeErrorCount++;
+                    logger.info("\t" + MIGRATION_ERROR + "Unable to create edge. No model-ver vertex found with model-invariant-id :" + currrentValueModelInvariantID + " and model-version-id :" + currentValueModelVersionID);
+
+                }
+            } catch (Exception e) {
+                success = false;
+                modelVerEdgeErrorCount++;
+                logger.error("\t" + MIGRATION_ERROR + "encountered exception from " + nodeTypeString + " node when trying to create edge to models-ver with model-invariant-id :" + currrentValueModelInvariantID + " and model-version-id :" + currentValueModelVersionID, e);
+            }
+        }
+
+        logger.info ("\n \n ******* Summary " + nodeTypeString + " Nodes: Finished creating an Edge for " + nodeTypeString + " nodes with Model Invariant Id and Model Version Id to the model-ver Migration ********* \n");
+        logger.info(MIGRATION_SUMMARY_COUNT+"Number of ModelVer edge created from " + nodeTypeString + " nodes: " + modelVerEdgeCount +"\n");
+        logger.info(MIGRATION_SUMMARY_COUNT+"Number of ModelVer edge failed to create the edge from the " + nodeTypeString + " nodes due to error : "+ modelVerEdgeErrorCount  +"\n");
+
+
+    }
+    private String getModelInvariantIdNodeValue(Vertex vertex) {
+        String propertyValue = "";
+        if(vertex != null && vertex.property(MODELINVARIANTIDLOCAL).isPresent()){
+            propertyValue = vertex.value(MODELINVARIANTIDLOCAL).toString();
+        }
+        return propertyValue;
+    }
+
+    private String getModelVersionIdNodeValue(Vertex vertex) {
+        String propertyValue = "";
+        if(vertex != null && vertex.property(MODELVERSIONIDLOCAL).isPresent()){
+            propertyValue = vertex.value(MODELVERSIONIDLOCAL).toString();
+        }
+        return propertyValue;
+    }
+
+    @Override
+    public Status getStatus() {
+        if (success) {
+            return Status.SUCCESS;
+        } else {
+            return Status.FAILURE;
+        }
+    }
+
+    @Override
+    public Optional<String[]> getAffectedNodeTypes() {
+        return Optional.of(new String[]{MODELVER});
+    }
+
+    @Override
+    public String getMigrationName() {
+        return "MigrateModelVer";
+    }
+
+}
diff --git a/src/main/java/org/onap/aai/migration/v13/MigratePserverAndPnfEquipType.java b/src/main/java/org/onap/aai/migration/v13/MigratePserverAndPnfEquipType.java
new file mode 100644 (file)
index 0000000..6788d7f
--- /dev/null
@@ -0,0 +1,157 @@
+/**\r
+ * ============LICENSE_START=======================================================\r
+ * org.onap.aai\r
+ * ================================================================================\r
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.\r
+ * ================================================================================\r
+ * Licensed under the Apache License, Version 2.0 (the "License");\r
+ * you may not use this file except in compliance with the License.\r
+ * You may obtain a copy of the License at\r
+ *\r
+ *    http://www.apache.org/licenses/LICENSE-2.0\r
+ *\r
+ * Unless required by applicable law or agreed to in writing, software\r
+ * distributed under the License is distributed on an "AS IS" BASIS,\r
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
+ * See the License for the specific language governing permissions and\r
+ * limitations under the License.\r
+ * ============LICENSE_END=========================================================\r
+ */\r
+package org.onap.aai.migration.v13;\r
+import java.util.List;\r
+import java.util.Optional;\r
+\r
+import org.apache.tinkerpop.gremlin.structure.Vertex;\r
+import org.onap.aai.db.props.AAIProperties;\r
+import org.onap.aai.edges.EdgeIngestor;\r
+import org.onap.aai.introspection.LoaderFactory;\r
+import org.onap.aai.migration.Enabled;\r
+import org.onap.aai.migration.MigrationDangerRating;\r
+import org.onap.aai.migration.MigrationPriority;\r
+import org.onap.aai.migration.Migrator;\r
+import org.onap.aai.migration.Status;\r
+import org.onap.aai.serialization.db.EdgeSerializer;\r
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;\r
+import org.onap.aai.setup.SchemaVersions;\r
+\r
+@MigrationPriority(20)\r
+@MigrationDangerRating(2)\r
+@Enabled\r
+public class MigratePserverAndPnfEquipType extends Migrator{\r
+\r
+    protected static final String EQUIP_TYPE_PROPERTY = "equip-type";\r
+    protected static final String HOSTNAME_PROPERTY = "hostname";\r
+    protected static final String PNF_NAME_PROPERTY = "pnf-name";\r
+    protected static final String PNF_NODE_TYPE = "pnf";\r
+    protected static final String PSERVER_NODE_TYPE = "pserver";\r
+       private boolean success = true;\r
+\r
+    public MigratePserverAndPnfEquipType(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) {\r
+        super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);\r
+    }\r
+\r
+\r
+\r
+    @Override\r
+    public void run() {\r
+       int pserverCount = 0;\r
+       int pnfCount = 0;\r
+               int pserverErrorCount = 0;\r
+               int pnfErrorCount  = 0;\r
+               logger.info("---------- Start Updating equip-type for Pserver and Pnf  ----------");\r
+\r
+       List<Vertex> pserverList = this.engine.asAdmin().getTraversalSource().V().has(AAIProperties.NODE_TYPE, PSERVER_NODE_TYPE).toList();\r
+       List<Vertex> pnfList = this.engine.asAdmin().getTraversalSource().V().has(AAIProperties.NODE_TYPE, PNF_NODE_TYPE).toList();\r
+\r
+       for (Vertex vertex : pserverList) {\r
+               String currentValueOfEquipType = null;\r
+               String hostName = null;\r
+               try {\r
+                       currentValueOfEquipType = getEquipTypeNodeValue(vertex);\r
+                       hostName = getHostNameNodeValue(vertex);\r
+                       if("Server".equals(currentValueOfEquipType) ||"server".equals(currentValueOfEquipType) ){\r
+                               vertex.property(EQUIP_TYPE_PROPERTY, "SERVER");\r
+                               this.touchVertexProperties(vertex, false);\r
+                               logger.info("changed Pserver equip-type from " + currentValueOfEquipType + " to SERVER having hostname : " + hostName);\r
+                               pserverCount++;\r
+                       }\r
+               } catch (Exception e) {\r
+                       success = false;\r
+                       pserverErrorCount++;\r
+                       logger.error(MIGRATION_ERROR + "encountered exception for equip-type:" + currentValueOfEquipType + " having hostName :" + hostName, e);\r
+               }\r
+       }\r
+        \r
+       for (Vertex vertex : pnfList) {\r
+               String currentValueOfEquipType = null;\r
+               String pnfName = null;\r
+               try {\r
+                       currentValueOfEquipType = getEquipTypeNodeValue(vertex);\r
+                       pnfName = getPnfNameNodeValue(vertex);\r
+                       if("Switch".equals(currentValueOfEquipType)||"switch".equals(currentValueOfEquipType)){\r
+                               vertex.property(EQUIP_TYPE_PROPERTY, "SWITCH");\r
+                               this.touchVertexProperties(vertex, false);\r
+                               logger.info("changed Pnf equip-type from "+ currentValueOfEquipType +" to SWITCH having pnf-name :" + pnfName);\r
+                               pnfCount++;\r
+                       }\r
+\r
+               } catch (Exception e) {\r
+                       success = false;\r
+                       pnfErrorCount++;\r
+                       logger.error(MIGRATION_ERROR + "encountered exception for equip-type:" + currentValueOfEquipType +" having pnf-name : "+ pnfName , e);\r
+               }\r
+       }\r
+       \r
+       logger.info ("\n \n ******* Final Summary Updated equip-type for Pserver and Pnf  Migration ********* \n");\r
+        logger.info(MIGRATION_SUMMARY_COUNT+"Number of Pservers updated: "+ pserverCount +"\n");\r
+        logger.info(MIGRATION_SUMMARY_COUNT+"Number of Pservers failed to update due to error : "+ pserverErrorCount  +"\n");\r
+        \r
+        logger.info(MIGRATION_SUMMARY_COUNT+"Number of Pnf updated: "+ pnfCount +"\n");\r
+        logger.info(MIGRATION_SUMMARY_COUNT+"Number of Pnf failed to update due to error : "+ pnfErrorCount  +"\n");\r
+\r
+    }\r
+\r
+       private String getEquipTypeNodeValue(Vertex vertex) {\r
+               String propertyValue = "";\r
+               if(vertex != null && vertex.property(EQUIP_TYPE_PROPERTY).isPresent()){\r
+                       propertyValue = vertex.property(EQUIP_TYPE_PROPERTY).value().toString();\r
+               }\r
+               return propertyValue;\r
+       }\r
+       \r
+       private String getHostNameNodeValue(Vertex vertex) {\r
+               String propertyValue = "";\r
+               if(vertex != null && vertex.property(HOSTNAME_PROPERTY).isPresent()){\r
+                       propertyValue = vertex.property(HOSTNAME_PROPERTY).value().toString();\r
+               }\r
+               return propertyValue;\r
+       }\r
+       \r
+       private String getPnfNameNodeValue(Vertex vertex) {\r
+               String propertyValue = "";\r
+               if(vertex != null && vertex.property(PNF_NAME_PROPERTY).isPresent()){\r
+                       propertyValue = vertex.property(PNF_NAME_PROPERTY).value().toString();\r
+               }\r
+               return propertyValue;\r
+       }\r
+    \r
+    @Override\r
+    public Status getStatus() {\r
+        if (success) {\r
+            return Status.SUCCESS;\r
+        } else {\r
+            return Status.FAILURE;\r
+        }\r
+    }\r
+    \r
+    @Override\r
+    public Optional<String[]> getAffectedNodeTypes() {\r
+       return Optional.of(new String[]{PSERVER_NODE_TYPE,PNF_NODE_TYPE});\r
+    }\r
+\r
+    @Override\r
+    public String getMigrationName() {\r
+        return "MigratePserverAndPnfEquipType";\r
+    }\r
+\r
+}\r
diff --git a/src/main/java/org/onap/aai/migration/v13/MigrateVnfcModelInvariantId.java b/src/main/java/org/onap/aai/migration/v13/MigrateVnfcModelInvariantId.java
new file mode 100644 (file)
index 0000000..a643842
--- /dev/null
@@ -0,0 +1,84 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v13;
+/*-
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+
+import org.janusgraph.core.Cardinality;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.migration.Enabled;
+import org.onap.aai.migration.MigrationDangerRating;
+import org.onap.aai.migration.MigrationPriority;
+import org.onap.aai.migration.PropertyMigrator;
+import org.onap.aai.serialization.db.EdgeSerializer;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.setup.SchemaVersions;
+
+import java.util.Optional;
+
+
+@MigrationPriority(19)
+@MigrationDangerRating(2)
+@Enabled
+public class MigrateVnfcModelInvariantId extends PropertyMigrator {
+
+    private static final String VNFC_NODE_TYPE = "vnfc";
+    private static final String VNFC_MODEL_INVARIANT_ID_PROPERTY = "model-invariant-id";
+    private static final String VNFC_MODEL_INVARIANT_ID_LOCAL_PROPERTY = "model-invariant-id-local";
+
+    public MigrateVnfcModelInvariantId(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) {
+        super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+        this.initialize(VNFC_MODEL_INVARIANT_ID_PROPERTY, VNFC_MODEL_INVARIANT_ID_LOCAL_PROPERTY, String.class, Cardinality.SINGLE);
+    }
+
+    @Override
+    public Optional<String[]> getAffectedNodeTypes() {
+        return Optional.of(new String[]{this.VNFC_NODE_TYPE});
+    }
+
+    @Override
+    public String getMigrationName() {
+        return "MigrateVnfcModelInvariantId";
+    }
+
+    @Override
+    public boolean isIndexed() {
+        return true;
+    }
+}
diff --git a/src/main/java/org/onap/aai/migration/v13/MigrateVnfcModelVersionId.java b/src/main/java/org/onap/aai/migration/v13/MigrateVnfcModelVersionId.java
new file mode 100644 (file)
index 0000000..13cdb80
--- /dev/null
@@ -0,0 +1,83 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v13;
+/*-
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+
+import java.util.Optional;
+import org.janusgraph.core.Cardinality;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.migration.Enabled;
+import org.onap.aai.migration.MigrationDangerRating;
+import org.onap.aai.migration.MigrationPriority;
+import org.onap.aai.migration.PropertyMigrator;
+import org.onap.aai.serialization.db.EdgeSerializer;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.setup.SchemaVersions;
+
+
+@MigrationPriority(19)
+@MigrationDangerRating(2)
+@Enabled
+public class MigrateVnfcModelVersionId extends PropertyMigrator {
+
+    private static final String VNFC_NODE_TYPE = "vnfc";
+    private static final String VNFC_MODEL_VERSION_ID_PROPERTY = "model-version-id";
+    private static final String VNFC_MODEL_VERSION_ID_LOCAL_PROPERTY = "model-version-id-local";
+
+    public MigrateVnfcModelVersionId(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) {
+        super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+        this.initialize(VNFC_MODEL_VERSION_ID_PROPERTY, VNFC_MODEL_VERSION_ID_LOCAL_PROPERTY, String.class, Cardinality.SINGLE);
+    }
+
+    @Override
+    public Optional<String[]> getAffectedNodeTypes() {
+        return Optional.of(new String[]{this.VNFC_NODE_TYPE});
+    }
+
+    @Override
+    public String getMigrationName() {
+        return "MigrateVnfcModelVersionId";
+    }
+
+    @Override
+    public boolean isIndexed() {
+        return true;
+    }
+}
diff --git a/src/main/java/org/onap/aai/rest/ExceptionHandler.java b/src/main/java/org/onap/aai/rest/ExceptionHandler.java
new file mode 100644 (file)
index 0000000..14c45da
--- /dev/null
@@ -0,0 +1,127 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.rest;
+
+import com.fasterxml.jackson.core.JsonParseException;
+import com.fasterxml.jackson.databind.JsonMappingException;
+import com.sun.istack.SAXParseException2;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.logging.ErrorLogHelper;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.ext.ExceptionMapper;
+import javax.ws.rs.ext.Provider;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * The Class ExceptionHandler.
+ */
+@Provider
+public class ExceptionHandler implements ExceptionMapper<Exception> {
+
+    @Context
+    private HttpServletRequest request;
+    
+    @Context
+    private HttpHeaders headers;
+    
+    /**
+        * @{inheritDoc}
+        */
+    @Override
+    public Response toResponse(Exception exception) {
+
+       Response response = null;
+       ArrayList<String> templateVars = new ArrayList<String>();
+
+       //the general case is that cxf will give us a WebApplicationException
+       //with a linked exception
+       if (exception instanceof WebApplicationException) { 
+               WebApplicationException e = (WebApplicationException) exception;
+               if (e.getCause() != null) {
+                       if (e.getCause() instanceof SAXParseException2) {
+                               templateVars.add("UnmarshalException");
+                               AAIException ex = new AAIException("AAI_4007", exception);
+                               response = Response
+                                               .status(400)
+                                               .entity(ErrorLogHelper.getRESTAPIErrorResponse(headers.getAcceptableMediaTypes(), ex, templateVars))
+                                               .build();
+                       }
+               }
+       } else if (exception instanceof JsonParseException) {
+               //jackson does it differently so we get the direct JsonParseException
+               templateVars.add("JsonParseException");
+               AAIException ex = new AAIException("AAI_4007", exception);
+               response = Response
+                               .status(400)
+                               .entity(ErrorLogHelper.getRESTAPIErrorResponse(headers.getAcceptableMediaTypes(), ex, templateVars))
+                               .build();
+        } else if (exception instanceof JsonMappingException) {
+               //jackson does it differently so we get the direct JsonParseException
+               templateVars.add("JsonMappingException");
+               AAIException ex = new AAIException("AAI_4007", exception);
+               response = Response
+                               .status(400)
+                               .entity(ErrorLogHelper.getRESTAPIErrorResponse(headers.getAcceptableMediaTypes(), ex, templateVars))
+                               .build();
+        } 
+       
+       // it didn't get set above, we wrap a general fault here
+       if (response == null) { 
+               
+               Exception actual_e = exception;
+               if (exception instanceof WebApplicationException) { 
+                       WebApplicationException e = (WebApplicationException) exception;
+                       response = e.getResponse();
+               } else { 
+                       templateVars.add(request.getMethod());
+                       templateVars.add("unknown");
+                       AAIException ex = new AAIException("AAI_4000", actual_e);
+                       List<MediaType> mediaTypes = headers.getAcceptableMediaTypes();
+                       int setError = 0;
+
+                       for (MediaType mediaType : mediaTypes) { 
+                               if (MediaType.APPLICATION_XML_TYPE.isCompatible(mediaType)) {
+                                       response = Response
+                                                       .status(400)
+                                                       .type(MediaType.APPLICATION_XML_TYPE)
+                                                       .entity(ErrorLogHelper.getRESTAPIErrorResponse(headers.getAcceptableMediaTypes(), ex, templateVars))
+                                                       .build();       
+                                       setError = 1;
+                               } 
+                       }
+                       if (setError == 0) { 
+                               response = Response
+                                               .status(400)
+                                               .type(MediaType.APPLICATION_JSON_TYPE)
+                                               .entity(ErrorLogHelper.getRESTAPIErrorResponse(headers.getAcceptableMediaTypes(), ex, templateVars))
+                                               .build();       
+                       }
+               }
+       }               
+       return response;
+    }
+}
diff --git a/src/main/java/org/onap/aai/rest/QueryConsumer.java b/src/main/java/org/onap/aai/rest/QueryConsumer.java
new file mode 100644 (file)
index 0000000..85665da
--- /dev/null
@@ -0,0 +1,217 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.rest;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import com.google.gson.JsonElement;
+import com.google.gson.JsonObject;
+import com.google.gson.JsonParser;
+import org.onap.aai.concurrent.AaiCallable;
+import org.onap.aai.dbmap.DBConnectionType;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.rest.dsl.DslQueryProcessor;
+import org.onap.aai.logging.LoggingContext;
+import org.onap.aai.logging.StopWatch;
+import org.onap.aai.rest.db.HttpEntry;
+import org.onap.aai.rest.search.GenericQueryProcessor;
+import org.onap.aai.rest.search.QueryProcessorType;
+import org.onap.aai.restcore.HttpMethod;
+import org.onap.aai.restcore.RESTAPI;
+import org.onap.aai.serialization.db.DBSerializer;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.serialization.queryformats.Format;
+import org.onap.aai.serialization.queryformats.FormatFactory;
+import org.onap.aai.serialization.queryformats.Formatter;
+import org.onap.aai.serialization.queryformats.SubGraphStyle;
+import org.onap.aai.setup.SchemaVersion;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.util.AAIConstants;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.stereotype.Component;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.*;
+import javax.ws.rs.core.*;
+import javax.ws.rs.core.Response.Status;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+
+@Component
+@Path("{version: v[1-9][0-9]*|latest}/dbquery")
+public class QueryConsumer extends RESTAPI {
+       
+       /** The introspector factory type. */
+       private ModelType introspectorFactoryType = ModelType.MOXY;
+       
+       private QueryProcessorType processorType = QueryProcessorType.LOCAL_GROOVY;
+
+       private static final String TARGET_ENTITY = "DB";
+       private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(QueryConsumer.class);
+
+       private HttpEntry traversalUriHttpEntry;
+
+       private DslQueryProcessor dslQueryProcessor;
+
+       private SchemaVersions schemaVersions;
+
+       private String basePath;
+
+       @Autowired
+       public QueryConsumer(
+               HttpEntry traversalUriHttpEntry,
+               DslQueryProcessor dslQueryProcessor,
+               SchemaVersions schemaVersions,
+               @Value("${schema.uri.base.path}") String basePath
+       ){
+               this.traversalUriHttpEntry = traversalUriHttpEntry;
+               this.dslQueryProcessor     = dslQueryProcessor;
+               this.basePath                           = basePath;
+               this.schemaVersions                 = schemaVersions;
+       }
+
+
+       @PUT
+       @Consumes({ MediaType.APPLICATION_JSON})
+       @Produces({ MediaType.APPLICATION_JSON})
+       public Response executeQuery(String content, @PathParam("version")String versionParam, @PathParam("uri") @Encoded String uri, @DefaultValue("graphson") @QueryParam("format") String queryFormat,@DefaultValue("no_op") @QueryParam("subgraph") String subgraph, @Context HttpHeaders headers, @Context UriInfo info, @Context HttpServletRequest req){
+               return runner(AAIConstants.AAI_GRAPHADMIN_TIMEOUT_ENABLED,
+                               AAIConstants.AAI_GRAPHADMIN_TIMEOUT_APP,
+                               AAIConstants.AAI_GRAPHADMIN_TIMEOUT_LIMIT,
+                               headers,
+                               info,
+                               HttpMethod.GET,
+                               new AaiCallable<Response>() {
+                                       @Override
+                                       public Response process() {
+                                               return processExecuteQuery(content, versionParam, uri, queryFormat, subgraph, headers, info, req);
+                                       }
+                               }
+               );
+       }
+
+       public Response processExecuteQuery(String content, @PathParam("version")String versionParam, @PathParam("uri") @Encoded String uri, @DefaultValue("graphson") @QueryParam("format") String queryFormat,@DefaultValue("no_op") @QueryParam("subgraph") String subgraph, @Context HttpHeaders headers, @Context UriInfo info, @Context HttpServletRequest req) {
+
+               String methodName = "executeQuery";
+               String sourceOfTruth = headers.getRequestHeaders().getFirst("X-FromAppId");
+               String realTime = headers.getRequestHeaders().getFirst("Real-Time");
+               String queryProcessor = headers.getRequestHeaders().getFirst("QueryProcessor");
+               QueryProcessorType processorType = this.processorType;
+               Response response = null;
+               TransactionalGraphEngine dbEngine = null;
+               try {
+                       LoggingContext.save();
+                       this.checkQueryParams(info.getQueryParameters());
+                       Format format = Format.getFormat(queryFormat);
+                       if (queryProcessor != null) {
+                               processorType = QueryProcessorType.valueOf(queryProcessor);
+                       }
+                       SubGraphStyle subGraphStyle = SubGraphStyle.valueOf(subgraph);
+                       JsonParser parser = new JsonParser();
+                       
+                       JsonObject input = parser.parse(content).getAsJsonObject();
+                       
+                       JsonElement gremlinElement = input.get("gremlin");
+                       JsonElement dslElement = input.get("dsl");
+                       String queryURI = "";
+                       String gremlin = "";
+                       String dsl = "";
+                       
+                       SchemaVersion version = new SchemaVersion(versionParam);
+                       DBConnectionType type = this.determineConnectionType(sourceOfTruth, realTime);
+                       traversalUriHttpEntry.setHttpEntryProperties(version, type);
+                       dbEngine = traversalUriHttpEntry.getDbEngine();
+
+                       if (gremlinElement != null) {
+                               gremlin = gremlinElement.getAsString();
+                       }
+                       if (dslElement != null) {
+                               dsl = dslElement.getAsString();
+                       }
+                       GenericQueryProcessor processor = null;
+                       
+                       LoggingContext.targetEntity(TARGET_ENTITY);
+                       LoggingContext.targetServiceName(methodName);
+                       LoggingContext.startTime();
+                       StopWatch.conditionalStart();
+                       
+                       if(!dsl.equals("")){
+                               processor =  new GenericQueryProcessor.Builder(dbEngine)
+                                               .queryFrom(dsl, "dsl")
+                                               .queryProcessor(dslQueryProcessor)
+                                               .processWith(processorType).create();
+                       }else {
+                               processor =  new GenericQueryProcessor.Builder(dbEngine)
+                                               .queryFrom(gremlin, "gremlin")
+                                               .processWith(processorType).create();
+                       }
+
+                       String result = "";
+                       List<Object> vertices = processor.execute(subGraphStyle);
+               
+                       DBSerializer serializer = new DBSerializer(version, dbEngine, introspectorFactoryType, sourceOfTruth);
+                       FormatFactory ff = new FormatFactory(traversalUriHttpEntry.getLoader(), serializer, schemaVersions, basePath);
+                       
+                       Formatter formater =  ff.get(format, info.getQueryParameters());
+               
+                       result = formater.output(vertices).toString();
+
+                       double msecs = StopWatch.stopIfStarted();
+                       LoggingContext.elapsedTime((long)msecs,TimeUnit.MILLISECONDS);
+                       LoggingContext.successStatusFields();
+                       LOGGER.info ("Completed");
+                       
+                       response = Response.status(Status.OK)
+                                       .type(MediaType.APPLICATION_JSON)
+                                       .entity(result).build();
+               
+               } catch (AAIException e) {
+                       response = consumerExceptionResponseGenerator(headers, info, HttpMethod.GET, e);
+               } catch (Exception e ) {
+                       AAIException ex = new AAIException("AAI_4000", e);
+                       response = consumerExceptionResponseGenerator(headers, info, HttpMethod.GET, ex);
+               } finally {
+                       LoggingContext.restoreIfPossible();
+                       LoggingContext.successStatusFields();
+                       if (dbEngine != null) {
+                               dbEngine.rollback();
+                       }
+                       
+               }
+               
+               return response;
+       }
+       
+       public void checkQueryParams(MultivaluedMap<String, String> params) throws AAIException {
+               
+               if (params.containsKey("depth") && params.getFirst("depth").matches("\\d+")) {
+                       String depth = params.getFirst("depth");
+                       Integer i = Integer.parseInt(depth);
+                       if (i > 1) {
+                               throw new AAIException("AAI_3303");
+                       }
+               }
+               
+               
+       }
+
+}
diff --git a/src/main/java/org/onap/aai/rest/dsl/DslListener.java b/src/main/java/org/onap/aai/rest/dsl/DslListener.java
new file mode 100644 (file)
index 0000000..e41a946
--- /dev/null
@@ -0,0 +1,314 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.rest.dsl;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.antlr.v4.runtime.tree.TerminalNode;
+import org.onap.aai.AAIDslBaseListener;
+import org.onap.aai.AAIDslParser;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.edges.EdgeRule;
+import org.onap.aai.edges.EdgeRuleQuery;
+import org.onap.aai.edges.exceptions.AmbiguousRuleChoiceException;
+import org.onap.aai.edges.exceptions.EdgeRuleNotFoundException;
+import org.springframework.beans.factory.annotation.Autowired;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * The Class DslListener.
+ */
+public class DslListener extends AAIDslBaseListener {
+
+       private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(DslQueryProcessor.class);
+       private final EdgeIngestor edgeRules;
+
+       //TODO Use StringBuilder to build the query than concat
+       String query = "";
+
+       Map<Integer, String> unionMap = new HashMap<>();
+       Map<String, String> flags = new HashMap<>();
+
+       String currentNode = "";
+       String prevsNode = "";
+       int commas = 0;
+
+       int unionKey = 0;
+       int unionMembers = 0;
+       boolean isUnionBeg = false;
+       boolean isUnionTraversal = false;
+
+       boolean isTraversal = false;
+       boolean isWhereTraversal = false;
+       String whereTraversalNode = "";
+
+       String limitQuery = "";
+       boolean isNot = false;
+
+       /**
+        * Instantiates a new DslListener.
+        */
+       @Autowired
+       public DslListener(EdgeIngestor edgeIngestor) {
+               this.edgeRules = edgeIngestor;
+       }
+
+       @Override
+       public void enterAaiquery(AAIDslParser.AaiqueryContext ctx) {
+               query += "builder";
+       }
+
+       @Override
+       public void enterDslStatement(AAIDslParser.DslStatementContext ctx) {
+               // LOGGER.info("Statement Enter"+ctx.getText());
+               /*
+                * This block of code is entered for every query statement
+                */
+               if (isUnionBeg) {
+                       isUnionBeg = false;
+                       isUnionTraversal = true;
+
+               } else if (unionMembers > 0) {
+                       unionMembers--;
+                       query += ",builder.newInstance()";
+                       isUnionTraversal = true;
+               }
+
+       }
+
+       @Override
+       public void exitDslStatement(AAIDslParser.DslStatementContext ctx) {
+               /*
+                * Nothing to be done here for now
+                * LOGGER.info("Statement Exit"+ctx.getText());
+                */
+       }
+
+       @Override
+       public void exitAaiquery(AAIDslParser.AaiqueryContext ctx) {
+               /*
+                * dedup is by default for all queries If the query has limit in it
+                * include this as well LOGGER.info("Statement Exit"+ctx.getText());
+                */
+
+               query += ".cap('x').unfold().dedup()" + limitQuery;
+       }
+
+       /*
+        * TODO: The contexts are not inherited from a single parent in AAIDslParser
+        * Need to find a way to do that
+        */
+       @Override
+       public void enterSingleNodeStep(AAIDslParser.SingleNodeStepContext ctx) {
+               
+               prevsNode = currentNode;
+               currentNode = ctx.NODE().getText();
+
+               this.generateQuery();
+               if (ctx.STORE() != null && ctx.STORE().getText().equals("*")) {
+                       flags.put(currentNode, "store");
+               }
+
+       }
+
+       @Override
+       public void enterSingleQueryStep(AAIDslParser.SingleQueryStepContext ctx) {
+               
+               prevsNode = currentNode;
+               currentNode = ctx.NODE().getText();
+               this.generateQuery();
+
+               if (ctx.STORE() != null && ctx.STORE().getText().equals("*")) {
+                       flags.put(currentNode, "store");
+               }
+       }
+
+       @Override
+       public void enterMultiQueryStep(AAIDslParser.MultiQueryStepContext ctx) {
+               
+               prevsNode = currentNode;
+               currentNode = ctx.NODE().getText();
+               this.generateQuery();
+               
+               if (ctx.STORE() != null && ctx.STORE().getText().equals("*")) {
+                       flags.put(currentNode, "store");
+               }
+
+       }
+
+       /*
+        * Generates the QueryBuilder syntax for the dsl query
+        */
+       private void generateQuery() {
+               String edgeType = "";
+
+               if (isUnionTraversal || isTraversal || isWhereTraversal) {
+                       String previousNode = prevsNode;
+                       if (isUnionTraversal) {
+                               previousNode = unionMap.get(unionKey);
+                               isUnionTraversal = false;
+                       }
+
+                       EdgeRuleQuery edgeRuleQuery = new EdgeRuleQuery.Builder(previousNode, currentNode).build();
+                       EdgeRule edgeRule = null;
+
+                       try {
+                               edgeRule = edgeRules.getRule(edgeRuleQuery);
+                       } catch (EdgeRuleNotFoundException | AmbiguousRuleChoiceException e) {
+                       }
+
+                       if (edgeRule == null) {
+                               edgeType = "EdgeType.COUSIN";
+                       } else if ("none".equalsIgnoreCase(edgeRule.getContains())){
+                               edgeType = "EdgeType.COUSIN";
+                       }else {
+                               edgeType = "EdgeType.TREE";
+                       }
+
+                       query += ".createEdgeTraversal(" + edgeType + ", '" + previousNode + "','" + currentNode + "')";
+
+               }
+
+               else
+                       query += ".getVerticesByProperty('aai-node-type', '" + currentNode + "')";
+       }
+
+       @Override
+       public void exitSingleNodeStep(AAIDslParser.SingleNodeStepContext ctx) {
+
+               generateExitStep();
+       }
+
+       @Override
+       public void exitSingleQueryStep(AAIDslParser.SingleQueryStepContext ctx) {
+               generateExitStep();
+       }
+
+       @Override
+       public void exitMultiQueryStep(AAIDslParser.MultiQueryStepContext ctx) {
+               generateExitStep();
+
+       }
+
+       private void generateExitStep() {
+               if (flags.containsKey(currentNode)) {
+                       String storeFlag = flags.get(currentNode);
+                       if (storeFlag != null && storeFlag.equals("store"))
+                               query += ".store('x')";
+                       flags.remove(currentNode);
+               }
+       }
+
+       @Override
+       public void enterUnionQueryStep(AAIDslParser.UnionQueryStepContext ctx) {
+               isUnionBeg = true;
+
+               unionKey++;
+               unionMap.put(unionKey, currentNode);
+               query += ".union(builder.newInstance()";
+
+               List<TerminalNode> commaNodes = ctx.COMMA();
+
+               for (TerminalNode node : commaNodes) {
+                       unionMembers++;
+               }
+       }
+
+       @Override
+       public void exitUnionQueryStep(AAIDslParser.UnionQueryStepContext ctx) {
+               isUnionBeg = false;
+               unionMap.remove(unionKey);
+
+               query += ")";
+               unionKey--;
+
+       }
+
+       @Override
+       public void enterFilterTraverseStep(AAIDslParser.FilterTraverseStepContext ctx) {
+               isWhereTraversal = true;
+               whereTraversalNode = currentNode;
+               query += ".where(builder.newInstance()";
+       }
+
+       @Override
+       public void exitFilterTraverseStep(AAIDslParser.FilterTraverseStepContext ctx) {
+               query += ")";
+               isWhereTraversal = false;
+               currentNode = whereTraversalNode;
+       }
+
+       @Override
+       public void enterFilterStep(AAIDslParser.FilterStepContext ctx) {
+               if (ctx.NOT() != null && ctx.NOT().getText().equals("!"))
+                       isNot = true;
+
+               List<TerminalNode> nodes = ctx.KEY();
+               String key = ctx.KEY(0).getText();
+
+               if (isNot) {
+                       query += ".getVerticesExcludeByProperty(";
+                       isNot = false;
+               } else
+                       query += ".getVerticesByProperty(";
+
+               if (nodes.size() == 2) {
+                       query += key + "," + ctx.KEY(1).getText();
+                       query += ")";
+               }
+
+               if (nodes.size() > 2) {
+
+                       for (TerminalNode node : nodes) {
+                               if (node.getText().equals(key))
+                                       continue;
+
+                               query += key + "," + node.getText();
+                               query += ")";
+                       }
+
+               }
+
+       }
+
+       @Override
+       public void exitFilterStep(AAIDslParser.FilterStepContext ctx) {
+               // For now do nothing
+       }
+
+       @Override
+       public void enterTraverseStep(AAIDslParser.TraverseStepContext ctx) {
+               isTraversal = true;
+       }
+
+       @Override
+       public void exitTraverseStep(AAIDslParser.TraverseStepContext ctx) {
+               isTraversal = false;
+       }
+
+       @Override
+       public void enterLimitStep(AAIDslParser.LimitStepContext ctx) {
+               String value = ctx.NODE().getText();
+               limitQuery += ".limit(" + value + ")";
+       }
+}
diff --git a/src/main/java/org/onap/aai/rest/dsl/DslQueryProcessor.java b/src/main/java/org/onap/aai/rest/dsl/DslQueryProcessor.java
new file mode 100644 (file)
index 0000000..582f0ea
--- /dev/null
@@ -0,0 +1,85 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.rest.dsl;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.antlr.v4.runtime.CharStreams;
+import org.antlr.v4.runtime.CommonTokenStream;
+import org.antlr.v4.runtime.tree.ParseTree;
+import org.antlr.v4.runtime.tree.ParseTreeWalker;
+import org.onap.aai.AAIDslLexer;
+import org.onap.aai.AAIDslParser;
+import org.springframework.beans.factory.annotation.Autowired;
+
+import java.io.ByteArrayInputStream;
+import java.io.InputStream;
+import java.nio.charset.StandardCharsets;
+
+/**
+ * The Class DslQueryProcessor.
+ */
+public class DslQueryProcessor {
+
+       private static final EELFLogger LOGGER = EELFManager.getInstance().getLogger(DslQueryProcessor.class);
+
+       private DslListener dslListener;
+
+       @Autowired
+       public DslQueryProcessor(DslListener dslListener){
+               this.dslListener = dslListener;
+       }
+
+       public String parseAaiQuery(String aaiQuery) {
+               try {
+                       // Create a input stream that reads our string
+                       InputStream stream = new ByteArrayInputStream(aaiQuery.getBytes(StandardCharsets.UTF_8));
+
+                       // Create a lexer from the input CharStream
+                       AAIDslLexer lexer = new AAIDslLexer(CharStreams.fromStream(stream, StandardCharsets.UTF_8));
+
+                       // Get a list of tokens pulled from the lexer
+                       CommonTokenStream tokens = new CommonTokenStream(lexer);
+
+                       
+                       // Parser that feeds off of the tokens buffer
+                       AAIDslParser parser = new AAIDslParser(tokens);
+
+                       // Specify our entry point
+                       ParseTree ptree = parser.aaiquery();
+                       LOGGER.info("QUERY-interim" + ptree.toStringTree(parser));
+
+                       // Walk it and attach our listener
+                       ParseTreeWalker walker = new ParseTreeWalker();
+                       walker.walk(dslListener, ptree);
+                       LOGGER.info("Final QUERY" + dslListener.query);
+
+                       /*
+                        * TODO - Visitor patternQueryDslVisitor visitor = new
+                        * QueryDslVisitor(); String query = visitor.visit(ptree);
+                        * 
+                        */
+                       return dslListener.query;
+               } catch (Exception e) {
+                       LOGGER.error("Error while processing the query"+e.getMessage());
+               }
+               return "";
+       }
+}
diff --git a/src/main/java/org/onap/aai/rest/search/GenericQueryProcessor.java b/src/main/java/org/onap/aai/rest/search/GenericQueryProcessor.java
new file mode 100644 (file)
index 0000000..2431d11
--- /dev/null
@@ -0,0 +1,226 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.rest.search;
+
+import org.apache.tinkerpop.gremlin.process.traversal.P;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__;
+import org.apache.tinkerpop.gremlin.structure.Graph;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.javatuples.Pair;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.rest.dsl.DslQueryProcessor;
+import org.onap.aai.restcore.search.GroovyQueryBuilderSingleton;
+import org.onap.aai.restcore.util.URITools;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.serialization.queryformats.SubGraphStyle;
+
+import javax.ws.rs.core.MultivaluedHashMap;
+import javax.ws.rs.core.MultivaluedMap;
+import java.io.FileNotFoundException;
+import java.net.URI;
+import java.util.*;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+public abstract class GenericQueryProcessor {
+
+       protected final Optional<URI> uri;
+       protected final MultivaluedMap<String, String> queryParams;
+       protected final Optional<Collection<Vertex>> vertices;
+       protected static Pattern p = Pattern.compile("query/(.*+)");
+       protected Optional<String> gremlin;
+       protected final TransactionalGraphEngine dbEngine;
+       protected static GroovyQueryBuilderSingleton queryBuilderSingleton = GroovyQueryBuilderSingleton.getInstance();
+       protected final boolean isGremlin;
+       protected Optional<DslQueryProcessor> dslQueryProcessorOptional;
+       /* dsl parameters to store dsl query and to check
+        * if this is a DSL request
+        */
+       protected Optional<String> dsl;
+       protected final boolean isDsl ;
+       
+       protected GenericQueryProcessor(Builder builder) {
+               this.uri = builder.getUri();
+               this.dbEngine = builder.getDbEngine();
+               this.vertices = builder.getVertices();
+               this.gremlin = builder.getGremlin();
+               this.isGremlin = builder.isGremlin();
+               this.dsl = builder.getDsl();
+               this.isDsl = builder.isDsl();
+               this.dslQueryProcessorOptional = builder.getDslQueryProcessor();
+               
+               if (uri.isPresent()) {
+                       queryParams = URITools.getQueryMap(uri.get());
+               } else {
+                       queryParams = new MultivaluedHashMap<>();
+               }
+       }
+       
+       protected abstract GraphTraversal<?,?> runQuery(String query, Map<String, Object> params);
+       
+       protected List<Object> processSubGraph(SubGraphStyle style, GraphTraversal<?,?> g) {
+               final List<Object> resultVertices = new Vector<>();
+               g.store("y");
+               
+               if (SubGraphStyle.prune.equals(style) || SubGraphStyle.star.equals(style)) {
+                       g.barrier().bothE();
+                       if (SubGraphStyle.prune.equals(style)) {
+                               g.where(__.otherV().where(P.within("y")));
+                       }
+                       g.dedup().subgraph("subGraph").cap("subGraph").map(x -> (Graph)x.get()).next().traversal().V().forEachRemaining(x -> {
+                               resultVertices.add(x);
+                       });
+               } else {
+                       resultVertices.addAll(g.toList());
+               }
+               return resultVertices;
+       }
+       
+       public List<Object> execute(SubGraphStyle style) throws FileNotFoundException, AAIException {
+               final List<Object> resultVertices;
+
+               Pair<String, Map<String, Object>> tuple = this.createQuery();
+               String query = tuple.getValue0();
+               Map<String, Object> params = tuple.getValue1();
+
+               if (query.equals("") && (vertices.isPresent() && vertices.get().isEmpty())) {
+                       //nothing to do, just exit
+                       return new ArrayList<>();
+               }
+               GraphTraversal<?,?> g = this.runQuery(query, params);
+               
+               resultVertices = this.processSubGraph(style, g);
+               
+               return resultVertices;
+       }
+       
+       protected Pair<String, Map<String, Object>> createQuery() throws AAIException {
+               Map<String, Object> params = new HashMap<>();
+               String query = "";
+                if (this.isGremlin) {
+                       query = gremlin.get();
+                       
+               }else if (this.isDsl) {
+                       String dslUserQuery = dsl.get();
+                       if(dslQueryProcessorOptional.isPresent()){
+                               String dslQuery = dslQueryProcessorOptional.get().parseAaiQuery(dslUserQuery);
+                               query = queryBuilderSingleton.executeTraversal(dbEngine, dslQuery, params);
+                               String startPrefix = "g.V()";
+                               query = startPrefix + query;
+                       }
+               }
+               
+               return new Pair<>(query, params);
+       }
+       
+       public static class Builder {
+
+               private final TransactionalGraphEngine dbEngine;
+               private Optional<URI> uri = Optional.empty();
+               private Optional<String> gremlin = Optional.empty();
+               private boolean isGremlin = false;
+               private Optional<Collection<Vertex>> vertices = Optional.empty();
+               private QueryProcessorType processorType = QueryProcessorType.GREMLIN_SERVER;
+               
+               private Optional<String> dsl = Optional.empty();
+               private boolean isDsl = false;
+               private DslQueryProcessor dslQueryProcessor;
+
+               public Builder(TransactionalGraphEngine dbEngine) {
+                       this.dbEngine = dbEngine;
+               }
+               
+               public Builder queryFrom(URI uri) {
+                       this.uri = Optional.of(uri);
+                       this.isGremlin = false;
+                       return this;
+               }
+               
+               public Builder startFrom(Collection<Vertex> vertices) {
+                       this.vertices = Optional.of(vertices);
+                       return this;
+               }
+               
+               public Builder queryFrom( String query, String queryType) {
+                       
+                       if(queryType.equals("gremlin")){
+                               this.gremlin = Optional.of(query);
+                               this.isGremlin = true;
+                       }
+                       if(queryType.equals("dsl")){
+                               this.dsl = Optional.of(query);
+                               this.isDsl = true;
+                       }
+                       return this;
+               }
+               
+               public Builder processWith(QueryProcessorType type) {
+                       this.processorType = type;
+                       return this;
+               }
+
+               public Builder queryProcessor(DslQueryProcessor dslQueryProcessor){
+                       this.dslQueryProcessor = dslQueryProcessor;
+                       return this;
+               }
+
+               public Optional<DslQueryProcessor> getDslQueryProcessor(){
+                       return Optional.ofNullable(this.dslQueryProcessor);
+               }
+
+               public TransactionalGraphEngine getDbEngine() {
+                       return dbEngine;
+               }
+
+               public Optional<URI> getUri() {
+                       return uri;
+               }
+
+               public Optional<String> getGremlin() {
+                       return gremlin;
+               }
+
+               public boolean isGremlin() {
+                       return isGremlin;
+               }
+               
+               public Optional<String> getDsl() {
+                       return dsl;
+               }
+
+               public boolean isDsl() {
+                       return isDsl;
+               }
+
+               public Optional<Collection<Vertex>> getVertices() {
+                       return vertices;
+               }
+               
+               public QueryProcessorType getProcessorType() {
+                       return processorType;
+               }
+               
+               public GenericQueryProcessor create() {
+                       return new GroovyShellImpl(this);
+               }
+               
+       }
+}
diff --git a/src/main/java/org/onap/aai/rest/search/GroovyShellImpl.java b/src/main/java/org/onap/aai/rest/search/GroovyShellImpl.java
new file mode 100644 (file)
index 0000000..3db4301
--- /dev/null
@@ -0,0 +1,45 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.rest.search;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
+import org.onap.aai.restcore.search.GremlinGroovyShellSingleton;
+
+import java.util.Map;
+
+public class GroovyShellImpl extends GenericQueryProcessor {
+
+       protected GroovyShellImpl(Builder builder) {
+               super(builder);
+       }
+       
+       @Override
+       protected GraphTraversal<?,?> runQuery(String query, Map<String, Object> params) {
+
+               params.put("g", this.dbEngine.asAdmin().getTraversalSource());
+               
+               GremlinGroovyShellSingleton shell = GremlinGroovyShellSingleton.getInstance();
+               
+               return shell.executeTraversal(query, params);
+       }
+               
+}
+
+
diff --git a/src/main/java/org/onap/aai/rest/search/QueryProcessorType.java b/src/main/java/org/onap/aai/rest/search/QueryProcessorType.java
new file mode 100644 (file)
index 0000000..c8e1d14
--- /dev/null
@@ -0,0 +1,26 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.rest.search;
+
+public enum QueryProcessorType {
+
+       GREMLIN_SERVER,
+       LOCAL_GROOVY
+}
diff --git a/src/main/java/org/onap/aai/rest/util/EchoResponse.java b/src/main/java/org/onap/aai/rest/util/EchoResponse.java
new file mode 100644 (file)
index 0000000..05ff38e
--- /dev/null
@@ -0,0 +1,122 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.rest.util;
+
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.logging.ErrorLogHelper;
+import org.onap.aai.restcore.RESTAPI;
+import org.springframework.stereotype.Component;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.Response.Status;
+import java.util.ArrayList;
+import java.util.HashMap;
+
+/**
+ * The Class EchoResponse.
+ */
+@Component
+@Path("/util")
+public class EchoResponse extends RESTAPI {
+       
+       protected static String authPolicyFunctionName = "util";
+               
+       public static final String echoPath = "/util/echo";
+
+       /**
+        * Simple health-check API that echos back the X-FromAppId and X-TransactionId to clients.
+        * If there is a query string, a transaction gets logged into hbase, proving the application is connected to the data store.
+        * If there is no query string, no transacction logging is done to hbase.
+        *
+        * @param headers the headers
+        * @param req the req
+        * @param myAction if exists will cause transaction to be logged to hbase
+        * @return the response
+        */
+       @GET
+       @Produces( { MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON })
+       @Path("/echo")
+       public Response echoResult(@Context HttpHeaders headers, @Context HttpServletRequest req,
+                       @QueryParam("action") String myAction) {
+               Response response = null;
+               
+               AAIException ex = null;
+               String fromAppId = null;
+               String transId = null;
+               
+               try { 
+                       fromAppId = getFromAppId(headers );
+                       transId = getTransId(headers);
+               } catch (AAIException e) { 
+                       ArrayList<String> templateVars = new ArrayList<String>();
+                       templateVars.add("PUT uebProvider");
+                       templateVars.add("addTopic");
+                       return Response
+                                       .status(e.getErrorObject().getHTTPResponseCode())
+                                       .entity(ErrorLogHelper.getRESTAPIErrorResponse(headers.getAcceptableMediaTypes(), e, templateVars))
+                                       .build();
+               }
+               
+               try {
+                       
+                       HashMap<AAIException, ArrayList<String>> exceptionList = new HashMap<AAIException, ArrayList<String>>();
+                                       
+                       ArrayList<String> templateVars = new ArrayList<String>();
+                       templateVars.add(fromAppId);
+                       templateVars.add(transId);
+               
+                       exceptionList.put(new AAIException("AAI_0002", "OK"), templateVars);
+                               
+                       response = Response.status(Status.OK)
+                                       .entity(ErrorLogHelper.getRESTAPIInfoResponse(
+                                                       headers.getAcceptableMediaTypes(), exceptionList))
+                                                       .build();
+                       
+               } catch (Exception e) {
+                       ex = new AAIException("AAI_4000", e);
+                       ArrayList<String> templateVars = new ArrayList<String>();
+                       templateVars.add(Action.GET.name());
+                       templateVars.add(fromAppId +" "+transId);
+
+                       response = Response
+                                       .status(Status.INTERNAL_SERVER_ERROR)
+                                       .entity(ErrorLogHelper.getRESTAPIErrorResponse(
+                                                       headers.getAcceptableMediaTypes(), ex,
+                                                       templateVars)).build();
+
+               } finally {
+                       if (ex != null) {
+                               ErrorLogHelper.logException(ex);
+                       }
+
+               }
+               
+               return response;
+       }
+
+}
diff --git a/src/main/java/org/onap/aai/schema/GenTester.java b/src/main/java/org/onap/aai/schema/GenTester.java
new file mode 100644 (file)
index 0000000..812c7b0
--- /dev/null
@@ -0,0 +1,162 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.schema;
+
+import com.att.eelf.configuration.Configuration;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.janusgraph.core.JanusGraph;
+import org.janusgraph.core.schema.JanusGraphManagement;
+import org.onap.aai.dbgen.SchemaGenerator;
+import org.onap.aai.dbmap.AAIGraph;
+import org.onap.aai.logging.ErrorLogHelper;
+import org.onap.aai.logging.LoggingContext;
+import org.onap.aai.logging.LoggingContext.StatusCode;
+import org.onap.aai.util.AAIConfig;
+import org.onap.aai.util.AAIConstants;
+import org.springframework.context.annotation.AnnotationConfigApplicationContext;
+
+import java.util.Properties;
+import java.util.UUID;
+
+
+public class GenTester {
+
+       private static EELFLogger LOGGER;
+       
+       /**
+        * The main method.
+        *
+        * @param args the arguments
+        */
+       public static void main(String[] args) {
+          
+               JanusGraph graph = null;
+               System.setProperty("aai.service.name", GenTester.class.getSimpleName());
+               // Set the logging file properties to be used by EELFManager
+               Properties props = System.getProperties();
+               props.setProperty(Configuration.PROPERTY_LOGGING_FILE_NAME, AAIConstants.AAI_LOGBACK_PROPS);
+               props.setProperty(Configuration.PROPERTY_LOGGING_FILE_PATH, AAIConstants.AAI_HOME_BUNDLECONFIG);
+               LOGGER = EELFManager.getInstance().getLogger(GenTester.class);
+               boolean addDefaultCR = true;
+               
+               LoggingContext.init();
+               LoggingContext.component("DBGenTester");
+               LoggingContext.partnerName("AAI-TOOLS");
+               LoggingContext.targetEntity("AAI");
+               LoggingContext.requestId(UUID.randomUUID().toString());
+               LoggingContext.serviceName("AAI");
+               LoggingContext.targetServiceName("main");
+               LoggingContext.statusCode(StatusCode.COMPLETE);
+               LoggingContext.responseCode(LoggingContext.SUCCESS);
+
+               AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext(
+                               "org.onap.aai.config",
+                               "org.onap.aai.setup"
+               );
+
+               try {
+                       AAIConfig.init();
+               if (args != null && args.length > 0 ){
+                       if( "genDbRulesOnly".equals(args[0]) ){
+                               ErrorLogHelper.logError("AAI_3100", 
+                                               " This option is no longer supported. What was in DbRules is now derived from the OXM files. ");
+                               return;
+                       }
+                       else if ( "GEN_DB_WITH_NO_SCHEMA".equals(args[0]) ){
+                               // Note this is done to create an empty DB with no Schema so that
+                                       // an HBase copyTable can be used to set up a copy of the db.
+                                       String imsg = "    ---- NOTE --- about to load a graph without doing any schema processing (takes a little while) --------   ";
+                       System.out.println(imsg);
+                       LOGGER.info(imsg);
+                                       graph = AAIGraph.getInstance().getGraph();
+                               
+                              if( graph == null ){
+                                          ErrorLogHelper.logError("AAI_5102", "Error creating JanusGraph graph.");
+                                  return;
+                              }
+                              else {
+                                  String amsg = "Successfully loaded a JanusGraph graph without doing any schema work.  ";
+                                  System.out.println(amsg);
+                                  LOGGER.auditEvent(amsg);
+                                  return;
+                              }
+                       } else if ("GEN_DB_WITH_NO_DEFAULT_CR".equals(args[0])) {
+                               addDefaultCR = false;
+                       }
+                       else {
+                               ErrorLogHelper.logError("AAI_3000", "Unrecognized argument passed to GenTester.java: [" + args[0] + "]. ");
+                               
+                               String emsg = "Unrecognized argument passed to GenTester.java: [" + args[0] + "]. ";
+                               System.out.println(emsg);
+                               LoggingContext.statusCode(StatusCode.ERROR);
+                               LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);
+                               LOGGER.error(emsg);
+                               
+                               emsg = "Either pass no argument for normal processing, or use 'GEN_DB_WITH_NO_SCHEMA'.";
+                               System.out.println(emsg);
+                               LOGGER.error(emsg);
+                               
+                               return;
+                       }
+               }
+               
+                       //AAIConfig.init();
+                       ErrorLogHelper.loadProperties();
+                       String imsg = "    ---- NOTE --- about to open graph (takes a little while)--------;";
+               System.out.println(imsg);
+               LOGGER.info(imsg);
+                       graph = AAIGraph.getInstance().getGraph();
+               
+                       if( graph == null ){
+                               ErrorLogHelper.logError("AAI_5102", "Error creating JanusGraph graph. ");
+                               return;
+                       }
+
+                       // Load the propertyKeys, indexes and edge-Labels into the DB
+                       JanusGraphManagement graphMgt = graph.openManagement();
+
+               imsg = "-- Loading new schema elements into JanusGraph --";
+                       System.out.println(imsg);
+                       LOGGER.info(imsg);
+                       SchemaGenerator.loadSchemaIntoJanusGraph(graph, graphMgt, null);
+           } catch(Exception ex) {
+               ErrorLogHelper.logError("AAI_4000", ex.getMessage());
+           }
+           
+
+           if( graph != null ){
+                   String imsg = "-- graph commit";
+               System.out.println(imsg);
+               LOGGER.info(imsg);
+               graph.tx().commit();
+
+                       imsg = "-- graph shutdown ";
+               System.out.println(imsg);
+               LOGGER.info(imsg);
+               graph.close();
+           }
+           
+           LOGGER.auditEvent("-- all done, if program does not exit, please kill.");
+           System.exit(0);
+    }
+
+}
+
diff --git a/src/main/java/org/onap/aai/service/AuthorizationService.java b/src/main/java/org/onap/aai/service/AuthorizationService.java
new file mode 100644 (file)
index 0000000..d2597d0
--- /dev/null
@@ -0,0 +1,109 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.service;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.eclipse.jetty.util.security.Password;
+import org.onap.aai.Profiles;
+import org.onap.aai.util.AAIConstants;
+import org.springframework.context.annotation.Profile;
+import org.springframework.stereotype.Service;
+
+import javax.annotation.PostConstruct;
+import java.io.IOException;
+import java.io.UnsupportedEncodingException;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+import java.util.Base64;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.stream.Stream;
+
+@Profile(Profiles.ONE_WAY_SSL)
+@Service
+public class AuthorizationService {
+
+    private static final EELFLogger logger = EELFManager.getInstance().getLogger(AuthorizationService.class);
+
+    private final Map<String, String> authorizedUsers = new HashMap<>();
+
+    private static final Base64.Encoder ENCODER = Base64.getEncoder();
+
+    @PostConstruct
+    public void init(){
+
+        String basicAuthFile = getBasicAuthFilePath();
+
+        try(Stream<String> stream = Files.lines(Paths.get(basicAuthFile))){
+            stream.filter(line -> !line.startsWith("#")).forEach(str -> {
+                byte [] bytes = null;
+
+                String usernamePassword = null;
+                String accessType = null;
+
+                try {
+                    String [] userAccessType = str.split(",");
+
+                    if(userAccessType == null || userAccessType.length != 2){
+                        throw new RuntimeException("Please check the realm.properties file as it is not conforming to the basic auth");
+                    }
+
+                    usernamePassword = userAccessType[0];
+                    accessType       = userAccessType[1];
+
+                    String[] usernamePasswordArray = usernamePassword.split(":");
+
+                    if(usernamePasswordArray == null || usernamePasswordArray.length != 3){
+                        throw new RuntimeException("Not a valid entry for the realm.properties entry: " + usernamePassword);
+                    }
+
+                    String username = usernamePasswordArray[0];
+                    String password = null;
+
+                    if(str.contains("OBF:")){
+                        password = usernamePasswordArray[1] + ":" + usernamePasswordArray[2];
+                        password = Password.deobfuscate(password);
+                    }
+
+                    bytes = ENCODER.encode((username + ":" + password).getBytes("UTF-8"));
+
+                    authorizedUsers.put(new String(bytes), accessType);
+
+                } catch (UnsupportedEncodingException e)
+                {
+                    logger.error("Unable to support the encoding of the file" + basicAuthFile);
+                }
+
+                authorizedUsers.put(new String(ENCODER.encode(bytes)), accessType);
+            });
+        } catch (IOException e) {
+            logger.error("IO Exception occurred during the reading of realm.properties", e);
+        }
+    }
+
+    public boolean checkIfUserAuthorized(String authorization){
+        return authorizedUsers.containsKey(authorization) && "admin".equals(authorizedUsers.get(authorization));
+    }
+
+    public String getBasicAuthFilePath(){
+        return AAIConstants.AAI_HOME_ETC_AUTH + AAIConstants.AAI_FILESEP + "realm.properties";
+    }
+}
diff --git a/src/main/java/org/onap/aai/service/RetiredService.java b/src/main/java/org/onap/aai/service/RetiredService.java
new file mode 100644 (file)
index 0000000..5989e31
--- /dev/null
@@ -0,0 +1,67 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.service;
+
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.context.annotation.PropertySource;
+import org.springframework.stereotype.Service;
+
+import javax.annotation.PostConstruct;
+import java.util.Arrays;
+import java.util.List;
+import java.util.regex.Pattern;
+import java.util.stream.Collectors;
+
+@Service
+@PropertySource("classpath:retired.properties")
+@PropertySource(value = "file:${server.local.startpath}/retired.properties")
+public class RetiredService {
+
+    private String retiredPatterns;
+
+    private String retiredAllVersions;
+
+    private List<Pattern> retiredPatternsList;
+    private List<Pattern> retiredAllVersionList;
+
+    @PostConstruct
+    public void initialize(){
+        this.retiredPatternsList = Arrays.stream(retiredPatterns.split(",")).map(Pattern::compile).collect(Collectors.toList());
+        this.retiredAllVersionList = Arrays.stream(retiredAllVersions.split(",")).map(Pattern::compile).collect(Collectors.toList());
+    }
+
+    @Value("${retired.api.pattern.list}")
+    public void setRetiredPatterns(String retiredPatterns){
+        this.retiredPatterns = retiredPatterns;
+    }
+
+    public List<Pattern> getRetiredPatterns(){
+        return retiredPatternsList;
+    }
+
+    @Value("${retired.api.all.versions}")
+    public void setRetiredAllVersions(String retiredPatterns){
+        this.retiredAllVersions = retiredPatterns;
+    }
+
+    public List<Pattern> getRetiredAllVersionList(){
+        return retiredAllVersionList;
+    }
+}
diff --git a/src/main/java/org/onap/aai/util/PositiveNumValidator.java b/src/main/java/org/onap/aai/util/PositiveNumValidator.java
new file mode 100644 (file)
index 0000000..ee58f55
--- /dev/null
@@ -0,0 +1,35 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.util;
+
+import com.beust.jcommander.IParameterValidator;
+import com.beust.jcommander.ParameterException;
+
+public class PositiveNumValidator implements IParameterValidator {
+
+       @Override
+       public void validate(String name, String value) throws ParameterException {
+               int num = Integer.parseInt(value);
+
+               if(num < 0) {
+                       throw new ParameterException("Parameter " + name + " should be >= 0");
+               }
+       }
+}
\ No newline at end of file
diff --git a/src/main/java/org/onap/aai/util/SendDeleteMigrationNotifications.java b/src/main/java/org/onap/aai/util/SendDeleteMigrationNotifications.java
new file mode 100644 (file)
index 0000000..d9615b0
--- /dev/null
@@ -0,0 +1,183 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.util;
+
+import com.att.eelf.configuration.Configuration;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.onap.aai.db.props.AAIProperties;
+import org.onap.aai.dbmap.AAIGraph;
+import org.onap.aai.dbmap.DBConnectionType;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.introspection.*;
+import org.onap.aai.migration.EventAction;
+import org.onap.aai.migration.NotificationHelper;
+import org.onap.aai.rest.ueb.UEBNotification;
+import org.onap.aai.serialization.db.DBSerializer;
+import org.onap.aai.serialization.engines.QueryStyle;
+import org.onap.aai.serialization.engines.JanusGraphDBEngine;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.setup.SchemaVersion;
+import org.slf4j.MDC;
+
+import java.io.IOException;
+import java.net.URI;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+import java.util.*;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import javax.ws.rs.core.Response.Status;
+
+public class SendDeleteMigrationNotifications {
+
+       protected EELFLogger logger = EELFManager.getInstance().getLogger(SendDeleteMigrationNotifications.class.getSimpleName());
+
+       private String config;
+       private String path;
+       private Set<String> notifyOn;
+       long sleepInMilliSecs;
+       int numToBatch;
+       private String requestId;
+       private EventAction eventAction;
+       private String eventSource;
+
+       protected QueryStyle queryStyle = QueryStyle.TRAVERSAL;
+       protected ModelType introspectorFactoryType = ModelType.MOXY;
+       protected Loader loader = null;
+       protected TransactionalGraphEngine engine = null;
+       protected NotificationHelper notificationHelper = null;
+       protected DBSerializer serializer = null;
+       protected final LoaderFactory loaderFactory;
+       protected final SchemaVersions schemaVersions;
+       protected final SchemaVersion version;
+       
+       public SendDeleteMigrationNotifications(LoaderFactory loaderFactory, SchemaVersions schemaVersions, String config, String path, Set<String> notifyOn, int sleepInMilliSecs, int numToBatch, String requestId, EventAction eventAction, String eventSource) {
+               System.setProperty("aai.service.name", SendDeleteMigrationNotifications.class.getSimpleName());
+               Properties props = System.getProperties();
+               props.setProperty(Configuration.PROPERTY_LOGGING_FILE_NAME, "migration-logback.xml");
+               props.setProperty(Configuration.PROPERTY_LOGGING_FILE_PATH, AAIConstants.AAI_HOME_ETC_APP_PROPERTIES);
+
+               MDC.put("logFilenameAppender", SendDeleteMigrationNotifications.class.getSimpleName());
+
+               this.config = config;
+               this.path = path;
+               this.notifyOn = notifyOn;
+               this.sleepInMilliSecs = sleepInMilliSecs;
+               this.numToBatch = numToBatch;
+               this.requestId = requestId;
+               this.eventAction = eventAction;
+               this.eventSource = eventSource;
+               this.loaderFactory = loaderFactory;
+               this.schemaVersions = schemaVersions;
+               this.version  = schemaVersions.getDefaultVersion();
+               initGraph();
+
+               initFields();
+               
+
+       }
+
+       public void process(String basePath) throws Exception {
+
+               try {
+                       Map<Integer, String> deleteDataMap = processFile();
+                       int count = 0;
+                       for (Map.Entry<Integer, String> entry : deleteDataMap.entrySet()) {
+                               logger.info("Processing " + entry.getKey() + " :: Data :: " + entry.getValue());
+                               String data = entry.getValue();
+                               Introspector obj = null;
+                               if (data.contains("#@#")) {
+                                       String[] splitLine = data.split("#@#");
+                                       if (splitLine.length == 3) {
+                                               obj = loader.unmarshal(splitLine[0], splitLine[2]);
+                                               this.notificationHelper.addDeleteEvent(UUID.randomUUID().toString(), splitLine[0], eventAction,
+                                                               URI.create(splitLine[1]), obj, new HashMap(), basePath);
+                                       }
+                               }
+                               count++;
+                               if (count >= this.numToBatch) {
+                                       trigger();
+                                       logger.info("Triggered " + entry.getKey());
+                                       count = 0;
+                                       Thread.sleep(this.sleepInMilliSecs);
+                               }
+                       }
+                       if (count > 0) {
+                               trigger();
+                       }
+                       cleanup();
+               } catch (Exception e) {
+                       // TODO Auto-generated catch block
+                       e.printStackTrace();
+               }
+       }
+
+       protected void trigger() throws AAIException {
+               this.notificationHelper.triggerEvents();
+       }
+
+       private Map<Integer,String> processFile() throws IOException {
+               List<String> lines = Files.readAllLines(Paths.get(path));
+               final Map<Integer,String> data = new LinkedHashMap<>();
+               AtomicInteger counter = new AtomicInteger(0);
+               lines.stream().forEach(line -> {
+                       if (line.contains("#@#")) {
+                               data.put(counter.incrementAndGet(), line);
+                       }
+               });
+               return data;
+       }
+
+       protected void cleanup() {
+               logAndPrint("Events sent, closing graph connections");
+               engine.rollback();
+               AAIGraph.getInstance().graphShutdown();
+               logAndPrint("---------- Done ----------");
+       }
+
+       private void initFields() {
+               this.loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, version);
+               this.engine = new JanusGraphDBEngine(queryStyle, DBConnectionType.REALTIME, loader);
+               try {
+                       this.serializer = new DBSerializer(version, this.engine, introspectorFactoryType, this.eventSource);
+               } catch (AAIException e) {
+                       throw new RuntimeException("could not create serializer", e);
+               }
+               this.notificationHelper = new NotificationHelper(loader, serializer, loaderFactory, schemaVersions, engine, requestId, this.eventSource);
+       }
+
+       protected void initGraph() {
+               System.setProperty("realtime.db.config", this.config);
+               logAndPrint("\n\n---------- Connecting to Graph ----------");
+               AAIGraph.getInstance();
+               logAndPrint("---------- Connection Established ----------");
+       }
+
+       protected void logAndPrint(String msg) {
+               System.out.println(msg);
+               logger.info(msg);
+       }
+
+
+}
\ No newline at end of file
diff --git a/src/main/java/org/onap/aai/util/SendDeleteMigrationNotificationsMain.java b/src/main/java/org/onap/aai/util/SendDeleteMigrationNotificationsMain.java
new file mode 100644 (file)
index 0000000..ad96efe
--- /dev/null
@@ -0,0 +1,105 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.util;
+
+import com.beust.jcommander.JCommander;
+import com.beust.jcommander.Parameter;
+import org.onap.aai.dbmap.AAIGraph;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.logging.LoggingContext;
+import org.onap.aai.migration.EventAction;
+import org.onap.aai.setup.SchemaVersions;
+import org.springframework.context.annotation.AnnotationConfigApplicationContext;
+
+import java.util.*;
+
+public class SendDeleteMigrationNotificationsMain {
+
+       public static void main(String[] args) {
+
+               Arrays.asList(args).stream().forEach(System.out::println);
+
+               String requestId = UUID.randomUUID().toString();
+               LoggingContext.init();
+               LoggingContext.partnerName("Migration");
+               LoggingContext.serviceName(AAIConstants.AAI_RESOURCES_MS);
+               LoggingContext.component("SendMigrationNotifications");
+               LoggingContext.targetEntity(AAIConstants.AAI_RESOURCES_MS);
+               LoggingContext.targetServiceName("main");
+               LoggingContext.requestId(requestId);
+               LoggingContext.statusCode(LoggingContext.StatusCode.COMPLETE);
+               LoggingContext.responseCode(LoggingContext.SUCCESS);
+
+               AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext(
+                               "org.onap.aai.config",
+                               "org.onap.aai.setup"
+               );
+
+               LoaderFactory loaderFactory = ctx.getBean(LoaderFactory.class);
+               SchemaVersions schemaVersions = ctx.getBean(SchemaVersions.class);
+               String basePath = ctx.getEnvironment().getProperty("schema.uri.base.path");
+
+               CommandLineDeleteArgs cArgs = new CommandLineDeleteArgs();
+
+               JCommander jCommander = new JCommander(cArgs, args);
+               jCommander.setProgramName(SendDeleteMigrationNotificationsMain.class.getSimpleName());
+
+               EventAction action = EventAction.valueOf(cArgs.eventAction.toUpperCase());
+
+               SendDeleteMigrationNotifications internal = new SendDeleteMigrationNotifications(loaderFactory, schemaVersions, cArgs.config, cArgs.file, new HashSet<>(cArgs.notifyOn), cArgs.sleepInMilliSecs, cArgs.numToBatch, requestId, action, cArgs.eventSource);
+
+               try {
+                       internal.process(basePath);
+               } catch (Exception e) {
+                       e.printStackTrace();
+               }
+               AAIGraph.getInstance().graphShutdown();
+               System.exit(0);
+       }
+}
+
+class CommandLineDeleteArgs {
+
+       @Parameter(names = "--help", help = true)
+       public boolean help;
+
+       @Parameter(names = "-c", description = "location of configuration file", required = true)
+       public String config;
+
+       @Parameter(names = "--inputFile", description = "path to input file", required = true)
+       public String file;
+
+       @Parameter (names = "--notifyOn", description = "path to input file")
+       public List<String> notifyOn = new ArrayList<>();
+
+       @Parameter (names = "--sleepInMilliSecs", description = "how long to sleep between sending in seconds", validateWith = PositiveNumValidator.class)
+       public Integer sleepInMilliSecs = 0;
+
+       @Parameter (names = "--numToBatch", description = "how many to batch before sending", validateWith = PositiveNumValidator.class)
+       public Integer numToBatch = 1;
+
+       @Parameter (names = "-a", description = "event action type for dmaap event: CREATE, UPDATE, or DELETE")
+       public String eventAction = EventAction.DELETE.toString();
+
+       @Parameter (names = "--eventSource", description = "source of truth for notification, defaults to DMAAP-LOAD")
+       public String eventSource = "DMAAP-LOAD";
+}
+
+
diff --git a/src/main/java/org/onap/aai/util/SendMigrationNotifications.java b/src/main/java/org/onap/aai/util/SendMigrationNotifications.java
new file mode 100644 (file)
index 0000000..577f577
--- /dev/null
@@ -0,0 +1,189 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.util;
+
+import com.att.eelf.configuration.Configuration;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.onap.aai.db.props.AAIProperties;
+import org.onap.aai.dbmap.AAIGraph;
+import org.onap.aai.dbmap.DBConnectionType;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.introspection.*;
+import org.onap.aai.migration.EventAction;
+import org.onap.aai.migration.NotificationHelper;
+import org.onap.aai.serialization.db.DBSerializer;
+import org.onap.aai.serialization.engines.QueryStyle;
+import org.onap.aai.serialization.engines.JanusGraphDBEngine;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.setup.SchemaVersion;
+import org.slf4j.MDC;
+
+import java.io.IOException;
+import java.net.URI;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+import java.util.*;
+
+public class SendMigrationNotifications {
+
+       protected EELFLogger logger = EELFManager.getInstance().getLogger(SendMigrationNotifications.class.getSimpleName());
+
+       private String config;
+       private String path;
+       private Set<String> notifyOn;
+       long sleepInMilliSecs;
+       int numToBatch;
+       private String requestId;
+       private EventAction eventAction;
+       private String eventSource;
+
+       protected QueryStyle queryStyle = QueryStyle.TRAVERSAL;
+       protected ModelType introspectorFactoryType = ModelType.MOXY;
+       protected Loader loader = null;
+       protected TransactionalGraphEngine engine = null;
+       protected NotificationHelper notificationHelper = null;
+       protected DBSerializer serializer = null;
+       protected final LoaderFactory loaderFactory;
+       protected final SchemaVersions schemaVersions;
+       protected final SchemaVersion version;
+
+       public SendMigrationNotifications(LoaderFactory loaderFactory, SchemaVersions schemaVersions, String config, String path, Set<String> notifyOn, int sleepInMilliSecs, int numToBatch, String requestId, EventAction eventAction, String eventSource) {
+               System.setProperty("aai.service.name", SendMigrationNotifications.class.getSimpleName());
+               Properties props = System.getProperties();
+               props.setProperty(Configuration.PROPERTY_LOGGING_FILE_NAME, "migration-logback.xml");
+               props.setProperty(Configuration.PROPERTY_LOGGING_FILE_PATH, AAIConstants.AAI_HOME_ETC_APP_PROPERTIES);
+
+               MDC.put("logFilenameAppender", SendMigrationNotifications.class.getSimpleName());
+
+               this.config = config;
+               this.path = path;
+               this.notifyOn = notifyOn;
+               this.sleepInMilliSecs = sleepInMilliSecs;
+               this.numToBatch = numToBatch;
+               this.requestId = requestId;
+               this.eventAction = eventAction;
+               this.eventSource = eventSource;
+               this.loaderFactory = loaderFactory;
+               this.schemaVersions = schemaVersions;
+               this.version  = schemaVersions.getDefaultVersion();
+
+               initGraph();
+
+               initFields();
+       }
+
+       public void process(String basePath) throws Exception {
+
+               Map<String, String> vertexIds = processFile();
+               engine.startTransaction();
+               GraphTraversalSource g = engine.asAdmin().getReadOnlyTraversalSource();
+               List<Vertex> vertexes;
+               URI uri;
+               Vertex v;
+               int count = 0;
+               for (Map.Entry<String, String> entry : vertexIds.entrySet()) {
+                       vertexes = g.V(entry.getKey()).toList();
+                       if (vertexes == null || vertexes.isEmpty()) {
+                               logAndPrint("Vertex " + entry.getKey() + " no longer exists." );
+                               continue;
+                       } else if (vertexes.size() > 1) {
+                               logAndPrint("Vertex " + entry.getKey() + " query returned " + vertexes.size() + " vertexes." );
+                               continue;
+                       } else {
+                               logger.info("Processing " + entry.getKey() + "resource-version " + entry.getValue());
+                               v = vertexes.get(0);
+                               if (notifyOn.isEmpty() || notifyOn.contains(v.value(AAIProperties.NODE_TYPE).toString())) {
+                                       if (entry.getValue().equals(v.value(AAIProperties.RESOURCE_VERSION).toString())) {
+                                               Introspector introspector = serializer.getLatestVersionView(v);
+                                               uri = this.serializer.getURIForVertex(v, false);
+                                               this.notificationHelper.addEvent(v, introspector, eventAction, uri, basePath);
+                                               count++;
+                                               if (count >= this.numToBatch) {
+                                                       trigger();
+                                                       logger.info("Triggered " + entry.getKey());
+                                                       count = 0;
+                                                       Thread.sleep(this.sleepInMilliSecs);
+                                               }
+                                       }
+                               }
+                       }
+               }
+
+               if (count > 0) {
+                       trigger();
+               }
+
+               cleanup();
+       }
+
+       protected void trigger() throws AAIException {
+               this.notificationHelper.triggerEvents();
+       }
+
+       private Map<String, String> processFile() throws IOException {
+               List<String> lines = Files.readAllLines(Paths.get(path));
+               final Map<String,String> vertexIds = new LinkedHashMap<>();
+               lines.stream().forEach(line -> {
+                       if (line.contains("_")) {
+                               String[] splitLine = line.split("_");
+                               if (splitLine.length == 2) {
+                                       vertexIds.put(splitLine[0], splitLine[1]);
+                               }
+                       }
+               });
+               return vertexIds;
+       }
+
+       protected void cleanup() {
+               logAndPrint("Events sent, closing graph connections");
+               engine.rollback();
+               AAIGraph.getInstance().graphShutdown();
+               logAndPrint("---------- Done ----------");
+       }
+
+       private void initFields() {
+               this.loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, version);
+               this.engine = new JanusGraphDBEngine(queryStyle, DBConnectionType.REALTIME, loader);
+               try {
+                       this.serializer = new DBSerializer(version, this.engine, introspectorFactoryType, this.eventSource);
+               } catch (AAIException e) {
+                       throw new RuntimeException("could not create serializer", e);
+               }
+               this.notificationHelper = new NotificationHelper(loader, serializer, loaderFactory, schemaVersions, engine, requestId, this.eventSource);
+       }
+
+       protected void initGraph() {
+               System.setProperty("realtime.db.config", this.config);
+               logAndPrint("\n\n---------- Connecting to Graph ----------");
+               AAIGraph.getInstance();
+               logAndPrint("---------- Connection Established ----------");
+       }
+
+       protected void logAndPrint(String msg) {
+               System.out.println(msg);
+               logger.info(msg);
+       }
+
+
+}
\ No newline at end of file
diff --git a/src/main/java/org/onap/aai/util/SendMigrationNotificationsMain.java b/src/main/java/org/onap/aai/util/SendMigrationNotificationsMain.java
new file mode 100644 (file)
index 0000000..29eb1da
--- /dev/null
@@ -0,0 +1,105 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.util;
+
+import com.beust.jcommander.JCommander;
+import com.beust.jcommander.Parameter;
+import org.onap.aai.dbmap.AAIGraph;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.logging.LoggingContext;
+import org.onap.aai.migration.EventAction;
+import org.onap.aai.setup.SchemaVersions;
+import org.springframework.context.annotation.AnnotationConfigApplicationContext;
+
+import java.util.*;
+
+public class SendMigrationNotificationsMain {
+
+       public static void main(String[] args) {
+
+               Arrays.asList(args).stream().forEach(System.out::println);
+
+               String requestId = UUID.randomUUID().toString();
+               LoggingContext.init();
+               LoggingContext.partnerName("Migration");
+               LoggingContext.serviceName(AAIConstants.AAI_RESOURCES_MS);
+               LoggingContext.component("SendMigrationNotifications");
+               LoggingContext.targetEntity(AAIConstants.AAI_RESOURCES_MS);
+               LoggingContext.targetServiceName("main");
+               LoggingContext.requestId(requestId);
+               LoggingContext.statusCode(LoggingContext.StatusCode.COMPLETE);
+               LoggingContext.responseCode(LoggingContext.SUCCESS);
+
+               AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext(
+                               "org.onap.aai.config",
+                               "org.onap.aai.setup"
+               );
+
+               LoaderFactory loaderFactory = ctx.getBean(LoaderFactory.class);
+               SchemaVersions schemaVersions = ctx.getBean(SchemaVersions.class);
+               String basePath = ctx.getEnvironment().getProperty("schema.uri.base.path");
+
+               CommandLineArgs cArgs = new CommandLineArgs();
+
+               JCommander jCommander = new JCommander(cArgs, args);
+               jCommander.setProgramName(SendMigrationNotificationsMain.class.getSimpleName());
+
+               EventAction action = EventAction.valueOf(cArgs.eventAction.toUpperCase());
+
+               SendMigrationNotifications internal = new SendMigrationNotifications(loaderFactory, schemaVersions, cArgs.config, cArgs.file, new HashSet<>(cArgs.notifyOn), cArgs.sleepInMilliSecs, cArgs.numToBatch, requestId, action, cArgs.eventSource);
+
+               try {
+                       internal.process(basePath);
+               } catch (Exception e) {
+                       e.printStackTrace();
+               }
+               AAIGraph.getInstance().graphShutdown();
+               System.exit(0);
+       }
+}
+
+class CommandLineArgs {
+
+       @Parameter(names = "--help", help = true)
+       public boolean help;
+
+       @Parameter(names = "-c", description = "location of configuration file", required = true)
+       public String config;
+
+       @Parameter(names = "--inputFile", description = "path to input file", required = true)
+       public String file;
+
+       @Parameter (names = "--notifyOn", description = "path to input file")
+       public List<String> notifyOn = new ArrayList<>();
+
+       @Parameter (names = "--sleepInMilliSecs", description = "how long to sleep between sending in seconds", validateWith = PositiveNumValidator.class)
+       public Integer sleepInMilliSecs = 0;
+
+       @Parameter (names = "--numToBatch", description = "how many to batch before sending", validateWith = PositiveNumValidator.class)
+       public Integer numToBatch = 1;
+
+       @Parameter (names = "-a", description = "event action type for dmaap event: CREATE, UPDATE, or DELETE")
+       public String eventAction = EventAction.CREATE.toString();
+
+       @Parameter (names = "--eventSource", description = "source of truth for notification, defaults to DMAAP-LOAD")
+       public String eventSource = "DMAAP-LOAD";
+}
+
+
diff --git a/src/main/java/org/onap/aai/util/UniquePropertyCheck.java b/src/main/java/org/onap/aai/util/UniquePropertyCheck.java
new file mode 100644 (file)
index 0000000..e96c252
--- /dev/null
@@ -0,0 +1,288 @@
+/**\r
+ * ============LICENSE_START=======================================================\r
+ * org.onap.aai\r
+ * ================================================================================\r
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.\r
+ * ================================================================================\r
+ * Licensed under the Apache License, Version 2.0 (the "License");\r
+ * you may not use this file except in compliance with the License.\r
+ * You may obtain a copy of the License at\r
+ *\r
+ *    http://www.apache.org/licenses/LICENSE-2.0\r
+ *\r
+ * Unless required by applicable law or agreed to in writing, software\r
+ * distributed under the License is distributed on an "AS IS" BASIS,\r
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
+ * See the License for the specific language governing permissions and\r
+ * limitations under the License.\r
+ * ============LICENSE_END=========================================================\r
+ */\r
+package org.onap.aai.util;\r
+import java.util.HashMap;\r
+import java.util.Iterator;\r
+import java.util.Map;\r
+import java.util.Properties;\r
+import java.util.UUID;\r
+\r
+import org.apache.tinkerpop.gremlin.structure.Direction;\r
+import org.apache.tinkerpop.gremlin.structure.Edge;\r
+import org.apache.tinkerpop.gremlin.structure.Graph;\r
+import org.apache.tinkerpop.gremlin.structure.Vertex;\r
+import org.apache.tinkerpop.gremlin.structure.VertexProperty;\r
+import org.onap.aai.GraphAdminApp;\r
+import org.onap.aai.exceptions.AAIException;\r
+import org.onap.aai.logging.LoggingContext;\r
+import org.onap.aai.logging.LoggingContext.StatusCode;\r
+import org.slf4j.MDC;\r
+\r
+import com.att.eelf.configuration.Configuration;\r
+import com.att.eelf.configuration.EELFLogger;\r
+import com.att.eelf.configuration.EELFManager;\r
+\r
+import org.janusgraph.core.JanusGraphFactory;\r
+import org.janusgraph.core.JanusGraph;\r
+import org.onap.aai.dbmap.AAIGraphConfig;\r
+\r
+public class UniquePropertyCheck {\r
+\r
+\r
+       private static  final  String    FROMAPPID = "AAI-UTILS";\r
+       private static  final  String    TRANSID   = UUID.randomUUID().toString();\r
+       private static  final  String    COMPONENT = "UniquePropertyCheck";\r
+       \r
+       /**\r
+        * The main method.\r
+        *\r
+        * @param args the arguments\r
+        */\r
+       public static void main(String[] args) {\r
+               \r
+               \r
+               Properties props = System.getProperties();\r
+               props.setProperty(Configuration.PROPERTY_LOGGING_FILE_NAME, AAIConstants.AAI_LOGBACK_PROPS);\r
+               props.setProperty(Configuration.PROPERTY_LOGGING_FILE_PATH, AAIConstants.AAI_HOME_BUNDLECONFIG);\r
+               EELFLogger logger = EELFManager.getInstance().getLogger(UniquePropertyCheck.class.getSimpleName());\r
+               \r
+               LoggingContext.init();\r
+               LoggingContext.partnerName(FROMAPPID);\r
+               LoggingContext.serviceName(GraphAdminApp.APP_NAME);\r
+               LoggingContext.component(COMPONENT);\r
+               LoggingContext.targetEntity(GraphAdminApp.APP_NAME);\r
+               LoggingContext.targetServiceName("main");\r
+               LoggingContext.requestId(TRANSID);\r
+               LoggingContext.statusCode(StatusCode.COMPLETE);\r
+               LoggingContext.responseCode(LoggingContext.SUCCESS);\r
+               \r
+               MDC.put("logFilenameAppender", UniquePropertyCheck.class.getSimpleName());\r
+               \r
+               if( args == null || args.length != 1 ){\r
+                               String msg = "usage:  UniquePropertyCheck propertyName \n";\r
+                               System.out.println(msg);\r
+                               LoggingContext.statusCode(StatusCode.ERROR);\r
+                       LoggingContext.responseCode(LoggingContext.BUSINESS_PROCESS_ERROR);\r
+                               logAndPrint(logger, msg );\r
+                               System.exit(1);\r
+               }\r
+               String propertyName = args[0];\r
+               Graph graph = null;\r
+               \r
+               try {   \r
+               AAIConfig.init();\r
+               System.out.println("    ---- NOTE --- about to open graph (takes a little while)--------\n");\r
+               JanusGraph tGraph = JanusGraphFactory.open(new AAIGraphConfig.Builder(AAIConstants.REALTIME_DB_CONFIG).forService(UniquePropertyCheck.class.getSimpleName()).withGraphType("realtime").buildConfiguration());\r
+               \r
+               if( tGraph == null ) {\r
+                       LoggingContext.statusCode(StatusCode.ERROR);\r
+                       LoggingContext.responseCode(LoggingContext.AVAILABILITY_TIMEOUT_ERROR);\r
+                       logAndPrint(logger, " Error:  Could not get JanusGraph ");\r
+                       System.exit(1);\r
+               }\r
+               \r
+               graph = tGraph.newTransaction();\r
+               if( graph == null ){\r
+                       LoggingContext.statusCode(StatusCode.ERROR);\r
+                       LoggingContext.responseCode(LoggingContext.AVAILABILITY_TIMEOUT_ERROR);\r
+                       logAndPrint(logger, "could not get graph object in UniquePropertyCheck() \n");\r
+                       System.exit(0);\r
+               }\r
+       }\r
+           catch (AAIException e1) {\r
+                       String msg =  "Threw Exception: [" + e1.toString() + "]";\r
+                       LoggingContext.statusCode(StatusCode.ERROR);\r
+                       LoggingContext.responseCode(LoggingContext.UNKNOWN_ERROR);\r
+                       logAndPrint(logger, msg);\r
+                       System.exit(0);\r
+        }\r
+        catch (Exception e2) {\r
+                       String msg =  "Threw Exception: [" + e2.toString() + "]";\r
+                       LoggingContext.statusCode(StatusCode.ERROR);\r
+                       LoggingContext.responseCode(LoggingContext.UNKNOWN_ERROR);\r
+                       logAndPrint(logger, msg);\r
+                       System.exit(0);\r
+        }\r
+               \r
+               runTheCheckForUniqueness( TRANSID, FROMAPPID, graph, propertyName, logger );\r
+               System.exit(0);\r
+               \r
+       }// End main()\r
+       \r
+       \r
+       /**\r
+        * Run the check for uniqueness.\r
+        *\r
+        * @param transId the trans id\r
+        * @param fromAppId the from app id\r
+        * @param graph the graph\r
+        * @param propertyName the property name\r
+        * @param logger the logger\r
+        * @return the boolean\r
+        */\r
+       public static Boolean runTheCheckForUniqueness( String transId, String fromAppId, Graph graph, \r
+                       String propertyName, EELFLogger logger ){\r
+               \r
+               // Note - property can be found in more than one nodetype \r
+               //    our uniqueness constraints are always across the entire db - so this \r
+               //   tool looks across all nodeTypes that the property is found in.\r
+               Boolean foundDupesFlag = false;\r
+               \r
+               HashMap <String,String> valuesAndVidHash = new HashMap <String, String> ();\r
+               HashMap <String,String> dupeHash = new HashMap <String, String> ();\r
+       \r
+               int propCount = 0;\r
+               int dupeCount = 0;\r
+               Iterator<Vertex> vertItor = graph.traversal().V().has(propertyName);\r
+               while( vertItor.hasNext() ){\r
+                       propCount++;\r
+               Vertex v = vertItor.next();\r
+               String thisVid = v.id().toString();\r
+               Object val = (v.<Object>property(propertyName)).orElse(null);\r
+               if( valuesAndVidHash.containsKey(val) ){\r
+                       // We've seen this one before- track it in our  dupe hash\r
+                       dupeCount++;\r
+                       if( dupeHash.containsKey(val) ){\r
+                               // This is not the first one being added to the dupe hash for this value\r
+                               String updatedDupeList = dupeHash.get(val) + "|" + thisVid;\r
+                               dupeHash.put(val.toString(), updatedDupeList);\r
+                       }\r
+                       else {\r
+                               // This is the first time we see this value repeating\r
+                               String firstTwoVids =  valuesAndVidHash.get(val) + "|" + thisVid;\r
+                               dupeHash.put(val.toString(), firstTwoVids);\r
+                       }\r
+               }\r
+               else {\r
+                       valuesAndVidHash.put(val.toString(), thisVid);\r
+               }               \r
+               }\r
+               \r
+       \r
+       String info = "\n Found this property [" + propertyName + "] " + propCount + " times in our db.";\r
+       logAndPrint(logger, info);\r
+       info = " Found " + dupeCount + " cases of duplicate values for this property.\n\n";\r
+       logAndPrint(logger, info);\r
+\r
+       try {\r
+               if( ! dupeHash.isEmpty() ){\r
+                       Iterator <?> dupeItr = dupeHash.entrySet().iterator();\r
+                       while( dupeItr.hasNext() ){\r
+                               foundDupesFlag = true;\r
+                               Map.Entry pair = (Map.Entry) dupeItr.next();\r
+                               String dupeValue = pair.getKey().toString();;\r
+                                                       String vidsStr = pair.getValue().toString();\r
+                               String[] vidArr = vidsStr.split("\\|");\r
+                               logAndPrint(logger, "\n\n -------------- Found " + vidArr.length \r
+                                               + " nodes with " + propertyName + " of this value: [" + dupeValue + "].  Node details: ");\r
+                               \r
+                               for( int i = 0; i < vidArr.length; i++ ){\r
+                                       String vidString = vidArr[i];\r
+                                       Long idLong = Long.valueOf(vidString);\r
+                                       Vertex tvx = graph.traversal().V(idLong).next();\r
+                                       showPropertiesAndEdges( TRANSID, FROMAPPID, tvx, logger );\r
+                               }\r
+                       }\r
+               }\r
+       }\r
+       catch( Exception e2 ){\r
+               LoggingContext.statusCode(StatusCode.ERROR);\r
+                       LoggingContext.responseCode(LoggingContext.DATA_ERROR);\r
+                       logAndPrint(logger, "Threw Exception: [" + e2.toString() + "]");\r
+       } \r
+       \r
+       \r
+       return foundDupesFlag;\r
+       \r
+       }// end of runTheCheckForUniqueness()\r
+       \r
+       \r
+       /**\r
+        * Show properties and edges.\r
+        *\r
+        * @param transId the trans id\r
+        * @param fromAppId the from app id\r
+        * @param tVert the t vert\r
+        * @param logger the logger\r
+        */\r
+       private static void showPropertiesAndEdges( String transId, String fromAppId, Vertex tVert,\r
+                       EELFLogger logger ){ \r
+\r
+               if( tVert == null ){\r
+                       logAndPrint(logger, "Null node passed to showPropertiesAndEdges.");\r
+               }\r
+               else {\r
+                       String nodeType = "";\r
+                       Object ob = tVert.<String>property("aai-node-type").orElse(null);\r
+                       if( ob == null ){\r
+                               nodeType = "null";\r
+                       }\r
+                       else{\r
+                               nodeType = ob.toString();\r
+                       }\r
+                       \r
+                       logAndPrint(logger, " AAINodeType/VtxID for this Node = [" + nodeType + "/" + tVert.id() + "]");\r
+                       logAndPrint(logger, " Property Detail: ");\r
+                       Iterator<VertexProperty<Object>> pI = tVert.properties();\r
+                       while( pI.hasNext() ){\r
+                               VertexProperty<Object> tp = pI.next();\r
+                               Object val = tp.value();\r
+                               logAndPrint(logger, "Prop: [" + tp.key() + "], val = [" + val + "] ");          \r
+                       }\r
+                       \r
+                       Iterator <Edge> eI = tVert.edges(Direction.BOTH);\r
+                       if( ! eI.hasNext() ){\r
+                               logAndPrint(logger, "No edges were found for this vertex. ");\r
+                       }\r
+                       while( eI.hasNext() ){\r
+                               Edge ed = eI.next();\r
+                               String lab = ed.label();\r
+                               Vertex vtx;\r
+                               if (tVert.equals(ed.inVertex())) {\r
+                                       vtx = ed.outVertex();\r
+                               } else {\r
+                                       vtx = ed.inVertex();\r
+                               }\r
+                               if( vtx == null ){\r
+                                       logAndPrint(logger, " >>> COULD NOT FIND VERTEX on the other side of this edge edgeId = " + ed.id() + " <<< ");\r
+                               }\r
+                               else {\r
+                                       String nType = vtx.<String>property("aai-node-type").orElse(null);\r
+                                       String vid = vtx.id().toString();\r
+                                       logAndPrint(logger, "Found an edge (" + lab + ") from this vertex to a [" + nType + "] node with VtxId = " + vid);\r
+                               }\r
+                       }\r
+               }\r
+       } // End of showPropertiesAndEdges()\r
+\r
+       \r
+       /**\r
+        * Log and print.\r
+        *\r
+        * @param logger the logger\r
+        * @param msg the msg\r
+        */\r
+       protected static void logAndPrint(EELFLogger logger, String msg) {\r
+               System.out.println(msg);\r
+               logger.info(msg);\r
+       }\r
+       \r
+}\r
+\r
diff --git a/src/main/java/org/onap/aai/web/JerseyConfiguration.java b/src/main/java/org/onap/aai/web/JerseyConfiguration.java
new file mode 100644 (file)
index 0000000..436946c
--- /dev/null
@@ -0,0 +1,137 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.web;
+
+import org.glassfish.jersey.filter.LoggingFilter;
+import org.glassfish.jersey.server.ResourceConfig;
+import org.glassfish.jersey.servlet.ServletProperties;
+import org.onap.aai.rest.QueryConsumer;
+import org.onap.aai.rest.util.EchoResponse;
+import org.reflections.Reflections;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.context.annotation.Profile;
+import org.springframework.core.env.Environment;
+import org.springframework.stereotype.Component;
+
+import javax.annotation.Priority;
+import javax.ws.rs.container.ContainerRequestFilter;
+import javax.ws.rs.container.ContainerResponseFilter;
+import java.util.List;
+import java.util.Set;
+import java.util.logging.Logger;
+import java.util.stream.Collectors;
+
+@Component
+public class JerseyConfiguration extends ResourceConfig {
+
+    private static final Logger log = Logger.getLogger(JerseyConfiguration.class.getName());
+
+    private Environment env;
+
+    @Autowired
+    public JerseyConfiguration(Environment env) {
+
+        this.env = env;
+
+        register(QueryConsumer.class);
+
+        register(EchoResponse.class);
+
+        //Request Filters
+        registerFiltersForRequests();
+        // Response Filters
+        registerFiltersForResponses();
+
+        property(ServletProperties.FILTER_FORWARD_ON_404, true);
+
+        // Following registers the request headers and response headers
+        // If the LoggingFilter second argument is set to true, it will print response value as well
+        if ("true".equalsIgnoreCase(env.getProperty("aai.request.logging.enabled"))) {
+            register(new LoggingFilter(log, false));
+        }
+    }
+
+    public void registerFiltersForRequests() {
+
+        // Find all the classes within the interceptors package
+        Reflections reflections = new Reflections("org.onap.aai.interceptors");
+        // Filter them based on the clazz that was passed in
+        Set<Class<? extends ContainerRequestFilter>> filters = reflections.getSubTypesOf(ContainerRequestFilter.class);
+
+
+        // Check to ensure that each of the filter has the @Priority annotation and if not throw exception
+        for (Class filterClass : filters) {
+            if (filterClass.getAnnotation(Priority.class) == null) {
+                throw new RuntimeException("Container filter " + filterClass.getName() + " does not have @Priority annotation");
+            }
+        }
+
+        // Turn the set back into a list
+        List<Class<? extends ContainerRequestFilter>> filtersList = filters
+                .stream()
+                .filter(f -> {
+                    if (f.isAnnotationPresent(Profile.class)
+                            && !env.acceptsProfiles(f.getAnnotation(Profile.class).value())) {
+                        return false;
+                    }
+                    return true;
+                })
+                .collect(Collectors.toList());
+
+        // Sort them by their priority levels value
+        filtersList.sort((c1, c2) -> Integer.valueOf(c1.getAnnotation(Priority.class).value()).compareTo(c2.getAnnotation(Priority.class).value()));
+
+        // Then register this to the jersey application
+        filtersList.forEach(this::register);
+    }
+
+    public void registerFiltersForResponses() {
+
+        // Find all the classes within the interceptors package
+        Reflections reflections = new Reflections("org.onap.aai.interceptors");
+        // Filter them based on the clazz that was passed in
+        Set<Class<? extends ContainerResponseFilter>> filters = reflections.getSubTypesOf(ContainerResponseFilter.class);
+
+
+        // Check to ensure that each of the filter has the @Priority annotation and if not throw exception
+        for (Class filterClass : filters) {
+            if (filterClass.getAnnotation(Priority.class) == null) {
+                throw new RuntimeException("Container filter " + filterClass.getName() + " does not have @Priority annotation");
+            }
+        }
+
+        // Turn the set back into a list
+        List<Class<? extends ContainerResponseFilter>> filtersList = filters.stream()
+                .filter(f -> {
+                    if (f.isAnnotationPresent(Profile.class)
+                            && !env.acceptsProfiles(f.getAnnotation(Profile.class).value())) {
+                        return false;
+                    }
+                    return true;
+                })
+                .collect(Collectors.toList());
+
+        // Sort them by their priority levels value
+        filtersList.sort((c1, c2) -> Integer.valueOf(c1.getAnnotation(Priority.class).value()).compareTo(c2.getAnnotation(Priority.class).value()));
+
+        // Then register this to the jersey application
+        filtersList.forEach(this::register);
+    }
+}
diff --git a/src/main/java/org/onap/aai/web/LocalHostAccessLog.java b/src/main/java/org/onap/aai/web/LocalHostAccessLog.java
new file mode 100644 (file)
index 0000000..4e28562
--- /dev/null
@@ -0,0 +1,67 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.web;
+
+import ch.qos.logback.access.jetty.RequestLogImpl;
+import org.eclipse.jetty.server.handler.HandlerCollection;
+import org.eclipse.jetty.server.handler.RequestLogHandler;
+import org.eclipse.jetty.util.thread.QueuedThreadPool;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.boot.context.embedded.EmbeddedServletContainerFactory;
+import org.springframework.boot.context.embedded.jetty.JettyEmbeddedServletContainerFactory;
+import org.springframework.boot.context.embedded.jetty.JettyServerCustomizer;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+
+import java.util.Arrays;
+
+@Configuration
+public class LocalHostAccessLog {
+
+    @Bean
+    public EmbeddedServletContainerFactory jettyConfigBean(
+            @Value("${jetty.threadPool.maxThreads:200}") final String maxThreads,
+            @Value("${jetty.threadPool.minThreads:8}") final String minThreads
+    ){
+               JettyEmbeddedServletContainerFactory jef = new JettyEmbeddedServletContainerFactory();
+               jef.addServerCustomizers((JettyServerCustomizer) server -> {
+
+            HandlerCollection handlers = new HandlerCollection();
+
+            Arrays.stream(server.getHandlers()).forEach(handlers::addHandler);
+
+            RequestLogHandler requestLogHandler = new RequestLogHandler();
+            requestLogHandler.setServer(server);
+
+            RequestLogImpl requestLogImpl = new RequestLogImpl();
+            requestLogImpl.setResource("/localhost-access-logback.xml");
+            requestLogImpl.start();
+
+            requestLogHandler.setRequestLog(requestLogImpl);
+            handlers.addHandler(requestLogHandler);
+            server.setHandler(handlers);
+
+            final QueuedThreadPool threadPool = server.getBean(QueuedThreadPool.class);
+            threadPool.setMaxThreads(Integer.valueOf(maxThreads));
+            threadPool.setMinThreads(Integer.valueOf(minThreads));
+        });
+               return jef;
+       }
+}
diff --git a/src/main/resources/antlr4/org/onap/aai/AAIDsl.g4 b/src/main/resources/antlr4/org/onap/aai/AAIDsl.g4
new file mode 100644 (file)
index 0000000..2713677
--- /dev/null
@@ -0,0 +1,66 @@
+/**
+ * Define a grammar called AAIDsl
+ */
+grammar AAIDsl;
+
+
+aaiquery: dslStatement;
+
+dslStatement: (queryStep) (traverseStep | unionTraverseStep)* limitStep*;
+
+queryStep : (singleNodeStep |singleQueryStep | multiQueryStep);
+
+unionQueryStep: LBRACKET dslStatement ( COMMA (dslStatement))* RBRACKET;
+
+traverseStep: (TRAVERSE (  queryStep | unionQueryStep));
+
+unionTraverseStep: TRAVERSE unionQueryStep;
+
+singleNodeStep: NODE STORE? ;
+singleQueryStep: NODE STORE? (filterStep | filterTraverseStep);
+multiQueryStep:  NODE STORE? (filterStep | filterTraverseStep) (filterStep)+;
+
+filterStep: NOT? (LPAREN KEY COMMA KEY (COMMA KEY)*RPAREN);
+filterTraverseStep: (LPAREN traverseStep* RPAREN);
+
+limitStep: LIMIT NODE;
+
+LIMIT: 'LIMIT';
+NODE: ID;
+
+KEY: ['] ID ['] ;
+
+AND: [&];
+
+STORE: [*];
+
+OR: [|];
+
+TRAVERSE: [>] ;
+
+LPAREN: [(];
+       
+RPAREN: [)];
+
+COMMA: [,] ;
+
+EQUAL: [=];
+
+LBRACKET: [[];
+       
+RBRACKET: [\]];
+
+NOT: [!];
+
+VALUE: DIGIT;
+
+fragment LOWERCASE  : [a-z] ;
+fragment UPPERCASE  : [A-Z] ;
+fragment DIGIT      : [0-9] ;
+ID
+   : ( LOWERCASE | UPPERCASE | DIGIT) ( LOWERCASE | UPPERCASE | DIGIT | '-' |'.' |'_')*
+   ;
+
+WS : [ \t\r\n]+ -> skip ; // skip spaces, tabs, newlines
+
+
diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties
new file mode 100644 (file)
index 0000000..d636bb6
--- /dev/null
@@ -0,0 +1,64 @@
+
+spring.application.name=GraphAdmin
+
+server.contextPath=/
+spring.autoconfigure.exclude=org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration,org.springframework.boot.autoconfigure.orm.jpa.HibernateJpaAutoConfiguration
+
+spring.profiles.active=production,one-way-ssl
+
+spring.jersey.application-path=${schema.uri.base.path}
+
+#This property is used to set the Tomcat connector attributes.developers can define multiple attributes separated by comma
+#tomcat.connector.attributes=allowTrace-true
+#The max number of active threads in this pool
+jetty.threadPool.maxThreads=200
+#The minimum number of threads always kept alive
+jetty.threadPool.minThreads=8
+#The number of milliseconds before an idle thread shutsdown, unless the number of active threads are less or equal to minSpareThreads
+server.tomcat.max-idle-time=60000
+
+# If you get an application startup failure that the port is already taken
+# If thats not it, please check if the key-store file path makes sense
+server.local.startpath=src/main/resources/
+server.basic.auth.location=${server.local.startpath}etc/auth/realm.properties
+
+server.port=8449
+server.ssl.enabled-protocols=TLSv1.1,TLSv1.2
+server.ssl.key-store=${server.local.startpath}etc/auth/aai_keystore
+server.ssl.key-store-password=password(OBF:1vn21ugu1saj1v9i1v941sar1ugw1vo0)
+server.ssl.trust-store=${server.local.startpath}etc/auth/aai_keystore
+server.ssl.trust-store-password=password(OBF:1vn21ugu1saj1v9i1v941sar1ugw1vo0)
+server.ssl.client-auth=want
+server.ssl.key-store-type=JKS
+
+# JMS bind address host port
+jms.bind.address=tcp://localhost:61450
+dmaap.ribbon.listOfServers=localhost:3904
+
+# Schema related attributes for the oxm and edges
+# Any additional schema related attributes should start with prefix schema
+schema.configuration.location=N/A
+schema.source.name=onap
+schema.nodes.location=${server.local.startpath}/schema/${schema.source.name}/oxm/
+schema.edges.location=${server.local.startpath}/schema/${schema.source.name}/dbedgerules/
+
+schema.ingest.file=${server.local.startpath}/application.properties
+
+# Schema Version Related Attributes
+
+schema.uri.base.path=/aai
+# Lists all of the versions in the schema
+schema.version.list=v8,v9,v10,v11,v12,v13,v14
+# Specifies from which version should the depth parameter to default to zero
+schema.version.depth.start=v9
+# Specifies from which version should the related link be displayed in response payload
+schema.version.related.link.start=v10
+# Specifies from which version should the client see only the uri excluding host info
+# Before this version server base will also be included
+schema.version.app.root.start=v11
+
+schema.version.namespace.change.start=v12
+# Specifies from which version should the client start seeing the edge label in payload
+schema.version.edge.label.start=v12
+# Specifies the version that the application should default to
+schema.version.api.default=v14
diff --git a/src/main/resources/dupeTool-logback.xml b/src/main/resources/dupeTool-logback.xml
new file mode 100644 (file)
index 0000000..ac27e67
--- /dev/null
@@ -0,0 +1,62 @@
+<configuration>
+       <property name="logDirectory" value="${AJSC_HOME}/logs" />
+
+       <appender name="dupeToollog" class="ch.qos.logback.classic.sift.SiftingAppender">
+               <filter class="ch.qos.logback.classic.filter.LevelFilter">
+                       <level>INFO</level>
+                       <onMatch>ACCEPT</onMatch>
+                       <onMismatch>DENY</onMismatch>
+               </filter>
+               <!-- This is MDC value -->
+               <!-- We will assign a value to 'logFilenameAppender' via Java code -->
+               <discriminator>
+                       <key>logFilenameAppender</key>
+                       <defaultValue>console</defaultValue>
+               </discriminator>
+               <sift>
+                       <!-- A standard RollingFileAppender, the log file is based on 'logFileName' 
+                               at runtime -->
+                       <appender name="FILE-${logFilenameAppender}"
+                               class="ch.qos.logback.core.rolling.RollingFileAppender">
+                               <file>${logDirectory}/dupeTool/${logFilenameAppender}.log</file>
+                               <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+                                       <fileNamePattern>${logDirectory}/dupeTool/${logFilenameAppender}.log.%d{yyyy-MM-dd}
+                                       </fileNamePattern>
+                               </rollingPolicy>
+                               <encoder>
+                                       <pattern>%d{yyyy-MM-dd'T'HH:mm:ss.SSSXXX}|%m%n</pattern>
+                               </encoder>
+                       </appender>
+               </sift>
+       </appender>
+
+       <logger name="org.reflections" level="ERROR" additivity="false">
+               <appender-ref ref="dupeToollog" />
+       </logger>
+       <logger name="org.apache.zookeeper" level="ERROR" additivity="false">
+               <appender-ref ref="dupeToollog" />
+       </logger>
+       <logger name="org.apache.hadoop" level="ERROR" additivity="false">
+               <appender-ref ref="dupeToollog" />
+       </logger>
+       <logger name="org.janusgraph" level="ERROR" additivity="false">
+               <appender-ref ref="dupeToollog" />
+       </logger>
+       <logger name="ch.qos.logback.classic" level="ERROR" additivity="false">
+               <appender-ref ref="dupeToollog" />
+       </logger>
+       <logger name="ch.qos.logback.core" level="ERROR" additivity="false">
+               <appender-ref ref="dupeToollog" />
+       </logger>
+       <logger name="com.att.eelf" level="ERROR" additivity="false">
+               <appender-ref ref="dupeToollog" />
+       </logger>
+       <logger name="org.onap.aai" level="ERROR" additivity="false">
+               <appender-ref ref="dupeToollog" />
+       </logger>
+
+
+       <root level="INFO">
+               <appender-ref ref="dupeToollog" />
+       </root>
+</configuration>
\ No newline at end of file
diff --git a/src/main/resources/dynamicPayloadGenerator-logback.xml b/src/main/resources/dynamicPayloadGenerator-logback.xml
new file mode 100644 (file)
index 0000000..d788a87
--- /dev/null
@@ -0,0 +1,80 @@
+<!--
+
+    ============LICENSE_START=======================================================
+    org.onap.aai
+    ================================================================================
+    Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+    ================================================================================
+    Licensed under the Apache License, Version 2.0 (the "License");
+    you may not use this file except in compliance with the License.
+    You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing, software
+    distributed under the License is distributed on an "AS IS" BASIS,
+    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    See the License for the specific language governing permissions and
+    limitations under the License.
+    ============LICENSE_END=========================================================
+
+    ECOMP is a trademark and service mark of AT&T Intellectual Property.
+
+-->
+<configuration>
+       <property name="logDirectory" value="${AJSC_HOME}/logs" />
+
+       <appender name="dynamicPayloadGeneratorlog" class="ch.qos.logback.classic.sift.SiftingAppender">
+               <!-- This is MDC value -->
+               <!-- We will assign a value to 'logFilenameAppender' via Java code -->
+               <discriminator>
+                       <key>logFilenameAppender</key>
+                       <defaultValue>undefined</defaultValue>
+               </discriminator>
+               <sift>
+                       <!-- A standard RollingFileAppender, the log file is based on 'logFileName' 
+                               at runtime -->
+                       <appender name="FILE-${logFilenameAppender}"
+                               class="ch.qos.logback.core.rolling.RollingFileAppender">
+                               <file>${logDirectory}/dynamicPayloadGenerator/${logFilenameAppender}.log</file>
+                               <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+                                       <fileNamePattern>${logDirectory}/dynamicPayloadGenerator/${logFilenameAppender}.log.%d{yyyy-MM-dd}
+                                       </fileNamePattern>
+                               </rollingPolicy>
+                               <encoder>
+                                       <pattern>%d{yyyy-MM-dd'T'HH:mm:ss.SSSXXX}|%m%n</pattern>
+                               </encoder>
+                       </appender>
+               </sift>
+       </appender>
+
+       <logger name="org.reflections" level="ERROR" additivity="false">
+               <appender-ref ref="dynamicPayloadGeneratorlog" />
+       </logger>
+       <logger name="org.apache.zookeeper" level="ERROR" additivity="false">
+               <appender-ref ref="dynamicPayloadGeneratorlog" />
+       </logger>
+       <logger name="org.apache.hadoop" level="ERROR" additivity="false">
+               <appender-ref ref="dynamicPayloadGeneratorlog" />
+       </logger>
+       <logger name="org.janusgraph" level="ERROR" additivity="false">
+               <appender-ref ref="dynamicPayloadGeneratorlog" />
+       </logger>
+       <logger name="ch.qos.logback.classic" level="ERROR" additivity="false">
+               <appender-ref ref="dynamicPayloadGeneratorlog" />
+       </logger>
+       <logger name="ch.qos.logback.core" level="ERROR" additivity="false">
+               <appender-ref ref="dynamicPayloadGeneratorlog" />
+       </logger>
+       <logger name="com.att.eelf" level="ERROR" additivity="false">
+               <appender-ref ref="dynamicPayloadGeneratorlog" />
+       </logger>
+       <logger name="org.onap.aai" level="INFO" additivity="false">
+               <appender-ref ref="dynamicPayloadGeneratorlog" />
+       </logger>
+
+
+       <root level="INFO">
+               <appender-ref ref="dynamicPayloadGeneratorlog" />
+       </root>
+</configuration>
\ No newline at end of file
diff --git a/src/main/resources/etc/appprops/aaiEventDMaaPPublisher.properties b/src/main/resources/etc/appprops/aaiEventDMaaPPublisher.properties
new file mode 100644 (file)
index 0000000..a8f5e95
--- /dev/null
@@ -0,0 +1,4 @@
+topic=AAI-EVENT\r
+partition=AAI\r
+maxBatchSize=100\r
+maxAgeMs=250\r
diff --git a/src/main/resources/etc/appprops/aaiconfig.properties b/src/main/resources/etc/appprops/aaiconfig.properties
new file mode 100644 (file)
index 0000000..8613d93
--- /dev/null
@@ -0,0 +1,144 @@
+#\r
+# ============LICENSE_START=======================================================\r
+# org.onap.aai\r
+# ================================================================================\r
+# Copyright © 2017-18 AT&T Intellectual Property. All rights reserved.\r
+# ================================================================================\r
+# Licensed under the Apache License, Version 2.0 (the "License");\r
+# you may not use this file except in compliance with the License.\r
+# You may obtain a copy of the License at\r
+#\r
+#    http://www.apache.org/licenses/LICENSE-2.0\r
+#\r
+# Unless required by applicable law or agreed to in writing, software\r
+# distributed under the License is distributed on an "AS IS" BASIS,\r
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
+# See the License for the specific language governing permissions and\r
+# limitations under the License.\r
+# ============LICENSE_END=========================================================\r
+\r
+####################################################################\r
+#  REMEMBER TO THINK ABOUT ENVIRONMENTAL DIFFERENCES AND CHANGE THE\r
+#  TEMPLATE AND *ALL* DATAFILES\r
+####################################################################\r
+\r
+aai.config.checktime=1000\r
+\r
+# this could come from siteconfig.pl?\r
+aai.config.nodename=AutomaticallyOverwritten\r
+\r
+aai.transaction.logging=true\r
+aai.transaction.logging.get=true\r
+aai.transaction.logging.post=true\r
+\r
+aai.server.url.base=https://localhost:8443/aai/\r
+aai.server.url=https://localhost:8443/aai/v14/\r
+aai.oldserver.url.base=https://localhost:8443/aai/servers/\r
+aai.oldserver.url=https://localhost:8443/aai/servers/v2/\r
+aai.global.callback.url=https://localhost:8443/aai/\r
+\r
+# Start of INTERNAL Specific Properties\r
+\r
+aai.truststore.filename=aai_keystore\r
+aai.truststore.passwd.x=OBF:1vn21ugu1saj1v9i1v941sar1ugw1vo0\r
+aai.keystore.filename=aai-client-cert.p12\r
+aai.keystore.passwd.x=OBF:1vn21ugu1saj1v9i1v941sar1ugw1vo0\r
+\r
+aai.realtime.clients=RO,SDNC,MSO,SO\r
+\r
+# End of INTERNAL Specific Properties\r
+\r
+aai.notification.current.version=v14\r
+aai.notificationEvent.default.status=UNPROCESSED\r
+aai.notificationEvent.default.eventType=AAI-EVENT\r
+aai.notificationEvent.default.domain=devINT1\r
+aai.notificationEvent.default.sourceName=aai\r
+aai.notificationEvent.default.sequenceNumber=0\r
+aai.notificationEvent.default.severity=NORMAL\r
+aai.notificationEvent.default.version=v14\r
+# This one lets us enable/disable resource-version checking on updates/deletes\r
+aai.resourceversion.enableflag=true\r
+aai.logging.maxStackTraceEntries=10\r
+aai.default.api.version=v14\r
+\r
+# Used by Model-processing code\r
+aai.model.delete.sleep.per.vtx.msec=500\r
+aai.model.query.resultset.maxcount=50\r
+aai.model.query.timeout.sec=90\r
\r
+# Used by Data Grooming\r
+aai.grooming.default.max.fix=150\r
+aai.grooming.default.sleep.minutes=7\r
+\r
+# Used by DupeTool\r
+aai.dupeTool.default.max.fix=25\r
+aai.dupeTool.default.sleep.minutes=7\r
+\r
+aai.model.proc.max.levels=50\r
+aai.edgeTag.proc.max.levels=50\r
+\r
+# Used by the ForceDelete tool\r
+aai.forceDel.protected.nt.list=cloud-region\r
+aai.forceDel.protected.edge.count=10\r
+aai.forceDel.protected.descendant.count=10\r
+\r
+# Used for CTAG-Pool generation\r
+aai.ctagPool.rangeString.vplsPe1=2001-2500\r
+aai.ctagPool.rangeString.vplsPe2=2501-3000\r
+\r
+aai.jms.enable=false\r
+\r
+#used by the dataGrooming and dataSnapshot cleanup tasks\r
+aai.cron.enable.datagroomingcleanup=true\r
+aai.cron.enable.datasnapshotcleanup=true\r
+aai.datagrooming.agezip=5\r
+aai.datagrooming.agedelete=30\r
+aai.datasnapshot.agezip=5\r
+aai.datasnapshot.agedelete=30\r
+\r
+#used by the dataSnapshot and dataGrooming tasks\r
+aai.cron.enable.dataSnapshot=true\r
+aai.cron.enable.dataGrooming=true\r
+\r
+#used by the dataGrooming tasks\r
+aai.datagrooming.enableautofix=true\r
+aai.datagrooming.enabledupefixon=true\r
+aai.datagrooming.enabledontfixorphans=true\r
+aai.datagrooming.enabletimewindowminutes=true\r
+aai.datagrooming.enableskiphostcheck=false\r
+aai.datagrooming.enablesleepminutes=false\r
+aai.datagrooming.enableedgesonly=false\r
+aai.datagrooming.enableskipedgechecks=false\r
+aai.datagrooming.enablemaxfix=false\r
+aai.datagrooming.enablesinglecommits=false\r
+aai.datagrooming.enabledupecheckoff=false\r
+aai.datagrooming.enableghost2checkoff=false\r
+aai.datagrooming.enableghost2fixon=false\r
+aai.datagrooming.enablef=false\r
+\r
+# used by the dataGrooming to set values\r
+aai.datagrooming.timewindowminutesvalue=10500\r
+aai.datagrooming.sleepminutesvalue=100\r
+aai.datagrooming.maxfixvalue=10\r
+aai.datagrooming.fvalue=10\r
+\r
+#timeout for traversal enabled flag\r
+aai.graphadmin.timeoutenabled=true\r
+\r
+#timeout app specific -1 to bypass for that app id, a whole number to override the timeout with that value (in ms)\r
+aai.graphadmin.timeout.appspecific=JUNITTESTAPP1,1|JUNITTESTAPP2,-1|DCAE-CCS,-1|DCAES,-1|AAI-FILEGEN-GFPIP,-1|FitNesse-Test-PS2418,-1|FitNesse-Test-jenkins,-1|FitNesse-Test-ps2418,-1|FitNesse-Relationship-Test-PS2418,-1|FitNesse-Relationship-Test-ps2418,-1|FitNesse-Relationship-Test-jenkins,-1|VPESAT,-1|AAIRctFeed,-1|NewvceCreator,-1|IANewvceCreator,-1|AAI-CSIOVALS,-1\r
+\r
+#default timeout limit added for graphadmin if not overridden (in ms)\r
+aai.graphadmin.timeoutlimit=180000\r
+\r
+# Disable the process check which are oriented towards linux OS\r
+# These props should only be true for local on windows\r
+aai.disable.check.snapshot.running=true\r
+aai.disable.check.grooming.running=true\r
+\r
+# Specify the params listed right here that you would have send to the dataSnapshot shell script\r
+# JUST_TAKE_SNAPSHOT\r
+# THREADED_SNAPSHOT 2 DEBUG\r
+# THREADED_SNAPSHOT 2\r
+aai.datasnapshot.params=JUST_TAKE_SNAPSHOT\r
+\r
diff --git a/src/main/resources/etc/appprops/datatoolscrons.properties b/src/main/resources/etc/appprops/datatoolscrons.properties
new file mode 100644 (file)
index 0000000..74b3c9e
--- /dev/null
@@ -0,0 +1,12 @@
+#Cron expressions
+#please note these must be in Quartz cron syntax
+#column key: seconds minutes hours dayOfMonth month dayOfWeek
+#note: dayOfWeek is optional, the rest are mandatory
+#for more information refer to http://www.quartz-scheduler.org/documentation/quartz-2.x/tutorials/crontrigger.html
+#this site can generate new expressions for you: http://www.cronmaker.com/
+#BUT you must omit the last (seventh) column when you copy its output (spring expects exactly 6 fields and doesn't allow the seventh optional one)
+datagroomingcleanup.cron=0 06 0 * * ?
+datasnapshotcleanup.cron=0 17 0 * * ?
+datasnapshottasks.cron=0 45 * * * ?
+datagroomingtasks.cron=0 10 1,5,9,13,17,21 * * ?
+dataexporttask.cron=0 02 3 * * ?
\ No newline at end of file
diff --git a/src/main/resources/etc/appprops/dynamic.properties b/src/main/resources/etc/appprops/dynamic.properties
new file mode 100644 (file)
index 0000000..38e1bda
--- /dev/null
@@ -0,0 +1,34 @@
+#
+# ============LICENSE_START=======================================================
+# org.onap.aai
+# ================================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+query.fast-property=true
+# the following parameters are not reloaded automatically and require a manual bounce
+storage.backend=inmemory
+
+#Kept the below if we need to change from in-memory to dynamic instance
+storage.hostname=localhost
+
+#caching on
+cache.db-cache = true
+cache.db-cache-clean-wait = 20
+cache.db-cache-time = 180000
+cache.db-cache-size = 0.3
+load.snapshot.file=false
diff --git a/src/main/resources/etc/appprops/error.properties b/src/main/resources/etc/appprops/error.properties
new file mode 100644 (file)
index 0000000..708fb1f
--- /dev/null
@@ -0,0 +1,178 @@
+# Adding comment trying to trigger a build
+#-------------------------------------------------------------------------------                                                                                            ----------
+#Key=Disposition:Category:Severity:Error Code:HTTP ResponseCode:RESTError Code:Error Message
+#-------------------------------------------------------------------------------                                                                                            ----------
+# testing code, please don't change unless error utility source code changes
+AAI_TESTING=5:2:WARN:0000:400:0001:Error code for testing
+
+# General success
+AAI_0000=0:0:INFO:0000:200:0000:Success
+
+# health check success
+AAI_0001=0:0:INFO:0001:200:0001:Success X-FromAppId=%1 X-TransactionId=%2 
+AAI_0002=0:0:INFO:0002:200:0001:Successful health check
+
+# Success with additional info
+AAI_0003=0:3:INFO:0003:202:0003:Success with additional info performing %1 on %2. Added %3 with key %4
+AAI_0004=0:3:INFO:0004:202:0003:Added prerequisite object to db
+
+#--- aairest: 3000-3299
+# svc errors
+AAI_3000=5:2:INFO:3000:400:3000:Invalid input performing %1 on %2
+AAI_3001=5:6:INFO:3001:404:3001:Resource not found for %1 using id %2
+AAI_3002=5:1:WARN:3002:400:3002:Error writing output performing %1 on %2
+AAI_3003=5:1:WARN:3003:400:3003:Failed to make edge to missing target node of type %3 with keys %4 performing %1 on %2
+AAI_3005=5:6:WARN:3005:404:3001:Node cannot be directly accessed for read, must be accessed via ancestor(s)
+AAI_3006=5:6:WARN:3006:404:3001:Node cannot be directly accessed for write, must be accessed via ancestor(s)
+AAI_3007=5:6:INFO:3007:410:3007:This version (%1) of the API is retired, please migrate to %2
+AAI_3008=5:6:WARN:3008:400:3008:URI is not encoded in UTF-8
+AAI_3009=5:6:WARN:3009:400:3002:Malformed URL
+AAI_3010=5:6:WARN:3010:400:3002:Cannot write via this URL
+AAI_3011=5:6:WARN:3011:400:3000:Unknown XML namespace used in payload
+AAI_3012=5:6:WARN:3012:400:3012:Unrecognized AAI function
+AAI_3013=5:6:WARN:3013:400:3013:Query payload missing required parameters %1
+AAI_3014=5:6:WARN:3014:400:3014:Query payload is invalid %1
+# pol errors
+AAI_3100=5:1:WARN:3100:400:3100:Unsupported operation %1
+AAI_3101=5:1:WARN:3101:403:3101:Attempt by client %1 to execute API %2
+AAI_3102=5:1:WARN:3102:400:3102:Error parsing input performing %1 on %2
+AAI_3300=5:1:WARN:3300:403:3300:Unauthorized
+AAI_3301=5:1:WARN:3301:401:3301:Stale credentials
+AAI_3302=5:1:WARN:3302:401:3301:Not authenticated
+AAI_3303=5:1:WARN:3303:403:3300:Too many objects would be returned by this request, please refine your request and retry
+
+#--- aaigen: 4000-4099
+AAI_4000=5:4:ERROR:4000:500:3002:Internal Error
+AAI_4001=5:4:FATAL:4001:500:3002:Configuration file not found
+AAI_4002=5:4:FATAL:4002:500:3002:Error reading Configuration file
+AAI_4003=5:4:ERROR:4003:500:3002:Error writing to log file
+AAI_4004=5:4:FATAL:4004:500:3002:Error reading/parsing the error properties file
+AAI_4005=5:4:FATAL:4005:500:3002:Missing or invalid configuration parameter
+AAI_4006=5:4:FATAL:4006:500:3002:Unexpected error in service
+AAI_4007=5:4:WARN:4007:500:3102:Input parsing error
+AAI_4008=5:4:ERROR:4008:500:3002:Output parsing error
+AAI_4009=4:0:WARN:4009:400:3000:Invalid X-FromAppId in header
+AAI_4010=4:0:WARN:4010:400:3000:Invalid X-TransactionId in header
+AAI_4011=5:4:ERROR:4011:500:3002:Missing data for REST error response
+AAI_4014=4:0:WARN:4014:400:3000:Invalid Accept header
+AAI_4015=4:0:WARN:4015:400:3000:You must provide at least one indexed property
+AAI_4016=4:0:WARN:4016:400:3000:The depth parameter must be a number or the string "all"
+AAI_4017=5:2:INFO:4017:400:3000:Could not set property
+AAI_4018=5:2:WARN:4018:400:3000:Unable to convert the string to integer
+#--- aaidbmap: 5102-5199
+AAI_5102=5:4:FATAL:5102:500:3002:Graph database is null after open
+AAI_5105=5:4:ERROR:5105:500:3002:Unexpected error reading/updating database
+AAI_5106=5:4:WARN:5106:404:3001:Node not found
+AAI_5107=5:2:WARN:5107:400:3000:Required information missing
+AAI_5108=5:2:WARN:5108:200:0:Unexpected information in request being ignored
+
+#--- aaidbgen: 6101-6199
+AAI_6101=5:4:ERROR:6101:500:3002:null JanusGraph object passed
+AAI_6102=5:4:WARN:6102:400:3000:Passed-in property is not valid for this nodeType
+AAI_6103=5:4:WARN:6103:400:3000:Required Node-property not found in input data
+AAI_6104=5:4:WARN:6104:400:3000:Required Node-property was passed with no data
+AAI_6105=5:4:WARN:6105:400:3000:Node-Key-Property not defined in DbMaps
+AAI_6106=5:4:WARN:6106:400:3000:Passed-in property is not valid for this edgeType
+AAI_6107=5:4:WARN:6107:400:3000:Required Edge-property not found in input data
+AAI_6108=5:4:WARN:6108:400:3000:Required Edge-property was passed with no data
+AAI_6109=5:4:WARN:6109:400:3000:Bad dependent Node value
+AAI_6110=5:4:ERROR:6110:400:3100:Node cannot be deleted
+AAI_6111=5:4:WARN:6111:400:3000:JSON processing error
+AAI_6112=5:4:ERROR:6112:400:3000:More than one node found by getUniqueNode()
+AAI_6114=5:4:INFO:6114:404:3001:Node Not Found
+AAI_6115=5:4:ERROR:6115:400:3000:Unrecognized NodeType
+AAI_6116=5:4:ERROR:6116:400:3000:Unrecognized Property
+AAI_6117=5:4:ERROR:6117:400:3000:Uniqueness constraint violated
+AAI_6118=5:4:WARN:6118:400:3000:Required Field not passed.
+AAI_6120=5:4:WARN:6120:400:3000:Bad Parameter Passed
+AAI_6121=5:4:ERROR:6121:400:3000:Problem with internal AAI reference data
+AAI_6122=5:4:ERROR:6122:400:3000:Data Set not complete in DB for this request
+AAI_6123=5:4:ERROR:6123:500:3000:Bad Data found by DataGrooming Tool - Investigate
+AAI_6124=5:4:ERROR:6124:500:3000:File read/write error
+AAI_6125=5:4:WARN:6125:500:3000:Problem Pulling Data Set
+AAI_6126=5:4:ERROR:6126:400:3000:Edge cannot be deleted
+AAI_6127=5:4:INFO:6127:404:3001:Edge Not Found
+AAI_6128=5:4:INFO:6128:500:3000:Unexpected error
+AAI_6129=5:4:INFO:6129:404:3003:Error making edge to target node
+AAI_6130=5:4:WARN:6130:412:3000:Precondition Required
+AAI_6131=5:4:WARN:6131:412:3000:Precondition Failed
+AAI_6132=5:4:WARN:6132:400:3000:Bad Model Definition 
+AAI_6133=5:4:WARN:6133:400:3000:Bad Named Query Definition
+AAI_6134=5:4:ERROR:6134:500:6134:Could not persist transaction to storage back end. Exhausted retry amount
+AAI_6135=5:4:WARN:6135:412:3000:Resource version specified on create
+AAI_6136=5:4:ERROR:6136:400:3000:Object cannot hold multiple items
+AAI_6137=5:4:ERROR:6137:400:3000:Cannot perform writes on multiple vertices
+AAI_6138=5:4:ERROR:6138:400:3000:Cannot delete multiple vertices
+AAI_6139=5:4:ERROR:6139:404:3000:Attempted to add edge to vertex that does not exist
+AAI_6140=5:4:ERROR:6140:400:3000:Edge multiplicity violated
+AAI_6141=5:4:WARN:6141:400:3000:Please Refine Query
+AAI_6142=5:4:INFO:6142:400:3000:Retrying transaction
+AAI_6143=5:4:INFO:6143:400:3000:Ghost vertex found
+AAI_6144=5:4:WARN:6144:400:3000:Cycle found in graph
+AAI_6145=5:4:ERROR:6145:400:3000:Cannot create a nested/containment edge via relationship
+AAI_6146=5:4:ERROR:6146:400:3000:Ambiguous identity map found, use a URI instead
+AAI_6147=5:4:ERROR:6147:400:3000:Payload Limit Reached, reduce payload
+
+#--- aaicsvp: 7101-7199
+AAI_7101=5:4:ERROR:7101:500:3002:Unexpected error in CSV file processing
+AAI_7102=5:4:ERROR:7102:500:3002:Error in cleanup temporary directory
+#AAI_7103=4:2:ERROR:7103:500:3002:Unsupported user
+AAI_7104=5:4:ERROR:7104:500:3002:Failed to create directory
+AAI_7105=5:4:ERROR:7105:500:3002:Temporary directory exists
+AAI_7106=5:4:ERROR:7106:500:3002:Cannot delete
+AAI_7107=5:4:ERROR:7107:500:3002:Input file does not exist
+AAI_7108=5:4:ERROR:7108:500:3002:Output file does not exist
+AAI_7109=5:4:ERROR:7109:500:3002:Error closing file
+AAI_7110=5:4:ERROR:7110:500:3002:Error loading/reading properties file
+AAI_7111=5:4:ERROR:7111:500:3002:Error executing shell script
+AAI_7112=5:4:ERROR:7112:500:3002:Error creating output file
+AAI_7113=5:4:ERROR:7113:500:3002:Trailer record error
+AAI_7114=5:4:ERROR:7114:500:3002:Input file error
+AAI_7115=5:4:ERROR:7115:500:3002:Unexpected error
+AAI_7116=5:4:ERROR:7116:500:3002:Request error 
+AAI_7117=5:4:ERROR:7117:500:3002:Error in get http client object
+AAI_7118=5:4:ERROR:7118:500:3002:Script Error
+AAI_7119=5:4:ERROR:7119:500:3002:Unknown host
+
+#--- aaisdnc: 7201-7299
+AAI_7202=5:4:ERROR:7202:500:3002:Error getting connection to odl
+AAI_7203=5:4:ERROR:7203:500:3002:Unexpected error calling DataChangeNotification API
+AAI_7204=5:4:ERROR:7204:500:3002:Error returned by DataChangeNotification API
+AAI_7205=5:4:ERROR:7205:500:3002:Unexpected error running notifySDNCOnUpdate
+#AAI_7206=5:4:ERROR:7206:500:3002:Invalid data returned from ODL
+
+#--- NotificationEvent, using UEB space
+AAI_7350=5:4:ERROR:7305:500:3002:Notification event creation failed
+
+#--- aairestctlr: 7401-7499
+AAI_7401=5:4:ERROR:7401:500:3002:Error connecting to AAI REST API
+AAI_7402=5:4:ERROR:7402:500:3002:Unexpected error
+AAI_7403=5:4:WARN:7403:400:3001:Request error
+AAI_7404=5:4:INFO:7404:404:3001:Node not found
+AAI_7405=5:4:WARN:7405:200:0:UUID not formatted correctly, generating UUID
+AAI_7406=5:4:ERROR:7406:400:7406:Request Timed Out
+
+#--- aaicsiovals: 7501-7599
+#AAI_7501=5:4:WARN:7501:500:3002:Error getting connection to CSI-OVALS
+AAI_7502=5:4:WARN:7502:500:3002:Bad parameter when trying to build request for CSI-OVALS
+AAI_7503=5:4:WARN:7503:500:3002:Error returned by CSI-OVALS
+
+#-- dataexport: 8001-8099
+AAI_8001=5:4:WARN:8001:500:3002:Unable to find data snapshots
+AAI_8002=5:4:ERROR:8002:500:3002:Script Error
+AAI_8003=5:4:ERROR:8003:500:3002:Dynamic Payload Generator Error
+#--- aaiauth: 9101-9199
+AAI_9101=5:0:WARN:9101:403:3300:User is not authorized to perform function
+#AAI_9102=5:0:WARN:9102:401:3301:Refresh credentials from source
+#AAI_9103=5:0:WARN:9103:403:3300:User not found
+#AAI_9104=5:0:WARN:9104:401:3302:Authentication error
+#AAI_9105=5:0:WARN:9105:403:3300:Authorization error
+#AAI_9106=5:0:WARN:9106:403:3300:Invalid AppId
+#AAI_9107=5:0:WARN:9107:403:3300:No Username in Request
+AAI_9107=5:0:WARN:9107:403:3300:SSL is not provided in request, please contact admin
+AAI_9108=5:0:WARN:9107:403:3300:Basic auth credentials is not provided in the request
+
+#--- aaiinstar: 9201-9299
+#AAI_9201=5:4:ERROR:9201:500:3002:Unable to send notification
+AAI_9202=5:4:ERROR:9202:500:3002:Unable to start a thread
+
diff --git a/src/main/resources/etc/appprops/janusgraph-cached.properties b/src/main/resources/etc/appprops/janusgraph-cached.properties
new file mode 100644 (file)
index 0000000..c90816d
--- /dev/null
@@ -0,0 +1,36 @@
+#
+# ============LICENSE_START=======================================================
+# org.onap.aai
+# ================================================================================
+# Copyright © 2017-18 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+
+query.fast-property=true
+# the following parameters are not reloaded automatically and require a manual bounce
+storage.backend=inmemory
+storage.hostname=localhost
+
+#schema.default=none
+storage.lock.wait-time=300
+storage.hbase.table=aaigraph-dev1.dev
+storage.hbase.ext.zookeeper.znode.parent=/hbase-unsecure
+#caching on
+cache.db-cache = true
+cache.db-cache-clean-wait = 20
+cache.db-cache-time = 180000
+cache.db-cache-size = 0.3
+
+#load graphson file on startup
+load.snapshot.file=false
\ No newline at end of file
diff --git a/src/main/resources/etc/appprops/janusgraph-realtime.properties b/src/main/resources/etc/appprops/janusgraph-realtime.properties
new file mode 100644 (file)
index 0000000..ccbe5ba
--- /dev/null
@@ -0,0 +1,33 @@
+#
+# ============LICENSE_START=======================================================
+# org.onap.aai
+# ================================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+
+query.fast-property=true
+# the following parameters are not reloaded automatically and require a manual bounce
+storage.backend=inmemory
+storage.hostname=localhost
+
+#schema.default=none
+storage.lock.wait-time=300
+storage.hbase.table=aaigraph-dev1.dev
+storage.hbase.ext.zookeeper.znode.parent=/hbase-unsecure
+# Setting db-cache to false ensure the fastest propagation of changes across servers
+cache.db-cache = false
+
+#load graphson file on startup
+load.snapshot.file=false
\ No newline at end of file
diff --git a/src/main/resources/etc/appprops/logging.properties b/src/main/resources/etc/appprops/logging.properties
new file mode 100644 (file)
index 0000000..e029cc4
--- /dev/null
@@ -0,0 +1,128 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+handlers = 1catalina.org.apache.juli.FileHandler, 2localhost.org.apache.juli.FileHandler, 3manager.org.apache.juli.FileHandler, 4host-manager.org.apache.juli.FileHandler, java.util.logging.ConsoleHandler
+
+.handlers = 1catalina.org.apache.juli.FileHandler, java.util.logging.ConsoleHandler
+
+############################################################
+# Handler specific properties.
+# Describes specific configuration info for Handlers.
+############################################################
+
+# this is where we will limit logging on components
+org.apache.hadoop.level=WARNING
+org.apache.zookeeper.level=WARNING
+org.reflections.level=WARNING
+com.thinkaurelius.level=WARNING
+
+1catalina.org.apache.juli.FileHandler.level = FINE
+1catalina.org.apache.juli.FileHandler.directory = ${catalina.base}/logs
+1catalina.org.apache.juli.FileHandler.prefix = catalina.
+
+2localhost.org.apache.juli.FileHandler.level = FINE
+2localhost.org.apache.juli.FileHandler.directory = ${catalina.base}/logs
+2localhost.org.apache.juli.FileHandler.prefix = localhost.
+
+3manager.org.apache.juli.FileHandler.level = FINE
+3manager.org.apache.juli.FileHandler.directory = ${catalina.base}/logs
+3manager.org.apache.juli.FileHandler.prefix = manager.
+
+4host-manager.org.apache.juli.FileHandler.level = FINE
+4host-manager.org.apache.juli.FileHandler.directory = ${catalina.base}/logs
+4host-manager.org.apache.juli.FileHandler.prefix = host-manager.
+
+java.util.logging.ConsoleHandler.level = INFO
+java.util.logging.ConsoleHandler.formatter = java.util.logging.SimpleFormatter
+
+
+
+############################################################
+# Facility specific properties.
+# Provides extra control for each logger.
+############################################################
+
+org.apache.catalina.core.ContainerBase.[Catalina].[localhost].level = INFO
+org.apache.catalina.core.ContainerBase.[Catalina].[localhost].handlers = 2localhost.org.apache.juli.FileHandler
+
+org.apache.catalina.core.ContainerBase.[Catalina].[localhost].[/manager].level = INFO
+org.apache.catalina.core.ContainerBase.[Catalina].[localhost].[/manager].handlers = 3manager.org.apache.juli.FileHandler
+
+org.apache.catalina.core.ContainerBase.[Catalina].[localhost].[/host-manager].level = INFO
+org.apache.catalina.core.ContainerBase.[Catalina].[localhost].[/host-manager].handlers = 4host-manager.org.apache.juli.FileHandler
+
+# For example, set the org.apache.catalina.util.LifecycleBase logger to log
+# each component that extends LifecycleBase changing state:
+#org.apache.catalina.util.LifecycleBase.level = FINE
+
+# To see debug messages in TldLocationsCache, uncomment the following line:
+#org.apache.jasper.compiler.TldLocationsCache.level = FINE
+
+
+################################
+# OpenEJB/TomEE specific loggers
+################################
+#
+# ACTIVATE LEVEL/HANDLERS YOU WANT
+# IF YOU ACTIVATE 5tomee.org.apache.juli.FileHandler
+# ADD IT TO handlers LINE LIKE:
+#
+# handlers = 1catalina.org.apache.juli.FileHandler, 2localhost.org.apache.juli.FileHandler, 3manager.org.apache.juli.FileHandler, 4host-manager.org.apache.juli.FileHandler, 5tomee.org.apache.juli.FileHandler, java.util.logging.ConsoleHandler
+#
+# LEVELS:
+# =======
+#
+# OpenEJB.level             = WARNING
+# OpenEJB.options.level     = INFO
+# OpenEJB.server.level      = INFO
+# OpenEJB.startup.level     = INFO
+# OpenEJB.startup.service.level = WARNING
+# OpenEJB.startup.config.level = INFO
+# OpenEJB.hsql.level        = INFO
+# CORBA-Adapter.level       = WARNING
+# Transaction.level         = WARNING
+# org.apache.activemq.level = SEVERE
+# org.apache.geronimo.level = SEVERE
+# openjpa.level             = WARNING
+# OpenEJB.cdi.level         = INFO
+# org.apache.webbeans.level = INFO
+# org.apache.openejb.level = FINE
+#
+# HANDLERS:
+# =========
+#
+# OpenEJB.handlers             = 5tomee.org.apache.juli.FileHandler, java.util.logging.ConsoleHandler
+# OpenEJB.options.handlers     = 5tomee.org.apache.juli.FileHandler, java.util.logging.ConsoleHandler
+# OpenEJB.server.handlers      = 5tomee.org.apache.juli.FileHandler, java.util.logging.ConsoleHandler
+# OpenEJB.startup.handlers     = 5tomee.org.apache.juli.FileHandler, java.util.logging.ConsoleHandler
+# OpenEJB.startup.service.handlers = 5tomee.org.apache.juli.FileHandler, java.util.logging.ConsoleHandler
+# OpenEJB.startup.config.handlers = 5tomee.org.apache.juli.FileHandler, java.util.logging.ConsoleHandler
+# OpenEJB.hsql.handlers        = 5tomee.org.apache.juli.FileHandler, java.util.logging.ConsoleHandler
+# CORBA-Adapter.handlers       = 5tomee.org.apache.juli.FileHandler, java.util.logging.ConsoleHandler
+# Transaction.handlers         = 5tomee.org.apache.juli.FileHandler, java.util.logging.ConsoleHandler
+# org.apache.activemq.handlers = 5tomee.org.apache.juli.FileHandler, java.util.logging.ConsoleHandler
+# org.apache.geronimo.handlers = 5tomee.org.apache.juli.FileHandler, java.util.logging.ConsoleHandler
+# openjpa.handlers             = 5tomee.org.apache.juli.FileHandler, java.util.logging.ConsoleHandler
+# OpenEJB.cdi.handlers         = 5tomee.org.apache.juli.FileHandler, java.util.logging.ConsoleHandler
+# org.apache.webbeans.handlers = 5tomee.org.apache.juli.FileHandler, java.util.logging.ConsoleHandler
+# org.apache.openejb.handlers = 5tomee.org.apache.juli.FileHandler, java.util.logging.ConsoleHandler
+#
+# TOMEE HANDLER SAMPLE:
+# =====================
+#
+# 5tomee.org.apache.juli.FileHandler.level = FINEST
+# 5tomee.org.apache.juli.FileHandler.directory = ${catalina.base}/logs
+# 5tomee.org.apache.juli.FileHandler.prefix = tomee.
+
diff --git a/src/main/resources/etc/auth/aai_keystore b/src/main/resources/etc/auth/aai_keystore
new file mode 100644 (file)
index 0000000..16d93a7
Binary files /dev/null and b/src/main/resources/etc/auth/aai_keystore differ
diff --git a/src/main/resources/etc/auth/realm.properties b/src/main/resources/etc/auth/realm.properties
new file mode 100644 (file)
index 0000000..f0e0172
--- /dev/null
@@ -0,0 +1,13 @@
+# format : username: password[,rolename ...]
+# default username/password: AAI/AAI, MSO/MSO, ModelLoader/ModelLoader...
+AAI:OBF:1gfr1ev31gg7,admin
+MSO:OBF:1jzx1lz31k01,admin
+SDNC:OBF:1itr1i0l1i151isv,admin
+DCAE:OBF:1g8u1f9d1f991g8w,admin
+POLICY:OBF:1mk61i171ima1im41i0j1mko,admin
+ASDC:OBF:1f991j0u1j001f9d,admin
+VID:OBF:1jm91i0v1jl9,admin
+APPC:OBF:1f991ksf1ksf1f9d,admin
+ModelLoader:OBF:1qvu1v2h1sov1sar1wfw1j7j1wg21saj1sov1v1x1qxw,admin
+AaiUI:OBF:1gfr1p571unz1p4j1gg7,admin
+OOF:OBF:1img1ke71ily,admin
diff --git a/src/main/resources/etc/scriptdata/addmanualdata/README b/src/main/resources/etc/scriptdata/addmanualdata/README
new file mode 100644 (file)
index 0000000..662f35b
--- /dev/null
@@ -0,0 +1,17 @@
+to add manual data, two files will be populated in the release directory under this folder. 
+If the release directory does not exist, create it.
+
+The addManualData.sh script requires the release to be passed
+as a parameter. It finds and applies manual data changes under
+the release folder matching the parameter.
+
+This script is expected to be run for each installation. The script uses
+the PutTool, and flags the put to ignore 412 errors produced
+when the resource already exists.
+
+100-<file>.txt will contain the resource to be put. 
+100-<file>.json will be the json file passed to the PutTool.
+
+
+bundleconfig/etc/scriptdata/addmanualdata/1610/<file>.txt
+bundleconfig/etc/scriptdata/addmanualdata/1610/<file>.json
diff --git a/src/main/resources/etc/scriptdata/addmanualdata/tenant_isolation/README b/src/main/resources/etc/scriptdata/addmanualdata/tenant_isolation/README
new file mode 100644 (file)
index 0000000..16510a0
--- /dev/null
@@ -0,0 +1 @@
+The tenant_isolation directory is used to store the payload files created by the dynamic payload generator.
diff --git a/src/main/resources/etc/scriptdata/tenant_isolation/inputFilters.json b/src/main/resources/etc/scriptdata/tenant_isolation/inputFilters.json
new file mode 100644 (file)
index 0000000..07ee9c4
--- /dev/null
@@ -0,0 +1,104 @@
+{
+  "cloud-region" : {
+        "filtered-node-type": "cloud-region",
+        "filters": [
+             {
+                "property": "cloud-owner",
+                 "regex": "att-aic"
+             },
+             {
+                "property": "cloud-region-version",
+                 "regex": "2.5|3.0|aic3.0|3.6"
+             },
+             {
+                "property": "cloud-region-id",
+                "regex": "dyh1b|DHY1A|mtn23a|mtn23b|mdt25b|mtn6|au7tx"
+             }
+        ]
+  },
+  "complex" : {
+        "filtered-node-type": "cloud-region",
+        "filters": [
+             {
+                "property": "cloud-owner",
+                 "regex": "att-aic"
+             },
+             {
+                "property": "cloud-region-version",
+                 "regex": "2.5|3.0|aic3.0|3.6"
+             },
+             {
+                "property": "cloud-region-id",
+                "regex": "dyh1b|DHY1A|mtn23a|mtn23b|mdt25b|mtn6|au7tx"
+             }
+        ]
+  },
+  "availability-zone" : {
+        "filtered-node-type": "cloud-region",
+        "filters": [
+             {
+                "property": "cloud-owner",
+                 "regex": "att-aic"
+             },
+             {
+                "property": "cloud-region-version",
+                 "regex": "2.5|3.0|aic3.0|3.6"
+             },
+             {
+                "property": "cloud-region-id",
+                "regex": "dyh1b|DHY1A|mtn23a|mtn23b|mdt25b|mtn6|au7tx"
+             }
+        ]
+  },
+  "pserver" : {
+        "filtered-node-type": "cloud-region",
+        "filters": [
+             {
+                "property": "cloud-owner",
+                 "regex": "att-aic"
+             },
+             {
+                "property": "cloud-region-version",
+                 "regex": "2.5|3.0|aic3.0|3.6"
+             },
+             {
+                "property": "cloud-region-id",
+                "regex": "dyh1b|DHY1A|mtn23a|mtn23b|mdt25b|mtn6|au7tx"
+             }
+        ]
+  },
+  "zone" : {
+        "filtered-node-type": "cloud-region",
+        "filters": [
+             {
+                "property": "cloud-owner",
+                 "regex": "att-aic"
+             },
+             {
+                "property": "cloud-region-version",
+                 "regex": "2.5|3.0|aic3.0|3.6"
+             },
+             {
+                "property": "cloud-region-id",
+                "regex": "dyh1b|DHY1A|mtn23a|mtn23b|mdt25b|mtn6|au7tx"
+             }
+        ]
+  },
+  "tenant" : {
+        "filtered-node-type": "cloud-region",
+        "filters": [
+             {
+                "property": "cloud-owner",
+                 "regex": "att-aic"
+             },
+             {
+                "property": "cloud-region-version",
+                 "regex": "2.5|3.0|aic3.0|3.6"
+             },
+             {
+                "property": "cloud-region-id",
+                "regex": "dyh1b|DHY1A|mtn23a|mtn23b|mdt25b|mtn6|au7tx"
+             }
+        ]
+  }
+}
\ No newline at end of file
diff --git a/src/main/resources/etc/scriptdata/tenant_isolation/inputFiltersAllzones.json b/src/main/resources/etc/scriptdata/tenant_isolation/inputFiltersAllzones.json
new file mode 100644 (file)
index 0000000..026759d
--- /dev/null
@@ -0,0 +1,78 @@
+{
+  "cloud-region" : {
+        "filtered-node-type": "cloud-region",
+        "filters": [
+             {
+                "property": "cloud-owner",
+                 "regex": "att-aic"
+             },
+             {
+                "property": "cloud-region-version",
+                 "regex": "2.5|3.0"
+             },
+             {
+                "property": "cloud-region-id",
+                "regex": "m.*"
+             }
+        ]
+  },
+  "complex" : {
+        "filtered-node-type": "complex",
+        "filters": []
+  },
+  "availability-zone" : {
+        "filtered-node-type": "cloud-region",
+        "filters": [
+             {
+                "property": "cloud-owner",
+                 "regex": "att-aic"
+             },
+             {
+                "property": "cloud-region-version",
+                 "regex": "2.5|3.0"
+             },
+             {
+                "property": "cloud-region-id",
+                "regex": "m.*"
+             }
+        ]
+  },
+  "pserver" : {
+        "filtered-node-type": "cloud-region",
+        "filters": [
+             {
+                "property": "cloud-owner",
+                 "regex": "att-aic"
+             },
+             {
+                "property": "cloud-region-version",
+                 "regex": "2.5|3.0"
+             },
+             {
+                "property": "cloud-region-id",
+                "regex": "m.*"
+             }
+        ]
+  },
+  "zone" : {
+        "filtered-node-type": "zone",
+        "filters": []
+  },
+  "tenant" : {
+        "filtered-node-type": "cloud-region",
+        "filters": [
+             {
+                "property": "cloud-owner",
+                 "regex": "att-aic"
+             },
+             {
+                "property": "cloud-region-version",
+                 "regex": "2.5|3.0"
+             },
+             {
+                "property": "cloud-region-id",
+                "regex": "m.*"
+             }
+        ]
+  }
+}
diff --git a/src/main/resources/etc/scriptdata/tenant_isolation/inputFilters_E2E.json b/src/main/resources/etc/scriptdata/tenant_isolation/inputFilters_E2E.json
new file mode 100644 (file)
index 0000000..24ee80d
--- /dev/null
@@ -0,0 +1,78 @@
+{
+  "cloud-region" : {
+        "filtered-node-type": "cloud-region",
+        "filters": [
+             {
+                "property": "cloud-owner",
+                 "regex": "att-aic"
+             },
+             {
+                "property": "cloud-region-version",
+                 "regex": "3.0|aic3.0|3.6"
+             },
+             {
+                "property": "cloud-region-id",
+                "regex": "RDM3|RDM5a|RDM5b|RDM6a|RDM6b|DPA2a|DPA2b"
+             }
+        ]
+  },
+  "complex" : {
+        "filtered-node-type": "complex",
+        "filters": []
+  },
+  "availability-zone" : {
+        "filtered-node-type": "cloud-region",
+        "filters": [
+             {
+                "property": "cloud-owner",
+                 "regex": "att-aic"
+             },
+             {
+                "property": "cloud-region-version",
+                 "regex": "3.0|aic3.0|3.6"
+             },
+             {
+                "property": "cloud-region-id",
+                "regex": "RDM3|RDM5a|RDM5b|RDM6a|RDM6b|DPA2a|DPA2b"
+             }
+        ]
+  },
+  "pserver" : {
+        "filtered-node-type": "cloud-region",
+        "filters": [
+             {
+                "property": "cloud-owner",
+                 "regex": "att-aic"
+             },
+             {
+                "property": "cloud-region-version",
+                 "regex": "3.0|aic3.0|3.6"
+             },
+             {
+                "property": "cloud-region-id",
+                "regex": "RDM3|RDM5a|RDM5b|RDM6a|RDM6b|DPA2a|DPA2b"
+             }
+        ]
+  },
+  "zone" : {
+        "filtered-node-type": "zone",
+        "filters": []
+  },
+  "tenant" : {
+        "filtered-node-type": "cloud-region",
+        "filters": [
+             {
+                "property": "cloud-owner",
+                 "regex": "att-aic"
+             },
+             {
+                "property": "cloud-region-version",
+                 "regex": "3.0|aic3.0|3.6"
+             },
+             {
+                "property": "cloud-region-id",
+                "regex": "RDM3|RDM5a|RDM5b|RDM6a|RDM6b|DPA2a|DPA2b"
+             }
+        ]
+  }
+}
diff --git a/src/main/resources/etc/scriptdata/tenant_isolation/inputFilters_IST.json b/src/main/resources/etc/scriptdata/tenant_isolation/inputFilters_IST.json
new file mode 100644 (file)
index 0000000..8ebbf6c
--- /dev/null
@@ -0,0 +1,78 @@
+{
+  "cloud-region" : {
+        "filtered-node-type": "cloud-region",
+        "filters": [
+             {
+                "property": "cloud-owner",
+                 "regex": "att-aic"
+             },
+             {
+                "property": "cloud-region-version",
+                 "regex": "3.0|aic3.0|3.6"
+             },
+             {
+                "property": "cloud-region-id",
+                "regex": "dyh1b|DHY1A|mtn23a|mtn23b|mdt25b|mtn6|au7tx"
+             }
+        ]
+  },
+  "complex" : {
+        "filtered-node-type": "complex",
+        "filters": []
+  },
+  "availability-zone" : {
+        "filtered-node-type": "cloud-region",
+        "filters": [
+             {
+                "property": "cloud-owner",
+                 "regex": "att-aic"
+             },
+             {
+                "property": "cloud-region-version",
+                 "regex": "3.0|aic3.0|3.6"
+             },
+             {
+                "property": "cloud-region-id",
+                "regex": "dyh1b|DHY1A|mtn23a|mtn23b|mdt25b|mtn6|au7tx"
+             }
+        ]
+  },
+  "pserver" : {
+        "filtered-node-type": "cloud-region",
+        "filters": [
+             {
+                "property": "cloud-owner",
+                 "regex": "att-aic"
+             },
+             {
+                "property": "cloud-region-version",
+                 "regex": "3.0|aic3.0|3.6"
+             },
+             {
+                "property": "cloud-region-id",
+                "regex": "dyh1b|DHY1A|mtn23a|mtn23b|mdt25b|mtn6|au7tx"
+             }
+        ]
+  },
+  "zone" : {
+        "filtered-node-type": "zone",
+        "filters": []
+  },
+  "tenant" : {
+        "filtered-node-type": "cloud-region",
+        "filters": [
+             {
+                "property": "cloud-owner",
+                 "regex": "att-aic"
+             },
+             {
+                "property": "cloud-region-version",
+                 "regex": "3.0|aic3.0|3.6"
+             },
+             {
+                "property": "cloud-region-id",
+                "regex": "dyh1b|DHY1A|mtn23a|mtn23b|mdt25b|mtn6|au7tx"
+             }
+        ]
+  }
+}
diff --git a/src/main/resources/etc/scriptdata/tenant_isolation/inputFilters_PROD.json b/src/main/resources/etc/scriptdata/tenant_isolation/inputFilters_PROD.json
new file mode 100644 (file)
index 0000000..24ee80d
--- /dev/null
@@ -0,0 +1,78 @@
+{
+  "cloud-region" : {
+        "filtered-node-type": "cloud-region",
+        "filters": [
+             {
+                "property": "cloud-owner",
+                 "regex": "att-aic"
+             },
+             {
+                "property": "cloud-region-version",
+                 "regex": "3.0|aic3.0|3.6"
+             },
+             {
+                "property": "cloud-region-id",
+                "regex": "RDM3|RDM5a|RDM5b|RDM6a|RDM6b|DPA2a|DPA2b"
+             }
+        ]
+  },
+  "complex" : {
+        "filtered-node-type": "complex",
+        "filters": []
+  },
+  "availability-zone" : {
+        "filtered-node-type": "cloud-region",
+        "filters": [
+             {
+                "property": "cloud-owner",
+                 "regex": "att-aic"
+             },
+             {
+                "property": "cloud-region-version",
+                 "regex": "3.0|aic3.0|3.6"
+             },
+             {
+                "property": "cloud-region-id",
+                "regex": "RDM3|RDM5a|RDM5b|RDM6a|RDM6b|DPA2a|DPA2b"
+             }
+        ]
+  },
+  "pserver" : {
+        "filtered-node-type": "cloud-region",
+        "filters": [
+             {
+                "property": "cloud-owner",
+                 "regex": "att-aic"
+             },
+             {
+                "property": "cloud-region-version",
+                 "regex": "3.0|aic3.0|3.6"
+             },
+             {
+                "property": "cloud-region-id",
+                "regex": "RDM3|RDM5a|RDM5b|RDM6a|RDM6b|DPA2a|DPA2b"
+             }
+        ]
+  },
+  "zone" : {
+        "filtered-node-type": "zone",
+        "filters": []
+  },
+  "tenant" : {
+        "filtered-node-type": "cloud-region",
+        "filters": [
+             {
+                "property": "cloud-owner",
+                 "regex": "att-aic"
+             },
+             {
+                "property": "cloud-region-version",
+                 "regex": "3.0|aic3.0|3.6"
+             },
+             {
+                "property": "cloud-region-id",
+                "regex": "RDM3|RDM5a|RDM5b|RDM6a|RDM6b|DPA2a|DPA2b"
+             }
+        ]
+  }
+}
diff --git a/src/main/resources/etc/scriptdata/tenant_isolation/nodes.json b/src/main/resources/etc/scriptdata/tenant_isolation/nodes.json
new file mode 100644 (file)
index 0000000..1bfc62b
--- /dev/null
@@ -0,0 +1,26 @@
+{
+  "cloud-region": {
+             "cousins" : ["complex","zone"],
+             "parents" : []
+  },
+  "availability-zone": {
+             "cousins" : ["complex"],
+             "parents":["cloud-region"]
+  },
+  "pserver" : {
+             "cousins" : ["zone", "complex", "availability-zone","cloud-region"],
+             "parents":[]
+  },
+  "complex" : {
+           "cousins":[],
+           "parents":[]
+  },
+  "tenant" : {
+          "cousins":[],
+          "parents":["cloud-region"]
+  },
+  "zone" : {
+          "cousins":["complex"],
+          "parents":[]
+  }
+}
diff --git a/src/main/resources/etc/scriptdata/tenant_isolation/nodesAZCloud.json b/src/main/resources/etc/scriptdata/tenant_isolation/nodesAZCloud.json
new file mode 100644 (file)
index 0000000..b955757
--- /dev/null
@@ -0,0 +1,22 @@
+{
+  "cloud-region": {
+             "cousins" : [],
+             "parents":[]
+  },
+  "availability-zone": {
+             "cousins" : [],
+             "parents":["cloud-region"]
+  },
+  "pserver" : {
+             "cousins" : ["zone", "complex", "availability-zone"],
+             "parents":[]
+  },
+  "complex" : {
+           "cousins":[],
+           "parents":[]
+  },
+  "zone" : {
+          "cousins":["complex"],
+          "parents":[]
+  }
+}
diff --git a/src/main/resources/etc/scriptdata/tenant_isolation/nodesIncremental.json b/src/main/resources/etc/scriptdata/tenant_isolation/nodesIncremental.json
new file mode 100644 (file)
index 0000000..0816bc4
--- /dev/null
@@ -0,0 +1,10 @@
+{
+  "pserver" : {
+             "cousins" : ["zone", "complex", "availability-zone","cloud-region"],
+             "parents":[]
+  },
+  "tenant" : {
+          "cousins":[],
+          "parents":["cloud-region"]
+  }
+}
diff --git a/src/main/resources/etc/scriptdata/tenant_isolation/nodesNoAZ.json b/src/main/resources/etc/scriptdata/tenant_isolation/nodesNoAZ.json
new file mode 100644 (file)
index 0000000..a0dfae5
--- /dev/null
@@ -0,0 +1,14 @@
+{
+  "pserver" : {
+             "cousins" : ["zone", "complex"],
+             "parents":[]
+  },
+  "complex" : {
+           "cousins":[],
+           "parents":[]
+  },
+  "zone" : {
+          "cousins":["complex"],
+          "parents":[]
+  }
+}
diff --git a/src/main/resources/forceDelete-logback.xml b/src/main/resources/forceDelete-logback.xml
new file mode 100644 (file)
index 0000000..5a3b2e2
--- /dev/null
@@ -0,0 +1,85 @@
+<!--
+
+    ============LICENSE_START=======================================================
+    org.onap.aai
+    ================================================================================
+    Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+    ================================================================================
+    Licensed under the Apache License, Version 2.0 (the "License");
+    you may not use this file except in compliance with the License.
+    You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing, software
+    distributed under the License is distributed on an "AS IS" BASIS,
+    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    See the License for the specific language governing permissions and
+    limitations under the License.
+    ============LICENSE_END=========================================================
+
+    ECOMP is a trademark and service mark of AT&T Intellectual Property.
+
+-->
+<configuration>
+       <property name="logDirectory" value="${AJSC_HOME}/logs" />
+
+       <appender name="forceDeletelog" class="ch.qos.logback.classic.sift.SiftingAppender">
+               <filter class="ch.qos.logback.classic.filter.LevelFilter">
+                       <level>INFO</level>
+                       <onMatch>ACCEPT</onMatch>
+                       <onMismatch>DENY</onMismatch>
+               </filter>
+               <!-- This is MDC value -->
+               <!-- We will assign a value to 'logFilenameAppender' via Java code -->
+               <discriminator>
+                       <key>logFilenameAppender</key>
+                       <defaultValue>console</defaultValue>
+               </discriminator>
+               <sift>
+                       <!-- A standard RollingFileAppender, the log file is based on 'logFileName' 
+                               at runtime -->
+                       <appender name="FILE-${logFilenameAppender}"
+                               class="ch.qos.logback.core.rolling.RollingFileAppender">
+                               <file>${logDirectory}/forceDelete/${logFilenameAppender}.log</file>
+                               <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+                                       <fileNamePattern>${logDirectory}/forceDelete/${logFilenameAppender}.log.%d{yyyy-MM-dd}
+                                       </fileNamePattern>
+                               </rollingPolicy>
+                               <encoder>
+                                       <pattern>%d{yyyy-MM-dd'T'HH:mm:ss.SSSXXX}|%m%n</pattern>
+                               </encoder>
+                       </appender>
+               </sift>
+       </appender>
+
+       <logger name="org.reflections" level="ERROR" additivity="false">
+               <appender-ref ref="forceDeletelog" />
+       </logger>
+       <logger name="org.apache.zookeeper" level="ERROR" additivity="false">
+               <appender-ref ref="forceDeletelog" />
+       </logger>
+       <logger name="org.apache.hadoop" level="ERROR" additivity="false">
+               <appender-ref ref="forceDeletelog" />
+       </logger>
+       <logger name="org.janusgraph" level="ERROR" additivity="false">
+               <appender-ref ref="forceDeletelog" />
+       </logger>
+       <logger name="ch.qos.logback.classic" level="ERROR" additivity="false">
+               <appender-ref ref="forceDeletelog" />
+       </logger>
+       <logger name="ch.qos.logback.core" level="ERROR" additivity="false">
+               <appender-ref ref="forceDeletelog" />
+       </logger>
+       <logger name="com.att.eelf" level="ERROR" additivity="false">
+               <appender-ref ref="forceDeletelog" />
+       </logger>
+       <logger name="org.onap.aai" level="ERROR" additivity="false">
+               <appender-ref ref="forceDeletelog" />
+       </logger>
+
+
+       <root level="INFO">
+               <appender-ref ref="forceDeletelog" />
+       </root>
+</configuration>
\ No newline at end of file
diff --git a/src/main/resources/localhost-access-logback.xml b/src/main/resources/localhost-access-logback.xml
new file mode 100644 (file)
index 0000000..a318796
--- /dev/null
@@ -0,0 +1,62 @@
+<!--
+
+    ============LICENSE_START=======================================================
+    org.onap.aai
+    ================================================================================
+    Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+    ================================================================================
+    Licensed under the Apache License, Version 2.0 (the "License");
+    you may not use this file except in compliance with the License.
+    You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing, software
+    distributed under the License is distributed on an "AS IS" BASIS,
+    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    See the License for the specific language governing permissions and
+    limitations under the License.
+    ============LICENSE_END=========================================================
+
+    ECOMP is a trademark and service mark of AT&T Intellectual Property.
+
+-->
+<configuration>
+       <property name="AJSC_HOME" value="${AJSC_HOME:-.}" />
+       <appender name="ACCESS"
+               class="ch.qos.logback.core.rolling.RollingFileAppender">
+               <file>${AJSC_HOME}/logs/ajsc-jetty/localhost_access.log</file>
+               <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+                       <fileNamePattern>${AJSC_HOME}/logs/ajsc-jetty/localhost_access.log.%d{yyyy-MM-dd}
+                       </fileNamePattern>
+               </rollingPolicy>
+               <encoder class="org.onap.aai.logging.CustomLogPatternLayoutEncoder">
+                       <Pattern>%a %u %z [%t] "%m %U%q" %s %b %y %i{X-TransactionId} %i{X-FromAppId} %i{X-Forwarded-For} %i{X-AAI-SSL-Client-CN} %i{X-AAI-SSL-Client-OU} %i{X-AAI-SSL-Client-O} %i{X-AAI-SSL-Client-L} %i{X-AAI-SSL-Client-ST} %i{X-AAI-SSL-Client-C} %i{X-AAI-SSL-Client-NotBefore} %i{X-AAI-SSL-Client-NotAfter} %i{X-AAI-SSL-Client-DN} %D</Pattern>
+               </encoder>
+       </appender>
+       <appender-ref ref="ACCESS" />
+</configuration>
+
+<!-- 
+%a - Remote IP address
+%A - Local IP address
+%b - Bytes sent, excluding HTTP headers, or '-' if no bytes were sent
+%B - Bytes sent, excluding HTTP headers
+%h - Remote host name
+%H - Request protocol
+%l - Remote logical username from identd (always returns '-')
+%m - Request method
+%p - Local port
+%q - Query string (prepended with a '?' if it exists, otherwise an empty string
+%r - First line of the request
+%s - HTTP status code of the response
+%S - User session ID
+%t - Date and time, in Common Log Format format
+%u - Remote user that was authenticated
+%U - Requested URL path
+%v - Local server name
+%I - current request thread name (can compare later with stacktraces)
+
+%z - Custom pattern that parses the cert for the subject
+%y - Custom pattern determines rest or dme2
+ -->
\ No newline at end of file
diff --git a/src/main/resources/logback.xml b/src/main/resources/logback.xml
new file mode 100644 (file)
index 0000000..8f40031
--- /dev/null
@@ -0,0 +1,701 @@
+<!--
+
+    ============LICENSE_START=======================================================
+    org.onap.aai
+    ================================================================================
+    Copyright 2017 AT&T Intellectual Property. All rights reserved.
+    ================================================================================
+    Licensed under the Apache License, Version 2.0 (the "License");
+    you may not use this file except in compliance with the License.
+    You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing, software
+    distributed under the License is distributed on an "AS IS" BASIS,
+    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    See the License for the specific language governing permissions and
+    limitations under the License.
+    ============LICENSE_END=========================================================
+
+    ECOMP is a trademark and service mark of AT&T Intellectual Property.
+
+-->
+<configuration scan="true" scanPeriod="60 seconds" debug="false">
+       <statusListener class="ch.qos.logback.core.status.NopStatusListener" />
+
+       <property resource="application.properties" />
+
+       <property name="namespace" value="graph-admin"/>
+
+       <property name="AJSC_HOME" value="${AJSC_HOME:-.}" />
+       
+       <property name="logDirectory" value="${AJSC_HOME}/logs" />
+       <property name="eelfLogPattern" value="%ecompStartTime|%date{yyyy-MM-dd'T'HH:mm:ss.SSSZ, UTC}|%X{requestId}|%X{serviceInstanceId}|%-10t|%X{serverName}|%X{serviceName}|%X{partnerName}|%ecompStatusCode|%X{responseCode}|%replace(%replace(%X{responseDescription}){'\\|', '!'}){'\r|\n', '^'}|%X{instanceUUID}|%level|%X{severity}|%X{serverIpAddress}|%ecompElapsedTime|%X{server}|%X{clientIpAddress}|%eelfClassOfCaller|%X{unused}|%X{processKey}|%X{customField1}|%X{customField2}|%X{customField3}|%X{customField4}|co=%X{component}:%replace(%replace(%m){'\\|', '!'}){'\r|\n', '^'}%n"/>
+       <property name="eelfAuditLogPattern" value="%ecompStartTime|%date{yyyy-MM-dd'T'HH:mm:ss.SSSZ, UTC}|%X{requestId}|%X{serviceInstanceId}|%-10t|%X{serverName}|%X{serviceName}|%X{partnerName}|%ecompStatusCode|%X{responseCode}|%replace(%replace(%X{responseDescription}){'\\|', '!'}){'\r|\n|\r\n', '^'}|%X{instanceUUID}|%level|%X{severity}|%X{serverIpAddress}|%ecompElapsedTime|%X{server}|%X{clientIpAddress}|%eelfClassOfCaller|%X{unused}|%X{processKey}|%X{customField1}|%X{customField2}|%X{customField3}|%X{customField4}|co=%X{component}:%replace(%replace(%m){'\\|', '!'}){'\r|\n', '^'}%n"/>
+       <property name="eelfMetricLogPattern" value="%ecompStartTime|%date{yyyy-MM-dd'T'HH:mm:ss.SSSZ, UTC}|%X{requestId}|%X{serviceInstanceId}|%-10t|%X{serverName}|%X{serviceName}|%X{partnerName}|%X{targetEntity}|%X{targetServiceName}|%ecompStatusCode|%X{responseCode}|%replace(%replace(%X{responseDescription}){'\\|', '!'}){'\r|\n', '^'}|%X{instanceUUID}|%level|%X{severity}|%X{serverIpAddress}|%ecompElapsedTime|%X{server}|%X{clientIpAddress}|%eelfClassOfCaller|%X{unused}|%X{processKey}|%X{targetVirtualEntity}|%X{customField1}|%X{customField2}|%X{customField3}|%X{customField4}|co=%X{component}:%replace(%replace(%m){'\\|', '!'}){'\r|\n', '^'}%n"/>
+       <!--  <property name="eelfErrorLogPattern" value="%ecompStartTime|%X{requestId}|%-10t|%X{serviceName}|%X{partnerName}|%X{targetEntity}|%X{targetServiceName}|%ecompErrorCategory|%X{responseCode}|%replace(%replace(%X{responseDescription}){'\\|', '!'}){'\r|\n|\r\n', '^'}|co=%X{component}:%replace(%replace(%m){'\\|', '!'}){'\r|\n', '^'}%n"/> -->
+       <property name="eelfErrorLogPattern" value="%ecompStartTime|%X{requestId}|%-10t|%X{serviceName}|%X{partnerName}|%X{targetEntity}|%X{targetServiceName}|%ecompErrorCategory|%ecompResponseCode|%ecompResponseDescription|co=%X{component}:%replace(%replace(%m){'\\|', '!'}){'\r|\n', '^'}%n"/>
+    <property name="eelfTransLogPattern" value="%ecompStartTime|%date{yyyy-MM-dd'T'HH:mm:ss.SSSZ, UTC}|%X{requestId}|%X{serviceInstanceId}|%-10t|%X{serverName}|%X{serviceName}|%X{partnerName}|%ecompStatusCode|%X{responseCode}|%replace(%replace(%X{responseDescription}){'\\|', '!'}){'\r|\n', '^'}|%X{instanceUUID}|%level|%X{severity}|%X{serverIpAddress}|%ecompElapsedTime|%X{server}|%X{clientIpAddress}|%eelfClassOfCaller|%X{unused}|%X{processKey}|%X{customField1}|%X{customField2}|%X{customField3}|%X{customField4}|co=%X{partnerName}:%m%n"/>
+       
+    <conversionRule conversionWord="clr" converterClass="org.springframework.boot.logging.logback.ColorConverter" />
+    <conversionRule conversionWord="wex" converterClass="org.springframework.boot.logging.logback.WhitespaceThrowableProxyConverter" />
+    <conversionRule conversionWord="wEx" converterClass="org.springframework.boot.logging.logback.ExtendedWhitespaceThrowableProxyConverter" />
+       <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
+               <encoder>
+                       <pattern>
+                               %clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(---){faint} %clr([%15.15t]){faint} %clr(%-40.40logger{39}){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}
+                       </pattern>
+               </encoder>
+       </appender>
+
+       <appender name="SANE" class="ch.qos.logback.core.rolling.RollingFileAppender">
+               <file>${logDirectory}/rest/sane.log</file>
+               <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+                       <fileNamePattern>${logDirectory}/rest/sane.log.%d{yyyy-MM-dd}</fileNamePattern>
+               </rollingPolicy>
+               <encoder>
+                       <pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{1024} - %msg%n
+                       </pattern>
+               </encoder>
+       </appender>
+
+       <appender name="asyncSANE" class="ch.qos.logback.classic.AsyncAppender">
+               <queueSize>1000</queueSize>
+               <includeCallerData>true</includeCallerData>
+               <appender-ref ref="SANE" />
+       </appender>
+
+       <appender name="METRIC"
+               class="ch.qos.logback.core.rolling.RollingFileAppender">
+               <filter class="ch.qos.logback.classic.filter.LevelFilter">
+                       <level>INFO</level>
+                       <onMatch>ACCEPT</onMatch>
+                       <onMismatch>DENY</onMismatch>
+               </filter>
+               <file>${logDirectory}/rest/metrics.log</file>
+               <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+                       <fileNamePattern>${logDirectory}/rest/metrics.log.%d{yyyy-MM-dd}
+                       </fileNamePattern>
+               </rollingPolicy>
+               <encoder class="org.onap.aai.logging.EcompEncoder">
+                       <pattern>${eelfMetricLogPattern}</pattern>
+               </encoder>
+       </appender>
+       <appender name="asyncMETRIC" class="ch.qos.logback.classic.AsyncAppender">
+               <queueSize>1000</queueSize>
+               <includeCallerData>true</includeCallerData>
+               <appender-ref ref="METRIC" />
+       </appender>
+
+       <appender name="DEBUG"
+               class="ch.qos.logback.core.rolling.RollingFileAppender">
+               <filter class="ch.qos.logback.classic.filter.LevelFilter">
+                       <level>DEBUG</level>
+                       <onMatch>ACCEPT</onMatch>
+                       <onMismatch>DENY</onMismatch>
+               </filter>
+               <file>${logDirectory}/rest/debug.log</file>
+               <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+                       <fileNamePattern>${logDirectory}/rest/debug.log.%d{yyyy-MM-dd}
+                       </fileNamePattern>
+               </rollingPolicy>
+               <encoder class="org.onap.aai.logging.EcompEncoder">
+                       <pattern>${eelfLogPattern}</pattern>
+               </encoder>
+       </appender>
+
+       <appender name="asyncDEBUG" class="ch.qos.logback.classic.AsyncAppender">
+               <queueSize>1000</queueSize>
+               <includeCallerData>true</includeCallerData>
+               <appender-ref ref="DEBUG" />
+       </appender>
+
+       <appender name="ERROR"
+               class="ch.qos.logback.core.rolling.RollingFileAppender">
+               <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
+                       <level>WARN</level>
+               </filter>
+               <file>${logDirectory}/rest/error.log</file>
+               <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+                       <fileNamePattern>${logDirectory}/rest/error.log.%d{yyyy-MM-dd}
+                       </fileNamePattern>
+               </rollingPolicy>
+               <encoder class="org.onap.aai.logging.EcompEncoder">
+                       <pattern>${"eelfErrorLogPattern"}</pattern>
+               </encoder>
+       </appender>
+
+       <appender name="asyncERROR" class="ch.qos.logback.classic.AsyncAppender">
+               <queueSize>1000</queueSize>
+               <includeCallerData>true</includeCallerData>
+               <appender-ref ref="ERROR" />
+       </appender>
+
+       <appender name="AUDIT"
+               class="ch.qos.logback.core.rolling.RollingFileAppender">
+               <file>${logDirectory}/rest/audit.log</file>
+               <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+                       <fileNamePattern>${logDirectory}/rest/audit.log.%d{yyyy-MM-dd}
+                       </fileNamePattern>
+               </rollingPolicy>
+               <encoder class="org.onap.aai.logging.EcompEncoder">
+                       <pattern>${eelfAuditLogPattern}</pattern>
+               </encoder>
+       </appender>
+
+       <appender name="asyncAUDIT" class="ch.qos.logback.classic.AsyncAppender">
+               <queueSize>1000</queueSize>
+               <includeCallerData>true</includeCallerData>
+               <appender-ref ref="AUDIT" />
+       </appender>
+
+       <appender name="translog"
+               class="ch.qos.logback.core.rolling.RollingFileAppender">
+               <filter class="ch.qos.logback.classic.filter.LevelFilter">
+                       <level>DEBUG</level>
+                       <onMatch>ACCEPT</onMatch>
+                       <onMismatch>DENY</onMismatch>
+               </filter>
+               <file>${logDirectory}/rest/translog.log</file>
+               <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+                       <fileNamePattern>${logDirectory}/rest/translog.log.%d{yyyy-MM-dd}
+                       </fileNamePattern>
+               </rollingPolicy>
+               <encoder class="org.onap.aai.logging.EcompEncoder">
+                       <pattern>${eelfTransLogPattern}</pattern>
+               </encoder>
+       </appender>
+       
+       <appender name="asynctranslog" class="ch.qos.logback.classic.AsyncAppender">
+               <queueSize>1000</queueSize>
+               <includeCallerData>true</includeCallerData>
+               <appender-ref ref="translog" />
+       </appender>
+
+       <appender name="dmaapAAIEventConsumer"
+               class="ch.qos.logback.core.rolling.RollingFileAppender">
+               <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
+                       <level>WARN</level>
+               </filter>
+               <File>${logDirectory}/dmaapAAIEventConsumer/error.log</File>
+               <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+                       <fileNamePattern>${logDirectory}/dmaapAAIEventConsumer/error.log.%d{yyyy-MM-dd}
+                       </fileNamePattern>
+               </rollingPolicy>
+               <encoder class="org.onap.aai.logging.EcompEncoder">
+                       <pattern>${"eelfErrorLogPattern"}</pattern>
+               </encoder>
+       </appender>
+
+       <appender name="dmaapAAIEventConsumerDebug"
+               class="ch.qos.logback.core.rolling.RollingFileAppender">
+               <filter class="ch.qos.logback.classic.filter.LevelFilter">
+                       <level>DEBUG</level>
+                       <onMatch>ACCEPT</onMatch>
+                       <onMismatch>DENY</onMismatch>
+               </filter>
+               <File>${logDirectory}/dmaapAAIEventConsumer/debug.log</File>
+               <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+                       <fileNamePattern>${logDirectory}/dmaapAAIEventConsumer/debug.log.%d{yyyy-MM-dd}
+                       </fileNamePattern>
+               </rollingPolicy>
+               <encoder class="org.onap.aai.logging.EcompEncoder">
+                       <pattern>${eelfLogPattern}</pattern>
+               </encoder>
+       </appender>
+       <appender name="dmaapAAIEventConsumerMetric"
+               class="ch.qos.logback.core.rolling.RollingFileAppender">
+               <filter class="ch.qos.logback.classic.filter.LevelFilter">
+                       <level>INFO</level>
+                       <onMatch>ACCEPT</onMatch>
+                       <onMismatch>DENY</onMismatch>
+               </filter>
+               <File>${logDirectory}/dmaapAAIEventConsumer/metrics.log</File>
+               <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+                       <fileNamePattern>${logDirectory}/dmaapAAIEventConsumer/metrics.log.%d{yyyy-MM-dd}
+                       </fileNamePattern>
+               </rollingPolicy>
+               <encoder class="org.onap.aai.logging.EcompEncoder">
+                       <pattern>${eelfMetricLogPattern}</pattern>
+               </encoder>
+       </appender>
+       <appender name="external"
+               class="ch.qos.logback.core.rolling.RollingFileAppender">
+               <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
+                       <level>WARN</level>
+               </filter>
+               <file>${logDirectory}/external/external.log</file>
+               <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+                       <fileNamePattern>${logDirectory}/external/external.log.%d{yyyy-MM-dd}
+                       </fileNamePattern>
+               </rollingPolicy>
+               <encoder class="org.onap.aai.logging.EcompEncoder">
+                       <pattern>${eelfLogPattern}</pattern>
+               </encoder>
+       </appender>
+       
+       <!-- DataGrooming logs started -->
+       <appender name="dataGrooming" class="ch.qos.logback.core.rolling.RollingFileAppender">
+               <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
+                       <level>WARN</level>
+               </filter>
+               <File>${logDirectory}/dataGrooming/error.log</File>
+               <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+                       <fileNamePattern>${logDirectory}/dataGrooming/error.log.%d{yyyy-MM-dd}</fileNamePattern>
+               </rollingPolicy>
+               <encoder class="org.onap.aai.logging.EcompEncoder">
+                       <pattern>${eelfErrorLogPattern}</pattern>
+               </encoder>
+       </appender>
+
+       <appender name="dataGroomingdebug" class="ch.qos.logback.core.rolling.RollingFileAppender">
+               <filter class="ch.qos.logback.classic.filter.LevelFilter">
+                       <level>DEBUG</level>
+                       <onMatch>ACCEPT</onMatch>
+                       <onMismatch>DENY</onMismatch>
+               </filter>
+               <File>${logDirectory}/dataGrooming/debug.log</File>
+               <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+                       <fileNamePattern>${logDirectory}/dataGrooming/debug.log.%d{yyyy-MM-dd}</fileNamePattern>
+               </rollingPolicy>
+               <encoder class="org.onap.aai.logging.EcompEncoder">
+                       <pattern>${eelfLogPattern}</pattern>
+               </encoder>
+       </appender>
+
+       <appender name="dataGroomingmetric" class="ch.qos.logback.core.rolling.RollingFileAppender">
+               <filter class="ch.qos.logback.classic.filter.LevelFilter">
+                       <level>INFO</level>
+                       <onMatch>ACCEPT</onMatch>
+                       <onMismatch>DENY</onMismatch>
+               </filter>
+               <File>${logDirectory}/dataGrooming/metrics.log</File>
+               <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+                       <fileNamePattern>${logDirectory}/dataGrooming/metrics.log.%d{yyyy-MM-dd}</fileNamePattern>
+               </rollingPolicy>
+               <encoder class="org.onap.aai.logging.EcompEncoder">
+                       <pattern>${eelfMetricLogPattern}</pattern>
+               </encoder>
+       </appender>
+       
+       <!-- DataGrooming logs ended -->
+       
+       <!-- DataSnapshot logs started -->
+       <appender name="dataSnapshot" class="ch.qos.logback.core.rolling.RollingFileAppender">
+               <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
+                       <level>WARN</level>
+               </filter>
+               <File>${logDirectory}/dataSnapshot/error.log</File>
+               <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+                       <fileNamePattern>${logDirectory}/dataSnapshot/error.log.%d{yyyy-MM-dd}</fileNamePattern>
+               </rollingPolicy>
+               <encoder class="org.onap.aai.logging.EcompEncoder">
+                       <pattern>${eelfErrorLogPattern}</pattern>
+               </encoder>
+       </appender>
+
+       <appender name="dataSnapshotdebug" class="ch.qos.logback.core.rolling.RollingFileAppender">
+               <filter class="ch.qos.logback.classic.filter.LevelFilter">
+                       <level>DEBUG</level>
+                       <onMatch>ACCEPT</onMatch>
+                       <onMismatch>DENY</onMismatch>
+               </filter>
+               <File>${logDirectory}/dataSnapshot/debug.log</File>
+               <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+                       <fileNamePattern>${logDirectory}/dataSnapshot/debug.log.%d{yyyy-MM-dd}</fileNamePattern>
+               </rollingPolicy>
+               <encoder class="org.onap.aai.logging.EcompEncoder">
+                       <pattern>${eelfLogPattern}</pattern>
+               </encoder>
+       </appender>
+
+       <appender name="dataSnapshotmetric" class="ch.qos.logback.core.rolling.RollingFileAppender">
+               <filter class="ch.qos.logback.classic.filter.LevelFilter">
+                       <level>INFO</level>
+                       <onMatch>ACCEPT</onMatch>
+                       <onMismatch>DENY</onMismatch>
+               </filter>
+               <File>${logDirectory}/dataSnapshot/metrics.log</File>
+               <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+                       <fileNamePattern>${logDirectory}/dataSnapshot/metrics.log.%d{yyyy-MM-dd}</fileNamePattern>
+               </rollingPolicy>
+               <encoder class="org.onap.aai.logging.EcompEncoder">
+                       <pattern>${eelfMetricLogPattern}</pattern>
+               </encoder>
+       </appender>
+       
+       <!-- DataSnapshot logs ended -->
+       
+       <!-- CreateDBSchema logs started  -->
+       <appender name="createDBSchema" class="ch.qos.logback.core.rolling.RollingFileAppender">
+               <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
+                       <level>WARN</level>
+               </filter>
+               <File>${logDirectory}/createDBSchema/error.log</File>
+               <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+                       <fileNamePattern>${logDirectory}/createDBSchema/error.log.%d{yyyy-MM-dd}</fileNamePattern>
+               </rollingPolicy>
+               <encoder class="org.onap.aai.logging.EcompEncoder">
+                       <pattern>${"eelfErrorLogPattern"}</pattern>
+               </encoder>
+       </appender>
+
+       <appender name="createDBSchemadebug" class="ch.qos.logback.core.rolling.RollingFileAppender">
+               <filter class="ch.qos.logback.classic.filter.LevelFilter">
+                       <level>DEBUG</level>
+                       <onMatch>ACCEPT</onMatch>
+                       <onMismatch>DENY</onMismatch>
+               </filter>
+               <File>${logDirectory}/createDBSchema/debug.log</File>
+               <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+                       <fileNamePattern>${logDirectory}/createDBSchema/debug.log.%d{yyyy-MM-dd}</fileNamePattern>
+               </rollingPolicy>
+               <encoder class="org.onap.aai.logging.EcompEncoder">
+                       <pattern>${eelfLogPattern}</pattern>
+               </encoder>
+       </appender>
+
+       <appender name="createDBSchemametric" class="ch.qos.logback.core.rolling.RollingFileAppender">
+               <filter class="ch.qos.logback.classic.filter.LevelFilter">
+                       <level>INFO</level>
+                       <onMatch>ACCEPT</onMatch>
+                       <onMismatch>DENY</onMismatch>
+               </filter>
+               <File>${logDirectory}/createDBSchema/metrics.log</File>
+               <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+                       <fileNamePattern>${logDirectory}/createDBSchema/metrics.log.%d{yyyy-MM-dd}</fileNamePattern>
+               </rollingPolicy>
+               <encoder class="org.onap.aai.logging.EcompEncoder">
+                       <pattern>${eelfMetricLogPattern}</pattern>
+               </encoder>
+       </appender>
+       <!-- CreateDBSchema logs ended  -->     
+       
+       <!-- DataCleanupTasks logs started  -->
+       <appender name="dataCleanuperror" class="ch.qos.logback.core.rolling.RollingFileAppender">
+               <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
+                       <level>WARN</level>
+               </filter>
+               <File>${logDirectory}/misc/error.log</File>
+               <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+                       <fileNamePattern>${logDirectory}/misc/error.log.%d{yyyy-MM-dd}</fileNamePattern>
+               </rollingPolicy>
+               <encoder class="org.onap.aai.logging.EcompEncoder">
+                       <pattern>${"eelfErrorLogPattern"}</pattern>
+               </encoder>
+       </appender>
+
+       <appender name="dataCleanupdebug" class="ch.qos.logback.core.rolling.RollingFileAppender">
+               <filter class="ch.qos.logback.classic.filter.LevelFilter">
+                       <level>DEBUG</level>
+                       <onMatch>ACCEPT</onMatch>
+                       <onMismatch>DENY</onMismatch>
+               </filter>
+               <File>${logDirectory}/misc/debug.log</File>
+               <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+                       <fileNamePattern>${logDirectory}/misc/debug.log.%d{yyyy-MM-dd}</fileNamePattern>
+               </rollingPolicy>
+               <encoder class="org.onap.aai.logging.EcompEncoder">
+                       <pattern>${eelfLogPattern}</pattern>
+               </encoder>
+       </appender>
+
+       <appender name="dataCleanupmetric" class="ch.qos.logback.core.rolling.RollingFileAppender">
+               <filter class="ch.qos.logback.classic.filter.LevelFilter">
+                       <level>INFO</level>
+                       <onMatch>ACCEPT</onMatch>
+                       <onMismatch>DENY</onMismatch>
+               </filter>
+               <File>${logDirectory}/misc/metrics.log</File>
+               <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+                       <fileNamePattern>${logDirectory}/misc/metrics.log.%d{yyyy-MM-dd}</fileNamePattern>
+               </rollingPolicy>
+               <encoder class="org.onap.aai.logging.EcompEncoder">
+                       <pattern>${eelfMetricLogPattern}</pattern>
+               </encoder>
+       </appender>
+       <!-- DataCleanupTasks logs ended  -->   
+                       
+       <!-- pullInvData logs started -->
+       <appender name="pullInvData" class="ch.qos.logback.core.rolling.RollingFileAppender">
+               <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
+                       <level>WARN</level>
+               </filter>
+               <File>${logDirectory}/pullInvData/error.log</File>
+               <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+                       <fileNamePattern>${logDirectory}/pullInvData/error.log.%d{yyyy-MM-dd}</fileNamePattern>
+               </rollingPolicy>
+               <encoder class="org.onap.aai.logging.EcompEncoder">
+                       <pattern>${"eelfErrorLogPattern"}</pattern>
+               </encoder>
+       </appender>
+
+       <appender name="pullInvDatadebug" class="ch.qos.logback.core.rolling.RollingFileAppender">
+               <filter class="ch.qos.logback.classic.filter.LevelFilter">
+                       <level>DEBUG</level>
+                       <onMatch>ACCEPT</onMatch>
+                       <onMismatch>DENY</onMismatch>
+               </filter>
+               <File>${logDirectory}/pullInvData/debug.log</File>
+               <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+                       <fileNamePattern>${logDirectory}/pullInvData/debug.log.%d{yyyy-MM-dd}</fileNamePattern>
+               </rollingPolicy>
+               <encoder class="org.onap.aai.logging.EcompEncoder">
+                       <pattern>${eelfLogPattern}</pattern>
+               </encoder>
+       </appender>
+
+       <appender name="pullInvDatametric" class="ch.qos.logback.core.rolling.RollingFileAppender">
+               <filter class="ch.qos.logback.classic.filter.LevelFilter">
+                       <level>INFO</level>
+                       <onMatch>ACCEPT</onMatch>
+                       <onMismatch>DENY</onMismatch>
+               </filter>
+               <File>${logDirectory}/pullInvData/metrics.log</File>
+               <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+                       <fileNamePattern>${logDirectory}/pullInvData/metrics.log.%d{yyyy-MM-dd}</fileNamePattern>
+               </rollingPolicy>
+               <encoder class="org.onap.aai.logging.EcompEncoder">
+                       <pattern>${eelfMetricLogPattern}</pattern>
+               </encoder>
+       </appender>
+       <!-- pullInvData logs ended -->
+                               <!-- DataGrooming logs started -->
+       <appender name="dataExportError" class="ch.qos.logback.core.rolling.RollingFileAppender">
+               <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
+                       <level>WARN</level>
+               </filter>
+               <File>${logDirectory}/dataExport/error.log</File>
+               <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+                       <fileNamePattern>${logDirectory}/dataExport/error.log.%d{yyyy-MM-dd}</fileNamePattern>
+               </rollingPolicy>
+               <encoder class="org.onap.aai.logging.EcompEncoder">
+                       <pattern>${eelfErrorLogPattern}</pattern>
+               </encoder>
+       </appender>
+
+       <appender name="dataExportDebug" class="ch.qos.logback.core.rolling.RollingFileAppender">
+               <filter class="ch.qos.logback.classic.filter.LevelFilter">
+                       <level>DEBUG</level>
+                       <onMatch>ACCEPT</onMatch>
+                       <onMismatch>DENY</onMismatch>
+               </filter>
+               <File>${logDirectory}/dataExport/debug.log</File>
+               <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+                       <fileNamePattern>${logDirectory}/dataExport/debug.log.%d{yyyy-MM-dd}</fileNamePattern>
+               </rollingPolicy>
+               <encoder class="org.onap.aai.logging.EcompEncoder">
+                       <pattern>${eelfLogPattern}</pattern>
+               </encoder>
+       </appender>
+
+       <appender name="dataExportMetric" class="ch.qos.logback.core.rolling.RollingFileAppender">
+               <filter class="ch.qos.logback.classic.filter.LevelFilter">
+                       <level>INFO</level>
+                       <onMatch>ACCEPT</onMatch>
+                       <onMismatch>DENY</onMismatch>
+               </filter>
+               <File>${logDirectory}/dataExport/metrics.log</File>
+               <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+                       <fileNamePattern>${logDirectory}/dataExport/metrics.log.%d{yyyy-MM-dd}</fileNamePattern>
+               </rollingPolicy>
+               <encoder class="org.onap.aai.logging.EcompEncoder">
+                       <pattern>${eelfMetricLogPattern}</pattern>
+               </encoder>
+       </appender>
+       
+       <logger name="org.onap.aai" level="DEBUG" additivity="false">
+               <appender-ref ref="asyncDEBUG" />
+               <appender-ref ref="asyncERROR" />
+               <appender-ref ref="asyncMETRIC" />
+               <appender-ref ref="asyncSANE" />
+       </logger>
+
+       <!-- Spring related loggers -->
+       <logger name="org.springframework" level="WARN" />
+       <logger name="org.springframework.beans" level="WARN" />
+       <logger name="org.springframework.web" level="WARN" />
+       <logger name="com.blog.spring.jms" level="WARN" />
+       <logger name="com.jayway.jsonpath" level="WARN" />
+
+       <!-- AJSC Services (bootstrap services) -->
+       <logger name="ajsc" level="WARN" />
+       <logger name="ajsc.RouteMgmtService" level="WARN" />
+       <logger name="ajsc.ComputeService" level="WARN" />
+       <logger name="ajsc.VandelayService" level="WARN" />
+       <logger name="ajsc.FilePersistenceService" level="WARN" />
+       <logger name="ajsc.UserDefinedJarService" level="WARN" />
+       <logger name="ajsc.UserDefinedBeansDefService" level="WARN" />
+       <logger name="ajsc.LoggingConfigurationService" level="WARN" />
+
+       <!-- AJSC related loggers (DME2 Registration, csi logging, restlet, servlet 
+               logging) -->
+       <logger name="org.codehaus.groovy" level="WARN" />
+       <logger name="com.att.scamper" level="WARN" />
+       <logger name="ajsc.utils" level="WARN" />
+       <logger name="ajsc.utils.DME2Helper" level="WARN" />
+       <logger name="ajsc.filters" level="WARN" />
+       <logger name="ajsc.beans.interceptors" level="WARN" />
+       <logger name="ajsc.restlet" level="WARN" />
+       <logger name="ajsc.servlet" level="WARN" />
+       <logger name="com.att.ajsc" level="WARN" />
+       <logger name="com.att.ajsc.csi.logging" level="WARN" />
+       <logger name="com.att.ajsc.filemonitor" level="WARN" />
+       <logger name="com.netflix.loadbalancer" level="WARN" />
+
+       <logger name="org.apache.zookeeper" level="WARN" />
+
+       <!-- Other Loggers that may help troubleshoot -->
+       <logger name="net.sf" level="WARN" />
+       <logger name="org.apache.commons.httpclient" level="WARN" />
+       <logger name="org.apache.commons" level="WARN" />
+       <logger name="org.apache.coyote" level="WARN" />
+       <logger name="org.apache.jasper" level="WARN" />
+
+       <!-- Camel Related Loggers (including restlet/servlet/jaxrs/cxf logging. 
+               May aid in troubleshooting) -->
+       <logger name="org.apache.camel" level="WARN" />
+       <logger name="org.apache.cxf" level="WARN" />
+       <logger name="org.apache.camel.processor.interceptor" level="WARN" />
+       <logger name="org.apache.cxf.jaxrs.interceptor" level="WARN" />
+       <logger name="org.apache.cxf.service" level="WARN" />
+       <logger name="org.restlet" level="WARN" />
+       <logger name="org.apache.camel.component.restlet" level="WARN" />
+
+       <logger name="org.hibernate.validator" level="WARN" />
+       <logger name="org.hibernate" level="WARN" />
+       <logger name="org.hibernate.ejb" level="OFF" />
+
+       <!-- logback internals logging -->
+       <logger name="ch.qos.logback.classic" level="WARN" />
+       <logger name="ch.qos.logback.core" level="WARN" />
+
+       <logger name="org.eclipse.jetty" level="WARN" />
+
+       <!-- logback jms appenders & loggers definition starts here -->
+       <appender name="auditLogs"
+               class="ch.qos.logback.core.rolling.RollingFileAppender">
+               <filter class="ch.qos.logback.classic.filter.ThresholdFilter" />
+               <file>${logDirectory}/perf-audit/Audit-${lrmRVer}-${lrmRO}-${Pid}.log
+               </file>
+               <rollingPolicy class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy">
+                       <fileNamePattern>${logDirectory}/perf-audit/Audit-${lrmRVer}-${lrmRO}-${Pid}.%i.log.zip
+                       </fileNamePattern>
+                       <minIndex>1</minIndex>
+                       <maxIndex>9</maxIndex>
+               </rollingPolicy>
+               <triggeringPolicy
+                       class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
+                       <maxFileSize>5MB</maxFileSize>
+               </triggeringPolicy>
+               <encoder>
+                       <pattern>eelfAuditLogPattern</pattern>
+               </encoder>
+       </appender>
+       <appender name="perfLogs"
+               class="ch.qos.logback.core.rolling.RollingFileAppender">
+               <filter class="ch.qos.logback.classic.filter.ThresholdFilter" />
+               <file>${logDirectory}/perf-audit/Perform-${lrmRVer}-${lrmRO}-${Pid}.log
+               </file>
+               <rollingPolicy class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy">
+                       <fileNamePattern>${logDirectory}/perf-audit/Perform-${lrmRVer}-${lrmRO}-${Pid}.%i.log.zip
+                       </fileNamePattern>
+                       <minIndex>1</minIndex>
+                       <maxIndex>9</maxIndex>
+               </rollingPolicy>
+               <triggeringPolicy
+                       class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
+                       <maxFileSize>5MB</maxFileSize>
+               </triggeringPolicy>
+               <encoder>
+                       <pattern>"%d [%thread] %-5level %logger{1024} - %msg%n"</pattern>
+               </encoder>
+       </appender>
+       <logger name="AuditRecord" level="INFO" additivity="false">
+               <appender-ref ref="auditLogs" />
+       </logger>
+       <logger name="AuditRecord_DirectCall" level="INFO" additivity="false">
+               <appender-ref ref="auditLogs" />
+       </logger>
+       <logger name="PerfTrackerRecord" level="INFO" additivity="false">
+               <appender-ref ref="perfLogs" />
+       </logger>
+       <!-- logback jms appenders & loggers definition ends here -->
+
+       <logger name="org.onap.aai.interceptors.post" level="DEBUG"
+                       additivity="false">
+               <appender-ref ref="asynctranslog" />
+       </logger>
+
+       <logger name="org.onap.aai.interceptors.pre.SetLoggingContext" level="DEBUG">
+               <appender-ref ref="asyncAUDIT"/>
+       </logger>
+
+       <logger name="org.onap.aai.interceptors.post.ResetLoggingContext" level="DEBUG">
+               <appender-ref ref="asyncAUDIT"/>
+       </logger>
+
+       <logger name="org.onap.aai.dmaap" level="DEBUG" additivity="false">
+               <appender-ref ref="dmaapAAIEventConsumer" />
+               <appender-ref ref="dmaapAAIEventConsumerDebug" />
+               <appender-ref ref="dmaapAAIEventConsumerMetric" />
+       </logger>
+
+       <logger name="org.onap.aai.datasnapshot" level="DEBUG" additivity="false">
+               <appender-ref ref="dataSnapshot"/>
+               <appender-ref ref="dataSnapshotdebug"/>
+               <appender-ref ref="dataSnapshotmetric"/>
+               <appender-ref ref="STDOUT"/>
+       </logger>
+
+       <logger name="org.onap.aai.datagrooming" level="DEBUG" additivity="false">
+               <appender-ref ref="dataGrooming"/>
+               <appender-ref ref="dataGroomingdebug"/>
+               <appender-ref ref="dataGroomingmetric"/>
+               <appender-ref ref="STDOUT"/>
+       </logger>
+
+       <logger name="org.onap.aai.schema" level="DEBUG" additivity="false">
+               <appender-ref ref="createDBSchema"/>
+               <appender-ref ref="createDBSchemadebug"/>
+               <appender-ref ref="createDBSchemametric"/>
+       </logger>
+
+       <logger name="org.onap.aai.dbgen.PullInvData" level="DEBUG" additivity="false">
+               <appender-ref ref="pullInvData"/>
+               <appender-ref ref="pullInvDatadebug"/>
+               <appender-ref ref="pullInvDatametric"/>
+       </logger>
+       
+       <logger name="org.onap.aai.datacleanup" level="INFO" additivity="false">
+       <appender-ref ref="dataCleanuperror" />
+       <appender-ref ref="dataCleanupdebug" />
+       <appender-ref ref="dataCleanupmetric" />
+       <appender-ref ref="STDOUT"/>
+       </logger>
+       <logger name="org.onap.aai.dataexport" level="DEBUG" additivity="false">
+               <appender-ref ref="dataExportError"/>
+               <appender-ref ref="dataExportDebug"/>
+               <appender-ref ref="dataExportMetric"/>
+               <appender-ref ref="STDOUT"/>
+       </logger>
+       <logger name="org.apache" level="WARN" />
+       <logger name="org.zookeeper" level="WARN" />
+       <logger name="com.netflix" level="WARN" />
+       <logger name="org.janusgraph" level="WARN" />
+       <logger name="com.att.aft.dme2" level="WARN" />
+
+       <!-- ============================================================================ -->
+       <!-- General EELF logger -->
+       <!-- ============================================================================ -->
+       <logger name="com.att.eelf" level="WARN" additivity="false">
+               <appender-ref ref="asyncDEBUG" />
+               <appender-ref ref="asyncERROR" />
+               <appender-ref ref="asyncMETRIC" />
+       </logger>
+
+       <root level="DEBUG">
+               <appender-ref ref="external" />
+       </root>
+</configuration>
diff --git a/src/main/resources/migration-logback.xml b/src/main/resources/migration-logback.xml
new file mode 100644 (file)
index 0000000..ff56f57
--- /dev/null
@@ -0,0 +1,84 @@
+<!--
+
+    ============LICENSE_START=======================================================
+    org.onap.aai
+    ================================================================================
+    Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+    ================================================================================
+    Licensed under the Apache License, Version 2.0 (the "License");
+    you may not use this file except in compliance with the License.
+    You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing, software
+    distributed under the License is distributed on an "AS IS" BASIS,
+    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    See the License for the specific language governing permissions and
+    limitations under the License.
+    ============LICENSE_END=========================================================
+
+    ECOMP is a trademark and service mark of AT&T Intellectual Property.
+
+-->
+<configuration>
+       <property name="logDirectory" value="${AJSC_HOME}/logs" />
+
+       <appender name="migrationlog" class="ch.qos.logback.classic.sift.SiftingAppender">
+               <filter class="ch.qos.logback.classic.filter.LevelFilter">
+                       <level>INFO</level>
+                       <onMatch>ACCEPT</onMatch>
+                       <onMismatch>DENY</onMismatch>
+               </filter>
+               <!-- This is MDC value -->
+               <!-- We will assign a value to 'logFilenameAppender' via Java code -->
+               <discriminator>
+                       <key>logFilenameAppender</key>
+                       <defaultValue>undefined</defaultValue>
+               </discriminator>
+               <sift>
+                       <!-- A standard RollingFileAppender, the log file is based on 'logFileName' 
+                               at runtime -->
+                       <appender name="FILE-${logFilenameAppender}"
+                               class="ch.qos.logback.core.rolling.RollingFileAppender">
+                               <file>${logDirectory}/migration/${logFilenameAppender}.log</file>
+                               <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+                                       <fileNamePattern>${logDirectory}/migration/${logFilenameAppender}.log.%d{yyyy-MM-dd}
+                                       </fileNamePattern>
+                               </rollingPolicy>
+                               <encoder>
+                                       <pattern>%d{yyyy-MM-dd'T'HH:mm:ss.SSSXXX}|%m%n</pattern>
+                               </encoder>
+                       </appender>
+               </sift>
+       </appender>
+
+       <logger name="org.reflections" level="ERROR" additivity="false">
+               <appender-ref ref="migrationlog" />
+       </logger>
+       <logger name="org.apache.zookeeper" level="ERROR" additivity="false">
+               <appender-ref ref="migrationlog" />
+       </logger>
+       <logger name="org.apache.hadoop" level="INFO" additivity="false">
+               <appender-ref ref="migrationlog" />
+       </logger>
+       <logger name="org.janusgraph" level="ERROR" additivity="false">
+               <appender-ref ref="migrationlog" />
+       </logger>
+       <logger name="ch.qos.logback.classic" level="ERROR" additivity="false">
+               <appender-ref ref="migrationlog" />
+       </logger>
+       <logger name="ch.qos.logback.core" level="ERROR" additivity="false">
+               <appender-ref ref="migrationlog" />
+       </logger>
+       <logger name="com.att.eelf" level="ERROR" additivity="false">
+               <appender-ref ref="migrationlog" />
+       </logger>
+       <logger name="org.onap.aai" level="ERROR" additivity="false">
+               <appender-ref ref="migrationlog" />
+       </logger>
+
+       <root level="INFO">
+               <appender-ref ref="migrationlog" />
+       </root>
+</configuration>
\ No newline at end of file
diff --git a/src/main/resources/retired.properties b/src/main/resources/retired.properties
new file mode 100644 (file)
index 0000000..940a358
--- /dev/null
@@ -0,0 +1,6 @@
+# Retired patterns specifying that a version is retired
+retired.api.pattern.list=\
+  ^/aai/v[2-6]+/.*$
+
+# Retired patterns specifying that all versions of the api are retired
+retired.api.all.versions=
\ No newline at end of file
diff --git a/src/main/resources/schemaMod-logback.xml b/src/main/resources/schemaMod-logback.xml
new file mode 100644 (file)
index 0000000..d99c7dc
--- /dev/null
@@ -0,0 +1,62 @@
+<configuration>
+       <property name="logDirectory" value="${AJSC_HOME}/logs" />
+
+       <appender name="schemaModlog" class="ch.qos.logback.classic.sift.SiftingAppender">
+               <filter class="ch.qos.logback.classic.filter.LevelFilter">
+                       <level>INFO</level>
+                       <onMatch>ACCEPT</onMatch>
+                       <onMismatch>DENY</onMismatch>
+               </filter>
+               <!-- This is MDC value -->
+               <!-- We will assign a value to 'logFilenameAppender' via Java code -->
+               <discriminator>
+                       <key>logFilenameAppender</key>
+                       <defaultValue>undefined</defaultValue>
+               </discriminator>
+               <sift>
+                       <!-- A standard RollingFileAppender, the log file is based on 'logFileName' 
+                               at runtime -->
+                       <appender name="FILE-${logFilenameAppender}"
+                               class="ch.qos.logback.core.rolling.RollingFileAppender">
+                               <file>${logDirectory}/schemaMod/${logFilenameAppender}.log</file>
+                               <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+                                       <fileNamePattern>${logDirectory}/schemaMod/${logFilenameAppender}.log.%d{yyyy-MM-dd}
+                                       </fileNamePattern>
+                               </rollingPolicy>
+                               <encoder>
+                                       <pattern>%d{yyyy-MM-dd'T'HH:mm:ss.SSSXXX}|%m%n</pattern>
+                               </encoder>
+                       </appender>
+               </sift>
+       </appender>
+
+       <logger name="org.reflections" level="WARN" additivity="false">
+               <appender-ref ref="schemaModlog" />
+       </logger>
+       <logger name="org.apache.zookeeper" level="ERROR" additivity="false">
+               <appender-ref ref="schemaModlog" />
+       </logger>
+       <logger name="org.apache.hadoop" level="ERROR" additivity="false">
+               <appender-ref ref="schemaModlog" />
+       </logger>
+       <logger name="com.thinkaurelius" level="WARN" additivity="false">
+               <appender-ref ref="schemaModlog" />
+       </logger>
+       <logger name="ch.qos.logback.classic" level="WARN" additivity="false">
+               <appender-ref ref="schemaModlog" />
+       </logger>
+       <logger name="ch.qos.logback.core" level="WARN" additivity="false">
+               <appender-ref ref="schemaModlog" />
+       </logger>
+       <logger name="com.att.eelf" level="WARN" additivity="false">
+               <appender-ref ref="schemaModlog" />
+       </logger>
+       <logger name="org.onap.aai" level="INFO" additivity="false">
+               <appender-ref ref="schemaModlog" />
+       </logger>
+
+
+       <root level="INFO">
+               <appender-ref ref="schemaModlog" />
+       </root>
+</configuration>
\ No newline at end of file
diff --git a/src/main/resources/uniquePropertyCheck-logback.xml b/src/main/resources/uniquePropertyCheck-logback.xml
new file mode 100644 (file)
index 0000000..ca0c2c7
--- /dev/null
@@ -0,0 +1,62 @@
+<configuration>
+       <property name="logDirectory" value="${AJSC_HOME}/logs" />
+
+       <appender name="uniquePropertyChecklog" class="ch.qos.logback.classic.sift.SiftingAppender">
+               <filter class="ch.qos.logback.classic.filter.LevelFilter">
+                       <level>INFO</level>
+                       <onMatch>ACCEPT</onMatch>
+                       <onMismatch>DENY</onMismatch>
+               </filter>
+               <!-- This is MDC value -->
+               <!-- We will assign a value to 'logFilenameAppender' via Java code -->
+               <discriminator>
+                       <key>logFilenameAppender</key>
+                       <defaultValue>undefined</defaultValue>
+               </discriminator>
+               <sift>
+                       <!-- A standard RollingFileAppender, the log file is based on 'logFileName' 
+                               at runtime -->
+                       <appender name="FILE-${logFilenameAppender}"
+                               class="ch.qos.logback.core.rolling.RollingFileAppender">
+                               <file>${logDirectory}/uniquePropertyCheck/${logFilenameAppender}.log</file>
+                               <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+                                       <fileNamePattern>${logDirectory}/uniquePropertyCheck/${logFilenameAppender}.log.%d{yyyy-MM-dd}
+                                       </fileNamePattern>
+                               </rollingPolicy>
+                               <encoder>
+                                       <pattern>%d{yyyy-MM-dd'T'HH:mm:ss.SSSXXX}|%m%n</pattern>
+                               </encoder>
+                       </appender>
+               </sift>
+       </appender>
+
+       <logger name="org.reflections" level="WARN" additivity="false">
+               <appender-ref ref="uniquePropertyChecklog" />
+       </logger>
+       <logger name="org.apache.zookeeper" level="ERROR" additivity="false">
+               <appender-ref ref="uniquePropertyChecklog" />
+       </logger>
+       <logger name="org.apache.hadoop" level="ERROR" additivity="false">
+               <appender-ref ref="uniquePropertyChecklog" />
+       </logger>
+       <logger name="com.thinkaurelius" level="WARN" additivity="false">
+               <appender-ref ref="uniquePropertyChecklog" />
+       </logger>
+       <logger name="ch.qos.logback.classic" level="WARN" additivity="false">
+               <appender-ref ref="uniquePropertyChecklog" />
+       </logger>
+       <logger name="ch.qos.logback.core" level="WARN" additivity="false">
+               <appender-ref ref="uniquePropertyChecklog" />
+       </logger>
+       <logger name="com.att.eelf" level="WARN" additivity="false">
+               <appender-ref ref="uniquePropertyChecklog" />
+       </logger>
+       <logger name="org.onap.aai" level="INFO" additivity="false">
+               <appender-ref ref="uniquePropertyChecklog" />
+       </logger>
+
+
+       <root level="INFO">
+               <appender-ref ref="uniquePropertyChecklog" />
+       </root>
+</configuration>
\ No newline at end of file
diff --git a/src/main/scripts/audit_schema.sh b/src/main/scripts/audit_schema.sh
new file mode 100644 (file)
index 0000000..686dd49
--- /dev/null
@@ -0,0 +1,32 @@
+#!/bin/sh
+#
+# ============LICENSE_START=======================================================
+# org.onap.aai
+# ================================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+#
+COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P )
+. ${COMMON_ENV_PATH}/common_functions.sh
+
+start_date;
+check_user;
+source_profile;
+execute_spring_jar org.onap.aai.db.schema.ScriptDriver "/opt/app/aai-graphadmin/resources/logback.xml" "$@"
+end_date;
+exit 0
diff --git a/src/main/scripts/common_functions.sh b/src/main/scripts/common_functions.sh
new file mode 100644 (file)
index 0000000..ed795fe
--- /dev/null
@@ -0,0 +1,65 @@
+#!/bin/ksh
+#
+# Common functions that can be used throughout multiple scripts
+# In order to call these functions, this file needs to be sourced
+
+# Checks if the user that is currently running is aaiadmin
+check_user(){
+
+    userid=$( id | cut -f2 -d"(" | cut -f1 -d")" )
+
+    if [ "${userid}" != "aaiadmin" ]; then
+        echo "You must be aaiadmin to run $0. The id used $userid."
+        exit 1
+    fi
+}
+
+# Sources the profile and sets the project home
+source_profile(){
+    . /etc/profile.d/aai.sh
+    PROJECT_HOME=/opt/app/aai-graphadmin
+}
+
+# Runs the spring boot jar based on which main class
+# to execute and which logback file to use for that class
+execute_spring_jar(){
+
+    className=$1;
+    logbackFile=$2;
+
+    shift 2;
+
+    EXECUTABLE_JAR=$(ls ${PROJECT_HOME}/lib/*.jar);
+
+    JAVA_OPTS="${JAVA_PRE_OPTS} -DAJSC_HOME=$PROJECT_HOME";
+    JAVA_OPTS="$JAVA_OPTS -DBUNDLECONFIG_DIR=resources";
+    JAVA_OPTS="$JAVA_OPTS -Daai.home=$PROJECT_HOME ";
+    JAVA_OPTS="$JAVA_OPTS -Dhttps.protocols=TLSv1.1,TLSv1.2";
+    JAVA_OPTS="$JAVA_OPTS -Dloader.main=${className}";
+    JAVA_OPTS="$JAVA_OPTS -Dloader.path=${PROJECT_HOME}/resources";
+    JAVA_OPTS="$JAVA_OPTS -Dlogback.configurationFile=${logbackFile}";
+
+    export SOURCE_NAME=$(grep '^schema.source.name=' ${PROJECT_HOME}/resources/application.properties | cut -d"=" -f2-);
+    # Needed for the schema ingest library beans
+    eval $(grep '^schema\.' ${PROJECT_HOME}/resources/application.properties | \
+     sed 's/^\(.*\)$/JAVA_OPTS="$JAVA_OPTS -D\1"/g' | \
+     sed 's/${server.local.startpath}/${PROJECT_HOME}\/resources/g'| \
+     sed 's/${schema.source.name}/'${SOURCE_NAME}'/g'\
+    )
+
+    JAVA_OPTS="${JAVA_OPTS} ${JAVA_POST_OPTS}";
+
+    ${JAVA_HOME}/bin/java ${JVM_OPTS} ${JAVA_OPTS} -jar ${EXECUTABLE_JAR} "$@"
+}
+
+# Prints the start date and the script that the user called
+start_date(){
+    echo
+    echo `date` "   Starting $0"
+}
+
+# Prints the end date and the script that the user called
+end_date(){
+    echo
+    echo `date` "   Done $0"
+}
diff --git a/src/main/scripts/createDBSchema.sh b/src/main/scripts/createDBSchema.sh
new file mode 100644 (file)
index 0000000..01fef07
--- /dev/null
@@ -0,0 +1,44 @@
+#!/bin/ksh
+#
+# ============LICENSE_START=======================================================
+# org.onap.aai
+# ================================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+# The script invokes GenTester java class to create the DB schema
+#
+# NOTE: you can pass an option GEN_DB_WITH_NO_SCHEMA if you want it to create an instance of
+#       the graph - but with no schema (this is useful when using the Hbase copyTable to
+#       copy our database to different environments).
+#       Ie. createDbSchema.sh GEN_DB_WITH_NO_SCHEMA
+#
+#
+#
+#
+
+COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P )     
+. ${COMMON_ENV_PATH}/common_functions.sh
+start_date;
+check_user;
+source_profile;
+if [ -z "$1" ]; then
+    execute_spring_jar org.onap.aai.schema.GenTester ${PROJECT_HOME}/resources/logback.xml
+else
+    execute_spring_jar org.onap.aai.schema.GenTester ${PROJECT_HOME}/resources/logback.xml "$1"
+fi;
+end_date;
+exit 0
\ No newline at end of file
diff --git a/src/main/scripts/dataGrooming.sh b/src/main/scripts/dataGrooming.sh
new file mode 100644 (file)
index 0000000..a6b5f4f
--- /dev/null
@@ -0,0 +1,116 @@
+#!/bin/ksh
+#
+# The script invokes the dataGrooming java class to run some tests and generate a report and
+#     potentially do some auto-deleteing.
+#
+# Here are the allowed Parameters.  Note - they are all optional and can be mixed and matched.
+#
+#  -f oldFileName  (see note below)
+#  -autoFix 
+#  -sleepMinutes nn
+#  -edgesOnly
+#  -skipEdges
+#  -timeWindowMinutes nn
+#  -dontFixOrphans
+#  -maxFix
+#  -skipHostCheck
+#  -singleCommits
+#  -dupeCheckOff
+#  -dupeFixOn
+#  -ghost2CheckOff
+#  -ghost2FixOn
+#
+#
+#
+#
+# NOTES:
+# -f  The name of a previous report can optionally be passed in with the "-f" option. 
+#     Just the filename --  ie. "dataGrooming.sh -f dataGrooming.201504272106.out"   
+#     The file will be assumed to be in the directory that it was created in.
+#     If a filename is passed, then the "deleteCandidate" vertex-id's and bad edges
+#     listed inside that report file will be deleted on this run if they are encountered as
+#     bad nodes/edges again.
+#     
+# -autoFix  If you don't use the "-f" option, you could choose to use "-autofix" which will
+#           automatically run the script twice: once to look for problems, then after 
+#           sleeping for a few minutes, it will re-run with the inital-run's output as
+#           an input file.  
+#
+# -maxFix   When using autoFix, you might want to limit how many 'bad' records get fixed.
+#           This is a safeguard against accidently deleting too many records automatically.
+#           It has a default value set in AAIConstants:  AAI_GROOMING_DEFAULT_MAX_FIX = 15;
+#           If there are more than maxFix candidates found -- then none will be deleted (ie. 
+#           someone needs to look into it)
+# 
+# -sleepMinutes   When using autoFix, this defines how many minutes we sleep before the second run.
+#           It has a default value set in AAIConstants:  AAI_GROOMING_DEFAULT_SLEEP_MINUTES = 7;
+#           The reason we sleep at all between runs is that our DB is "eventually consistant", so
+#           we want to give it time to resolve itself if possible.
+#
+# -edgesOnly    Can be used any time you want to limit this tool so it only looks at edges.
+#            Note - as of 1710, we have not been seeing many purely bad edges, 
+#            (ie. not associated with a phantom node) so this option is not used often.
+#           
+# -skipEdgeChecks  Use it to bypass checks for bad Edges (which are pretty rare).
+#
+# -timeWindowMinutes   Use it to limit the nodes looked at to ones whose update-timestamp tells us that it was last updated less than this many minutes ago.  Note this is usually used along with the skipEdgeChecks option.
+#
+# -dontFixOrphans   Since there can sometimes be a lot of orphan nodes, and they don't 
+#           harm processing as much as phantom-nodes or bad-edges, it is useful to be
+#           able to ignore them when fixing things.  
+#
+# -skipHostCheck    By default, the grooming tool will check to see that it is running
+#           on the host that is the first one in the list found in:
+#               aaiconfig.properties  aai.primary.filetransfer.serverlist
+#           This is so that when run from the cron, it only runs on one machine.
+#           This option lets you turn that checking off.
+#
+# -singleCommits    By default, the grooming tool will do all of its processing and then do
+#           a commit of all the changes at once.  This option (maybe could have been named better)
+#           is letting the user override the default behavior and do a commit for each
+#           individual 'remove" one by one as they are encountered by the grooming logic. 
+#           NOTE - this only applies when using either the "-f" or "-autoFix" options since 
+#           those are the only two that make changes to the database.
+#
+# -dupeCheckOff    By default, we will check all of our nodes for duplicates.  This parameter lets
+#           us turn this check off if we don't want to do it for some reason.
+#
+# -dupeFixOn    When we're fixing data, by default we will NOT fix duplicates  This parameter lets us turn 
+#           that fixing ON when we are comfortable that it can pick the correct duplicate to preserve. 
+#
+# -ghost2CheckOff    By default, we will check for the "new" kind of ghost that we saw on
+#           Production in early February 2016.  This parameter lets us turn this check off if we 
+#           don't want to do it for some reason.
+#
+# -ghost2FixOn    When we're fixing data, by default we will NOT try to fix the "new" ghost nodes.  
+#           This parameter lets us turn that fixing ON if we want to try to fix them. 
+#
+#
+COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P )
+. ${COMMON_ENV_PATH}/common_functions.sh
+
+# TODO: There is a better way where you can pass in the function
+# and then let the common functions check if the function exist and invoke it
+# So this all can be templated out
+start_date;
+check_user;
+
+processStat=$(ps -ef | grep '[D]ataGrooming');
+if [ "$processStat" != "" ]
+       then
+       echo "Found dataGrooming is already running: " $processStat
+       exit 1
+fi
+
+# Make sure that it's not already running
+processStat=`ps -ef|grep aaiadmin|grep -E "org.onap.aai.dbgen.DataGrooming"|grep -v grep`
+if [ "$processStat" != "" ]
+       then
+       echo "Found dataGrooming is already running: " $processStat
+       exit 1
+fi
+
+source_profile;
+execute_spring_jar org.onap.aai.datagrooming.DataGrooming $PROJECT_HOME/resources/logback.xml "$@"
+end_date;
+exit 0
diff --git a/src/main/scripts/dataRestoreFromSnapshot.sh b/src/main/scripts/dataRestoreFromSnapshot.sh
new file mode 100644 (file)
index 0000000..405a667
--- /dev/null
@@ -0,0 +1,50 @@
+#!/bin/ksh
+#
+# This script uses the dataSnapshot and SchemaGenerator (via GenTester) java classes to restore 
+# data to a database by doing three things: 
+#   1) clear out whatever data and schema are currently in the db 
+#   2) rebuild the schema (using the SchemaGenerator)
+#   3) reload data from the passed-in datafile (which must found in the dataSnapShots directory and
+#      contain an xml view of the db data).
+#
+
+COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P )
+. ${COMMON_ENV_PATH}/common_functions.sh
+
+start_date;
+check_user;
+
+if [ "$#" -lt 1 ]; then
+    echo "Illegal number of parameters"
+    echo "usage: $0 previous_snapshot_filename"
+    exit 1
+fi
+
+source_profile;
+export PRE_JAVA_OPTS=${PRE_JAVA_OPTS:--Xms6g -Xmx8g};
+
+#### Step 1) clear out the database
+execute_spring_jar org.onap.aai.datasnapshot.DataSnapshot ${PROJECT_HOME}/resources/logback.xml "CLEAR_ENTIRE_DATABASE" "$1" "$2"
+if [ "$?" -ne "0" ]; then
+    echo "Problem clearing out database."
+    exit 1
+fi
+#### Step 2) rebuild the db-schema
+execute_spring_jar org.onap.aai.schema.GenTester ${PROJECT_HOME}/resources/logback.xml "GEN_DB_WITH_NO_DEFAULT_CR"
+if [ "$?" -ne "0" ]; then
+    echo "Problem rebuilding the schema (SchemaGenerator)."
+    exit 1
+fi
+
+#### Step 3) reload the data from a snapshot file
+
+execute_spring_jar org.onap.aai.datasnapshot.DataSnapshot ${PROJECT_HOME}/resources/logback.xml "RELOAD_DATA" "$1"
+if [ "$?" -ne "0" ]; then
+    echo "Problem reloading data into the database."
+    end_date;
+    exit 1
+fi
+end_date;
+exit 0
diff --git a/src/main/scripts/dataSnapshot.sh b/src/main/scripts/dataSnapshot.sh
new file mode 100644 (file)
index 0000000..f380e85
--- /dev/null
@@ -0,0 +1,28 @@
+#!/bin/ksh
+#
+# This script invokes the dataSnapshot java class passing an option to tell it to take
+# a snapshot of the database and store it as a single-line XML file.
+#
+#
+#
+#
+#
+COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P )
+. ${COMMON_ENV_PATH}/common_functions.sh
+
+processStat=$(ps -ef | grep '[D]ataSnapshot');
+if [ "$processStat" != "" ]
+       then
+       echo "Found dataSnapshot is already running: " $processStat
+       exit 1
+fi
+
+# TODO: There is a better way where you can pass in the function
+# and then let the common functions check if the function exist and invoke it
+# So this all can be templated out
+start_date;
+check_user;
+source_profile;
+execute_spring_jar org.onap.aai.datasnapshot.DataSnapshot $PROJECT_HOME/resources/logback.xml "$@"
+end_date;
+exit 0
diff --git a/src/main/scripts/dupeTool.sh b/src/main/scripts/dupeTool.sh
new file mode 100644 (file)
index 0000000..350b0bd
--- /dev/null
@@ -0,0 +1,73 @@
+#!/bin/ksh
+
+###
+# ============LICENSE_START=======================================================
+# org.onap.aai
+# ================================================================================
+# Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# 
+#      http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+###
+#
+#
+# dupeTool.sh  -- This tool is used to look at or fix duplicate nodes for one nodeType
+#       at a time and can be used to limit what it's looking at to just nodes created
+#       within a recent time window.
+#       It is made to deal with situations (like we have in 1610/1702) where one type
+#       of node keeps needing to have duplicates cleaned up (tenant nodes).
+#       It is needed because DataGrooming cannot be run often and cannot be focused just
+#       on duplicates or just on one nodeType.
+#
+# Parameters:
+#
+#  -userId (required) must be followed by a userid
+#  -nodeType (required) must be followed by a valid nodeType
+#  -timeWindowMinutes (optional) by default we would look at all nodes of the
+#        given nodeType, but if a window is given, then we will only look at
+#        nodes created that many (or fewer) minutes ago.
+#  -autoFix (optional) use this if you want duplicates fixed automatically (if we
+#           can figure out which to delete)
+#  -maxFix (optional) like with dataGrooming lets you override the default maximum 
+#           number of dupes that can be processed at one time
+#  -skipHostCheck (optional) By default, the dupe tool will check to see that it is running
+#           on the host that is the first one in the list found in:
+#               aaiconfig.properties  aai.primary.filetransfer.serverlist
+#           This is so that when run from the cron, it only runs on one machine.
+#           This option lets you turn that checking off.
+#  -sleepMinutes (optional) like with DataGrooming, you can override the 
+#           sleep time done when doing autoFix between first and second checks of the data.
+#  -params4Collect (optional) followed by a string to tell what properties/values to use 
+#              to limit the nodes being looked at.  Must be in the format
+#              of “propertName|propValue” use commas to separate if there
+#              are more than one name/value being passed.
+#  -specialTenantRule (optional) turns on logic which will use extra logic to figure
+#       out which tenant node can be deleted in a common scenario.
+#  
+#  
+#  For example (there are many valid ways to use it):
+#  
+#  dupeTool.sh -userId am8383 -nodeType tenant -timeWindowMinutes 60 -autoFix
+#  or
+#  dupeTool.sh -userId am8383 -nodeType tenant -specialTenantRule -autoFix -maxFix 100
+# 
+
+COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P )
+. ${COMMON_ENV_PATH}/common_functions.sh
+
+start_date;
+check_user;
+source_profile;
+execute_spring_jar org.onap.aai.dbgen.DupeTool ${PROJECT_HOME}/resources/dupeTool-logback.xml "$@"
+end_date;
+exit 0
\ No newline at end of file
diff --git a/src/main/scripts/dynamicPayloadArchive.sh b/src/main/scripts/dynamicPayloadArchive.sh
new file mode 100644 (file)
index 0000000..87cce13
--- /dev/null
@@ -0,0 +1,75 @@
+#!/bin/ksh
+#
+###
+# ============LICENSE_START=======================================================
+# org.onap.aai
+# ================================================================================
+# Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# 
+#      http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+###
+
+#
+# The script is called to tar and gzip the files under /opt/app/aai-graphadmin/data/scriptdata/addmanualdata/tenant_isolation/payload 
+# which contains the payload files created by the dynamicPayloadGenerator.sh tool.
+# /opt/app/aai-graphadmin/data/scriptdata/addmanualdata/tenant_isolation is mounted to the docker container
+#
+
+COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P )
+. ${COMMON_ENV_PATH}/common_functions.sh
+
+. /etc/profile.d/aai.sh
+PROJECT_HOME=/opt/app/aai-graphadmin
+
+PROGNAME=$(basename $0)
+
+TS=$(date "+%Y_%m_%d_%H_%M_%S")
+
+CHECK_USER="aaiadmin"
+userid=$( id | cut -f2 -d"(" | cut -f1 -d")" )
+if [ "${userid}" != $CHECK_USER ]; then
+    echo "You must be  $CHECK_USER to run $0. The id used $userid."
+    exit 1
+fi
+PAYLOAD_DIRECTORY=${PROJECT_HOME}/resources/etc/scriptdata/addmanualdata/tenant_isolation/payload
+ARCHIVE_DIRECTORY=${PROJECT_HOME}/resources/etc/scriptdata/addmanualdata/tenant_isolation/archive
+if [ ! -d ${PAYLOAD_DIRECTORY} ]
+then
+       echo " ${PAYLOAD_DIRECTORY} doesn't exist"
+       exit 1
+fi
+if [ ! -d ${ARCHIVE_DIRECTORY} ]
+then
+       mkdir -p ${ARCHIVE_DIRECTORY}
+       chown aaiadmin:aaiadmin ${ARCHIVE_DIRECTORY}
+       chmod u+w ${ARCHIVE_DIRECTORY}
+fi
+cd ${PAYLOAD_DIRECTORY}
+tar c * -f ${ARCHIVE_DIRECTORY}/dynamicPayloadArchive_${TS}.tar --exclude=payload
+if [ $? -ne 0 ]
+then
+       echo " Unable to tar ${PAYLOAD_DIRECTORY}"
+       exit 1
+fi
+
+cd ${ARCHIVE_DIRECTORY}
+gzip ${ARCHIVE_DIRECTORY}/dynamicPayloadArchive_${TS}.tar 
+
+if [ $? -ne 0 ]
+then
+       echo " Unable to gzip ${ARCHIVE_DIRECTORY}/dynamicPayloadArchive_${TS}.tar"
+       exit 1
+fi
+echo "Completed successfully: ${ARCHIVE_DIRECTORY}/dynamicPayloadArchive_${TS}.tar"
+exit 0
diff --git a/src/main/scripts/dynamicPayloadGenerator.sh b/src/main/scripts/dynamicPayloadGenerator.sh
new file mode 100644 (file)
index 0000000..3d30790
--- /dev/null
@@ -0,0 +1,155 @@
+#!/bin/ksh
+#
+###
+# ============LICENSE_START=======================================================
+# org.onap.aai
+# ================================================================================
+# Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# 
+#      http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+###
+
+#
+# dynamicPayloadGenerator.sh  -- This tool is used to dynamically load payloads from snapshots
+#       It is used to load a snapshot into memory and generate payloads for any input nodes
+#       
+#
+# Parameters:
+#
+#  -d (required) name of the fully qualified Datasnapshot file that you need to load
+#  -s (optional) true or false to enable or disable schema, By default it is true for production, 
+#           you can change to false if the snapshot has duplicates
+#  -c (optional) config file to use for loading snapshot into memory.
+#  -o (required) output file to store the data files
+#  -f (optional) PAYLOAD or DMAAP-MR
+#  -n (optional) input file for the script 
+#  
+#  
+#  For example (there are many valid ways to use it):
+#  
+#  dynamicPayloadGenerator.sh -d '/opt/app/snapshots/snaphot.graphSON' -o '/opt/app/aai-graphadmin/resources/etc/scriptdata/addmanualdata/payload_dir/'
+#              
+#  or
+#  dynamicPayloadGenerator.sh -d '/opt/app/snapshots/snaphot.graphSON' -s false -c '/opt/app/aai-graphadmin/resources/etc/appprops/dynamic.properties'
+#                                      -o '/opt/app/aai-graphadmin/resources/etc/scriptdata/addmanualdata/payload_dir/' -f PAYLOAD -n '/opt/app/aai-graphadmin/resources/etc/scriptdata/nodes.json'
+# 
+
+
+echo
+echo `date` "   Starting $0"
+
+display_usage() {
+        cat <<EOF
+        Usage: $0 [options]
+
+        1. Usage: dynamicPayloadGenerator -d <graphsonPath> -o  <output-path>
+        2. This script has  2 arguments that are required.
+           a.  -d (required) Name of the fully qualified Datasnapshot file that you need to load
+           b.  -o (required) output file to store the data files
+        3. Optional Parameters:
+                  a.   -s (optional) true or false to enable or disable schema, By default it is true for production, 
+                  b.   -c (optional) config file to use for loading snapshot into memory. By default it is set to /opt/app/aai-graphadmin/resources/etc/appprops/dynamic.properties
+                  c.   -f (optional) PAYLOAD or DMAAP-MR
+                  d.   -n (optional) input file specifying the nodes and relationships to export. Default: /opt/app/aai-graphadmin/scriptdata/tenant_isolation/nodes.json
+                  e.   -m (optional) true or false to read multiple snapshots or not, by default is false
+                  f.   -i (optional) the file containing the input filters based on node property and regex/value. By default, it is: /opt/app/aai-graphadmin/scriptdata/tenant_isolation/inputFilters.json
+               4. For example (there are many valid ways to use it):
+                       dynamicPayloadGenerator.sh -d '/opt/app/snapshots/snaphot.graphSON' -o '/opt/app/aai-graphadmin/resources/etc/scriptdata/addmanualdata/tenant_isolation/'
+                               
+                       dynamicPayloadGenerator.sh -d '/opt/app/snapshots/snaphot.graphSON' -s false -c '/opt/app/aai-graphadmin/resources/etc/appprops/dynamic.properties'
+                                       -o '/opt/app/aai-graphadmin/resources/etc/scriptdata/addmanualdata/tenant_isolation/' -f PAYLOAD -n '/opt/app/aai-graphadmin/resources/etc/scriptdata/tenant_isolation/nodes.json'
+                                       -m false -i '/opt/app/aai-graphadmin/resources/etc/scriptdata/tenant_isolation/inputFilters.json'
+   
+EOF
+}
+if [ $# -eq 0 ]; then
+        display_usage
+        exit 1
+fi
+
+COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P )
+. ${COMMON_ENV_PATH}/common_functions.sh
+
+start_date;
+check_user;
+source_profile;
+export JVM_OPTS="-Xmx9000m -Xms9000m"
+
+while getopts ":f:s:d:n:c:i:m:o:p:" opt; do
+      case ${opt} in
+        f )
+          PAYLOAD=$OPTARG
+          echo ${opt}
+          ;;
+        s )
+          VALIDATE_SCHEMA=$OPTARG
+          echo ${opt}
+          ;;
+        d )
+          INPUT_DATASNAPSHOT_FILE=$OPTARG
+          echo ${opt}
+          ;;
+        n )
+          NODE_CONFIG_FILE=$OPTARG
+          echo ${opt}
+          ;;
+        c )
+          DYNAMIC_CONFIG_FILE=$OPTARG
+          echo ${opt}
+          ;;
+        i )
+          INPUT_FILTER_FILE=$OPTARG
+          echo ${opt}
+          ;;
+        m )
+          MULTIPLE_SNAPSHOTS=$OPTARG
+          echo ${opt}
+          ;;
+        p )
+          PARTIAL=$OPTARG
+          echo ${opt}
+          ;;
+        o )
+          OUTPUT_DIR=$OPTARG
+          echo ${opt}
+          ;;
+        \? )
+          echo "Invalid Option: -$OPTARG" 1>&2
+          ;;
+        : )
+          echo "Invalid Option: -$OPTARG requires an argument" 1>&2
+          ;;
+      esac
+    done
+    shift $((OPTIND -1))
+
+echo 'Done'
+
+set -A nodes pserver cloud-region availability-zone tenant zone complex
+
+#Create empty partial file
+ > $INPUT_DATASNAPSHOT_FILE".partial"
+
+for nodeType in ${nodes[@]}
+       do
+         grep "aai-node-type.*\"value\":\"$nodeType\"" $INPUT_DATASNAPSHOT_FILE >>$INPUT_DATASNAPSHOT_FILE'.partial'
+    done
+
+
+execute_spring_jar org.onap.aai.dbgen.DynamicPayloadGenerator ${PROJECT_HOME}/resources/dynamicPayloadGenerator-logback.xml -s ${VALIDATE_SCHEMA} \
+               -f ${PAYLOAD} -o ${OUTPUT_DIR} -c ${DYNAMIC_CONFIG_FILE} -i ${INPUT_FILTER_FILE} -m ${MULTIPLE_SNAPSHOTS} \
+               -d ${INPUT_DATASNAPSHOT_FILE} -n ${NODE_CONFIG_FILE} ;
+               
+end_date;
+exit 0
diff --git a/src/main/scripts/dynamicPayloadPartial.sh b/src/main/scripts/dynamicPayloadPartial.sh
new file mode 100644 (file)
index 0000000..8021aa6
--- /dev/null
@@ -0,0 +1,13 @@
+#!/bin/ksh
+
+#Create empty partial snapshot file
+INPUT_DATASNAPSHOT_FILE=$1
+
+set -A nodes pserver cloud-region availability-zone tenant zone complex
+ > $INPUT_DATASNAPSHOT_FILE".partial"
+
+for nodeType in ${nodes[@]}
+       do
+         grep "aai-node-type.*\"value\":\"$nodeType\"" $INPUT_DATASNAPSHOT_FILE >>$INPUT_DATASNAPSHOT_FILE'.partial'
+    done
+exit 0
\ No newline at end of file
diff --git a/src/main/scripts/forceDeleteTool.sh b/src/main/scripts/forceDeleteTool.sh
new file mode 100644 (file)
index 0000000..2d42fda
--- /dev/null
@@ -0,0 +1,84 @@
+#!/bin/ksh
+#
+# ============LICENSE_START=======================================================
+# org.onap.aai
+# ================================================================================
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+#
+#
+# forceDeleteTool.sh  -- This tool is used to delete nodes that cannot be deleted using
+#        the normal REST API because of internal DB problems.  For example, Phantom nodes
+#        and duplicate nodes cause errors to happen in "normal" REST API codes and must
+#        be deleted using this tool.  
+#        Since it is not using the "normal" REST logic, it is also not invoking the "normal" 
+#        edge rules that we use to cascade deletes to "child" nodes.  So - this tool can be dangerous.  
+#        Ie. if you accidently delete a parent node (like a cloud-region) that has many dependent 
+#        child nodes, there will be no way to get to any of those child-nodes after the cloud-region
+#        has been deleted.  
+#        There are several environment variables defined in aaiconfig.properties to help minimize errors like that.
+#                aai.forceDel.protected.nt.list=cloud-region
+#                aai.forceDel.protected.edge.count=10
+#                aai.forceDel.protected.descendant.count=10
+#
+# Parameters:
+#
+#  -action (required) valid values: COLLECT_DATA or DELETE_NODE or DELETE_EDGE
+#  -userId (required) must be followed by a userid
+#  -params4Collect (followed by a string to tell what properties/values to use 
+#              as part of a COLLECT_DATA request.  Must be in the format
+#              of ?propertName|propValue? use commas to separate if there
+#              are more than one name/value being passed.
+#  -vertexId - required for a DELETE_NODE request
+#  -edgeId - required for a DELETE_EDGE request
+#  -overRideProtection --- WARNING ? This over-rides the protections we introduced!
+#       It will let you override a protected vertex or vertex that has more
+#       than the allowed number of edges or descendants.
+#  -DISPLAY_ALL_VIDS (optional) - in the rare case when you want to see the 
+#       vertex-ids (vids) of all the CONNECTED vertices, you can use this.  By 
+#       default, we do not show them.
+#  
+#  
+#  For example:
+#  
+#  forceDeleteTool.sh -action COLLECT_DATA -userId am8383 -params4Collect "tenant-id|junk tenant01 ID 0224"
+#  
+#  forceDeleteTool.sh -action COLLECT_DATA -userId am8383 -params4Collect "cloud-owner|junkTesterCloudOwner 0224,cloud-region-id|junkTesterCloud REgion ID 0224"
+#
+#  forceDeleteTool.sh -action DELETE_NODE -userId am8383 -vertexId 1234567
+#
+#  forceDeleteTool.sh -action DELETE_EDGE -userId am8383 -edgeId 9876543
+#
+# 
+
+COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P )
+. ${COMMON_ENV_PATH}/common_functions.sh
+
+start_date;
+
+echo " NOTE - if you are deleting data, please run the dataSnapshot.sh script first or "
+echo "     at least make a note the details of the node that you are deleting. "
+
+check_user;
+source_profile;
+
+execute_spring_jar org.onap.aai.dbgen.ForceDeleteTool ${PROJECT_HOME}/resources/forceDelete-logback.xml "$@"
+
+end_date;
+
+exit 0
diff --git a/src/main/scripts/migration_verification.sh b/src/main/scripts/migration_verification.sh
new file mode 100644 (file)
index 0000000..1e1b228
--- /dev/null
@@ -0,0 +1,61 @@
+#!/bin/ksh
+
+###
+# ============LICENSE_START=======================================================
+# org.onap.aai
+# ================================================================================
+# Copyright (C) 2018 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# 
+#      http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+###
+
+#
+# migration_verification.sh  -- This tool is used to provide a summary of migration logs
+# This searches for pre-defined strings "Migration Error" and "Migration Summary Count" in log files and outputs those lines.
+#
+
+display_usage() {
+  cat << EOF
+  Usage: $0 [options]
+  
+  1. Usage: migration_verification.sh <last_modified> <logs_path>
+  2. The <logs_path> should be a directory containing all of the logs. If empty, default path is /opt/app/aai-graphadmin/logs/migration.
+  3. The <last_modified> parameter should be an integer for up to how many minutes ago a log file should be parsed.
+  4. Example: migration_verification.sh 60 /opt/app/aai-graphadmin/logs/migration
+EOF
+}
+
+if [ $# -eq 0 ]; then
+  display_usage
+  exit 1
+fi
+
+LOGS_DIRECTORY=${2:-/opt/app/aai-graphadmin/logs/migration/}
+MTIME=$1
+
+echo
+echo 'Running migration summary:'
+print "Logs directory: $LOGS_DIRECTORY"
+print "Searching log files modified within last $MTIME minutes: \n"
+echo
+
+for i in $(find -L $LOGS_DIRECTORY -mtime -$MTIME -name '*.log' );
+do
+  echo "Checking Log File: $i"
+  grep "Migration Error:" $i
+  grep "Migration Summary Count:" $i
+  echo
+done
+
+echo 'Done'
diff --git a/src/main/scripts/run_Migrations.sh b/src/main/scripts/run_Migrations.sh
new file mode 100644 (file)
index 0000000..2b0f5c5
--- /dev/null
@@ -0,0 +1,49 @@
+#!/bin/sh
+
+###
+# ============LICENSE_START=======================================================
+# org.onap.aai
+# ================================================================================
+# Copyright (C) 2017-2018 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# 
+#      http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+###
+
+COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P )
+. ${COMMON_ENV_PATH}/common_functions.sh
+
+# TODO: There is a better way where you can pass in the function
+# and then let the common functions check if the function exist and invoke it
+# So this all can be templated out
+start_date;
+check_user;
+source_profile;
+
+ARGS="-c ${PROJECT_HOME}/resources/etc/appprops/janusgraph-realtime.properties";
+
+if [ -f "$PROJECT_HOME/resources/application.properties" ]; then
+    # Get the application properties file and look for all lines
+    # starting with either jms dmaap or niws
+    # Turn them into system properties and export JAVA_PRE_OPTS so
+    # execute spring jar will get those values
+    # This is only needed since dmaap is used by run_migrations
+    JAVA_PRE_OPTS="-Xms8g -Xmx8g";
+    JMS_PROPS=$(egrep '^jms.bind.address' $PROJECT_HOME/resources/application.properties | cut -d"=" -f2- |  sed 's/^\(.*\)$/-Dactivemq.tcp.url=\1/g' | tr '\n' ' ');
+    JAVA_PRE_OPTS="${JAVA_PRE_OPTS} ${JMS_PROPS}";
+    export JAVA_PRE_OPTS;
+fi;
+
+execute_spring_jar org.onap.aai.migration.MigrationController ${PROJECT_HOME}/resources/migration-logback.xml ${ARGS} "$@"
+end_date;
+exit 0
\ No newline at end of file
diff --git a/src/main/scripts/run_SendDeleteMigrationNotification.sh b/src/main/scripts/run_SendDeleteMigrationNotification.sh
new file mode 100644 (file)
index 0000000..ebd8677
--- /dev/null
@@ -0,0 +1,65 @@
+#!/bin/ksh
+
+###
+# ============LICENSE_START=======================================================
+# org.onap.aai
+# ================================================================================
+# Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# 
+#      http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+###
+
+COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P )
+. ${COMMON_ENV_PATH}/common_functions.sh
+
+
+start_date;
+check_user;
+source_profile;
+
+INPUT_PATH=$1
+
+if [ ! -d "$INPUT_PATH" ]; then
+    echo "Input directory $INPUT_PATH does not exist!!";
+    exit
+fi
+
+if [ $(ls ${INPUT_PATH}/* 2> /dev/null | wc -l) -eq 0 ]; then
+    echo "Input directory $INPUT_PATH does not contain any migration files!!";
+    exit
+fi
+
+INPUT_DIR_FOR_JAVA=${INPUT_PATH}/deleteevents
+mkdir -p "$INPUT_DIR_FOR_JAVA"
+INPUT_FILE_FOR_JAVA=${INPUT_DIR_FOR_JAVA}/dmaap_delete_files.txt
+#sort --numeric-sort -k 1 -t '_' $(find ${INPUT_PATH}/DELETE-* -maxdepth 0 -type f) | awk -F '_' '{ print $2"_"$3; }' > $INPUT_FILE_FOR_JAVA
+find ${INPUT_PATH} -type f -name 'DELETE-*' -exec cat {} + > $INPUT_FILE_FOR_JAVA
+
+shift
+
+ARGS="-c ${PROJECT_HOME}/resources/etc/appprops/janusgraph-realtime.properties --inputFile $INPUT_FILE_FOR_JAVA"
+
+if [ -f "$PROJECT_HOME/resources/application.properties" ]; then
+    # Get the application properties file and look for all lines
+    # starting with either jms dmaap or niws
+    # Turn them into system properties and export JAVA_PRE_OPTS so
+    # execute spring jar will get those values
+    # This is only needed since dmaap is used by run_migrations
+    JAVA_PRE_OPTS=$(egrep '^jms.bind.address' $PROJECT_HOME/resources/application.properties | cut -d"=" -f2- |  sed 's/^\(.*\)$/-Dactivemq.tcp.url=\1/g' | tr '\n' ' ');
+    export JAVA_PRE_OPTS;
+fi;
+
+execute_spring_jar org.onap.aai.util.SendDeleteMigrationNotificationsMain ${PROJECT_HOME}/resources/migration-logback.xml ${ARGS} "$@"
+end_date;
+exit 0
\ No newline at end of file
diff --git a/src/main/scripts/run_SendMigrationNotification.sh b/src/main/scripts/run_SendMigrationNotification.sh
new file mode 100644 (file)
index 0000000..4bcc0d9
--- /dev/null
@@ -0,0 +1,64 @@
+#!/bin/ksh
+
+###
+# ============LICENSE_START=======================================================
+# org.onap.aai
+# ================================================================================
+# Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+# 
+#      http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+###
+
+COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P )
+. ${COMMON_ENV_PATH}/common_functions.sh
+
+
+start_date;
+check_user;
+source_profile;
+
+INPUT_PATH=$1
+
+if [ ! -d "$INPUT_PATH" ]; then
+    echo "Input directory $INPUT_PATH does not exist!!";
+    exit
+fi
+
+if [ $(ls ${INPUT_PATH}/* 2> /dev/null | wc -l) -eq 0 ]; then
+    echo "Input directory $INPUT_PATH does not contain any migration files!!";
+    exit
+fi
+
+INPUT_DIR_FOR_JAVA=${INPUT_PATH}/combined
+mkdir -p "$INPUT_DIR_FOR_JAVA"
+INPUT_FILE_FOR_JAVA=${INPUT_DIR_FOR_JAVA}/sorted_dmaap_files.txt
+sort --numeric-sort -k 1 -t '_' $(find ${INPUT_PATH}/* -maxdepth 0 -type f) | awk -F '_' '{ print $2"_"$3; }' > $INPUT_FILE_FOR_JAVA
+
+shift
+
+ARGS="-c ${PROJECT_HOME}/resources/etc/appprops/janusgraph-realtime.properties --inputFile $INPUT_FILE_FOR_JAVA"
+
+if [ -f "$PROJECT_HOME/resources/application.properties" ]; then
+    # Get the application properties file and look for all lines
+    # starting with either jms dmaap or niws
+    # Turn them into system properties and export JAVA_PRE_OPTS so
+    # execute spring jar will get those values
+    # This is only needed since dmaap is used by run_migrations
+    JAVA_PRE_OPTS=$(egrep '^jms.bind.address' $PROJECT_HOME/resources/application.properties | cut -d"=" -f2- |  sed 's/^\(.*\)$/-Dactivemq.tcp.url=\1/g' | tr '\n' ' ');
+    export JAVA_PRE_OPTS;
+fi;
+
+execute_spring_jar org.onap.aai.util.SendMigrationNotificationsMain ${PROJECT_HOME}/resources/migration-logback.xml ${ARGS} "$@"
+end_date;
+exit 0
\ No newline at end of file
diff --git a/src/main/scripts/schemaMod.sh b/src/main/scripts/schemaMod.sh
new file mode 100644 (file)
index 0000000..d1fb009
--- /dev/null
@@ -0,0 +1,50 @@
+#!/bin/ksh
+#
+# This script is used to correct mistakes made in the database schema.  
+# It currently just allows you to change either the dataType and/or indexType on properties used by nodes.    
+#
+# NOTE - Titan is not elegant in 0.5.3 about making changes to the schema.  Bad properties never
+#       actually leave the database, they just get renamed and stop getting used.  So it is 
+#       really worthwhile to get indexes and dataTypes correct the first time around.
+# Note also - This script just makes changes to the schema that is currently live.
+#    If you were to create a new schema in a brandy-new environment, it would look like
+#    whatever ex5.json (as of June 2015) told it to look like.   So, part of making a 
+#    change to the db schema should Always first be to make the change in ex5.json so that
+#    future environments will have the change.  This script is just to change existing
+#    instances of the schema since schemaGenerator (as of June 2015) does not update things - it 
+#    just does the initial creation.
+#
+# Boy, this is getting to be a big comment section...
+#
+# To use this script, you need to pass four parameters:
+#      propertyName    -- the name of the property that you need to change either the index or dataType on
+#      targetDataType  -- whether it's changing or not, you need to give it:  String, Integer, Boolean or Long
+#      targetIndexInfo -- whether it's changing or not, you need to give it: index, noIndex or uniqueIndex
+#      preserveDataFlag -- true or false.     The only reason I can think of why you'd ever want to
+#                   set this to false would be maybe if you were changing to an incompatible dataType so didn't 
+#                   want it to try to use the old data (and fail).  But 99% of the time this will just be 'true'.
+#
+# Ie.    schemaMod flavor-id String index true
+#
+
+COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P )     
+. ${COMMON_ENV_PATH}/common_functions.sh
+start_date;
+check_user;
+
+if [ "$#" -ne 4 ]; then
+    echo "Illegal number of parameters"
+    echo "usage: $0 propertyName targetDataType targetIndexInfo preserveDataFlag"
+    exit 1
+fi
+
+source_profile;
+execute_spring_jar org.onap.aai.dbgen.schemamod.SchemaMod ${PROJECT_HOME}/resources/schemaMod-logback.xml "$1" "$2" "$3" "$4"
+if [ "$?" -ne "0" ]; then
+    echo "Problem executing schemaMod "
+    end_date;
+    exit 1
+fi
+end_date;
+exit 0
diff --git a/src/main/scripts/uniquePropertyCheck.sh b/src/main/scripts/uniquePropertyCheck.sh
new file mode 100644 (file)
index 0000000..c3c92bf
--- /dev/null
@@ -0,0 +1,24 @@
+#!/bin/ksh
+#
+# The script invokes UniqueProperty java class to see if the passed property is unique in the db and if
+#    not, to display where duplicate values are found.
+#
+# For example:    uniquePropertyCheck.sh subscriber-name
+#
+
+COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P )     
+. ${COMMON_ENV_PATH}/common_functions.sh
+start_date;
+check_user;
+source_profile;
+
+#execute_spring_jar org.onap.aai.util.UniquePropertyCheck ${PROJECT_HOME}/resources/uniquePropertyCheck-logback.xml "$@"
+execute_spring_jar org.onap.aai.util.UniquePropertyCheck ${PROJECT_HOME}/resources/uniquePropertyCheck-logback.xml "$@"
+ret_code=$?
+if [ $ret_code != 0 ]; then
+  end_date;
+  exit $ret_code
+fi
+
+end_date;
+exit 0
\ No newline at end of file
diff --git a/src/main/scripts/updatePem.sh b/src/main/scripts/updatePem.sh
new file mode 100644 (file)
index 0000000..e43a2eb
--- /dev/null
@@ -0,0 +1,38 @@
+#!/bin/ksh
+
+###
+# ============LICENSE_START=======================================================
+# org.onap.aai
+# ================================================================================
+# Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+###
+#
+COMMON_ENV_PATH=$( cd "$(dirname "$0")" ; pwd -P )
+. ${COMMON_ENV_PATH}/common_functions.sh
+
+start_date;
+check_user;
+source_profile;
+
+CERTPATH=$PROJECT_HOME/resources/etc/auth/
+KEYNAME=aaiClientPrivateKey.pem
+CERTNAME=aaiClientPublicCert.pem
+
+pw=$(execute_spring_jar org.onap.aai.util.AAIConfigCommandLinePropGetter "" "aai.keystore.passwd" 2> /dev/null | tail -1)
+openssl pkcs12 -in ${CERTPATH}/aai-client-cert.p12 -out $CERTPATH$CERTNAME -clcerts -nokeys -passin pass:$pw
+openssl pkcs12 -in ${CERTPATH}/aai-client-cert.p12 -out $CERTPATH$KEYNAME -nocerts -nodes -passin pass:$pw
+end_date;
+exit 0
\ No newline at end of file
diff --git a/src/test/java/org/onap/aai/AAIGremlinQueryTest.java b/src/test/java/org/onap/aai/AAIGremlinQueryTest.java
new file mode 100644 (file)
index 0000000..6385fee
--- /dev/null
@@ -0,0 +1,210 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai;
+
+import com.jayway.jsonpath.JsonPath;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.hamcrest.CoreMatchers;
+import org.janusgraph.core.JanusGraphTransaction;
+import org.junit.*;
+import org.junit.runner.RunWith;
+import org.onap.aai.config.PropertyPasswordConfiguration;
+import org.onap.aai.dbmap.AAIGraph;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.util.AAIConfig;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.context.embedded.LocalServerPort;
+import org.springframework.boot.test.context.SpringBootTest;
+import org.springframework.context.annotation.Import;
+import org.springframework.http.*;
+import org.springframework.test.context.ContextConfiguration;
+import org.springframework.test.context.TestPropertySource;
+import org.springframework.test.context.junit4.SpringRunner;
+import org.springframework.test.context.junit4.rules.SpringClassRule;
+import org.springframework.test.context.junit4.rules.SpringMethodRule;
+import org.springframework.web.client.RestTemplate;
+
+import javax.ws.rs.core.Response;
+import java.io.UnsupportedEncodingException;
+import java.util.*;
+
+import static org.hamcrest.CoreMatchers.containsString;
+import static org.hamcrest.CoreMatchers.is;
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.fail;
+
+/**
+ * A sample junit test using spring boot that provides the ability to spin
+ * up the application from the junit layer and run rest requests against
+ * SpringBootTest annotation with web environment requires which spring boot
+ * class to load and the random port starts the application on a random port
+ * and injects back into the application for the field with annotation LocalServerPort
+ * <p>
+ *
+ * This can be used to potentially replace a lot of the fitnesse tests since
+ * they will be testing against the same thing except fitnesse uses hbase
+ */
+@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT, classes = GraphAdminApp.class)
+@ContextConfiguration(initializers = PropertyPasswordConfiguration.class)
+@Import(GraphAdminTestConfiguration.class)
+public class AAIGremlinQueryTest {
+
+    @ClassRule
+    public static final SpringClassRule springClassRule = new SpringClassRule();
+
+    @Rule
+    public final SpringMethodRule springMethodRule = new SpringMethodRule();
+
+    @Autowired
+    RestTemplate restTemplate;
+
+    @LocalServerPort
+    int randomPort;
+
+    private HttpEntity httpEntity;
+
+    private HttpHeaders headers;
+
+    private String baseUrl;
+
+    @BeforeClass
+    public static void setupConfig() throws AAIException {
+        System.setProperty("AJSC_HOME", "./");
+        System.setProperty("BUNDLECONFIG_DIR", "src/main/resources/");
+    }
+
+    public void createGraph(){
+
+        JanusGraphTransaction transaction = AAIGraph.getInstance().getGraph().newTransaction();
+
+        boolean success = true;
+
+        try {
+
+            GraphTraversalSource g = transaction.traversal();
+
+            g.addV()
+                .property("aai-node-type", "pserver")
+                .property("hostname", "test-pserver")
+                .property("in-maint", false)
+                .property("source-of-truth", "JUNIT")
+                .property("aai-uri", "/cloud-infrastructure/pservers/pserver/test-pserver")
+                .next();
+
+        } catch(Exception ex){
+            success = false;
+        } finally {
+            if(success){
+                transaction.commit();
+            } else {
+                transaction.rollback();
+                fail("Unable to setup the graph");
+            }
+        }
+    }
+
+    @Before
+    public void setup() throws Exception {
+
+        AAIConfig.init();
+        AAIGraph.getInstance();
+
+        createGraph();
+        headers = new HttpHeaders();
+
+        headers.setAccept(Collections.singletonList(MediaType.APPLICATION_JSON));
+        headers.setContentType(MediaType.APPLICATION_JSON);
+        headers.add("Real-Time", "true");
+        headers.add("X-FromAppId", "JUNIT");
+        headers.add("X-TransactionId", "JUNIT");
+
+        String authorization = Base64.getEncoder().encodeToString("AAI:AAI".getBytes("UTF-8"));
+        headers.add("Authorization", "Basic " + authorization);
+        httpEntity = new HttpEntity(headers);
+        baseUrl = "https://localhost:" + randomPort;
+    }
+
+    @Test
+    public void testPserverCountUsingGremlin() throws Exception {
+        Map<String, String> gremlinQueryMap = new HashMap<>();
+        gremlinQueryMap.put("gremlin-query", "g.V().has('hostname', 'test-pserver').count()");
+
+        String payload = PayloadUtil.getTemplatePayload("gremlin-query.json", gremlinQueryMap);
+
+        ResponseEntity responseEntity = null;
+
+        String endpoint = "/aai/v11/dbquery?format=console";
+
+        httpEntity = new HttpEntity(payload, headers);
+        responseEntity = restTemplate.exchange(baseUrl + endpoint, HttpMethod.PUT, httpEntity, String.class);
+        assertThat(responseEntity.getStatusCode(), is(HttpStatus.OK));
+
+        String result = JsonPath.read(responseEntity.getBody().toString(), "$.results[0].result");
+        assertThat(result, is("1"));
+    }
+
+    @Test
+    public void testPserverCountUsingDsl() throws Exception {
+        Map<String, String> dslQuerymap = new HashMap<>();
+        dslQuerymap.put("dsl-query", "pserver*('hostname', 'test-pserver')");
+
+        String payload = PayloadUtil.getTemplatePayload("dsl-query.json", dslQuerymap);
+
+        ResponseEntity responseEntity = null;
+
+        String endpoint = "/aai/v11/dbquery?format=console";
+
+        httpEntity = new HttpEntity(payload, headers);
+        responseEntity = restTemplate.exchange(baseUrl + endpoint, HttpMethod.PUT, httpEntity, String.class);
+        assertThat(responseEntity.getStatusCode(), is(HttpStatus.OK));
+
+        String result = JsonPath.read(responseEntity.getBody().toString(), "$.results[0].result");
+        assertThat(result, containsString("v["));
+    }
+
+    @After
+    public void tearDown() {
+
+        JanusGraphTransaction transaction = AAIGraph.getInstance().getGraph().newTransaction();
+        boolean success = true;
+
+        try {
+
+            GraphTraversalSource g = transaction.traversal();
+
+            g.V().has("source-of-truth", "JUNIT")
+                    .toList()
+                    .forEach(v -> v.remove());
+
+        } catch(Exception ex){
+            success = false;
+        } finally {
+            if(success){
+                transaction.commit();
+            } else {
+                transaction.rollback();
+                fail("Unable to teardown the graph");
+            }
+        }
+
+    }
+}
diff --git a/src/test/java/org/onap/aai/AAISetup.java b/src/test/java/org/onap/aai/AAISetup.java
new file mode 100644 (file)
index 0000000..59afe60
--- /dev/null
@@ -0,0 +1,137 @@
+/**\r
+ * ============LICENSE_START=======================================================\r
+ * org.onap.aai\r
+ * ================================================================================\r
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.\r
+ * ================================================================================\r
+ * Licensed under the Apache License, Version 2.0 (the "License");\r
+ * you may not use this file except in compliance with the License.\r
+ * You may obtain a copy of the License at\r
+ *\r
+ *    http://www.apache.org/licenses/LICENSE-2.0\r
+ *\r
+ * Unless required by applicable law or agreed to in writing, software\r
+ * distributed under the License is distributed on an "AS IS" BASIS,\r
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
+ * See the License for the specific language governing permissions and\r
+ * limitations under the License.\r
+ * ============LICENSE_END=========================================================\r
+ */\r
+package org.onap.aai;\r
+\r
+import static org.junit.Assert.assertNotNull;\r
+\r
+import java.io.IOException;\r
+import java.io.InputStream;\r
+import java.util.Map;\r
+\r
+import org.apache.commons.io.IOUtils;\r
+import org.janusgraph.core.JanusGraph;\r
+import org.janusgraph.core.JanusGraphFactory;\r
+import org.janusgraph.core.JanusGraphTransaction;\r
+import org.junit.*;\r
+import org.onap.aai.config.*;\r
+import org.onap.aai.db.schema.AuditorFactory;\r
+import org.onap.aai.edges.EdgeIngestor;\r
+import org.onap.aai.introspection.LoaderFactory;\r
+import org.onap.aai.introspection.MoxyLoader;\r
+import org.onap.aai.nodes.NodeIngestor;\r
+import org.onap.aai.rest.db.HttpEntry;\r
+import org.onap.aai.serialization.db.EdgeSerializer;\r
+import org.onap.aai.setup.AAIConfigTranslator;\r
+import org.onap.aai.setup.SchemaLocationsBean;\r
+import org.onap.aai.setup.SchemaVersions;\r
+import org.onap.aai.setup.SchemaVersion;\r
+import org.springframework.beans.factory.annotation.Autowired;\r
+import org.springframework.beans.factory.annotation.Value;\r
+import org.springframework.test.context.ContextConfiguration;\r
+import org.springframework.test.context.TestPropertySource;\r
+import org.springframework.test.context.junit4.rules.SpringClassRule;\r
+import org.springframework.test.context.junit4.rules.SpringMethodRule;\r
+\r
+@ContextConfiguration(classes = {\r
+        SchemaLocationsBean.class,\r
+        AAIConfigTranslator.class,\r
+        SchemaVersions.class,\r
+        NodeIngestor.class,\r
+        EdgeIngestor.class,\r
+        EdgeSerializer.class,\r
+        SpringContextAware.class,\r
+        AuditorConfiguration.class,\r
+        DslConfiguration.class,\r
+        IntrospectionConfig.class,\r
+        RestBeanConfig.class\r
+})\r
+@TestPropertySource(properties = {\r
+        "schema.uri.base.path = /aai",\r
+        "schema.ingest.file = src/main/resources/application.properties"\r
+})\r
+public abstract class AAISetup {\r
+\r
+    @Autowired\r
+    protected NodeIngestor nodeIngestor;\r
+\r
+    @Autowired\r
+    protected LoaderFactory loaderFactory;\r
+\r
+    @Autowired\r
+    protected Map<SchemaVersion, MoxyLoader> moxyLoaderInstance;\r
+\r
+    @Autowired\r
+    protected HttpEntry traversalHttpEntry;\r
+\r
+    @Autowired\r
+    protected HttpEntry traversalUriHttpEntry;\r
+\r
+    @Autowired\r
+    protected EdgeSerializer edgeSerializer;\r
+\r
+    @Autowired\r
+    protected SchemaVersions schemaVersions;\r
+\r
+    @Autowired\r
+    protected EdgeIngestor edgeIngestor;\r
+\r
+    @Autowired\r
+    protected AuditorFactory auditorFactory;\r
+\r
+    @Value("${schema.uri.base.path}")\r
+    protected String basePath;\r
+\r
+    @ClassRule\r
+    public static final SpringClassRule springClassRule = new SpringClassRule();\r
+\r
+    @Rule\r
+    public final SpringMethodRule springMethodRule = new SpringMethodRule();\r
+\r
+    protected static JanusGraph graph;\r
+    protected static JanusGraphTransaction tx;\r
+\r
+    @BeforeClass\r
+    public static void setupBundleconfig() throws Exception {\r
+        System.setProperty("AJSC_HOME", "./");\r
+        System.setProperty("BUNDLECONFIG_DIR", "src/main/resources/");\r
+        System.setProperty("org.onap.aai.graphadmin.started", "true");\r
+        graph = JanusGraphFactory.build().set("storage.backend","inmemory").open();\r
+        tx = graph.newTransaction();\r
+    }\r
+\r
+    @AfterClass\r
+    public static void cleanUpGraph() {\r
+        tx.tx().rollback();\r
+        graph.close();\r
+    }\r
+\r
+    public String getPayload(String filename) throws IOException {\r
+\r
+        InputStream inputStream = getClass()\r
+                .getClassLoader()\r
+                .getResourceAsStream(filename);\r
+\r
+        String message = String.format("Unable to find the %s in src/test/resources", filename);\r
+        assertNotNull(message, inputStream);\r
+\r
+        String resource = IOUtils.toString(inputStream);\r
+        return resource;\r
+    }\r
+}
\ No newline at end of file
diff --git a/src/test/java/org/onap/aai/GraphAdminTestConfiguration.java b/src/test/java/org/onap/aai/GraphAdminTestConfiguration.java
new file mode 100644 (file)
index 0000000..a69e703
--- /dev/null
@@ -0,0 +1,123 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.apache.http.client.HttpClient;
+import org.apache.http.impl.client.HttpClients;
+import org.apache.http.ssl.SSLContextBuilder;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.test.context.TestConfiguration;
+import org.springframework.boot.web.client.RestTemplateBuilder;
+import org.springframework.context.annotation.Bean;
+import org.springframework.core.env.Environment;
+import org.springframework.http.HttpStatus;
+import org.springframework.http.client.ClientHttpResponse;
+import org.springframework.http.client.HttpComponentsClientHttpRequestFactory;
+import org.springframework.util.ResourceUtils;
+import org.springframework.web.client.ResponseErrorHandler;
+import org.springframework.web.client.RestTemplate;
+
+import javax.net.ssl.SSLContext;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.security.KeyStore;
+
+@TestConfiguration
+public class GraphAdminTestConfiguration {
+
+    private static final EELFLogger logger = EELFManager.getInstance().getLogger(GraphAdminTestConfiguration.class);
+
+    @Autowired
+    private Environment env;
+
+    /**
+     * Create a RestTemplate bean, using the RestTemplateBuilder provided
+     * by the auto-configuration.
+     */
+    @Bean
+    RestTemplate restTemplate(RestTemplateBuilder builder) throws Exception {
+
+        char[] trustStorePassword = env.getProperty("server.ssl.trust-store-password").toCharArray();
+        char[] keyStorePassword   = env.getProperty("server.ssl.key-store-password").toCharArray();
+
+        String keyStore = env.getProperty("server.ssl.key-store");
+        String trustStore = env.getProperty("server.ssl.trust-store");
+
+        SSLContextBuilder sslContextBuilder = SSLContextBuilder.create();
+
+        if(env.acceptsProfiles("two-way-ssl")){
+            sslContextBuilder = sslContextBuilder.loadKeyMaterial(loadPfx(keyStore, keyStorePassword), keyStorePassword);
+        }
+
+        SSLContext sslContext = sslContextBuilder
+                .loadTrustMaterial(ResourceUtils.getFile(trustStore), trustStorePassword)
+                .build();
+
+        HttpClient client = HttpClients.custom()
+                .setSSLContext(sslContext)
+                .setSSLHostnameVerifier((s, sslSession) -> true)
+                .build();
+
+        RestTemplate restTemplate =  builder
+                .requestFactory(new HttpComponentsClientHttpRequestFactory(client))
+                .build();
+
+        restTemplate.setErrorHandler(new ResponseErrorHandler() {
+            @Override
+            public boolean hasError(ClientHttpResponse clientHttpResponse) throws IOException {
+                if (clientHttpResponse.getStatusCode() != HttpStatus.OK) {
+
+                    logger.debug("Status code: " + clientHttpResponse.getStatusCode());
+
+                    if (clientHttpResponse.getStatusCode() == HttpStatus.FORBIDDEN) {
+                        logger.debug("Call returned a error 403 forbidden resposne ");
+                        return true;
+                    }
+
+                    if(clientHttpResponse.getRawStatusCode() % 100 == 5){
+                        logger.debug("Call returned a error " + clientHttpResponse.getStatusText());
+                        return true;
+                    }
+                }
+
+                return false;
+            }
+
+            @Override
+            public void handleError(ClientHttpResponse clientHttpResponse) throws IOException {
+            }
+        });
+
+        return restTemplate;
+    }
+
+    private KeyStore loadPfx(String file, char[] password) throws Exception {
+        KeyStore keyStore = KeyStore.getInstance("PKCS12");
+        File key = ResourceUtils.getFile(file);
+        try (InputStream in = new FileInputStream(key)) {
+            keyStore.load(in, password);
+        }
+        return keyStore;
+    }
+}
diff --git a/src/test/java/org/onap/aai/PayloadUtil.java b/src/test/java/org/onap/aai/PayloadUtil.java
new file mode 100644 (file)
index 0000000..8adc9d1
--- /dev/null
@@ -0,0 +1,111 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai;
+
+import org.apache.commons.io.IOUtils;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import static org.junit.Assert.assertNotNull;
+
+public class PayloadUtil {
+
+    private static final Map<String, String> cache = new HashMap<>();
+    private static final Pattern TEMPLATE_PATTERN = Pattern.compile("\\$\\{[^}]+\\}");
+
+    public static String getExpectedPayload(String fileName) throws IOException {
+
+        InputStream inputStream = PayloadUtil.class.getClassLoader().getResourceAsStream("payloads/expected/" + fileName);
+
+        String message = String.format("Unable to find the %s in src/test/resources", fileName);
+        assertNotNull(message, inputStream);
+
+        String resource = IOUtils.toString(inputStream);
+
+        inputStream.close();
+        return resource;
+    }
+
+    public static String getResourcePayload(String fileName) throws IOException {
+
+        InputStream inputStream = PayloadUtil.class.getClassLoader().getResourceAsStream("payloads/resource/" + fileName);
+
+        String message = String.format("Unable to find the %s in src/test/resources", fileName);
+        assertNotNull(message, inputStream);
+
+        String resource = IOUtils.toString(inputStream);
+
+        inputStream.close();
+        return resource;
+    }
+
+    public static String getTemplatePayload(String fileName, Map<String, String> templateValueMap) throws Exception {
+
+        InputStream inputStream = PayloadUtil.class.getClassLoader().getResourceAsStream("payloads/templates/" + fileName);
+
+        String message = String.format("Unable to find the %s in src/test/resources", fileName);
+        assertNotNull(message, inputStream);
+
+        String resource;
+
+        if(cache.containsKey(fileName)){
+            resource = cache.get(fileName);
+        } else {
+            resource = IOUtils.toString(inputStream);
+            cache.put(fileName, resource);
+        }
+
+        Matcher matcher = TEMPLATE_PATTERN.matcher(resource);
+
+        String resourceWithTemplateValues = resource;
+
+        while(matcher.find()){
+            int start = matcher.start() + 2;
+            int end = matcher.end() - 1;
+            String key = resource.substring(start, end);
+            if(templateValueMap.containsKey(key)){
+                resourceWithTemplateValues = resourceWithTemplateValues.replaceAll("\\$\\{" + key +"\\}", templateValueMap.get(key));
+            } else {
+                throw new RuntimeException("Unable to find the key value pair in map for the template processing for key " + key);
+            }
+        }
+
+        inputStream.close();
+        return resourceWithTemplateValues;
+    }
+    
+    public static String getNamedQueryPayload(String fileName) throws IOException {
+
+        InputStream inputStream = PayloadUtil.class.getClassLoader().getResourceAsStream("payloads/named-queries/" + fileName);
+
+        String message = String.format("Unable to find the %s in src/test/resources/payloads/named-queries", fileName);
+        assertNotNull(message, inputStream);
+
+        String resource = IOUtils.toString(inputStream);
+
+        inputStream.close();
+        return resource;
+    }
+}
diff --git a/src/test/java/org/onap/aai/dbgen/DupeToolTest.java b/src/test/java/org/onap/aai/dbgen/DupeToolTest.java
new file mode 100644 (file)
index 0000000..392ce97
--- /dev/null
@@ -0,0 +1,144 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.dbgen;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.janusgraph.core.JanusGraphTransaction;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.aai.AAISetup;
+import org.onap.aai.dbmap.AAIGraph;
+
+import static org.junit.Assert.*;
+
+public class DupeToolTest extends AAISetup {
+
+    private static final EELFLogger logger = EELFManager.getInstance().getLogger(DupeToolTest.class);
+
+    private DupeTool dupeTool;
+
+    @Before
+    public void setup(){
+        dupeTool = new DupeTool(loaderFactory, schemaVersions, false);
+        createGraph();
+    }
+
+    private void createGraph() {
+
+        JanusGraphTransaction transaction = AAIGraph.getInstance().getGraph().newTransaction();
+
+        boolean success = true;
+
+        try {
+
+            GraphTraversalSource g = transaction.traversal();
+
+            Vertex cloudRegionVertex = g.addV()
+                    .property("aai-node-type", "cloud-region")
+                    .property("cloud-owner", "test-owner")
+                    .property("cloud-region-id", "test-region")
+                    .property("source-of-truth", "JUNIT")
+                    .next();
+
+            Vertex tenantVertex = g.addV()
+                    .property("aai-node-type", "tenant")
+                    .property("tenant-id", "test-tenant")
+                    .property("source-of-truth", "JUNIT")
+                    .next();
+
+            Vertex pserverVertex = g.addV()
+                    .property("aai-node-type", "pserver")
+                    .property("hostname", "test-pserver")
+                    .property("in-maint", false)
+                    .property("source-of-truth", "JUNIT")
+                    .next();
+
+            for(int i = 0; i < 100; ++i){
+                g.addV()
+                        .property("aai-node-type", "p-interface")
+                        .property("interface-name", "p-interface-name")
+                        .property("in-maint", false)
+                        .property("source-of-truth", "JUNIT")
+                        .next();
+            }
+
+            edgeSerializer.addTreeEdge(g, cloudRegionVertex, tenantVertex);
+            edgeSerializer.addEdge(g, cloudRegionVertex, pserverVertex);
+
+        } catch(Exception ex){
+            success = false;
+            logger.error("Unable to create the vertexes", ex);
+        } finally {
+            if(success){
+                transaction.commit();
+            } else {
+                transaction.rollback();
+                fail("Unable to setup the graph");
+            }
+        }
+    }
+
+    @Test
+    public void testDupeTool(){
+        //TODO: test does not find duplicates
+        String[] args = {
+                "-userId", "testuser",
+                "-nodeType", "p-interface",
+                "-timeWindowMinutes", "30",
+                "-autoFix",
+                "-maxFix", "30",
+                "-sleepMinutes", "0"
+        };
+
+        dupeTool.execute(args);
+    }
+
+    @After
+    public void tearDown(){
+
+        JanusGraphTransaction transaction = AAIGraph.getInstance().getGraph().newTransaction();
+        boolean success = true;
+
+        try {
+
+            GraphTraversalSource g = transaction.traversal();
+
+            g.V().has("source-of-truth", "JUNIT")
+                    .toList()
+                    .forEach(v -> v.remove());
+
+        } catch(Exception ex){
+            success = false;
+            logger.error("Unable to remove the vertexes", ex);
+        } finally {
+            if(success){
+                transaction.commit();
+            } else {
+                transaction.rollback();
+                fail("Unable to teardown the graph");
+            }
+        }
+
+    }
+}
\ No newline at end of file
diff --git a/src/test/java/org/onap/aai/dbgen/ForceDeleteToolTest.java b/src/test/java/org/onap/aai/dbgen/ForceDeleteToolTest.java
new file mode 100644 (file)
index 0000000..fb6301c
--- /dev/null
@@ -0,0 +1,206 @@
+/**\r
+ * ============LICENSE_START=======================================================\r
+ * org.onap.aai\r
+ * ================================================================================\r
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.\r
+ * ================================================================================\r
+ * Licensed under the Apache License, Version 2.0 (the "License");\r
+ * you may not use this file except in compliance with the License.\r
+ * You may obtain a copy of the License at\r
+ *\r
+ *    http://www.apache.org/licenses/LICENSE-2.0\r
+ *\r
+ * Unless required by applicable law or agreed to in writing, software\r
+ * distributed under the License is distributed on an "AS IS" BASIS,\r
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
+ * See the License for the specific language governing permissions and\r
+ * limitations under the License.\r
+ * ============LICENSE_END=========================================================\r
+ */\r
+package org.onap.aai.dbgen;\r
+\r
+import com.att.eelf.configuration.EELFLogger;\r
+import com.att.eelf.configuration.EELFManager;\r
+import org.janusgraph.core.JanusGraphTransaction;\r
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;\r
+import org.apache.tinkerpop.gremlin.structure.Edge;\r
+import org.apache.tinkerpop.gremlin.structure.Vertex;\r
+import org.junit.After;\r
+import org.junit.Before;\r
+import org.junit.FixMethodOrder;\r
+import org.junit.Test;\r
+import org.junit.runners.MethodSorters;\r
+import org.onap.aai.AAISetup;\r
+import org.onap.aai.dbmap.AAIGraph;\r
+\r
+import java.io.ByteArrayInputStream;\r
+import java.io.InputStream;\r
+import java.util.List;\r
+\r
+import static org.junit.Assert.fail;\r
+\r
+@FixMethodOrder(MethodSorters.NAME_ASCENDING)\r
+public class ForceDeleteToolTest extends AAISetup {\r
+\r
+    private static final EELFLogger logger = EELFManager.getInstance().getLogger(ForceDeleteToolTest.class);\r
+\r
+    private ForceDeleteTool deleteTool;\r
+\r
+    private Vertex cloudRegionVertex;\r
+\r
+    @Before\r
+    public void setup(){\r
+        deleteTool = new ForceDeleteTool();\r
+        deleteTool.SHOULD_EXIT_VM = false;\r
+        JanusGraphTransaction transaction = AAIGraph.getInstance().getGraph().newTransaction();\r
+\r
+        boolean success = true;\r
+\r
+        try {\r
+\r
+            GraphTraversalSource g = transaction.traversal();\r
+\r
+            cloudRegionVertex = g.addV()\r
+                    .property("aai-node-type", "cloud-region")\r
+                    .property("cloud-owner", "test-owner")\r
+                    .property("cloud-region-id", "test-region")\r
+                    .property("source-of-truth", "JUNIT")\r
+                    .next();\r
+\r
+            Vertex tenantVertex = g.addV()\r
+                    .property("aai-node-type", "tenant")\r
+                    .property("tenant-id", "test-tenant")\r
+                    .property("source-of-truth", "JUNIT")\r
+                    .next();\r
+\r
+            Vertex pserverVertex = g.addV()\r
+                    .property("aai-node-type", "pserver")\r
+                    .property("hostname", "test-pserver")\r
+                    .property("in-maint", false)\r
+                    .property("source-of-truth", "JUNIT")\r
+                    .next();\r
+\r
+            edgeSerializer.addTreeEdge(g, cloudRegionVertex, tenantVertex);\r
+            edgeSerializer.addEdge(g, cloudRegionVertex, pserverVertex);\r
+\r
+        } catch(Exception ex){\r
+            success = false;\r
+            logger.error("Unable to create the vertexes", ex);\r
+        } finally {\r
+            if(success){\r
+                transaction.commit();\r
+            } else {\r
+                transaction.rollback();\r
+                fail("Unable to setup the graph");\r
+            }\r
+        }\r
+\r
+\r
+    }\r
+\r
+    @Test\r
+    public void testCollectDataForVertex(){\r
+\r
+        String [] args = {\r
+\r
+                "-action",\r
+                "COLLECT_DATA",\r
+                "-userId",\r
+                "someuser",\r
+                "-params4Collect",\r
+                "cloud-owner|test-owner"\r
+        };\r
+\r
+        deleteTool.main(args);\r
+    }\r
+\r
+    @Test\r
+    public void testDeleteNode(){\r
+\r
+        String id = cloudRegionVertex.id().toString();\r
+\r
+        String [] args = {\r
+\r
+                "-action",\r
+                "DELETE_NODE",\r
+                "-userId",\r
+                "someuser",\r
+                "-vertexId",\r
+                id\r
+        };\r
+\r
+        deleteTool.main(args);\r
+    }\r
+\r
+    @Test\r
+    public void testCollectDataForEdge(){\r
+\r
+        JanusGraphTransaction transaction = AAIGraph.getInstance().getGraph().newTransaction();\r
+        GraphTraversalSource g = transaction.traversal();\r
+        List<Edge> edges = g.E().toList();\r
+        String cloudRegionToPserverId = edges.get(0).id().toString();\r
+\r
+        String [] args = {\r
+\r
+                "-action",\r
+                "COLLECT_DATA",\r
+                "-userId",\r
+                "someuser",\r
+                "-edgeId",\r
+                cloudRegionToPserverId\r
+        };\r
+\r
+        deleteTool.main(args);\r
+    }\r
+\r
+    @Test\r
+    public void testDeleteForEdge(){\r
+\r
+        InputStream systemInputStream = System.in;\r
+        ByteArrayInputStream in = new ByteArrayInputStream("y".getBytes());\r
+        System.setIn(in);\r
+        JanusGraphTransaction transaction = AAIGraph.getInstance().getGraph().newTransaction();\r
+        GraphTraversalSource g = transaction.traversal();\r
+        List<Edge> edges = g.E().toList();\r
+        String cloudRegionToPserverId = edges.get(0).id().toString();\r
+\r
+        String [] args = {\r
+\r
+                "-action",\r
+                "DELETE_EDGE",\r
+                "-userId",\r
+                "someuser",\r
+                "-edgeId",\r
+                cloudRegionToPserverId\r
+        };\r
+\r
+        deleteTool.main(args);\r
+        System.setIn(systemInputStream);\r
+    }\r
+    @After\r
+    public void tearDown(){\r
+\r
+        JanusGraphTransaction transaction = AAIGraph.getInstance().getGraph().newTransaction();\r
+        boolean success = true;\r
+\r
+        try {\r
+\r
+            GraphTraversalSource g = transaction.traversal();\r
+\r
+            g.V().has("source-of-truth", "JUNIT")\r
+                 .toList()\r
+                 .forEach(v -> v.remove());\r
+\r
+        } catch(Exception ex){\r
+            success = false;\r
+            logger.error("Unable to remove the vertexes", ex);\r
+        } finally {\r
+            if(success){\r
+                transaction.commit();\r
+            } else {\r
+                transaction.rollback();\r
+                fail("Unable to teardown the graph");\r
+            }\r
+        }\r
+    }\r
+}
\ No newline at end of file
diff --git a/src/test/java/org/onap/aai/migration/EdgeSwingMigratorTest.java b/src/test/java/org/onap/aai/migration/EdgeSwingMigratorTest.java
new file mode 100644 (file)
index 0000000..d472b4d
--- /dev/null
@@ -0,0 +1,240 @@
+/**\r
+ * ============LICENSE_START=======================================================\r
+ * org.onap.aai\r
+ * ================================================================================\r
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.\r
+ * ================================================================================\r
+ * Licensed under the Apache License, Version 2.0 (the "License");\r
+ * you may not use this file except in compliance with the License.\r
+ * You may obtain a copy of the License at\r
+ *\r
+ *    http://www.apache.org/licenses/LICENSE-2.0\r
+ *\r
+ * Unless required by applicable law or agreed to in writing, software\r
+ * distributed under the License is distributed on an "AS IS" BASIS,\r
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
+ * See the License for the specific language governing permissions and\r
+ * limitations under the License.\r
+ * ============LICENSE_END=========================================================\r
+ */\r
+package org.onap.aai.migration;\r
+\r
+import static org.junit.Assert.*;\r
+import static org.mockito.Mockito.spy;\r
+import static org.mockito.Mockito.when;\r
+\r
+import java.util.ArrayList;\r
+import java.util.HashMap;\r
+import java.util.Iterator;\r
+import java.util.List;\r
+import java.util.Optional;\r
+\r
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;\r
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;\r
+import org.apache.tinkerpop.gremlin.structure.Direction;\r
+import org.apache.tinkerpop.gremlin.structure.Vertex;\r
+import org.apache.tinkerpop.gremlin.structure.Edge;\r
+import org.apache.tinkerpop.gremlin.structure.Property;\r
+import org.javatuples.Pair;\r
+import org.junit.After;\r
+import org.junit.Before;\r
+import org.junit.Test;\r
+import org.mockito.Mockito;\r
+import org.onap.aai.AAISetup;\r
+import org.onap.aai.dbmap.DBConnectionType;\r
+import org.onap.aai.edges.EdgeIngestor;\r
+import org.onap.aai.exceptions.AAIException;\r
+import org.onap.aai.introspection.Loader;\r
+import org.onap.aai.introspection.LoaderFactory;\r
+import org.onap.aai.introspection.ModelType;\r
+import org.onap.aai.serialization.db.EdgeSerializer;\r
+import org.onap.aai.setup.SchemaVersions;\r
+import org.onap.aai.setup.SchemaVersion;\r
+import org.onap.aai.serialization.engines.QueryStyle;\r
+import org.onap.aai.serialization.engines.JanusGraphDBEngine;\r
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;\r
+\r
+import org.janusgraph.core.schema.JanusGraphManagement;\r
+\r
+public class EdgeSwingMigratorTest extends AAISetup {\r
+       \r
+       private final static ModelType introspectorFactoryType = ModelType.MOXY;\r
+       private final static QueryStyle queryStyle = QueryStyle.TRAVERSAL;\r
+       private final static DBConnectionType type = DBConnectionType.REALTIME;\r
+       private Loader loader;\r
+       private TransactionalGraphEngine dbEngine;\r
+       private GraphTraversalSource g;\r
+       private MockEdgeSwingMigrator migration;\r
+       private Vertex modelVer1 = null;\r
+       private Vertex modelVer3 = null;\r
+       \r
+       \r
+       @Before\r
+       public void setUp() throws Exception {\r
+               JanusGraphManagement janusgraphManagement = graph.openManagement();\r
+               g = graph.traversal();\r
+               loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, schemaVersions.getDefaultVersion());\r
+               dbEngine = new JanusGraphDBEngine(\r
+                               queryStyle,\r
+                               type,\r
+                               loader);\r
+               createFirstVertexAndRelatedVertexes();\r
+               TransactionalGraphEngine spy = spy(dbEngine);\r
+               TransactionalGraphEngine.Admin adminSpy = spy(dbEngine.asAdmin());\r
+               GraphTraversalSource traversal = g;\r
+               when(spy.asAdmin()).thenReturn(adminSpy);\r
+               when(adminSpy.getTraversalSource()).thenReturn(traversal);\r
+               Mockito.doReturn(janusgraphManagement).when(adminSpy).getManagementSystem();\r
+               \r
+               \r
+               migration = new MockEdgeSwingMigrator(spy, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);\r
+               migration.run();\r
+       }\r
+       \r
+       private void createFirstVertexAndRelatedVertexes() throws AAIException {\r
+               Vertex model1 = g.addV().property("aai-node-type", "model")\r
+                               .property("model-invariant-id", "model-invariant-id-1")\r
+                               .property("model-type", "widget")\r
+                               .next();\r
+               modelVer1 = g.addV().property("aai-node-type", "model-ver")\r
+                               .property("model-version-id", "model-version-id-1")\r
+                               .property("model-name", "connector")\r
+                               .property("model-version", "v1.0")\r
+                               .next();\r
+               edgeSerializer.addTreeEdge(g, model1, modelVer1);\r
+               \r
+               //Create the cousin vertex - modelElement2 which will point to modelVer1\r
+               Vertex model2 = g.addV().property("aai-node-type", "model")\r
+                               .property("model-invariant-id", "model-invariant-id-2")\r
+                               .property("model-type", "resource")\r
+                               .next();\r
+               Vertex modelVer2 = g.addV().property("aai-node-type", "model-ver")\r
+                               .property("model-version-id", "model-version-id-2")\r
+                               .property("model-name", "resourceModTestVer")\r
+                               .property("model-version", "v1.0")\r
+                               .next();\r
+               edgeSerializer.addTreeEdge(g, model2, modelVer2);\r
+               Vertex modelElement2 = g.addV().property("aai-node-type", "model-element")\r
+                               .property("model-element-uuid", "model-element-uuid-2")\r
+                               .property("new-data-del-flag", "T")\r
+                               .property("cardinality", "unbounded")\r
+                               .next();\r
+               edgeSerializer.addTreeEdge(g, modelVer2, modelElement2);\r
+               edgeSerializer.addEdge(g, modelVer1, modelElement2);\r
+               \r
+               Vertex model3 = g.addV().property("aai-node-type", "model")\r
+                               .property("model-invariant-id", "model-invariant-id-3")\r
+                               .property("model-type", "widget")\r
+                               .next();\r
+               modelVer3 = g.addV().property("aai-node-type", "model-ver")\r
+                               .property("model-version-id", "model-version-id-3")\r
+                               .property("model-name", "connector")\r
+                               .property("model-version", "v1.0")\r
+                               .next();\r
+               edgeSerializer.addTreeEdge(g, model3, modelVer3);\r
+       }\r
+       \r
+       class MockEdgeSwingMigrator extends EdgeSwingMigrator {\r
+               \r
+               public MockEdgeSwingMigrator(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions) {\r
+                       super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);\r
+               }\r
+\r
+               @Override\r
+               public List<Pair<Vertex, Vertex>> getAffectedNodePairs() {\r
+                       List<Pair<Vertex, Vertex>> fromToVertPairList = new ArrayList<Pair<Vertex, Vertex>>();\r
+                       Vertex fromVert = modelVer1;\r
+                       Vertex toVert = modelVer3;\r
+                       fromToVertPairList.add(new Pair<>(fromVert, toVert));\r
+                       return fromToVertPairList;\r
+               }\r
+               \r
+               public String getNodeTypeRestriction(){\r
+                       return "model-element";\r
+               }\r
+\r
+               public String getEdgeLabelRestriction(){\r
+                       return "org.onap.relationships.inventory.IsA";\r
+               }\r
+                               \r
+               public String getEdgeDirRestriction(){\r
+                       return "IN";\r
+               }\r
+\r
+               @Override\r
+               public void cleanupAsAppropriate(List<Pair<Vertex, Vertex>> nodePairL) {\r
+                       // For the scenario we're testing, we would define this to remove the model-ver that\r
+                       // we moved off of, and also remove its parent model since it was a widget model and \r
+                       // these are currently one-to-one (model-ver to model).\r
+                       //\r
+                       // But what gets cleaned up (if anything) after a node's edges are migrated will vary depending \r
+                       // on what the edgeSwingMigration is being used for.\r
+                       \r
+\r
+               }\r
+\r
+               @Override\r
+               public Optional<String[]> getAffectedNodeTypes() {\r
+                       return Optional.of(new String[]{"model", "model-element", "model-ver"});\r
+               }\r
+\r
+               @Override\r
+               public String getMigrationName() {\r
+                       return "MockEdgeSwingMigrator";\r
+               }\r
+       }\r
+\r
+       @Test\r
+       public void testBelongsToEdgesStillThere() {\r
+               assertEquals(true, g.V().has("aai-node-type", "model-ver").has("model-version-id", "model-version-id-1")\r
+                               .out("org.onap.relationships.inventory.BelongsTo").has("model-invariant-id", "model-invariant-id-1").hasNext());\r
+               assertEquals(true, g.V().has("aai-node-type", "model-ver").has("model-version-id", "model-version-id-3")\r
+                               .out("org.onap.relationships.inventory.BelongsTo").has("model-invariant-id", "model-invariant-id-3").hasNext());\r
+               assertEquals(true, g.V().has("aai-node-type", "model-element").has("model-element-uuid", "model-element-uuid-2")\r
+                               .out("org.onap.relationships.inventory.BelongsTo").has("model-version-id", "model-version-id-2").hasNext());\r
+       }\r
+       \r
+       @Test\r
+       public void testThatNewEdgeAdded() {\r
+               assertEquals(true, g.V().has("aai-node-type", "model-ver").has("model-version-id", "model-version-id-3")\r
+                               .in("org.onap.relationships.inventory.IsA").has("model-element-uuid", "model-element-uuid-2").hasNext());\r
+       }\r
+       \r
+       @Test\r
+       public void testThatNewEdgeHasAaiUuidAndDelProperties() {\r
+               boolean haveUuidProp = false;\r
+               boolean haveDelOtherVProp = false;\r
+               GraphTraversal<Vertex, Vertex> modVerTrav = g.V().has("aai-node-type", "model-ver").has("model-version-id", "model-version-id-3");\r
+               while (modVerTrav.hasNext()) {\r
+               Vertex modVerVtx = modVerTrav.next();\r
+               Iterator <Edge> edgeIter = modVerVtx.edges(Direction.IN, "org.onap.relationships.inventory.IsA");\r
+               while( edgeIter.hasNext() ){\r
+                       Edge oldOutE = edgeIter.next();\r
+                       \r
+                       Iterator <Property<Object>> propsIter2 = oldOutE.properties();\r
+                               HashMap<String, String> propMap2 = new HashMap<String,String>();\r
+                               while( propsIter2.hasNext() ){\r
+                                       Property <Object> ep2 = propsIter2.next();\r
+                                       if( ep2.key().equals("aai-uuid") ){\r
+                                               haveUuidProp = true;\r
+                                       }\r
+                                       else if( ep2.key().equals("delete-other-v") ){\r
+                                               haveDelOtherVProp = true;\r
+                                       }\r
+                               }\r
+               }\r
+               }\r
+                       \r
+               assertTrue("New IsA edge has aai-uuid property ", haveUuidProp );\r
+               assertTrue("New IsA edge has delete-other-v property ", haveDelOtherVProp );\r
+       }\r
+               \r
+               \r
+       @Test\r
+       public void testThatOldEdgeGone() {\r
+               assertEquals(false, g.V().has("aai-node-type", "model-ver").has("model-version-id", "model-version-id-1")\r
+                               .in("org.onap.relationships.inventory.IsA").has("model-element-uuid", "model-element-uuid-2").hasNext());\r
+       }\r
+       \r
+       \r
+}\r
diff --git a/src/test/java/org/onap/aai/migration/MigrationControllerInternalTest.java b/src/test/java/org/onap/aai/migration/MigrationControllerInternalTest.java
new file mode 100644 (file)
index 0000000..6cc108b
--- /dev/null
@@ -0,0 +1,280 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.janusgraph.core.JanusGraphTransaction;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Ignore;
+import org.junit.Test;
+import org.onap.aai.AAISetup;
+import org.onap.aai.dbmap.AAIGraph;
+import org.onap.aai.exceptions.AAIException;
+
+import java.io.ByteArrayOutputStream;
+import java.io.PrintStream;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.core.StringContains.containsString;
+
+public class MigrationControllerInternalTest extends AAISetup {
+
+    private static final EELFLogger logger = EELFManager.getInstance().getLogger(MigrationControllerInternalTest.class);
+
+    private MigrationControllerInternal migrationControllerInternal;
+
+    @Before
+    public void setup() throws AAIException {
+        migrationControllerInternal = new MigrationControllerInternal(loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+        clearGraph();
+        createGraph();
+    }
+
+    private void createGraph(){
+
+        JanusGraphTransaction transaction = AAIGraph.getInstance().getGraph().newTransaction();
+        boolean success = true;
+
+        try {
+            GraphTraversalSource g = transaction.traversal();
+
+            Vertex servSub1 = g.addV().property("aai-node-type", "service-subscription")
+                    .property("service-type", "DHV")
+                    .property("source-of-truth", "JUNIT")
+                    .next();
+            Vertex servinst1 =  g.addV().property( "aai-node-type", "service-instance")
+                    .property("service-type", "DHV")
+                    .property("source-of-truth", "JUNIT")
+                    .next();
+            Vertex allotedRsrc1 =  g.addV().property( "aai-node-type", "allotted-resource")
+                    .property("id","rsrc1")
+                    .property("source-of-truth", "JUNIT")
+                    .next();
+            Vertex servinst2 =  g.addV().property( "aai-node-type", "service-instance")
+                    .property("service-type", "VVIG")
+                    .property("source-of-truth", "JUNIT")
+                    .next();
+            Vertex servSub2 = g.addV().property("aai-node-type", "service-subscription")
+                    .property("service-type", "VVIG")
+                    .property("source-of-truth", "JUNIT")
+                    .next();
+
+            Vertex genericvnf1 = g.addV().property("aai-node-type", "generic-vnf")
+                    .property("vnf-id", "vnfId1")
+                    .property("source-of-truth", "JUNIT")
+                    .next();
+            Vertex vServer1 = g.addV().property("aai-node-type", "vserver")
+                    .property("source-of-truth", "JUNIT")
+                    .next();
+            Vertex pServer1 = g.addV().property("aai-node-type", "pserver")
+                    .property("source-of-truth", "JUNIT")
+                    .next();
+            Vertex pInterfaceWan1 = g.addV().property("aai-node-type", "p-interface")
+                    .property("interface-name","ge-0/0/10")
+                    .property("source-of-truth", "JUNIT")
+                    .next();
+            Vertex tunnelXConnectAll_Wan1 =  g.addV().property( "aai-node-type", "tunnel-xconnect")
+                    .property("id", "tunnelXConnectWan1")
+                    .property("bandwidth-up-wan1", "300")
+                    .property("bandwidth-down-wan1", "400")
+                    .property("bandwidth-up-wan2", "500")
+                    .property("bandwidth-down-wan2", "600")
+                    .property("source-of-truth", "JUNIT")
+                    .next();
+
+            Vertex pLinkWan1 = g.addV().property("aai-node-type", "physical-link")
+                    .property("link-name", "pLinkWan1")
+                    .property("service-provider-bandwidth-up-units", "empty")
+                    .property("service-provider-bandwidth-down-units", "empty")
+                    .property("source-of-truth", "JUNIT")
+                    .next();
+            Vertex servSub3 = g.addV().property("aai-node-type", "service-subscription")
+                    .property("service-type", "DHV")
+                    .property("source-of-truth", "JUNIT")
+                    .next();
+            Vertex servinst3 =  g.addV().property( "aai-node-type", "service-instance")
+                    .property("service-type", "DHV")
+                    .property("source-of-truth", "JUNIT")
+                    .next();
+            Vertex allotedRsrc3 =  g.addV().property( "aai-node-type", "allotted-resource")
+                    .property("id","rsrc3")
+                    .property("source-of-truth", "JUNIT")
+                    .next();
+            Vertex servinst4 =  g.addV().property( "aai-node-type", "service-instance")
+                    .property("service-type", "VVIG")
+                    .property("source-of-truth", "JUNIT")
+                    .next();
+            Vertex servSub4 = g.addV().property("aai-node-type", "service-subscription")
+                    .property("service-type", "VVIG")
+                    .property("source-of-truth", "JUNIT")
+                    .next();
+
+            Vertex genericvnf3 = g.addV().property("aai-node-type", "generic-vnf")
+                    .property("vnf-id", "vnfId3")
+                    .property("source-of-truth", "JUNIT")
+                    .next();
+            Vertex vServer3 = g.addV().property("aai-node-type", "vserver")
+                    .property("source-of-truth", "JUNIT")
+                    .next();
+            Vertex pServer3 = g.addV().property("aai-node-type", "pserver")
+                    .property("source-of-truth", "JUNIT")
+                    .next();
+            Vertex pInterfaceWan3 = g.addV().property("aai-node-type", "p-interface")
+                    .property("interface-name","ge-0/0/11")
+                    .property("source-of-truth", "JUNIT")
+                    .next();
+            Vertex tunnelXConnectAll_Wan3 =  g.addV().property( "aai-node-type", "tunnel-xconnect")
+                    .property("id", "tunnelXConnectWan3")
+                    .property("bandwidth-up-wan1", "300")
+                    .property("bandwidth-down-wan1", "400")
+                    .property("bandwidth-up-wan2", "500")
+                    .property("bandwidth-down-wan2", "600")
+                    .property("source-of-truth", "JUNIT")
+                    .next();
+
+            Vertex pLinkWan3 = g.addV().property("aai-node-type", "physical-link")
+                    .property("link-name", "pLinkWan3")
+                    .property("service-provider-bandwidth-up-units", "empty")
+                    .property("service-provider-bandwidth-down-units", "empty")
+                    .property("source-of-truth", "JUNIT")
+                    .next();
+
+            edgeSerializer.addTreeEdge(g,servSub1,servinst1);
+            edgeSerializer.addEdge(g,servinst1,allotedRsrc1);
+            edgeSerializer.addTreeEdge(g,servinst2,servSub2);
+            edgeSerializer.addTreeEdge(g,allotedRsrc1,servinst2);
+
+            edgeSerializer.addTreeEdge(g,allotedRsrc1,tunnelXConnectAll_Wan1);
+
+
+            edgeSerializer.addEdge(g,servinst1,genericvnf1);
+            edgeSerializer.addEdge(g,genericvnf1,vServer1);
+            edgeSerializer.addEdge(g,vServer1,pServer1);
+            edgeSerializer.addTreeEdge(g,pServer1,pInterfaceWan1);
+            edgeSerializer.addEdge(g,pInterfaceWan1,pLinkWan1);
+
+            edgeSerializer.addTreeEdge(g,servSub3,servinst3);
+            edgeSerializer.addEdge(g,servinst3,allotedRsrc3);
+            edgeSerializer.addTreeEdge(g,servinst4,servSub4);
+            edgeSerializer.addTreeEdge(g,allotedRsrc3,servinst4);
+
+            edgeSerializer.addTreeEdge(g,allotedRsrc3,tunnelXConnectAll_Wan3);
+
+
+            edgeSerializer.addEdge(g,servinst3,genericvnf3);
+            edgeSerializer.addEdge(g,genericvnf3,vServer3);
+            edgeSerializer.addEdge(g,vServer3,pServer3);
+            edgeSerializer.addTreeEdge(g,pServer3,pInterfaceWan3);
+            edgeSerializer.addEdge(g,pInterfaceWan3,pLinkWan3);
+
+        } catch(Exception ex){
+            success = false;
+            logger.error("Unable to create the graph {}", ex);
+        } finally {
+            if(success){
+                transaction.commit();
+            } else {
+                transaction.rollback();
+            }
+
+        }
+    }
+
+    @Ignore
+    @Test
+    public void testListAllOfMigrations() throws Exception {
+        PrintStream oldOutputStream = System.out;
+        final ByteArrayOutputStream myOut = new ByteArrayOutputStream();
+        System.setOut(new PrintStream(myOut));
+
+        String [] args = {
+            "-c", "./bundleconfig-local/etc/appprops/janusgraph-realtime.properties",
+            "-l"
+        };
+
+        migrationControllerInternal.run(args);
+
+        String content = myOut.toString();
+        assertThat(content, containsString("List of all migrations"));
+        System.setOut(oldOutputStream);
+    }
+
+    @Test
+    public void testRunSpecificMigration() throws Exception {
+        String [] args = "-c ./bundleconfig-local/etc/appprops/janusgraph-realtime.properties -m SDWANSpeedChangeMigration".split(" ");
+        migrationControllerInternal.run(args);
+    }
+
+    @Test
+    public void testRunSpecificMigrationAndCommit() throws Exception {
+        String [] args = {
+                "-c", "./bundleconfig-local/etc/appprops/janusgraph-realtime.properties",
+                "-m", "SDWANSpeedChangeMigration",
+                "--commit"
+        };
+        migrationControllerInternal.run(args);
+    }
+
+    @Test
+    public void testRunSpecificMigrationFromLoadingSnapshotAndCommit() throws Exception{
+        clearGraph();
+        String [] args = {
+                "-d", "./snapshots/sdwan_test_migration.graphson",
+                "-c", "./bundleconfig-local/etc/appprops/janusgraph-realtime.properties",
+                "-m", "SDWANSpeedChangeMigration"
+        };
+        migrationControllerInternal.run(args);
+    }
+
+    @After
+    public void tearDown(){
+        clearGraph();
+    }
+
+    public void clearGraph(){
+
+        JanusGraphTransaction janusgraphTransaction = AAIGraph.getInstance().getGraph().newTransaction();
+
+        boolean success = true;
+
+        try {
+            GraphTraversalSource g = janusgraphTransaction.traversal();
+
+            g.V().has("source-of-truth", "JUNIT")
+                 .toList()
+                 .forEach((v) -> v.remove());
+
+        } catch(Exception ex) {
+            success = false;
+            logger.error("Unable to remove all of the vertexes", ex);
+        } finally {
+            if(success){
+                janusgraphTransaction.commit();
+            } else {
+                janusgraphTransaction.rollback();
+            }
+        }
+
+    }
+}
diff --git a/src/test/java/org/onap/aai/migration/PropertyMigratorTest.java b/src/test/java/org/onap/aai/migration/PropertyMigratorTest.java
new file mode 100644 (file)
index 0000000..654782c
--- /dev/null
@@ -0,0 +1,130 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import org.janusgraph.core.Cardinality;
+import org.janusgraph.core.JanusGraphTransaction;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.aai.AAISetup;
+import org.onap.aai.dbmap.AAIGraph;
+import org.onap.aai.dbmap.DBConnectionType;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.introspection.Loader;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.serialization.db.EdgeSerializer;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.setup.SchemaVersion;
+import org.onap.aai.serialization.engines.QueryStyle;
+import org.onap.aai.serialization.engines.JanusGraphDBEngine;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+
+import java.util.List;
+import java.util.Optional;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+public class PropertyMigratorTest extends AAISetup {
+
+    private static final EELFLogger logger = EELFManager.getInstance().getLogger(PropertyMigratorTest.class);
+
+    public static class PserverPropMigrator extends PropertyMigrator {
+
+        public PserverPropMigrator(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions, String oldName, String newName, Class<?> type, Cardinality cardinality) {
+            super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+            this.initialize(oldName, newName, type, cardinality);
+        }
+
+        @Override
+        public boolean isIndexed() {
+            return true;
+        }
+
+        @Override
+        public Optional<String[]> getAffectedNodeTypes() {
+            return Optional.of(new String[]{ "pserver" });
+        }
+
+        @Override
+        public String getMigrationName() {
+            return "PserverPropMigrator";
+        }
+    }
+
+    @Before
+    public void setup(){
+        AAIGraph.getInstance();
+        JanusGraphTransaction janusgraphTransaction = AAIGraph.getInstance().getGraph().newTransaction();
+        boolean success = true;
+
+        try {
+            GraphTraversalSource g = janusgraphTransaction.traversal();
+            g.addV()
+                 .property("aai-node-type", "pserver")
+                 .property("hostname", "fake-hostname")
+                 .property("inv-status", "some status")
+                 .property("source-of-truth", "JUNIT")
+                 .next();
+        } catch(Exception ex){
+           success = false;
+           logger.error("Unable to commit the transaction {}", ex);
+
+        } finally {
+            if(success){
+                janusgraphTransaction.commit();
+            } else {
+                janusgraphTransaction.rollback();
+            }
+
+        }
+    }
+
+    @Test
+    public void testAfterPropertyMigration(){
+
+        String oldPropName = "inv-status";
+        String newPropName = "inventory-status";
+
+        Loader loader = loaderFactory.createLoaderForVersion(ModelType.MOXY, schemaVersions.getDefaultVersion());
+        JanusGraphDBEngine dbEngine = new JanusGraphDBEngine(QueryStyle.TRAVERSAL, DBConnectionType.REALTIME, loader);
+        dbEngine.startTransaction();
+
+        PropertyMigrator propertyMigrator = new PserverPropMigrator(dbEngine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions, oldPropName, newPropName, String.class, Cardinality.SINGLE);
+        propertyMigrator.run();
+        assertEquals("Expecting the property to be success", Status.SUCCESS, propertyMigrator.getStatus());
+        dbEngine.commit();
+
+        JanusGraphTransaction janusgraphTransaction = AAIGraph.getInstance().getGraph().newTransaction();
+        GraphTraversalSource g = janusgraphTransaction.traversal();
+
+        List<Vertex> oldVList = g.V().has("aai-node-type", "pserver").has(oldPropName).toList();
+        List<Vertex> newVList = g.V().has("aai-node-type", "pserver").has(newPropName).toList();
+
+        assertEquals("Expecting the vertex list with old property to be zero", 0, oldVList.size());
+        assertEquals("Expecting the vertex list with new property to be 1", 1, newVList.size());
+        assertEquals("Expecting the equipment type to be some equipment", "some status", newVList.get(0).property(newPropName).value());
+    }
+}
\ No newline at end of file
diff --git a/src/test/java/org/onap/aai/migration/ValueMigratorTest.java b/src/test/java/org/onap/aai/migration/ValueMigratorTest.java
new file mode 100644 (file)
index 0000000..0b64ccd
--- /dev/null
@@ -0,0 +1,150 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration;
+
+import org.janusgraph.core.JanusGraphFactory;
+import org.janusgraph.core.JanusGraph;
+import org.janusgraph.core.JanusGraphTransaction;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.aai.AAISetup;
+import org.onap.aai.dbmap.DBConnectionType;
+import org.onap.aai.edges.EdgeIngestor;
+import org.onap.aai.introspection.Loader;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.serialization.db.EdgeSerializer;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.setup.SchemaVersion;
+import org.onap.aai.serialization.engines.QueryStyle;
+import org.onap.aai.serialization.engines.JanusGraphDBEngine;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.when;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Optional;
+
+import static org.junit.Assert.assertTrue;
+
+public class ValueMigratorTest extends AAISetup{
+
+    public static class SampleValueMigrator extends ValueMigrator {
+        public SampleValueMigrator(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions, Map map, Boolean updateExistingValues){
+            super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions, map, updateExistingValues);
+        }
+        @Override
+        public Status getStatus() {
+            return Status.SUCCESS;
+        }
+        @Override
+        public Optional<String[]> getAffectedNodeTypes() {
+            return null;
+        }
+        @Override
+        public String getMigrationName() {
+            return "SampleValueMigrator";
+        }
+    }
+
+    private final static ModelType introspectorFactoryType = ModelType.MOXY;
+    private final static QueryStyle queryStyle = QueryStyle.TRAVERSAL;
+    private final static DBConnectionType type = DBConnectionType.REALTIME;
+    private Loader loader;
+    private TransactionalGraphEngine dbEngine;
+    private JanusGraph graph;
+    private SampleValueMigrator migration;
+    private GraphTraversalSource g;
+    private JanusGraphTransaction tx;
+    private SampleValueMigrator existingValuesMigration;
+
+    @Before
+    public void setup() throws Exception{
+        graph = JanusGraphFactory.build().set("storage.backend", "inmemory").open();
+        tx = graph.newTransaction();
+        g = tx.traversal();
+        loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, schemaVersions.getDefaultVersion());
+        dbEngine = new JanusGraphDBEngine(
+                queryStyle,
+                type,
+                loader);
+        Map<String, Map> map = new HashMap<>();
+        Map<String, Boolean> pair = new HashMap<>();
+        pair.put("in-maint", true);
+        map.put("pserver", pair);
+        map.put("pnf", pair);
+        g.addV().property("aai-node-type", "pserver")
+                .property("pserver-id", "pserver0")
+                .next();
+        g.addV().property("aai-node-type", "pserver")
+                .property("pserver-id", "pserver1")
+                .property("in-maint", "")
+                .next();
+        g.addV().property("aai-node-type", "pserver")
+                .property("pserver-id", "pserver2")
+                .property("in-maint", false)
+                .next();
+        g.addV().property("aai-node-type", "pnf")
+                .property("pnf-name","pnf1" )
+                .property("in-maint", false)
+                .next();
+        TransactionalGraphEngine spy = spy(dbEngine);
+        TransactionalGraphEngine.Admin adminSpy = spy(dbEngine.asAdmin());
+        GraphTraversalSource traversal = g;
+        when(spy.asAdmin()).thenReturn(adminSpy);
+        when(adminSpy.getTraversalSource()).thenReturn(traversal);
+        migration = new SampleValueMigrator(spy, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions, map, false);
+        migration.run();
+
+        map = new HashMap<>();
+        pair = new HashMap<>();
+        pair.put("in-maint", true);
+        map.put("pnf", pair);
+        existingValuesMigration = new SampleValueMigrator(spy, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions, map, true);
+        existingValuesMigration.run();
+    }
+
+    @Test
+    public void testMissingProperty(){
+        assertTrue("Value of pnf should be updated since the property doesn't exist",
+                g.V().has("aai-node-type", "pserver").has("pserver-id", "pserver0").has("in-maint", true).hasNext());
+    }
+
+    @Test
+    public void testExistingValue() {
+        assertTrue("Value of pserver shouldn't be updated since it already exists",
+                g.V().has("aai-node-type", "pserver").has("pserver-id", "pserver2").has("in-maint", false).hasNext());
+    }
+
+    @Test
+    public void testEmptyValue() {
+        assertTrue("Value of pserver should be updated since the value is an empty string",
+                g.V().has("aai-node-type", "pserver").has("pserver-id", "pserver1").has("in-maint", true).hasNext());
+    }
+
+    @Test
+    public void testUpdateExistingValues() {
+        assertTrue("Value of pnf should be updated even though it already exists",
+                g.V().has("aai-node-type", "pnf").has("pnf-name", "pnf1").has("in-maint", true).hasNext());
+    }
+}
diff --git a/src/test/java/org/onap/aai/migration/VertexMergeTest.java b/src/test/java/org/onap/aai/migration/VertexMergeTest.java
new file mode 100644 (file)
index 0000000..501072b
--- /dev/null
@@ -0,0 +1,177 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration;
+
+import org.janusgraph.core.Cardinality;
+import org.janusgraph.core.JanusGraphFactory;
+import org.janusgraph.core.JanusGraph;
+import org.janusgraph.core.schema.JanusGraphManagement;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.structure.Graph;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.junit.*;
+import org.onap.aai.AAISetup;
+import org.onap.aai.db.props.AAIProperties;
+import org.onap.aai.dbmap.DBConnectionType;
+import org.onap.aai.introspection.Loader;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.setup.SchemaVersion;
+import org.onap.aai.serialization.db.DBSerializer;
+import org.onap.aai.serialization.engines.QueryStyle;
+import org.onap.aai.serialization.engines.JanusGraphDBEngine;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+
+import java.io.UnsupportedEncodingException;
+import java.util.*;
+
+import static org.hamcrest.collection.IsIterableContainingInAnyOrder.containsInAnyOrder;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertThat;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.when;
+
+@Ignore
+public class VertexMergeTest extends AAISetup {
+       
+       
+       private final static SchemaVersion version = new SchemaVersion("v10");
+       private final static ModelType introspectorFactoryType = ModelType.MOXY;
+       private final static QueryStyle queryStyle = QueryStyle.TRAVERSAL;
+       private final static DBConnectionType type = DBConnectionType.REALTIME;
+
+       private Loader loader;
+       private TransactionalGraphEngine dbEngine;
+       private JanusGraph graph;
+       private GraphTraversalSource g;
+       private Graph tx;
+
+       @Before
+       public void setUp() throws Exception {
+               graph = JanusGraphFactory.build().set("storage.backend","inmemory").open();
+               tx = graph.newTransaction();
+               g = tx.traversal();
+               loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, version);
+               dbEngine = new JanusGraphDBEngine(
+                               queryStyle,
+                               type,
+                               loader);
+
+               JanusGraphManagement mgmt = graph.openManagement();
+               mgmt.makePropertyKey("test-list").dataType(String.class).cardinality(Cardinality.SET).make();
+               mgmt.commit();
+               Vertex pserverSkeleton = g.addV().property("aai-node-type", "pserver").property("hostname", "TEST1")
+                               .property("source-of-truth", "AAI-EXTENSIONS").property("fqdn", "test1.com").property("test-list", "value1").next();
+
+               Vertex pInterface1 = g.addV().property("aai-node-type", "p-interface").property("interface-name", "p-interface1")
+                               .property(AAIProperties.AAI_URI, "/cloud-infrastructure/pservers/pserver/TEST1/p-interfaces/p-interface/p-interface1").next();
+               
+               Vertex pInterface2 = g.addV().property("aai-node-type", "p-interface").property("interface-name", "p-interface2")
+                               .property(AAIProperties.AAI_URI, "/cloud-infrastructure/pservers/pserver/TEST1/p-interfaces/p-interface/p-interface2").next();
+               
+               Vertex pInterface2Secondary = g.addV().property("aai-node-type", "p-interface").property("interface-name", "p-interface2").property("special-prop", "value")
+                               .property(AAIProperties.AAI_URI, "/cloud-infrastructure/pservers/pserver/TEST1/p-interfaces/p-interface/p-interface2").next();
+               
+               Vertex lInterface1 = g.addV().property("aai-node-type", "l-interface").property("interface-name", "l-interface1").property("special-prop", "value")
+                               .property(AAIProperties.AAI_URI, "/cloud-infrastructure/pservers/pserver/TEST1/p-interfaces/p-interface/p-interface2/l-interfaces/l-interface/l-interface1").next();
+               
+               Vertex lInterface1Canopi = g.addV().property("aai-node-type", "l-interface").property("interface-name", "l-interface1")
+                               .property(AAIProperties.AAI_URI, "/cloud-infrastructure/pservers/pserver/TEST1/p-interfaces/p-interface/p-interface2/l-interfaces/l-interface/l-interface1").next();
+               
+               Vertex logicalLink = g.addV().property("aai-node-type", "logical-link").property("link-name", "logical-link1")
+                               .property(AAIProperties.AAI_URI, "/network/logical-links/logical-link/logical-link1").next();
+               Vertex pserverCanopi = g.addV().property("aai-node-type", "pserver").property("hostname",  "TEST1")
+                               .property("source-of-truth", "CANOPI-WS").property("fqdn", "test2.com").property("test-list", "value2").next();
+               
+               Vertex complex1 = g.addV().property("aai-node-type", "complex").property("physical-location-id", "complex1")
+                               .property("source-of-truth", "RO").next();
+               
+               Vertex complex2 = g.addV().property("aai-node-type", "complex").property("physical-location-id", "complex2")
+                               .property("source-of-truth", "RCT").next();
+               
+               Vertex vserver1 = g.addV().property("aai-node-type", "vserver").property("vserver-id", "vserver1")
+                               .property("source-of-truth", "RO").next();
+               
+               Vertex vserver2 = g.addV().property("aai-node-type", "vserver").property("vserver-id", "vserver2")
+                               .property("source-of-truth", "RCT").next();
+               Vertex vserver3 = g.addV().property("aai-node-type", "vserver").property("vserver-id", "vserver3")
+                               .property("source-of-truth", "RCT").next();
+               Vertex vserver4 = g.addV().property("aai-node-type", "vserver").property("vserver-id", "vserver4")
+                               .property("source-of-truth", "RCT").next();
+               Vertex vserver5 = g.addV().property("aai-node-type", "vserver").property("vserver-id", "vserver5")
+                               .property("source-of-truth", "RCT").next();
+               
+               
+               edgeSerializer.addEdge(g, pserverSkeleton, complex1);
+               edgeSerializer.addEdge(g, pserverSkeleton, vserver1);
+               edgeSerializer.addEdge(g, pserverSkeleton, vserver2);
+               edgeSerializer.addTreeEdge(g, pserverSkeleton, pInterface1);
+               edgeSerializer.addTreeEdge(g, pserverSkeleton, pInterface2Secondary);
+               edgeSerializer.addTreeEdge(g, pInterface2Secondary, lInterface1);
+               edgeSerializer.addEdge(g, lInterface1, logicalLink);
+               edgeSerializer.addEdge(g, pserverCanopi, complex2);
+               edgeSerializer.addEdge(g, pserverCanopi, vserver3);
+               edgeSerializer.addEdge(g, pserverCanopi, vserver4);
+               edgeSerializer.addEdge(g, pserverCanopi, vserver5);
+               edgeSerializer.addTreeEdge(g, pserverCanopi, pInterface2);
+               edgeSerializer.addTreeEdge(g, pInterface2, lInterface1Canopi);
+
+               Map<String, Set<String>> forceCopy = new HashMap<>();
+               Set<String> forceSet = new HashSet<>();
+               forceSet.add("fqdn");
+               forceCopy.put("pserver", forceSet);
+               
+               TransactionalGraphEngine spy = spy(dbEngine);
+               TransactionalGraphEngine.Admin adminSpy = spy(dbEngine.asAdmin());
+               GraphTraversalSource traversal = g;
+               GraphTraversalSource readOnly = g;
+               when(spy.asAdmin()).thenReturn(adminSpy);
+               when(adminSpy.getTraversalSource()).thenReturn(traversal);
+               when(adminSpy.getReadOnlyTraversalSource()).thenReturn(readOnly);
+               DBSerializer serializer = new DBSerializer(version, spy, introspectorFactoryType, "Merge test");
+
+               VertexMerge merge = new VertexMerge.Builder(loader, spy, serializer).edgeSerializer(edgeSerializer).build();
+               merge.performMerge(pserverCanopi, pserverSkeleton, forceCopy, basePath);
+       }
+
+       @After
+       public void cleanUp() {
+               tx.tx().rollback();
+               graph.close();
+       }
+
+       @Test
+       public void run() throws UnsupportedEncodingException {
+
+               assertEquals("pserver merged", false, g.V().has("hostname", "TEST1").has("source-of-truth", "AAI-EXTENSIONS").hasNext());
+               assertThat("pserver list merge", Arrays.asList("value1", "value2"), containsInAnyOrder(g.V().has("hostname", "TEST1").values("test-list").toList().toArray()));
+               assertEquals("canopi pserver has one edge to vserver2", 1, g.V().has("hostname", "TEST1").both().has("vserver-id", "vserver2").toList().size());
+               assertEquals("canopi pserver has one edge to vserver1", 1, g.V().has("hostname", "TEST1").both().has("vserver-id", "vserver1").toList().size());
+               assertEquals("canopi pserver retained edge to complex2", true, g.V().has("hostname", "TEST1").both().has("physical-location-id", "complex2").hasNext());
+               assertEquals("canopi pserver received forced prop", "test1.com", g.V().has("hostname", "TEST1").values("fqdn").next());
+               assertEquals("pserver skeleton child copied", true, g.V().has("hostname", "TEST1").both().has("interface-name", "p-interface1").hasNext());
+               assertEquals("pserver skeleton child merged", true, g.V().has("hostname", "TEST1").both().has("interface-name", "p-interface2").has("special-prop", "value").hasNext());
+               assertEquals("l-interface child merged", true, g.V().has("hostname", "TEST1").both().has("interface-name", "p-interface2").both().has("interface-name", "l-interface1").has("special-prop", "value").hasNext());
+               assertEquals("l-interface child cousin edge merged", true, g.V().has("hostname", "TEST1").both().has("interface-name", "p-interface2").both().has("interface-name", "l-interface1").both().has("link-name", "logical-link1").hasNext());
+               assertEquals("one l-interface1 found", new Long(1), g.V().has("interface-name", "l-interface1").count().next());
+               assertEquals("one p-interface2 found", new Long(1), g.V().has("interface-name", "p-interface2").count().next());
+
+       }
+}
diff --git a/src/test/java/org/onap/aai/migration/v12/ContainmentDeleteOtherVPropertyMigrationTest.java b/src/test/java/org/onap/aai/migration/v12/ContainmentDeleteOtherVPropertyMigrationTest.java
new file mode 100644 (file)
index 0000000..d159ef8
--- /dev/null
@@ -0,0 +1,112 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v12;
+
+import org.janusgraph.core.JanusGraphFactory;
+import org.janusgraph.core.JanusGraph;
+import org.janusgraph.core.schema.JanusGraphManagement;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.structure.Edge;
+import org.apache.tinkerpop.gremlin.structure.Graph;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.Mockito;
+import org.onap.aai.AAISetup;
+import org.onap.aai.dbmap.DBConnectionType;
+import org.onap.aai.edges.enums.EdgeProperty;
+import org.onap.aai.introspection.Loader;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.setup.SchemaVersion;
+import org.onap.aai.edges.enums.AAIDirection;
+import org.onap.aai.serialization.engines.QueryStyle;
+import org.onap.aai.serialization.engines.JanusGraphDBEngine;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+
+import static org.junit.Assert.assertEquals;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.when;
+
+public class ContainmentDeleteOtherVPropertyMigrationTest extends AAISetup {
+
+       private final static ModelType introspectorFactoryType = ModelType.MOXY;
+       private final static QueryStyle queryStyle = QueryStyle.TRAVERSAL;
+       private final static DBConnectionType type = DBConnectionType.REALTIME;
+
+       private Loader loader;
+       private TransactionalGraphEngine dbEngine;
+       private JanusGraph graph;
+       private ContainmentDeleteOtherVPropertyMigration migration;
+       private GraphTraversalSource g;
+       private Graph tx;
+
+       @Before
+       public void setUp() throws Exception {
+               graph = JanusGraphFactory.build().set("storage.backend","inmemory").open();
+               JanusGraphManagement janusgraphManagement = graph.openManagement();
+               tx = graph.newTransaction();
+               g = tx.traversal();
+               loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, schemaVersions.getDefaultVersion());
+               dbEngine = new JanusGraphDBEngine(
+                               queryStyle,
+                               type,
+                               loader);
+               Vertex v = g.addV().property("aai-node-type", "generic-vnf")
+                                                       .property("vnf-id", "delcontains-test-vnf")
+                                                       .next();
+               Vertex v2 = g.addV().property("aai-node-type", "l-interface")
+                                                       .property("interface-name", "delcontains-test-lint")
+                                                       .next();
+               Edge e = v.addEdge("hasLInterface", v2, EdgeProperty.CONTAINS.toString(), AAIDirection.OUT.toString(), 
+                                                                                                       EdgeProperty.DELETE_OTHER_V.toString(), AAIDirection.NONE.toString());
+               
+               Vertex v3 = g.addV().property("aai-node-type", "allotted-resource").next();
+               
+               Edge e2 = v2.addEdge("uses", v3, EdgeProperty.CONTAINS.toString(), AAIDirection.NONE.toString(),
+                                                                                       EdgeProperty.DELETE_OTHER_V.toString(), AAIDirection.NONE.toString());
+               TransactionalGraphEngine spy = spy(dbEngine);
+               TransactionalGraphEngine.Admin adminSpy = spy(dbEngine.asAdmin());
+               GraphTraversalSource traversal = g;
+               when(spy.asAdmin()).thenReturn(adminSpy);
+               when(adminSpy.getTraversalSource()).thenReturn(traversal);
+               Mockito.doReturn(janusgraphManagement).when(adminSpy).getManagementSystem();
+               migration = new ContainmentDeleteOtherVPropertyMigration(spy, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions, "/edgeMigrationTestRules.json");
+               migration.run();
+       }
+       
+       @After
+       public void cleanUp() {
+               tx.tx().rollback();
+               graph.close();
+       }
+       
+       @Test
+       public void run() {
+               assertEquals("del other now OUT", true, 
+                               g.E().hasLabel("hasLInterface").has(EdgeProperty.DELETE_OTHER_V.toString(), AAIDirection.OUT.toString()).hasNext());
+               assertEquals("contains val still same", true, 
+                               g.E().hasLabel("hasLInterface").has(EdgeProperty.CONTAINS.toString(), AAIDirection.OUT.toString()).hasNext());
+               assertEquals("non-containment unchanged", true,
+                               g.E().hasLabel("uses").has(EdgeProperty.DELETE_OTHER_V.toString(), AAIDirection.NONE.toString()).hasNext());
+       }
+
+}
diff --git a/src/test/java/org/onap/aai/migration/v12/DeletePInterfaceTest.java b/src/test/java/org/onap/aai/migration/v12/DeletePInterfaceTest.java
new file mode 100644 (file)
index 0000000..665000f
--- /dev/null
@@ -0,0 +1,145 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v12;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.janusgraph.core.JanusGraph;
+import org.janusgraph.core.JanusGraphFactory;
+import org.janusgraph.core.JanusGraphTransaction;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.aai.AAISetup;
+import org.onap.aai.dbmap.DBConnectionType;
+import org.onap.aai.introspection.Loader;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.migration.Status;
+import org.onap.aai.serialization.engines.JanusGraphDBEngine;
+import org.onap.aai.serialization.engines.QueryStyle;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.setup.SchemaVersion;
+
+import static org.junit.Assert.assertEquals;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.when;
+
+public class DeletePInterfaceTest extends AAISetup {
+
+       private final static ModelType introspectorFactoryType = ModelType.MOXY;
+       private final static QueryStyle queryStyle = QueryStyle.TRAVERSAL;
+       private final static DBConnectionType type = DBConnectionType.REALTIME;
+       private Loader loader;
+       private TransactionalGraphEngine dbEngine;
+       private JanusGraph graph;
+       private DeletePInterface migration;
+       private GraphTraversalSource g;
+       private JanusGraphTransaction tx;
+
+       @Before
+       public void setUp() throws Exception {
+               graph = JanusGraphFactory.build().set("storage.backend","inmemory").open();
+               tx = graph.newTransaction();
+               g = tx.traversal();
+               loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, schemaVersions.getDefaultVersion());
+               System.setProperty("AJSC_HOME", ".");
+               System.setProperty("BUNDLECONFIG_DIR", "src/test/resources");
+               dbEngine = new JanusGraphDBEngine(
+                               queryStyle,
+                               type,
+                               loader);
+
+               Vertex pnf1 = g.addV().property("aai-node-type", "pnf")
+                       .property("pnf-name", "pnf-name1")
+                       .next();
+               Vertex pInterface1 = g.addV().property("aai-node-type", "p-interface")
+                               .property("interface-name", "interface-name1")
+                               .property("source-of-truth", "AAI-CSVP-INSTARAMS")
+                               .next();
+               edgeSerializer.addTreeEdge(g, pnf1, pInterface1);
+               
+               Vertex pnf2 = g.addV().property("aai-node-type", "pnf")
+                               .property("pnf-name", "pnf-name2")
+                               .next();
+               Vertex pInterface2 = g.addV().property("aai-node-type", "p-interface")
+                               .property("interface-name", "interface-name2")
+                               .property("source-of-truth", "AAI-CSVP-INSTARAMS")
+                               .next();
+               Vertex pLink = g.addV().property("aai-node-type", "physical-link")
+                               .property("interface-name", "interface-name1")
+                               .next();
+               edgeSerializer.addTreeEdge(g, pnf2, pInterface2);
+               edgeSerializer.addEdge(g, pInterface2, pLink);
+               
+               Vertex pnf3 = g.addV().property("aai-node-type", "pnf")
+                               .property("pnf-name", "pnf-name3")
+                               .next();
+               Vertex pInterface3 = g.addV().property("aai-node-type", "p-interface")
+                               .property("interface-name", "interface-name3")
+                               .property("source-of-truth", "AAI-CSVP-INSTARAMS")
+                               .next();
+               Vertex lInterface = g.addV().property("aai-node-type", "l-interface")
+                               .property("interface-name", "interface-name3")
+                               .next();
+               edgeSerializer.addTreeEdge(g, pnf3, pInterface3);
+               edgeSerializer.addTreeEdge(g, pInterface3, lInterface);
+               
+               Vertex pnf4 = g.addV().property("aai-node-type", "pnf")
+                               .property("pnf-name", "pnf-name4")
+                               .next();
+               Vertex pInterface4 = g.addV().property("aai-node-type", "p-interface")
+                               .property("interface-name", "interface-name4")
+                               .next();
+               edgeSerializer.addTreeEdge(g, pnf4, pInterface4);
+               
+               TransactionalGraphEngine spy = spy(dbEngine);
+               TransactionalGraphEngine.Admin adminSpy = spy(dbEngine.asAdmin());
+               GraphTraversalSource traversal = g;
+               when(spy.asAdmin()).thenReturn(adminSpy);
+               when(adminSpy.getTraversalSource()).thenReturn(traversal);
+               migration = new DeletePInterface(spy, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+               migration.run();
+       }
+
+       @After
+       public void tearDown() throws Exception {
+               tx.rollback();
+               graph.close();
+       }
+
+       @Test
+       public void test() {
+               assertEquals("pInterface1 deleted", false, g.V().has("aai-node-type", "pnf").has("pnf-name", "pnf-name1")
+                               .in("tosca.relationships.network.BindsTo").has("aai-node-type", "p-interface").has("interface-name", "interface-name1").hasNext());
+               
+               assertEquals("pInterface2 skipped", true, g.V().has("aai-node-type", "pnf").has("pnf-name", "pnf-name2")
+                               .in("tosca.relationships.network.BindsTo").has("aai-node-type", "p-interface").hasNext());
+               
+               assertEquals("pInterface3 skipped", true, g.V().has("aai-node-type", "pnf").has("pnf-name", "pnf-name3")
+                               .in("tosca.relationships.network.BindsTo").has("aai-node-type", "p-interface").hasNext());
+               
+               assertEquals("pInterface4 should not be deleted", true, g.V().has("aai-node-type", "pnf").has("pnf-name", "pnf-name4")
+                               .in("tosca.relationships.network.BindsTo").has("aai-node-type", "p-interface").has("interface-name", "interface-name4").hasNext());
+               
+               assertEquals("Status should be success", Status.SUCCESS, migration.getStatus());
+       }
+
+}
diff --git a/src/test/java/org/onap/aai/migration/v12/MigrateModelVerDistributionStatusPropertyTest.java b/src/test/java/org/onap/aai/migration/v12/MigrateModelVerDistributionStatusPropertyTest.java
new file mode 100644 (file)
index 0000000..3571f5f
--- /dev/null
@@ -0,0 +1,106 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v12;
+
+import org.janusgraph.core.JanusGraphFactory;
+import org.janusgraph.core.JanusGraph;
+import org.janusgraph.core.JanusGraphTransaction;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.aai.AAISetup;
+import org.onap.aai.dbmap.DBConnectionType;
+import org.onap.aai.introspection.Loader;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.setup.SchemaVersion;
+import org.onap.aai.serialization.engines.QueryStyle;
+import org.onap.aai.serialization.engines.JanusGraphDBEngine;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import static org.junit.Assert.assertEquals;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.when;
+
+
+public class MigrateModelVerDistributionStatusPropertyTest extends AAISetup{
+
+    private final static ModelType introspectorFactoryType = ModelType.MOXY;
+    private final static QueryStyle queryStyle = QueryStyle.TRAVERSAL;
+    private final static DBConnectionType type = DBConnectionType.REALTIME;
+    private Loader loader;
+    private TransactionalGraphEngine dbEngine;
+    private JanusGraph graph;
+    private MigrateModelVerDistriubutionStatusProperty migration;
+    private GraphTraversalSource g;
+    private JanusGraphTransaction tx;
+    Vertex modelVer1;
+    Vertex modelVer2;
+
+    @Before
+    public void setUp() throws Exception {
+        graph = JanusGraphFactory.build().set("storage.backend", "inmemory").open();
+        tx = graph.newTransaction();
+        g = tx.traversal();
+        loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, schemaVersions.getDefaultVersion());
+        dbEngine = new JanusGraphDBEngine(
+                queryStyle,
+                type,
+                loader);
+         modelVer1 = g.addV().property("aai-node-type", "model-ver")
+                .property("model-version-id", "modelVer1")
+                .property("distribution-status", "test1")
+                .next();
+
+        modelVer2 = g.addV().property("aai-node-type", "model-ver")
+                .property("model-version-id", "modelVer1")
+                .next();
+
+        TransactionalGraphEngine spy = spy(dbEngine);
+        TransactionalGraphEngine.Admin adminSpy = spy(dbEngine.asAdmin());
+        GraphTraversalSource traversal = g;
+        when(spy.asAdmin()).thenReturn(adminSpy);
+        when(adminSpy.getTraversalSource()).thenReturn(traversal);
+        migration = new MigrateModelVerDistriubutionStatusProperty(spy, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+        migration.run();
+    }
+
+    @After
+    public void cleanUp() {
+        tx.rollback();
+        graph.close();
+    }
+
+
+    /***
+     * checks if the Distribution Status value was changed
+     */
+
+    @Test
+    public void confirmDistributionStatusChanged() {
+
+        assertEquals("DISTRIBUTION_COMPLETE_OK",modelVer1.property("distribution-status").value());
+        assertEquals("DISTRIBUTION_COMPLETE_OK",modelVer2.property("distribution-status").value());
+
+    }
+
+
+}
\ No newline at end of file
diff --git a/src/test/java/org/onap/aai/migration/v12/MigrateServiceInstanceToConfigurationTest.java b/src/test/java/org/onap/aai/migration/v12/MigrateServiceInstanceToConfigurationTest.java
new file mode 100644 (file)
index 0000000..0bfdb41
--- /dev/null
@@ -0,0 +1,404 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v12;
+
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.when;
+
+import java.util.Optional;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.process.traversal.strategy.verification.ReadOnlyStrategy;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.junit.*;
+import org.onap.aai.AAISetup;
+import org.onap.aai.dbmap.DBConnectionType;
+import org.onap.aai.introspection.Loader;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.setup.SchemaVersion;
+import org.onap.aai.serialization.engines.QueryStyle;
+import org.onap.aai.serialization.engines.JanusGraphDBEngine;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+
+import org.janusgraph.core.JanusGraphFactory;
+import org.janusgraph.core.JanusGraph;
+import org.janusgraph.core.JanusGraphTransaction;
+
+public class MigrateServiceInstanceToConfigurationTest extends AAISetup {
+
+       private final static ModelType introspectorFactoryType = ModelType.MOXY;
+       private final static QueryStyle queryStyle = QueryStyle.TRAVERSAL;
+       private final static DBConnectionType type = DBConnectionType.REALTIME;
+
+       private Loader loader;
+       private TransactionalGraphEngine dbEngine;
+       private JanusGraph graph;
+       private MigrateServiceInstanceToConfiguration migration;
+       private JanusGraphTransaction tx;
+       private GraphTraversalSource g;
+
+       @Before
+       public void setUp() throws Exception {
+               graph = JanusGraphFactory.build().set("storage.backend","inmemory").open();
+               tx = graph.newTransaction();
+               g = tx.traversal();
+               loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, schemaVersions.getDefaultVersion());
+               dbEngine = new JanusGraphDBEngine(
+                               queryStyle,
+                               type,
+                               loader);
+
+               Vertex customer1 = g.addV()
+                               .property("aai-node-type", "customer")
+                               .property("global-customer-id", "customer-id-1")
+                               .property("subscriber-type", "CUST")
+                               .next();
+               
+               Vertex customer2 = g.addV()
+                               .property("aai-node-type", "customer")
+                               .property("global-customer-id", "customer-id-2")
+                               .property("subscriber-type", "CUST")
+                               .next();
+               
+               Vertex customer3 = g.addV()
+                               .property("aai-node-type", "customer")
+                               .property("global-customer-id", "customer-id-3")
+                               .property("subscriber-type", "CUST")
+                               .next();
+               
+               Vertex customer4 = g.addV()
+                               .property("aai-node-type", "customer")
+                               .property("global-customer-id", "customer-id-4")
+                               .property("subscriber-type", "CUST")
+                               .next();
+
+               Vertex servSub1 = g.addV()
+                               .property("aai-node-type", "service-subscription")
+                               .property("service-type", "DHV")
+                               .next();
+               
+               Vertex servSub2 = g.addV()
+                               .property("aai-node-type", "service-subscription")
+                               .property("service-type", "OTHER")
+                               .next();
+               
+               Vertex servSub3 = g.addV()
+                               .property("aai-node-type", "service-subscription")
+                               .property("service-type", "DHV")
+                               .next();
+               
+               Vertex servSub4 = g.addV()
+                               .property("aai-node-type", "service-subscription")
+                               .property("service-type", "DHV")
+                               .next();
+               
+               Vertex servSub5 = g.addV()
+                               .property("aai-node-type", "service-subscription")
+                               .property("service-type", "DHV")
+                               .next();
+               
+               Vertex servInstance1 = g.addV()
+                               .property("aai-node-type", "service-instance")
+                               .property("service-instance-id", "service-inst-1")
+                               .property("operational-status", "activated")
+                               .property("bandwidth-total", "5")
+                               .next();
+               
+               Vertex servInstance2 = g.addV()
+                               .property("aai-node-type", "service-instance")
+                               .property("service-instance-id", "service-inst-2")
+                               .property("operational-status", "activated")
+                               .property("bandwidth-total", "8")
+                               .next();
+               
+               Vertex servInstance3 = g.addV()
+                               .property("aai-node-type", "service-instance")
+                               .property("service-instance-id", "service-inst-3")
+                               .property("operational-status", "activated")
+                               .property("bandwidth-total", "10")
+                               .next();
+               
+               Vertex servInstance4 = g.addV()
+                               .property("aai-node-type", "service-instance")
+                               .property("service-instance-id", "service-inst-4")
+                               .property("operational-status", "activated")
+                               .property("bandwidth-total", "15")
+                               .next();
+               
+               Vertex config1 = g.addV()
+                               .property("aai-node-type", "configuration")
+                               .property("configuration-id", "configuration-1")
+                               .property("configuration-type", "DHV")
+                               .property("tunnel-bandwidth", "7")
+                               .next();
+               
+               Vertex config2 = g.addV()
+                               .property("aai-node-type", "configuration")
+                               .property("configuration-id", "configuration-2")
+                               .property("configuration-type", "OTHER")
+                               .property("tunnel-bandwidth", "3")
+                               .next();
+               
+               Vertex config3 = g.addV()
+                               .property("aai-node-type", "configuration")
+                               .property("configuration-id", "configuration-3")
+                               .property("configuration-type", "OTHER")
+                               .property("tunnel-bandwidth", "2")
+                               .next();
+               
+               Vertex config4 = g.addV()
+                               .property("aai-node-type", "configuration")
+                               .property("configuration-id", "configuration-4")
+                               .property("configuration-type", "OTHER")
+                               .property("tunnel-bandwidth", "4")
+                               .next();
+
+               // graph 1
+               edgeSerializer.addTreeEdge(g, customer1, servSub1);
+               edgeSerializer.addTreeEdge(g, customer1, servSub2);
+               edgeSerializer.addTreeEdge(g, servSub1, servInstance1);
+               edgeSerializer.addTreeEdge(g, servSub2, servInstance2);
+               
+               // graph 2
+               edgeSerializer.addTreeEdge(g, customer2, servSub3);
+               
+               // graph 3
+               edgeSerializer.addTreeEdge(g, customer3, servSub4);
+               edgeSerializer.addTreeEdge(g, servSub4, servInstance3);
+               edgeSerializer.addEdge(g, servInstance3, config1);
+               edgeSerializer.addEdge(g, servInstance3, config2);
+               
+               // graph 4
+               edgeSerializer.addTreeEdge(g, customer4, servSub5);
+               edgeSerializer.addTreeEdge(g, servSub5, servInstance4);
+               edgeSerializer.addEdge(g, servInstance4, config3);
+               edgeSerializer.addEdge(g, servInstance4, config4);
+
+               TransactionalGraphEngine spy = spy(dbEngine);
+               TransactionalGraphEngine.Admin adminSpy = spy(dbEngine.asAdmin());
+
+               GraphTraversalSource traversal = g;
+               GraphTraversalSource readOnly = tx.traversal(GraphTraversalSource.build().with(ReadOnlyStrategy.instance()));
+               when (spy.tx()).thenReturn(tx);
+               when(spy.asAdmin()).thenReturn(adminSpy);
+               when(adminSpy.getTraversalSource()).thenReturn(traversal);
+               when(adminSpy.getReadOnlyTraversalSource()).thenReturn(readOnly);
+               
+               migration = new MigrateServiceInstanceToConfiguration(spy, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+               migration.run();
+       }
+       
+       @After
+       public void cleanUp() {
+               tx.tx().rollback();
+               graph.close();
+       }
+       
+       @Test
+       public void testRun_createConfigNode() throws Exception {
+               // check if graph nodes exist
+               assertEquals("customer node exists", true, 
+                               g.V().has("global-customer-id", "customer-id-1")
+                               .hasNext());
+               
+               assertEquals("service subscription node, service-type=DHV", true, 
+                               g.V().has("global-customer-id", "customer-id-1")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "DHV")
+                               .hasNext());
+               
+               assertEquals("service instance node, bandwidth-total=5", true, 
+                               g.V().has("global-customer-id", "customer-id-1")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "DHV")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "service-inst-1").has("bandwidth-total", "5")
+                               .hasNext());
+               
+               // check if configuration node gets created
+               assertEquals("configuration node exists", true, 
+                               g.V().has("global-customer-id", "customer-id-1")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "DHV")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "service-inst-1")
+                               .out("org.onap.relationships.inventory.Uses").has("aai-node-type", "configuration")
+                               .hasNext());
+               
+               // check configuration type
+               assertEquals("configuration node, configuration-type=DHV", true, 
+                               g.V().has("global-customer-id", "customer-id-1")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "DHV")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "service-inst-1")
+                               .out("org.onap.relationships.inventory.Uses").has("aai-node-type", "configuration").has("configuration-type", "DHV")
+                               .hasNext());
+               
+               // check configuration tunnel-bandwidth
+               assertEquals("configuration node, tunnel-bandwidth=5", true, 
+                               g.V().has("global-customer-id", "customer-id-1")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "DHV")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "service-inst-1")
+                               .out("org.onap.relationships.inventory.Uses").has("aai-node-type", "configuration").has("tunnel-bandwidth", "5")
+                               .hasNext());
+       }
+
+       @Test
+       public void testRun_configNodeNotCreated() throws Exception {
+               // check if graph nodes exist
+               assertEquals("customer node exists", true, 
+                               g.V().has("global-customer-id", "customer-id-1")
+                               .hasNext());
+               
+               assertEquals("service subscription node, service-type=OTHER", true, 
+                               g.V().has("global-customer-id", "customer-id-1")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "OTHER")
+                               .hasNext());
+               
+               assertEquals("service instance node, bandwidth-total=8", true, 
+                               g.V().has("global-customer-id", "customer-id-1")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "OTHER")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "service-inst-2").has("bandwidth-total", "8")
+                               .hasNext());
+               
+               // configuration node should not be created
+               assertEquals("configuration node does not exist", false, 
+                               g.V().has("global-customer-id", "customer-id-1")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "OTHER")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "service-inst-2")
+                               .out("org.onap.relationships.inventory.Uses").has("aai-node-type", "configuration")
+                               .hasNext());
+               
+               // edge between service instance and configuration should not be created
+               assertEquals("configuration node does not exist", false, 
+                               g.V().has("global-customer-id", "customer-id-1")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "OTHER")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "service-inst-2")
+                               .out("org.onap.relationships.inventory.Uses").hasNext());
+       }
+       
+       @Test
+       public void testRun_noServiceInstance() throws Exception {
+               // check if graph nodes exist
+               assertEquals("customer node exists", true, 
+                               g.V().has("global-customer-id", "customer-id-2")
+                               .hasNext());
+               
+               assertEquals("service subscription node, service-type=DHV", true, 
+                               g.V().has("global-customer-id", "customer-id-2")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "DHV")
+                               .hasNext());
+               
+               // no service instance nodes
+               assertEquals("no service instance nodes", false, 
+                               g.V().has("global-customer-id", "customer-id-2")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "DHV")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("aai-node-type", "service-instance")
+                               .hasNext());
+       }
+       
+       @Test
+       public void testRun_existingConfig() throws Exception {
+               // check if graph nodes exist
+               assertEquals("customer node exists", true, 
+                               g.V().has("global-customer-id", "customer-id-3")
+                               .hasNext());
+               
+               assertEquals("service subscription node, service-type=DHV", true, 
+                               g.V().has("global-customer-id", "customer-id-3")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "DHV")
+                               .hasNext());
+               
+               assertEquals("service instance node, bandwidth-total=10", true, 
+                               g.V().has("global-customer-id", "customer-id-3")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "DHV")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "service-inst-3").has("bandwidth-total", "10")
+                               .hasNext());
+               
+               assertEquals("configuration node with type DHV, tunnel-bandwidth changed to 10", true, 
+                               g.V().has("global-customer-id", "customer-id-3")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "DHV")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "service-inst-3")
+                               .out("org.onap.relationships.inventory.Uses").has("aai-node-type", "configuration").has("tunnel-bandwidth", "10")
+                               .hasNext());
+               
+               assertEquals("configuration node with type OTHER, tunnel-bandwidth remains same", true, 
+                               g.V().has("global-customer-id", "customer-id-3")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "DHV")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "service-inst-3")
+                               .out("org.onap.relationships.inventory.Uses").has("aai-node-type", "configuration").has("tunnel-bandwidth", "3")
+                               .hasNext());
+       }
+       
+       @Test
+       public void testRun_existingConfigNotDHV() throws Exception {
+               // check if graph nodes exist
+               assertEquals("customer node exists", true, 
+                               g.V().has("global-customer-id", "customer-id-4")
+                               .hasNext());
+               
+               assertEquals("service subscription node, service-type=DHV", true, 
+                               g.V().has("global-customer-id", "customer-id-4")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "DHV")
+                               .hasNext());
+               
+               assertEquals("service instance node, bandwidth-total=15", true, 
+                               g.V().has("global-customer-id", "customer-id-4")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "DHV")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "service-inst-4").has("bandwidth-total", "15")
+                               .hasNext());
+               
+               assertEquals("first configuration node with type OTHER, tunnel-bandwidth remains same", true, 
+                               g.V().has("global-customer-id", "customer-id-4")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "DHV")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "service-inst-4")
+                               .out("org.onap.relationships.inventory.Uses").has("aai-node-type", "configuration").has("tunnel-bandwidth", "2")
+                               .hasNext());
+               
+               assertEquals("second configuration node with type OTHER, tunnel-bandwidth remains same", true, 
+                               g.V().has("global-customer-id", "customer-id-4")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "DHV")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "service-inst-4")
+                               .out("org.onap.relationships.inventory.Uses").has("aai-node-type", "configuration").has("tunnel-bandwidth", "4")
+                               .hasNext());
+               
+               assertEquals("new configuration node created with type DHV, tunnel-bandwidth=15", true, 
+                               g.V().has("global-customer-id", "customer-id-4")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "DHV")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "service-inst-4")
+                               .out("org.onap.relationships.inventory.Uses").has("aai-node-type", "configuration").has("tunnel-bandwidth", "15")
+                               .hasNext());
+       }
+       
+       @Test
+       public void testGetAffectedNodeTypes() {
+               Optional<String[]> types = migration.getAffectedNodeTypes();
+               Optional<String[]> expected = Optional.of(new String[]{"service-instance"});
+               
+               assertNotNull(types);
+               assertArrayEquals(expected.get(), types.get());
+       }
+
+       @Test
+       public void testGetMigrationName() {
+               String migrationName = migration.getMigrationName();
+
+               assertNotNull(migrationName);
+               assertEquals("service-instance-to-configuration", migrationName);
+       }
+}
diff --git a/src/test/java/org/onap/aai/migration/v12/MigrateServiceInstanceToConfigurationTestPreMigrationMock.java b/src/test/java/org/onap/aai/migration/v12/MigrateServiceInstanceToConfigurationTestPreMigrationMock.java
new file mode 100644 (file)
index 0000000..ccec10b
--- /dev/null
@@ -0,0 +1,299 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v12;
+
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.when;
+
+import java.util.Optional;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.process.traversal.strategy.verification.ReadOnlyStrategy;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.junit.*;
+import org.onap.aai.AAISetup;
+import org.onap.aai.dbmap.DBConnectionType;
+import org.onap.aai.introspection.Loader;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.setup.SchemaVersion;
+import org.onap.aai.serialization.engines.QueryStyle;
+import org.onap.aai.serialization.engines.JanusGraphDBEngine;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+
+import org.janusgraph.core.JanusGraphFactory;
+import org.janusgraph.core.JanusGraph;
+import org.janusgraph.core.JanusGraphTransaction;
+
+public class MigrateServiceInstanceToConfigurationTestPreMigrationMock extends AAISetup {
+
+       private final static ModelType introspectorFactoryType = ModelType.MOXY;
+       private final static QueryStyle queryStyle = QueryStyle.TRAVERSAL;
+       private final static DBConnectionType type = DBConnectionType.REALTIME;
+
+       private Loader loader;
+       private TransactionalGraphEngine dbEngine;
+       private JanusGraph graph;
+       private MigrateServiceInstanceToConfiguration migration;
+       private JanusGraphTransaction tx;
+       private GraphTraversalSource g;
+
+       @Before
+       public void setUp() throws Exception {
+               graph = JanusGraphFactory.build().set("storage.backend","inmemory").open();
+               tx = graph.newTransaction();
+               g = tx.traversal();
+               loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, schemaVersions.getDefaultVersion());
+               dbEngine = new JanusGraphDBEngine(
+                               queryStyle,
+                               type,
+                               loader);
+
+               Vertex customer = g.addV()
+                               .property("aai-node-type", "customer")
+                               .property("global-customer-id", "customer-9972-BandwidthMigration")
+                               .property("subscriber-type", "CUST")
+                               .next();
+               
+               Vertex servSubSDNEI = g.addV()
+                               .property("aai-node-type", "service-subscription")
+                               .property("service-type", "SDN-ETHERNET-INTERNET")
+                               .next();
+               
+               Vertex servInstance22 = g.addV()
+                               .property("aai-node-type", "service-instance")
+                               .property("service-instance-id", "servInstance-9972-22-BandwidthMigration")
+                               .property("operational-status", "activated")
+                               .property("bandwidth-total", "bandwidth-total-22-BandwidthMigration")
+                               .next();
+               
+               Vertex servInstance11 = g.addV()
+                               .property("aai-node-type", "service-instance")
+                               .property("service-instance-id", "servInstance-9972-11-BandwidthMigration")
+                               .property("operational-status", "activated")
+                               .property("bandwidth-total", "bandwidth-total-11-BandwidthMigration")
+                               .next();
+               
+               Vertex servSubDHV = g.addV()
+                               .property("aai-node-type", "service-subscription")
+                               .property("service-type", "DHV")
+                               .next();
+               
+               Vertex servInstance4 = g.addV()
+                               .property("aai-node-type", "service-instance")
+                               .property("service-instance-id", "servInstance-9972-4-BandwidthMigration")
+                               .property("operational-status", "activated")
+                               .property("bandwidth-total", "bandwidth-total-4-BandwidthMigration")
+                               .next();
+               
+               Vertex servInstance1 = g.addV()
+                               .property("aai-node-type", "service-instance")
+                               .property("service-instance-id", "ServInstance-9972-1-BandwidthMigration")
+                               .property("operational-status", "activated")
+                               .property("bandwidth-total", "2380")
+                               .next();
+               
+               Vertex servInstance3 = g.addV()
+                               .property("aai-node-type", "service-instance")
+                               .property("service-instance-id", "servInstance-9972-3-BandwidthMigration")
+                               .property("operational-status", "activated")
+                               .property("bandwidth-total", "bandwidth-total-3-BandwidthMigration")
+                               .next();
+
+               Vertex servInstance2 = g.addV()
+                               .property("aai-node-type", "service-instance")
+                               .property("service-instance-id", "servInstance-9972-2-BandwidthMigration")
+                               .property("operational-status", "activated")
+                               .property("bandwidth-total", "bandwidth-total-2-BandwidthMigration")
+                               .next();
+               
+               Vertex config1 = g.addV()
+                               .property("aai-node-type", "configuration")
+                               .property("configuration-id", "9972-config-LB1113")
+                               .property("configuration-type", "DHV")
+                               .property("tunnel-bandwidth", "12")
+                               .next();
+               
+               Vertex config2 = g.addV()
+                               .property("aai-node-type", "configuration")
+                               .property("configuration-id", "9972-1config-LB1113")
+                               .property("configuration-type", "configuration-type1-9972")
+                               .next();
+               
+               Vertex allottedResource = g.addV()
+                               .property("aai-node-type", "allotted-resource")
+                               .property("id", "allResource-9972-BandwidthMigration")
+                               .next();
+
+               edgeSerializer.addTreeEdge(g, customer, servSubSDNEI);
+               edgeSerializer.addTreeEdge(g, customer, servSubDHV);
+               edgeSerializer.addTreeEdge(g, servSubSDNEI, servInstance22);
+               edgeSerializer.addTreeEdge(g, servSubSDNEI, servInstance11);
+               edgeSerializer.addTreeEdge(g, servSubDHV, servInstance4);
+               edgeSerializer.addTreeEdge(g, servSubDHV, servInstance1);
+               edgeSerializer.addTreeEdge(g, servSubDHV, servInstance3);
+               edgeSerializer.addTreeEdge(g, servSubDHV, servInstance2);
+               edgeSerializer.addEdge(g, servInstance1, allottedResource);
+               edgeSerializer.addEdge(g, servInstance1, config1);
+               edgeSerializer.addEdge(g, servInstance2, config2);
+
+               TransactionalGraphEngine spy = spy(dbEngine);
+               TransactionalGraphEngine.Admin adminSpy = spy(dbEngine.asAdmin());
+
+               GraphTraversalSource traversal = g;
+               GraphTraversalSource readOnly = tx.traversal(GraphTraversalSource.build().with(ReadOnlyStrategy.instance()));
+               when (spy.tx()).thenReturn(tx);
+               when(spy.asAdmin()).thenReturn(adminSpy);
+               when(adminSpy.getTraversalSource()).thenReturn(traversal);
+               when(adminSpy.getReadOnlyTraversalSource()).thenReturn(readOnly);
+               
+               migration = new MigrateServiceInstanceToConfiguration(spy, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+               migration.run();
+       }
+       
+       @After
+       public void cleanUp() {
+               tx.tx().rollback();
+               graph.close();
+       }
+       
+       @Test
+       public void testRun() throws Exception {
+               // check if graph nodes exist
+               assertEquals("customer node exists", true, 
+                               g.V().has("global-customer-id", "customer-9972-BandwidthMigration")
+                               .hasNext());
+
+               assertEquals("service subscription node, service-type=SDN-ETHERNET-INTERNET", true, 
+                               g.V().has("global-customer-id", "customer-9972-BandwidthMigration")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "SDN-ETHERNET-INTERNET")
+                               .hasNext());
+
+               assertEquals("service instance node, bandwidth-total=bandwidth-total-22-BandwidthMigration", true, 
+                               g.V().has("global-customer-id", "customer-9972-BandwidthMigration")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "SDN-ETHERNET-INTERNET")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "servInstance-9972-22-BandwidthMigration")
+                               .has("bandwidth-total", "bandwidth-total-22-BandwidthMigration")
+                               .hasNext());
+               
+               assertEquals("service instance node, bandwidth-total=bandwidth-total-11-BandwidthMigration", true, 
+                               g.V().has("global-customer-id", "customer-9972-BandwidthMigration")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "SDN-ETHERNET-INTERNET")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "servInstance-9972-11-BandwidthMigration")
+                               .has("bandwidth-total", "bandwidth-total-11-BandwidthMigration")
+                               .hasNext());
+               
+               assertEquals("service subscription node, service-type=DHV", true, 
+                               g.V().has("global-customer-id", "customer-9972-BandwidthMigration")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "DHV")
+                               .hasNext());
+
+               assertEquals("service instance node, bandwidth-total=servInstance-9972-4-BandwidthMigration", true, 
+                               g.V().has("global-customer-id", "customer-9972-BandwidthMigration")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "DHV")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "servInstance-9972-4-BandwidthMigration")
+                               .has("bandwidth-total", "bandwidth-total-4-BandwidthMigration")
+                               .hasNext());
+               
+               assertEquals("service instance node, bandwidth-total=ServInstance-9972-1-BandwidthMigration", true, 
+                               g.V().has("global-customer-id", "customer-9972-BandwidthMigration")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "DHV")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "ServInstance-9972-1-BandwidthMigration")
+                               .has("bandwidth-total", "2380")
+                               .hasNext());
+               
+               assertEquals("service instance node, bandwidth-total=servInstance-9972-3-BandwidthMigration", true, 
+                               g.V().has("global-customer-id", "customer-9972-BandwidthMigration")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "DHV")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "servInstance-9972-3-BandwidthMigration")
+                               .has("bandwidth-total", "bandwidth-total-3-BandwidthMigration")
+                               .hasNext());
+               
+               assertEquals("service instance node, bandwidth-total=servInstance-9972-2-BandwidthMigration", true, 
+                               g.V().has("global-customer-id", "customer-9972-BandwidthMigration")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "DHV")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "servInstance-9972-2-BandwidthMigration")
+                               .has("bandwidth-total", "bandwidth-total-2-BandwidthMigration")
+                               .hasNext());
+               
+               assertEquals("configuration node with type=configuration-type1-9972, tunnel-bandwidth does not exist", true, 
+                               g.V().has("global-customer-id", "customer-9972-BandwidthMigration")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "DHV")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "servInstance-9972-2-BandwidthMigration")
+                               .out("org.onap.relationships.inventory.Uses").has("aai-node-type", "configuration")
+                               .has("configuration-type", "configuration-type1-9972")
+                               .hasNext());
+               
+               // check if configuration node gets created for 2, 3, 4
+               assertEquals("configuration node created with type=DHV, tunnel-bandwidth=servInstance-9972-4-BandwidthMigration", true, 
+                               g.V().has("global-customer-id", "customer-9972-BandwidthMigration")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "DHV")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "servInstance-9972-4-BandwidthMigration")
+                               .out("org.onap.relationships.inventory.Uses").has("aai-node-type", "configuration")
+                               .has("configuration-type", "DHV").has("tunnel-bandwidth", "bandwidth-total-4-BandwidthMigration")
+                               .hasNext());
+               
+               assertEquals("configuration node created with type=DHV, tunnel-bandwidth=servInstance-9972-3-BandwidthMigration", true, 
+                               g.V().has("global-customer-id", "customer-9972-BandwidthMigration")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "DHV")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "servInstance-9972-3-BandwidthMigration")
+                               .out("org.onap.relationships.inventory.Uses").has("aai-node-type", "configuration")
+                               .has("configuration-type", "DHV").has("tunnel-bandwidth", "bandwidth-total-3-BandwidthMigration")
+                               .hasNext());
+               
+               assertEquals("configuration node created with type=DHV, tunnel-bandwidth=servInstance-9972-2-BandwidthMigration", true, 
+                               g.V().has("global-customer-id", "customer-9972-BandwidthMigration")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "DHV")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "servInstance-9972-2-BandwidthMigration")
+                               .out("org.onap.relationships.inventory.Uses").has("aai-node-type", "configuration")
+                               .has("configuration-type", "DHV").has("tunnel-bandwidth", "bandwidth-total-2-BandwidthMigration")
+                               .hasNext());
+               
+               // configuration modified for ServInstance-9972-1-BandwidthMigration
+               assertEquals("configuration node modified for ServInstance-9972-1-BandwidthMigration, tunnel-bandwidth=2380", true, 
+                               g.V().has("global-customer-id", "customer-9972-BandwidthMigration")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-type", "DHV")
+                               .in("org.onap.relationships.inventory.BelongsTo").has("service-instance-id", "ServInstance-9972-1-BandwidthMigration")
+                               .out("org.onap.relationships.inventory.Uses").has("aai-node-type", "configuration")
+                               .has("configuration-type", "DHV").has("tunnel-bandwidth", "2380")
+                               .hasNext());
+       }
+       
+       @Test
+       public void testGetAffectedNodeTypes() {
+               Optional<String[]> types = migration.getAffectedNodeTypes();
+               Optional<String[]> expected = Optional.of(new String[]{"service-instance"});
+               
+               assertNotNull(types);
+               assertArrayEquals(expected.get(), types.get());
+       }
+
+       @Test
+       public void testGetMigrationName() {
+               String migrationName = migration.getMigrationName();
+
+               assertNotNull(migrationName);
+               assertEquals("service-instance-to-configuration", migrationName);
+       }
+}
diff --git a/src/test/java/org/onap/aai/migration/v12/SDWANSpeedChangeMigrationTest.java b/src/test/java/org/onap/aai/migration/v12/SDWANSpeedChangeMigrationTest.java
new file mode 100644 (file)
index 0000000..8041880
--- /dev/null
@@ -0,0 +1,375 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v12;
+
+import org.janusgraph.core.JanusGraphFactory;
+import org.janusgraph.core.JanusGraph;
+import org.janusgraph.core.JanusGraphTransaction;
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.aai.AAISetup;
+import org.onap.aai.dbmap.DBConnectionType;
+import org.onap.aai.introspection.Loader;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.setup.SchemaVersion;
+import org.onap.aai.serialization.engines.QueryStyle;
+import org.onap.aai.serialization.engines.JanusGraphDBEngine;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+
+import static org.junit.Assert.assertEquals;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.when;
+
+public class SDWANSpeedChangeMigrationTest extends AAISetup {
+
+    private final static ModelType introspectorFactoryType = ModelType.MOXY;
+    private final static QueryStyle queryStyle = QueryStyle.TRAVERSAL;
+    private final static DBConnectionType type = DBConnectionType.REALTIME;
+
+    private Loader loader;
+    private TransactionalGraphEngine dbEngine;
+    private JanusGraph graph;
+    private SDWANSpeedChangeMigration migration;
+    private GraphTraversalSource g;
+    private JanusGraphTransaction tx;
+
+    Vertex pLinkWan1;
+    Vertex pLinkWan3;
+    Vertex pLinkWan5;
+    Vertex pLinkWan7;
+
+    @Before
+    public void setUp() throws Exception {
+        graph = JanusGraphFactory.build().set("storage.backend", "inmemory").open();
+        tx = graph.newTransaction();
+        g = tx.traversal();
+        loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, schemaVersions.getDefaultVersion());
+        dbEngine = new JanusGraphDBEngine(
+                queryStyle,
+                type,
+                loader);
+
+        Vertex servSub1 = g.addV().property("aai-node-type", "service-subscription")
+                .property("service-type", "DHV")
+                .next();
+        Vertex servinst1 = g.addV().property("aai-node-type", "service-instance")
+                .property("service-type", "DHV")
+                .next();
+        Vertex allotedRsrc1 = g.addV().property("aai-node-type", "allotted-resource")
+                .property("id", "rsrc1")
+                .next();
+        Vertex servinst2 = g.addV().property("aai-node-type", "service-instance")
+                .property("service-type", "VVIG")
+                .next();
+        Vertex servSub2 = g.addV().property("aai-node-type", "service-subscription")
+                .property("service-type", "VVIG")
+                .next();
+
+        Vertex genericvnf1 = g.addV().property("aai-node-type", "generic-vnf")
+                .property("vnf-id", "vnfId1")
+                .next();
+        Vertex vServer1 = g.addV().property("aai-node-type", "vserver")
+                .next();
+        Vertex pServer1 = g.addV().property("aai-node-type", "pserver")
+                .next();
+        Vertex pInterfaceWan1 = g.addV().property("aai-node-type", "p-interface")
+                .property("interface-name", "ge-0/0/10")
+                .next();
+        Vertex tunnelXConnectAll_Wan1 = g.addV().property("aai-node-type", "tunnel-xconnect")
+                .property("id", "txc1")
+                .property("bandwidth-up-wan1", "300 Mbps")
+                .property("bandwidth-down-wan1", "400 Mbps")
+                .property("bandwidth-up-wan2", "500 Mbps")
+                .property("bandwidth-down-wan2", "600 Mbps")
+                .next();
+
+        pLinkWan1 = g.addV().property("aai-node-type", "physical-link")
+                .property("link-name", "pLinkWan1")
+                .property("service-provider-bandwidth-up-value", "empty")
+                .property("service-provider-bandwidth-up-units", "empty")
+                .property("service-provider-bandwidth-down-value", "empty")
+                .property("service-provider-bandwidth-down-units", "empty")
+                .next();
+        Vertex servSub3 = g.addV().property("aai-node-type", "service-subscription")
+                .property("service-type", "DHV")
+                .next();
+        Vertex servinst3 = g.addV().property("aai-node-type", "service-instance")
+                .property("service-type", "DHV")
+                .next();
+        Vertex allotedRsrc3 = g.addV().property("aai-node-type", "allotted-resource")
+                .property("id", "rsrc1")
+                .next();
+        Vertex servinst4 = g.addV().property("aai-node-type", "service-instance")
+                .property("service-type", "VVIG")
+                .next();
+        Vertex servSub4 = g.addV().property("aai-node-type", "service-subscription")
+                .property("service-type", "VVIG")
+                .next();
+
+        Vertex genericvnf3 = g.addV().property("aai-node-type", "generic-vnf")
+                .property("vnf-id", "vnfId1")
+                .next();
+        Vertex vServer3 = g.addV().property("aai-node-type", "vserver")
+                .next();
+        Vertex pServer3 = g.addV().property("aai-node-type", "pserver")
+                .next();
+        Vertex pInterfaceWan3 = g.addV().property("aai-node-type", "p-interface")
+                .property("interface-name", "ge-0/0/11")
+                .next();
+        Vertex tunnelXConnectAll_Wan3 = g.addV().property("aai-node-type", "tunnel-xconnect")
+                .property("id", "txc3")
+                .property("bandwidth-up-wan1", "300 Mbps")
+                .property("bandwidth-down-wan1", "400 Mbps")
+                .property("bandwidth-up-wan2", "500 Mbps")
+                .property("bandwidth-down-wan2", "600 Mbps")
+                .next();
+
+        pLinkWan3 = g.addV().property("aai-node-type", "physical-link")
+                .property("link-name", "pLinkWan3")
+                .property("service-provider-bandwidth-up-value", "empty")
+                .property("service-provider-bandwidth-up-units", "empty")
+                .property("service-provider-bandwidth-down-value", "empty")
+                .property("service-provider-bandwidth-down-units", "empty")
+                .next();
+
+
+        Vertex servSub5 = g.addV().property("aai-node-type", "service-subscription")
+                .property("service-type", "DHV")
+                .next();
+        Vertex servinst5 = g.addV().property("aai-node-type", "service-instance")
+                .property("service-type", "DHV")
+                .next();
+        Vertex allotedRsrc5 = g.addV().property("aai-node-type", "allotted-resource")
+                .property("id", "rsrc1")
+                .next();
+        Vertex servinst6 = g.addV().property("aai-node-type", "service-instance")
+                .property("service-type", "VVIG")
+                .next();
+        Vertex servSub6 = g.addV().property("aai-node-type", "service-subscription")
+                .property("service-type", "VVIG")
+                .next();
+
+        Vertex genericvnf5 = g.addV().property("aai-node-type", "generic-vnf")
+                .property("vnf-id", "vnfId1")
+                .next();
+        Vertex vServer5 = g.addV().property("aai-node-type", "vserver")
+                .next();
+        Vertex pServer5 = g.addV().property("aai-node-type", "pserver")
+                .next();
+        Vertex pInterfaceWan5 = g.addV().property("aai-node-type", "p-interface")
+                .property("interface-name", "ge-0/0/10")
+                .next();
+        Vertex tunnelXConnectAll_Wan5 = g.addV().property("aai-node-type", "tunnel-xconnect")
+                .property("id", "txc5")
+                .property("bandwidth-up-wan1", "")
+                .property("bandwidth-down-wan1", "")
+                .property("bandwidth-up-wan2", "500 Mbps")
+                .property("bandwidth-down-wan2", "600 Mbps")
+                .next();
+
+        pLinkWan5 = g.addV().property("aai-node-type", "physical-link")
+                .property("link-name", "pLinkWan5")
+                .property("service-provider-bandwidth-up-value", "")
+                .property("service-provider-bandwidth-up-units", "")
+                .property("service-provider-bandwidth-down-value", "")
+                .property("service-provider-bandwidth-down-units", "")
+                .next();
+
+
+        Vertex servSub7 = g.addV().property("aai-node-type", "service-subscription")
+                .property("service-type", "DHV")
+                .next();
+        Vertex servinst7 = g.addV().property("aai-node-type", "service-instance")
+                .property("service-type", "DHV")
+                .next();
+        Vertex allotedRsrc7 = g.addV().property("aai-node-type", "allotted-resource")
+                .property("id", "rsrc1")
+                .next();
+        Vertex servinst9 = g.addV().property("aai-node-type", "service-instance")
+                .property("service-type", "VVIG")
+                .next();
+        Vertex servSub9 = g.addV().property("aai-node-type", "service-subscription")
+                .property("service-type", "VVIG")
+                .next();
+
+        Vertex genericvnf7 = g.addV().property("aai-node-type", "generic-vnf")
+                .property("vnf-id", "vnfId1")
+                .next();
+        Vertex vServer7 = g.addV().property("aai-node-type", "vserver")
+                .next();
+        Vertex pServer7 = g.addV().property("aai-node-type", "pserver")
+                .next();
+        Vertex pInterfaceWan7 = g.addV().property("aai-node-type", "p-interface")
+                .property("interface-name", "ge-0/0/11")
+                .next();
+        Vertex tunnelXConnectAll_Wan7 = g.addV().property("aai-node-type", "tunnel-xconnect")
+                .property("id", "txc7")
+                .property("bandwidth-up-wan1", "300 Mbps")
+                .property("bandwidth-down-wan1", "400 Mbps")
+                .property("bandwidth-up-wan2", "")
+                .property("bandwidth-down-wan2", "")
+                .next();
+
+        pLinkWan7 = g.addV().property("aai-node-type", "physical-link")
+                .property("link-name", "pLinkWan5")
+                .property("service-provider-bandwidth-up-value", "")
+                .property("service-provider-bandwidth-up-units", "")
+                .property("service-provider-bandwidth-down-value", "")
+                .property("service-provider-bandwidth-down-units", "")
+                .next();
+
+
+
+        edgeSerializer.addTreeEdge(g, servSub1, servinst1);
+        edgeSerializer.addEdge(g, servinst1, allotedRsrc1);
+        edgeSerializer.addTreeEdge(g, servinst2, servSub2);
+        edgeSerializer.addTreeEdge(g, allotedRsrc1, servinst2);
+
+        edgeSerializer.addTreeEdge(g, allotedRsrc1, tunnelXConnectAll_Wan1);
+
+
+        edgeSerializer.addEdge(g, servinst1, genericvnf1);
+        edgeSerializer.addEdge(g, genericvnf1, vServer1);
+        edgeSerializer.addEdge(g, vServer1, pServer1);
+        edgeSerializer.addTreeEdge(g, pServer1, pInterfaceWan1);
+        edgeSerializer.addEdge(g, pInterfaceWan1, pLinkWan1);
+
+        edgeSerializer.addTreeEdge(g, servSub3, servinst3);
+        edgeSerializer.addEdge(g, servinst3, allotedRsrc3);
+        edgeSerializer.addTreeEdge(g, servinst4, servSub4);
+        edgeSerializer.addTreeEdge(g, allotedRsrc3, servinst4);
+
+        edgeSerializer.addTreeEdge(g, allotedRsrc3, tunnelXConnectAll_Wan3);
+
+
+        edgeSerializer.addEdge(g, servinst3, genericvnf3);
+        edgeSerializer.addEdge(g, genericvnf3, vServer3);
+        edgeSerializer.addEdge(g, vServer3, pServer3);
+        edgeSerializer.addTreeEdge(g, pServer3, pInterfaceWan3);
+        edgeSerializer.addEdge(g, pInterfaceWan3, pLinkWan3);
+
+
+        edgeSerializer.addTreeEdge(g, servSub5, servinst5);
+        edgeSerializer.addEdge(g, servinst5, allotedRsrc5);
+        edgeSerializer.addTreeEdge(g, servinst6, servSub6);
+        edgeSerializer.addTreeEdge(g, allotedRsrc5, servinst6);
+
+        edgeSerializer.addTreeEdge(g, allotedRsrc5, tunnelXConnectAll_Wan5);
+
+
+        edgeSerializer.addEdge(g, servinst5, genericvnf5);
+        edgeSerializer.addEdge(g, genericvnf5, vServer5);
+        edgeSerializer.addEdge(g, vServer5, pServer5);
+        edgeSerializer.addTreeEdge(g, pServer5, pInterfaceWan5);
+        edgeSerializer.addEdge(g, pInterfaceWan5, pLinkWan5);
+
+        edgeSerializer.addTreeEdge(g, servSub7, servinst7);
+        edgeSerializer.addEdge(g, servinst7, allotedRsrc7);
+        edgeSerializer.addTreeEdge(g, servinst9, servSub9);
+        edgeSerializer.addTreeEdge(g, allotedRsrc7, servinst9);
+
+        edgeSerializer.addTreeEdge(g, allotedRsrc7, tunnelXConnectAll_Wan7);
+
+
+        edgeSerializer.addEdge(g, servinst7, genericvnf7);
+        edgeSerializer.addEdge(g, genericvnf7, vServer7);
+        edgeSerializer.addEdge(g, vServer7, pServer7);
+        edgeSerializer.addTreeEdge(g, pServer7, pInterfaceWan7);
+        edgeSerializer.addEdge(g, pInterfaceWan7, pLinkWan7);
+
+
+        TransactionalGraphEngine spy = spy(dbEngine);
+        TransactionalGraphEngine.Admin adminSpy = spy(dbEngine.asAdmin());
+        GraphTraversalSource traversal = g;
+        when(spy.asAdmin()).thenReturn(adminSpy);
+        when(adminSpy.getTraversalSource()).thenReturn(traversal);
+        migration = new SDWANSpeedChangeMigration(spy, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+        migration.run();
+    }
+
+
+    @After
+    public void cleanUp() {
+        tx.rollback();
+        graph.close();
+    }
+
+
+    /***
+     * Checks to see if the Wan1 properties were updated in the physical link
+     */
+
+    @Test
+    public void ConfirmWan1Changes() {
+
+        assertEquals("300", pLinkWan1.property("service-provider-bandwidth-up-value").value().toString());
+        assertEquals("Mbps", pLinkWan1.property("service-provider-bandwidth-up-units").value().toString());
+        assertEquals("400", pLinkWan1.property("service-provider-bandwidth-down-value").value().toString());
+        assertEquals("Mbps", pLinkWan1.property("service-provider-bandwidth-down-units").value().toString());
+
+    }
+
+    /***
+     * Checks to see if the Wan2 properties were updated in the physical link
+     */
+    @Test
+    public void ConfirmWan2Changes() {
+
+        assertEquals("500", pLinkWan3.property("service-provider-bandwidth-up-value").value().toString());
+        assertEquals("Mbps", pLinkWan3.property("service-provider-bandwidth-up-units").value().toString());
+        assertEquals("600", pLinkWan3.property("service-provider-bandwidth-down-value").value().toString());
+        assertEquals("Mbps", pLinkWan3.property("service-provider-bandwidth-down-units").value().toString());
+
+    }
+
+    /***
+     * if tunnel xconncets missing bandwidth up 1 value the plink should not be updated
+     */
+
+    @Test
+    public void Wan1EmptyNoChanges() {
+
+        assertEquals("", pLinkWan5.property("service-provider-bandwidth-up-value").value().toString());
+        assertEquals("", pLinkWan5.property("service-provider-bandwidth-up-units").value().toString());
+        assertEquals("", pLinkWan5.property("service-provider-bandwidth-down-value").value().toString());
+        assertEquals("", pLinkWan5.property("service-provider-bandwidth-down-units").value().toString());
+
+    }
+
+    /***
+     * if tunnel xconncets missing bandwidth up 2 value the plink should not be updated
+     */
+
+    @Test
+    public void Wan2EmptyNoChanges() {
+
+        assertEquals("", pLinkWan7.property("service-provider-bandwidth-up-value").value().toString());
+        assertEquals("", pLinkWan7.property("service-provider-bandwidth-up-units").value().toString());
+        assertEquals("", pLinkWan7.property("service-provider-bandwidth-down-value").value().toString());
+        assertEquals("", pLinkWan7.property("service-provider-bandwidth-down-units").value().toString());
+
+    }
+
+
+}
diff --git a/src/test/java/org/onap/aai/migration/v12/UriMigrationTest.java b/src/test/java/org/onap/aai/migration/v12/UriMigrationTest.java
new file mode 100644 (file)
index 0000000..70ea20d
--- /dev/null
@@ -0,0 +1,169 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v12;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.janusgraph.core.JanusGraph;
+import org.janusgraph.core.JanusGraphFactory;
+import org.janusgraph.core.JanusGraphTransaction;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.aai.AAISetup;
+import org.onap.aai.db.props.AAIProperties;
+import org.onap.aai.dbmap.DBConnectionType;
+import org.onap.aai.introspection.Loader;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.serialization.engines.JanusGraphDBEngine;
+import org.onap.aai.serialization.engines.QueryStyle;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.setup.SchemaVersion;
+
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashSet;
+
+import static org.junit.Assert.assertEquals;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.when;
+
+public class UriMigrationTest extends AAISetup {
+       private final static ModelType introspectorFactoryType = ModelType.MOXY;
+       private final static QueryStyle queryStyle = QueryStyle.TRAVERSAL;
+       private final static DBConnectionType type = DBConnectionType.REALTIME;
+       private Loader loader;
+       private TransactionalGraphEngine dbEngine;
+       private JanusGraph graph;
+       private UriMigration migration;
+       private GraphTraversalSource g;
+       private JanusGraphTransaction tx;
+
+       private Vertex pnf3;
+       private Vertex pInterface3;
+       private Vertex pInterface4;
+       private Vertex lInterface3;
+       private Vertex plink3;
+
+       @Before
+       public void setUp() throws Exception {
+               graph = JanusGraphFactory.build().set("storage.backend","inmemory").open();
+               tx = graph.newTransaction();
+               g = tx.traversal();
+               loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, schemaVersions.getDefaultVersion());
+               dbEngine = new JanusGraphDBEngine(
+                               queryStyle,
+                               type,
+                               loader);
+
+               pnf3 = g.addV().property("aai-node-type", "pnf")
+                               .property("pnf-name", "pnf-name3")
+                               .next();
+               pInterface3 = g.addV().property("aai-node-type", "p-interface")
+                               .property("interface-name", "p-interface-name3")
+                               .next();
+               pInterface4 = g.addV().property("aai-node-type", "p-interface")
+                               .property("interface-name", "p-interface-name/4")
+                               .next();
+               lInterface3 = g.addV().property("aai-node-type", "l-interface")
+                               .property("interface-name", "l-interface-name3")
+                               .next();
+               plink3 = g.addV().property("aai-node-type", "physical-link")
+                                               .property("link-name", "link-name3")
+                                       .next();
+               edgeSerializer.addTreeEdge(g, pnf3, pInterface3);
+               edgeSerializer.addTreeEdge(g, pnf3, pInterface4);
+               edgeSerializer.addTreeEdge(g, pInterface3, lInterface3);
+               edgeSerializer.addEdge(g, pInterface3, plink3);
+
+               TransactionalGraphEngine spy = spy(dbEngine);
+               TransactionalGraphEngine.Admin adminSpy = spy(dbEngine.asAdmin());
+               GraphTraversalSource traversal = g;
+               when(spy.asAdmin()).thenReturn(adminSpy);
+               when(adminSpy.getTraversalSource()).thenReturn(traversal);
+               migration = new UriMigration(spy, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+
+               migration.run();
+
+       }
+
+       @After
+       public void tearDown() throws Exception {
+               graph.close();
+       }
+
+       @Test
+       public void allVertexHasUri() throws InterruptedException {
+               assertEquals(Long.valueOf(0L), g.V().hasNot(AAIProperties.AAI_URI).count().next());
+       }
+
+       @Test
+       public void pnf() {
+               printVertex(pnf3);
+               assertEquals("/network/pnfs/pnf/pnf-name3", g.V().has("pnf-name", "pnf-name3").next().value("aai-uri"));
+       }
+
+       protected void printVertex(Vertex v) {
+               final StringBuilder sb = new StringBuilder();
+               v.properties().forEachRemaining(p -> sb.append("\t").append(p.key()).append(" : ").append(p.value()).append("\n"));
+               sb.append("\n");
+               System.out.println(sb.toString());
+       }
+
+       @Test
+       public void plink3() {
+               printVertex(plink3);
+               assertEquals("/network/physical-links/physical-link/link-name3", g.V().has("link-name", "link-name3").next().value("aai-uri"));
+       }
+
+       @Test
+       public void pinterface3() {
+               printVertex(pInterface3);
+               assertEquals("/network/pnfs/pnf/pnf-name3/p-interfaces/p-interface/p-interface-name3", g.V().has("interface-name", "p-interface-name3").next().value("aai-uri"));
+       }
+
+       @Test
+       public void pInterface4() {
+               printVertex(pInterface4);
+               assertEquals("/network/pnfs/pnf/pnf-name3/p-interfaces/p-interface/p-interface-name%2F4", g.V().has("interface-name", "p-interface-name/4").next().value("aai-uri"));
+       }
+
+       @Test
+       public void getChildrenTopTest() {
+               migration.seen = new HashSet<>();
+               migration.seen.add(pnf3.id());
+               assertEquals(new HashSet<>(Arrays.asList(pInterface3, pInterface4)), migration.getChildren(pnf3));
+       }
+
+       @Test
+       public void getChildrenOneDownTest() {
+               migration.seen = new HashSet<>();
+               migration.seen.add(pnf3.id());
+               assertEquals(new HashSet<>(Arrays.asList(lInterface3)), migration.getChildren(pInterface3));
+       }
+
+       @Test
+       public void getChildrenTwoDownTest() {
+               migration.seen = new HashSet<>();
+               migration.seen.add(pInterface3.id());
+               assertEquals(Collections.EMPTY_SET, migration.getChildren(lInterface3));
+       }
+}
\ No newline at end of file
diff --git a/src/test/java/org/onap/aai/migration/v13/MigrateBooleanDefaultsToFalseTest.java b/src/test/java/org/onap/aai/migration/v13/MigrateBooleanDefaultsToFalseTest.java
new file mode 100644 (file)
index 0000000..f8434fc
--- /dev/null
@@ -0,0 +1,384 @@
+/**\r
+ * ============LICENSE_START=======================================================\r
+ * org.onap.aai\r
+ * ================================================================================\r
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.\r
+ * ================================================================================\r
+ * Licensed under the Apache License, Version 2.0 (the "License");\r
+ * you may not use this file except in compliance with the License.\r
+ * You may obtain a copy of the License at\r
+ *\r
+ *    http://www.apache.org/licenses/LICENSE-2.0\r
+ *\r
+ * Unless required by applicable law or agreed to in writing, software\r
+ * distributed under the License is distributed on an "AS IS" BASIS,\r
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
+ * See the License for the specific language governing permissions and\r
+ * limitations under the License.\r
+ * ============LICENSE_END=========================================================\r
+ */\r
+package org.onap.aai.migration.v13;\r
+\r
+import org.onap.aai.AAISetup;\r
+import org.onap.aai.edges.EdgeIngestor;\r
+import org.onap.aai.introspection.LoaderFactory;\r
+import org.onap.aai.serialization.db.EdgeSerializer;\r
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;\r
+\r
+\r
+import org.janusgraph.core.JanusGraph;\r
+import org.janusgraph.core.JanusGraphFactory;\r
+import org.janusgraph.core.JanusGraphTransaction;\r
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;\r
+import org.junit.Before;\r
+import org.junit.Test;\r
+import org.onap.aai.dbmap.DBConnectionType;\r
+import org.onap.aai.introspection.Loader;\r
+import org.onap.aai.introspection.ModelType;\r
+import org.onap.aai.setup.SchemaVersions;\r
+import org.onap.aai.setup.SchemaVersion;\r
+import org.onap.aai.migration.Status;\r
+import org.onap.aai.serialization.engines.QueryStyle;\r
+import org.onap.aai.serialization.engines.JanusGraphDBEngine;\r
+\r
+import static org.mockito.Mockito.spy;\r
+import static org.mockito.Mockito.when;\r
+\r
+import java.util.Optional;\r
+\r
+import static org.junit.Assert.assertTrue;\r
+\r
+public class MigrateBooleanDefaultsToFalseTest extends AAISetup {\r
+\r
+       public static class BooleanDefaultMigrator extends MigrateBooleanDefaultsToFalse {\r
+        public BooleanDefaultMigrator(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions){\r
+            super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);\r
+        }\r
+        @Override\r
+        public Status getStatus() {\r
+            return Status.SUCCESS;\r
+        }\r
+        @Override\r
+        public Optional<String[]> getAffectedNodeTypes() {\r
+               return Optional.of(new String[]{VNF_NODE_TYPE,VSERVER_NODE_TYPE,VNFC_NODE_TYPE,L3NETWORK_NODE_TYPE,SUBNET_NODE_TYPE,LINTERFACE_NODE_TYPE,VFMODULE_NODE_TYPE});\r
+        }\r
+        @Override\r
+        public String getMigrationName() {\r
+            return "MockBooleanDefaultMigrator";\r
+        }\r
+    }\r
+\r
+    private final static ModelType introspectorFactoryType = ModelType.MOXY;\r
+    private final static QueryStyle queryStyle = QueryStyle.TRAVERSAL;\r
+    private final static DBConnectionType type = DBConnectionType.REALTIME;\r
+    private Loader loader;\r
+    private TransactionalGraphEngine dbEngine;\r
+    private BooleanDefaultMigrator migration;\r
+    private GraphTraversalSource g;\r
+\r
+    @Before\r
+    public void setup() throws Exception{\r
+        g = tx.traversal();\r
+        loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, schemaVersions.getDefaultVersion());\r
+        dbEngine = new JanusGraphDBEngine(\r
+                queryStyle,\r
+                type,\r
+                loader);\r
+\r
+        //generic-vnf\r
+        g.addV().property("aai-node-type", "generic-vnf")\r
+                .property("vnf-id", "generic-vnf0")\r
+                .next();\r
+        g.addV().property("aai-node-type", "generic-vnf")\r
+                .property("vnf-id", "generic-vnf1")\r
+                .property("is-closed-loop-disabled", "")\r
+                .next();\r
+        g.addV().property("aai-node-type", "generic-vnf")\r
+                .property("vnf-id", "generic-vnf2")\r
+                .property("is-closed-loop-disabled", true)\r
+                .next();\r
+        g.addV().property("aai-node-type", "generic-vnf")\r
+                       .property("vnf-id", "generic-vnf3")\r
+                       .property("is-closed-loop-disabled", false)\r
+                       .next();\r
+        //vnfc\r
+        g.addV().property("aai-node-type", "vnfc")\r
+                .property("vnfc-name", "vnfc0")\r
+                .next();\r
+        g.addV().property("aai-node-type", "vnfc")\r
+                .property("vnfc-name", "vnfc1")\r
+                .property("is-closed-loop-disabled", "")\r
+                .next();\r
+        g.addV().property("aai-node-type", "vnfc")\r
+                .property("vnfc-name", "vnfc2")\r
+                .property("is-closed-loop-disabled", true)\r
+                .next();\r
+        g.addV().property("aai-node-type", "vnfc")\r
+                       .property("vnfc-name", "vnfc3")\r
+                       .property("is-closed-loop-disabled", false)\r
+                       .next();\r
+        //vserver\r
+        g.addV().property("aai-node-type", "vserver")\r
+                .property("vserver-id", "vserver0")\r
+                .next();\r
+        g.addV().property("aai-node-type", "vserver")\r
+                .property("vserver-id", "vserver1")\r
+                .property("is-closed-loop-disabled", "")\r
+                .next();\r
+        g.addV().property("aai-node-type", "vserver")\r
+                .property("vserver-id", "vserver2")\r
+                .property("is-closed-loop-disabled", true)\r
+                .next();\r
+        g.addV().property("aai-node-type", "vserver")\r
+                       .property("vserver-id", "vserver3")\r
+                       .property("is-closed-loop-disabled", false)\r
+                       .next();        \r
+      //l3-network\r
+        g.addV().property("aai-node-type", "l3-network")\r
+                .property("network-id", "l3-network0")\r
+                               .property("network-name", "l3-network-name0")\r
+                .next();\r
+        g.addV().property("aai-node-type", "l3-network")\r
+                .property("network-id", "l3-network1")\r
+                               .property("network-name", "l3-network-name1")\r
+                .property("is-bound-to-vpn", "")\r
+                .property("is-provider-network", "")\r
+                               .property("is-shared-network", "")\r
+                               .property("is-external-network", "")\r
+                .next();\r
+        g.addV().property("aai-node-type", "l3-network")\r
+                .property("network-id", "l3-network2")\r
+                               .property("network-name", "l3-network-name2")\r
+                .property("is-bound-to-vpn", true)\r
+                .property("is-provider-network", true)\r
+                               .property("is-shared-network", true)\r
+                               .property("is-external-network", true)\r
+                .next();\r
+        g.addV().property("aai-node-type", "l3-network")\r
+                       .property("network-id", "l3-network3")\r
+                               .property("network-name", "l3-network-name3")\r
+                       .property("is-bound-to-vpn", false)\r
+                       .property("is-provider-network", false)\r
+                               .property("is-shared-network", false)\r
+                               .property("is-external-network", false)\r
+                       .next();       \r
+        //subnet\r
+        g.addV().property("aai-node-type", "subnet")\r
+                .property("subnet-id", "subnet0")\r
+                .next();\r
+        g.addV().property("aai-node-type", "subnet")\r
+                .property("subnet-id", "subnet1")\r
+                .property("dhcp-enabled", "")\r
+                .next();\r
+        g.addV().property("aai-node-type", "subnet")\r
+                .property("subnet-id", "subnet2")\r
+                .property("dhcp-enabled", true)\r
+                .next();\r
+        g.addV().property("aai-node-type", "subnet")\r
+                       .property("subnet-id", "subnet3")\r
+                       .property("dhcp-enabled", false)\r
+                       .next();\r
+      //l-interface\r
+        g.addV().property("aai-node-type", "l-interface")\r
+                .property("interface-name", "l-interface0")\r
+                               .property("in-maint", false)\r
+                .next();\r
+        g.addV().property("aai-node-type", "l-interface")\r
+                .property("interface-name", "l-interface1")\r
+                .property("in-maint", false)\r
+                               .property("is-port-mirrored", "")\r
+                               .property("is-ip-unnumbered", "")\r
+                .next();\r
+        g.addV().property("aai-node-type", "l-interface")\r
+                .property("interface-name", "l-interface2")\r
+                .property("in-maint", false)\r
+                               .property("is-port-mirrored", true)\r
+                               .property("is-ip-unnumbered", true)\r
+                .next();\r
+        g.addV().property("aai-node-type", "l-interface")\r
+                       .property("interface-name", "l-interface3")\r
+                       .property("in-maint", false)\r
+                               .property("is-port-mirrored", false)\r
+                               .property("is-ip-unnumbered", false)\r
+                       .next(); \r
+      //vf-module\r
+        g.addV().property("aai-node-type", "vf-module")\r
+                .property("vf-module-id", "vf-module0")\r
+                .next();\r
+        g.addV().property("aai-node-type", "vf-module")\r
+                .property("vf-module-id", "vf-module1")\r
+                               .property("is-base-vf-module", "")\r
+                .next();\r
+        g.addV().property("aai-node-type", "vf-module")\r
+                .property("vf-module-id", "vf-module2")\r
+                               .property("is-base-vf-module", true)\r
+                .next();\r
+        g.addV().property("aai-node-type", "vf-module")\r
+                       .property("vf-module-id", "vf-module3")\r
+                               .property("is-base-vf-module", false)                           \r
+                       .next(); \r
+                     \r
+      //vlan\r
+        g.addV().property("aai-node-type", "vlan")\r
+                .property("vlan-interface", "vlan0")\r
+                .next();\r
+        g.addV().property("aai-node-type", "vlan")\r
+                .property("vlan-interface", "vlan1")\r
+                               .property("is-ip-unnumbered", "")\r
+                .next();\r
+        g.addV().property("aai-node-type", "vlan")\r
+                .property("vlan-interface", "vlan2")\r
+                               .property("is-ip-unnumbered", true)\r
+                .next();\r
+        g.addV().property("aai-node-type", "vlan")\r
+                       .property("vlan-interface", "vlan3")\r
+                               .property("is-ip-unnumbered", false)                            \r
+                       .next();\r
+        \r
+        TransactionalGraphEngine spy = spy(dbEngine);\r
+        TransactionalGraphEngine.Admin adminSpy = spy(dbEngine.asAdmin());\r
+        GraphTraversalSource traversal = g;\r
+        when(spy.asAdmin()).thenReturn(adminSpy);\r
+        when(adminSpy.getTraversalSource()).thenReturn(traversal);\r
+        migration = new BooleanDefaultMigrator(spy, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);\r
+        migration.run();\r
+        \r
+    }\r
+\r
+    @Test\r
+    public void testMissingProperty(){\r
+       //is-closed-loop-disabled\r
+        assertTrue("Value of generic-vnf should be updated since the property is-closed-loop-disabled doesn't exist",\r
+                g.V().has("aai-node-type", "generic-vnf").has("vnf-id", "generic-vnf0").has("is-closed-loop-disabled", false).hasNext());\r
+        assertTrue("Value of vnfc should be updated since the property is-closed-loop-disabled doesn't exist",\r
+                g.V().has("aai-node-type", "vnfc").has("vnfc-name", "vnfc0").has("is-closed-loop-disabled", false).hasNext());\r
+        assertTrue("Value of vserver should be updated since the property is-closed-loop-disabled doesn't exist",\r
+                g.V().has("aai-node-type", "vserver").has("vserver-id", "vserver0").has("is-closed-loop-disabled", false).hasNext());\r
+        //dhcp-enabled\r
+        assertTrue("Value of subnet should be updated since the property dhcp-enabled doesn't exist",\r
+                g.V().has("aai-node-type", "subnet").has("subnet-id", "subnet0").has("dhcp-enabled", false).hasNext());\r
+        //l3-network: is-bound-to-vpn, is-shared-network, is-external-network\r
+        assertTrue("Value of l3-network should be updated since the property is-bound-to-vpn doesn't exist",\r
+                g.V().has("aai-node-type", "l3-network").has("network-id", "l3-network0").has("network-name", "l3-network-name0").has("is-bound-to-vpn", false).hasNext());  \r
+        assertTrue("Value of l3-network should be updated since the property is-provider-network doesn't exist",\r
+                g.V().has("aai-node-type", "l3-network").has("network-id", "l3-network0").has("network-name", "l3-network-name0").has("is-provider-network", false).hasNext());  \r
+        assertTrue("Value of l3-network should be updated since the property is-shared-network doesn't exist",\r
+                g.V().has("aai-node-type", "l3-network").has("network-id", "l3-network0").has("network-name", "l3-network-name0").has("is-shared-network", false).hasNext());  \r
+               assertTrue("Value of l3-network should be updated since the property is-external-network doesn't exist",\r
+                g.V().has("aai-node-type", "l3-network").has("network-id", "l3-network0").has("network-name", "l3-network-name0").has("is-external-network", false).hasNext()); \r
+               //l-interface: is-port-mirrored, is-ip-unnumbered\r
+               assertTrue("Value of l-interface should be updated since the property is-port-mirrored doesn't exist",\r
+                g.V().has("aai-node-type", "l-interface").has("interface-name", "l-interface0").has("is-port-mirrored", false).hasNext());  \r
+               assertTrue("Value of l-interface should be updated since the property is-ip-unnumbered doesn't exist",\r
+                g.V().has("aai-node-type", "l-interface").has("interface-name", "l-interface0").has("is-ip-unnumbered", false).hasNext());\r
+               //vf-module: is-base-vf-module\r
+               assertTrue("Value of vf-module should be updated since the property is-base-vf-module doesn't exist",\r
+                g.V().has("aai-node-type", "vf-module").has("vf-module-id", "vf-module0").has("is-base-vf-module", false).hasNext());  \r
+               //vlan: is-ip-unnumbered\r
+               assertTrue("Value of vlan should be updated since the property is-ip-unnumbered doesn't exist",\r
+                g.V().has("aai-node-type", "vlan").has("vlan-interface", "vlan0").has("is-ip-unnumbered", false).hasNext());\r
+    }\r
+\r
+    @Test\r
+    public void testEmptyValue() {                         \r
+      //is-closed-loop-disabled\r
+        assertTrue("Value of generic-vnf should be updated since the value for is-closed-loop-disabled is an empty string",\r
+                g.V().has("aai-node-type", "generic-vnf").has("vnf-id", "generic-vnf1").has("is-closed-loop-disabled", false).hasNext());\r
+        assertTrue("Value of vnfc should be updated since the value for is-closed-loop-disabled is an empty string",\r
+                g.V().has("aai-node-type", "vnfc").has("vnfc-name", "vnfc1").has("is-closed-loop-disabled", false).hasNext());\r
+        assertTrue("Value of vserver should be updated since the value for is-closed-loop-disabled is an empty string",\r
+                g.V().has("aai-node-type", "vserver").has("vserver-id", "vserver1").has("is-closed-loop-disabled", false).hasNext());\r
+        //dhcp-enabled\r
+        assertTrue("Value of subnet should be updated since the value for dhcp-enabled is an empty string",\r
+                g.V().has("aai-node-type", "subnet").has("subnet-id", "subnet1").has("dhcp-enabled", false).hasNext());\r
+        //l3-network: is-bound-to-vpn, is-shared-network, is-external-network\r
+        assertTrue("Value of l3-network should be updated since the value for is-bound-to-vpn is an empty string",\r
+                               g.V().has("aai-node-type", "l3-network").has("network-id", "l3-network1").has("network-name", "l3-network-name1").has("is-bound-to-vpn", false).hasNext());         \r
+        assertTrue("Value of l3-network should be updated since the value for is-provider-network is an empty string",\r
+                               g.V().has("aai-node-type", "l3-network").has("network-id", "l3-network1").has("network-name", "l3-network-name1").has("is-provider-network", false).hasNext());        \r
+               assertTrue("Value of l3-network should be updated since the value for is-shared-network is an empty string",\r
+                               g.V().has("aai-node-type", "l3-network").has("network-id", "l3-network1").has("network-name", "l3-network-name1").has("is-shared-network", false).hasNext());\r
+               assertTrue("Value of l3-network should be updated since the value for is-external-network is an empty string",\r
+                               g.V().has("aai-node-type", "l3-network").has("network-id", "l3-network1").has("network-name", "l3-network-name1").has("is-external-network", false).hasNext());\r
+               //l-interface: is-port-mirrored, is-ip-unnumbered\r
+               assertTrue("Value of l-interface should be updated since the property is-port-mirrored  is an empty string",\r
+                g.V().has("aai-node-type", "l-interface").has("interface-name", "l-interface1").has("is-port-mirrored", false).hasNext());  \r
+               assertTrue("Value of l-interface should be updated since the property is-ip-unnumbered  is an empty string",\r
+                g.V().has("aai-node-type", "l-interface").has("interface-name", "l-interface1").has("is-ip-unnumbered", false).hasNext());\r
+               //vf-module: is-base-vf-module, is-ip-unnumbered\r
+               assertTrue("Value of vf-module should be updated since the property is-base-vf-module  is an empty string",\r
+                g.V().has("aai-node-type", "vf-module").has("vf-module-id", "vf-module1").has("is-base-vf-module", false).hasNext());  \r
+               //vlan: is-ip-unnumbered\r
+               assertTrue("Value of vlan should be updated since the property is-ip-unnumbered is an empty string",\r
+                g.V().has("aai-node-type", "vlan").has("vlan-interface", "vlan1").has("is-ip-unnumbered", false).hasNext());\r
+    }\r
+    \r
+    @Test\r
+    public void testExistingTrueValues() {\r
+      //is-closed-loop-disabled\r
+        assertTrue("Value of generic-vnf shouldn't be update since is-closed-loop-disabled already exists",\r
+                g.V().has("aai-node-type", "generic-vnf").has("vnf-id", "generic-vnf2").has("is-closed-loop-disabled", true).hasNext());\r
+        assertTrue("Value of vnfc shouldn't be update since is-closed-loop-disabled already exists",\r
+                g.V().has("aai-node-type", "vnfc").has("vnfc-name", "vnfc2").has("is-closed-loop-disabled", true).hasNext());\r
+        assertTrue("Value of vserver shouldn't be update since is-closed-loop-disabled already exists",\r
+                g.V().has("aai-node-type", "vserver").has("vserver-id", "vserver2").has("is-closed-loop-disabled", true).hasNext());\r
+       //dhcp-enabled\r
+        assertTrue("Value of subnet shouldn't be update since dhcp-enabled already exists",\r
+                g.V().has("aai-node-type", "subnet").has("subnet-id", "subnet2").has("dhcp-enabled", true).hasNext()); \r
+      //l3-network: is-bound-to-vpn, is-shared-network, is-external-network\r
+        assertTrue("Value of l3-network shouldn't be updated since is-bound-to-vpn already exists",\r
+                               g.V().has("aai-node-type", "l3-network").has("network-id", "l3-network2").has("network-name", "l3-network-name2").has("is-bound-to-vpn", true).hasNext());\r
+        assertTrue("Value of l3-network shouldn't be updated since is-provider-network already exists",\r
+                               g.V().has("aai-node-type", "l3-network").has("network-id", "l3-network2").has("network-name", "l3-network-name2").has("is-provider-network", true).hasNext());\r
+               assertTrue("Value of l3-network shouldn't be updated since is-shared-network already exists",\r
+                               g.V().has("aai-node-type", "l3-network").has("network-id", "l3-network2").has("network-name", "l3-network-name2").has("is-shared-network", true).hasNext());\r
+               assertTrue("Value of l3-network shouldn't be updated since is-external-network already exists",\r
+                               g.V().has("aai-node-type", "l3-network").has("network-id", "l3-network2").has("network-name", "l3-network-name2").has("is-external-network", true).hasNext());                          \r
+               //l-interface: is-port-mirrored, is-ip-unnumbered\r
+               assertTrue("Value of l-interface shouldn't be updated since is-port-mirrored already exists",\r
+                g.V().has("aai-node-type", "l-interface").has("interface-name", "l-interface2").has("is-port-mirrored", true).hasNext());  \r
+               assertTrue("Value of ll-interface shouldn't be updated since is-ip-unnumbered already exists",\r
+                g.V().has("aai-node-type", "l-interface").has("interface-name", "l-interface2").has("is-ip-unnumbered", true).hasNext());              \r
+               //vf-module: is-base-vf-module\r
+               assertTrue("Value of vf-module shouldn't be updated since is-base-vf-module already exists",\r
+                g.V().has("aai-node-type", "vf-module").has("vf-module-id", "vf-module2").has("is-base-vf-module", true).hasNext());  \r
+               //vlan: is-ip-unnumbered\r
+               assertTrue("Value of vlan shouldn't be updated since is-ip-unnumbered already exists",\r
+                g.V().has("aai-node-type", "vlan").has("vlan-interface", "vlan2").has("is-ip-unnumbered", true).hasNext());\r
+        \r
+    }\r
+    \r
+    @Test\r
+    public void testExistingFalseValues() {\r
+       //is-closed-loop-disabled\r
+        assertTrue("Value of generic-vnf shouldn't be update since is-closed-loop-disabled already exists",\r
+                g.V().has("aai-node-type", "generic-vnf").has("vnf-id", "generic-vnf3").has("is-closed-loop-disabled", false).hasNext());\r
+        assertTrue("Value of vnfc shouldn't be update since is-closed-loop-disabled already exists",\r
+                g.V().has("aai-node-type", "vnfc").has("vnfc-name", "vnfc3").has("is-closed-loop-disabled", false).hasNext());\r
+        assertTrue("Value of vserver shouldn't be update since is-closed-loop-disabled already exists",\r
+                g.V().has("aai-node-type", "vserver").has("vserver-id", "vserver3").has("is-closed-loop-disabled", false).hasNext());\r
+        //dhcp-enabled\r
+        assertTrue("Value of subnet shouldn't be update since dhcp-enabled already exists",\r
+                g.V().has("aai-node-type", "subnet").has("subnet-id", "subnet3").has("dhcp-enabled", false).hasNext());\r
+        //l3-network: is-bound-to-vpn, is-shared-network, is-external-network\r
+        assertTrue("Value of l3-network shouldn't be updated since is-bound-to-vpn already exists",\r
+                               g.V().has("aai-node-type", "l3-network").has("network-id", "l3-network3").has("network-name", "l3-network-name3").has("is-bound-to-vpn", false).hasNext());  \r
+        assertTrue("Value of l3-network shouldn't be updated since is-provider-network already exists",\r
+                               g.V().has("aai-node-type", "l3-network").has("network-id", "l3-network3").has("network-name", "l3-network-name3").has("is-provider-network", false).hasNext());  \r
+        assertTrue("Value of l3-network shouldn't be updated since is-shared-network already exists",\r
+                               g.V().has("aai-node-type", "l3-network").has("network-id", "l3-network3").has("network-name", "l3-network-name3").has("is-shared-network", false).hasNext());\r
+               assertTrue("Value of l3-network shouldn't be updated since is-external-network already exists",\r
+                               g.V().has("aai-node-type", "l3-network").has("network-id", "l3-network3").has("network-name", "l3-network-name3").has("is-external-network", false).hasNext());                 \r
+               //l-interface: is-port-mirrored, is-ip-unnumbered\r
+               assertTrue("Value of l-interface shouldn't be updated since is-port-mirrored already exists",\r
+                g.V().has("aai-node-type", "l-interface").has("interface-name", "l-interface3").has("is-port-mirrored", false).hasNext());  \r
+               assertTrue("Value of ll-interface shouldn't be updated since is-ip-unnumbered already exists",\r
+                g.V().has("aai-node-type", "l-interface").has("interface-name", "l-interface3").has("is-ip-unnumbered", false).hasNext());                             \r
+               //vf-module: is-base-vf-module\r
+               assertTrue("Value of vf-module shouldn't be updated since is-base-vf-module already exists",\r
+                g.V().has("aai-node-type", "vf-module").has("vf-module-id", "vf-module3").has("is-base-vf-module", false).hasNext());  \r
+               //vlan: is-ip-unnumbered\r
+               assertTrue("Value of vlan shouldn't be updated since is-ip-unnumbered already exists",\r
+                g.V().has("aai-node-type", "vlan").has("vlan-interface", "vlan3").has("is-ip-unnumbered", false).hasNext());\r
+    } \r
+}
\ No newline at end of file
diff --git a/src/test/java/org/onap/aai/migration/v13/MigrateInMaintDefaultToFalseTest.java b/src/test/java/org/onap/aai/migration/v13/MigrateInMaintDefaultToFalseTest.java
new file mode 100644 (file)
index 0000000..9e7845f
--- /dev/null
@@ -0,0 +1,411 @@
+/**\r
+ * ============LICENSE_START=======================================================\r
+ * org.onap.aai\r
+ * ================================================================================\r
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.\r
+ * ================================================================================\r
+ * Licensed under the Apache License, Version 2.0 (the "License");\r
+ * you may not use this file except in compliance with the License.\r
+ * You may obtain a copy of the License at\r
+ *\r
+ *    http://www.apache.org/licenses/LICENSE-2.0\r
+ *\r
+ * Unless required by applicable law or agreed to in writing, software\r
+ * distributed under the License is distributed on an "AS IS" BASIS,\r
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
+ * See the License for the specific language governing permissions and\r
+ * limitations under the License.\r
+ * ============LICENSE_END=========================================================\r
+ */\r
+package org.onap.aai.migration.v13;\r
+\r
+import org.onap.aai.edges.EdgeIngestor;\r
+import org.onap.aai.serialization.db.EdgeSerializer;\r
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;\r
+\r
+import com.att.eelf.configuration.EELFLogger;\r
+import com.att.eelf.configuration.EELFManager;\r
+\r
+import org.janusgraph.core.JanusGraph;\r
+import org.janusgraph.core.JanusGraphFactory;\r
+import org.janusgraph.core.JanusGraphTransaction;\r
+import org.janusgraph.core.schema.JanusGraphManagement;\r
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;\r
+import org.junit.Before;\r
+import org.junit.Test;\r
+import org.onap.aai.AAISetup;\r
+import org.onap.aai.dbmap.DBConnectionType;\r
+import org.onap.aai.introspection.Loader;\r
+import org.onap.aai.introspection.LoaderFactory;\r
+import org.onap.aai.introspection.ModelType;\r
+import org.onap.aai.setup.SchemaVersions;\r
+import org.onap.aai.setup.SchemaVersion;\r
+import org.onap.aai.migration.Status;\r
+import org.onap.aai.migration.v13.MigrateInMaintDefaultToFalse;\r
+import org.onap.aai.migration.v13.MigrateInMaintDefaultToFalseTest.InMaintDefaultMigrator;\r
+import org.onap.aai.serialization.engines.QueryStyle;\r
+import org.onap.aai.serialization.engines.JanusGraphDBEngine;\r
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;\r
+\r
+import static org.mockito.Mockito.spy;\r
+import static org.mockito.Mockito.when;\r
+\r
+import java.util.HashMap;\r
+import java.util.Map;\r
+import java.util.Optional;\r
+\r
+import static org.junit.Assert.assertTrue;\r
+import static org.junit.Assert.assertEquals;\r
+\r
+public class MigrateInMaintDefaultToFalseTest extends\r
+               AAISetup {\r
+       protected static final String VNF_NODE_TYPE = "generic-vnf";\r
+       protected static final String LINTERFACE_NODE_TYPE = "l-interface";\r
+       protected static final String LAG_INTERFACE_NODE_TYPE = "lag-interface";\r
+       protected static final String LOGICAL_LINK_NODE_TYPE = "logical-link";\r
+       protected static final String PINTERFACE_NODE_TYPE = "p-interface";\r
+       protected static final String VLAN_NODE_TYPE = "vlan";\r
+       protected static final String VNFC_NODE_TYPE = "vnfc";\r
+       protected static final String VSERVER_NODE_TYPE = "vserver";\r
+       protected static final String PSERVER_NODE_TYPE = "pserver";\r
+       protected static final String PNF_NODE_TYPE = "pnf";\r
+       protected static final String NOS_SERVER_NODE_TYPE = "nos-server";\r
+\r
+       public static class InMaintDefaultMigrator extends MigrateInMaintDefaultToFalse {\r
+        public InMaintDefaultMigrator(TransactionalGraphEngine engine, LoaderFactory loaderFactory, EdgeIngestor edgeIngestor, EdgeSerializer edgeSerializer, SchemaVersions schemaVersions){\r
+            super(engine, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);\r
+        }\r
+        @Override\r
+        public Status getStatus() {\r
+            return Status.SUCCESS;\r
+        }\r
+        @Override\r
+        public Optional<String[]> getAffectedNodeTypes() {\r
+               return Optional.of(new String[]{VNF_NODE_TYPE,LINTERFACE_NODE_TYPE,LAG_INTERFACE_NODE_TYPE,LOGICAL_LINK_NODE_TYPE,PINTERFACE_NODE_TYPE,VLAN_NODE_TYPE,VNFC_NODE_TYPE,VSERVER_NODE_TYPE,PSERVER_NODE_TYPE,PNF_NODE_TYPE,NOS_SERVER_NODE_TYPE});\r
+        }\r
+        @Override\r
+        public String getMigrationName() {\r
+            return "MockInMaintDefaultMigrator";\r
+        }\r
+    }\r
+\r
+    private final static ModelType introspectorFactoryType = ModelType.MOXY;\r
+    private final static QueryStyle queryStyle = QueryStyle.TRAVERSAL;\r
+    private final static DBConnectionType type = DBConnectionType.REALTIME;\r
+    private Loader loader;\r
+    private TransactionalGraphEngine dbEngine;\r
+    private InMaintDefaultMigrator migration;\r
+    private GraphTraversalSource g;\r
+\r
+    @Before\r
+    public void setup() throws Exception{\r
+        g = tx.traversal();\r
+        loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, schemaVersions.getDefaultVersion());\r
+        dbEngine = new JanusGraphDBEngine(\r
+                queryStyle,\r
+                type,\r
+                loader);\r
+\r
+        //generic-vnf\r
+        g.addV().property("aai-node-type", "generic-vnf")\r
+                .property("vnf-id", "generic-vnf0")\r
+                .next();\r
+        g.addV().property("aai-node-type", "generic-vnf")\r
+                .property("vnf-id", "generic-vnf1")\r
+                .property("in-maint", "")\r
+                .next();\r
+        g.addV().property("aai-node-type", "generic-vnf")\r
+                .property("vnf-id", "generic-vnf2")\r
+                .property("in-maint", true)\r
+                .next();\r
+        g.addV().property("aai-node-type", "generic-vnf")\r
+                       .property("vnf-id", "generic-vnf3")\r
+                       .property("in-maint", false)\r
+                       .next();        \r
+      //l-interface\r
+        g.addV().property("aai-node-type", "l-interface")\r
+                .property("interface-name", "l-interface0")\r
+                .next();\r
+        g.addV().property("aai-node-type", "l-interface")\r
+                .property("interface-name", "l-interface1")\r
+                .property("in-maint", "")\r
+                .next();\r
+        g.addV().property("aai-node-type", "l-interface")\r
+                .property("interface-name", "l-interface2")\r
+                .property("in-maint", true)\r
+                .next();\r
+        g.addV().property("aai-node-type", "l-interface")\r
+                       .property("interface-name", "l-interface3")\r
+                       .property("in-maint", false)\r
+                       .next();         \r
+      //lag-interface\r
+        g.addV().property("aai-node-type", "lag-interface")\r
+                .property("interface-name", "lag-interface0")\r
+                .next();\r
+        g.addV().property("aai-node-type", "lag-interface")\r
+                .property("interface-name", "lag-interface1")\r
+                .property("in-maint", "")\r
+                .next();\r
+        g.addV().property("aai-node-type", "lag-interface")\r
+                .property("interface-name", "lag-interface2")\r
+                .property("in-maint", true)\r
+                .next();\r
+        g.addV().property("aai-node-type", "lag-interface")\r
+                       .property("interface-name", "lag-interface3")\r
+                       .property("in-maint", false)\r
+                       .next();        \r
+      //logical-link\r
+        g.addV().property("aai-node-type", "logical-link")\r
+                .property("link-name", "logical-link0")\r
+                .next();\r
+        g.addV().property("aai-node-type", "logical-link")\r
+                .property("link-name", "logical-link1")\r
+                .property("in-maint", "")\r
+                .next();\r
+        g.addV().property("aai-node-type", "logical-link")\r
+                .property("link-name", "logical-link2")\r
+                .property("in-maint", true)\r
+                .next();\r
+        g.addV().property("aai-node-type", "logical-link")\r
+                       .property("link-name", "logical-link3")\r
+                       .property("in-maint", false)\r
+                       .next();      \r
+      //p-interface\r
+        g.addV().property("aai-node-type", "p-interface")\r
+                .property("interface-name", "p-interface0")\r
+                .next();\r
+        g.addV().property("aai-node-type", "p-interface")\r
+                .property("interface-name", "p-interface1")\r
+                .property("in-maint", "")\r
+                .next();\r
+        g.addV().property("aai-node-type", "p-interface")\r
+                .property("interface-name", "p-interface2")\r
+                .property("in-maint", true)\r
+                .next();\r
+        g.addV().property("aai-node-type", "p-interface")\r
+                       .property("interface-name", "p-interface3")\r
+                       .property("in-maint", false)\r
+                       .next();        \r
+      //pnf\r
+        g.addV().property("aai-node-type", "pnf")\r
+                .property("pnf-name", "pnf0")\r
+                .next();\r
+        g.addV().property("aai-node-type", "pnf")\r
+                .property("pnf-name", "pnf1")\r
+                .property("in-maint", "")\r
+                .next();\r
+        g.addV().property("aai-node-type", "pnf")\r
+                .property("pnf-name", "pnf2")\r
+                .property("in-maint", true)\r
+                .next();\r
+        g.addV().property("aai-node-type", "pnf")\r
+                       .property("pnf-name", "pnf3")\r
+                       .property("in-maint", false)\r
+                       .next();        \r
+      //pserver\r
+        g.addV().property("aai-node-type", "pserver")\r
+                .property("pserver-id", "pserver0")\r
+                .next();\r
+        g.addV().property("aai-node-type", "pserver")\r
+                .property("pserver-id", "pserver1")\r
+                .property("in-maint", "")\r
+                .next();\r
+        g.addV().property("aai-node-type", "pserver")\r
+                .property("pserver-id", "pserver2")\r
+                .property("in-maint", true)\r
+                .next();\r
+        g.addV().property("aai-node-type", "pserver")\r
+                       .property("pserver-id", "pserver3")\r
+                       .property("in-maint", false)\r
+                       .next();       \r
+      //vlan\r
+        g.addV().property("aai-node-type", "vlan")\r
+                .property("vlan-interface", "vlan0")\r
+                .next();\r
+        g.addV().property("aai-node-type", "vlan")\r
+                .property("vlan-interface", "vlan1")\r
+                .property("in-maint", "")\r
+                .next();\r
+        g.addV().property("aai-node-type", "vlan")\r
+                .property("vlan-interface", "vlan2")\r
+                .property("in-maint", true)\r
+                .next();\r
+        g.addV().property("aai-node-type", "vlan")\r
+                       .property("vlan-interface", "vlan3")\r
+                       .property("in-maint", false)\r
+                       .next();\r
+      //vnfc\r
+        g.addV().property("aai-node-type", "vnfc")\r
+                .property("vnfc-name", "vnfc0")\r
+                .next();\r
+        g.addV().property("aai-node-type", "vnfc")\r
+                .property("vnfc-name", "vnfc1")\r
+                .property("in-maint", "")\r
+                .next();\r
+        g.addV().property("aai-node-type", "vnfc")\r
+                .property("vnfc-name", "vnfc2")\r
+                .property("in-maint", true)\r
+                .next();\r
+        g.addV().property("aai-node-type", "vnfc")\r
+                       .property("vnfc-name", "vnfc3")\r
+                       .property("in-maint", false)\r
+                       .next();\r
+      //vserver\r
+        g.addV().property("aai-node-type", "vserver")\r
+                .property("vserver-id", "vserver0")\r
+                .next();\r
+        g.addV().property("aai-node-type", "vserver")\r
+                .property("vserver-id", "vserver1")\r
+                .property("in-maint", "")\r
+                .next();\r
+        g.addV().property("aai-node-type", "vserver")\r
+                .property("vserver-id", "vserver2")\r
+                .property("in-maint", true)\r
+                .property("is-closed-loop-disabled", true)\r
+                .next();\r
+        g.addV().property("aai-node-type", "vserver")\r
+                       .property("vserver-id", "vserver3")\r
+                       .property("in-maint", false)\r
+                       .next();\r
+      //nos-server\r
+        g.addV().property("aai-node-type", "nos-server")\r
+                .property("nos-server-id", "nos-server0")\r
+                               .property("nos-server-name", "nos-server-name0")\r
+                               .property("vendor", "vendor0")\r
+                               .property("nos-server-selflink", "nos-server-selflink0")\r
+                .next();\r
+        g.addV().property("aai-node-type", "nos-server")\r
+                .property("nos-server-id", "nos-server1")\r
+                               .property("nos-server-name", "nos-server-name1")\r
+                               .property("vendor", "vendor1")\r
+                               .property("nos-server-selflink", "nos-server-selflink1")                                \r
+                .property("in-maint", "")\r
+                .next();\r
+        g.addV().property("aai-node-type", "nos-server")\r
+                .property("nos-server-id", "nos-server2")\r
+                               .property("nos-server-name", "nos-server-name2")\r
+                               .property("vendor", "vendor2")\r
+                               .property("nos-server-selflink", "nos-server-selflink2")\r
+                .property("in-maint", true)\r
+                .next();\r
+        g.addV().property("aai-node-type", "nos-server")\r
+                       .property("nos-server-id", "nos-server3")\r
+                               .property("nos-server-name", "nos-server-name3")\r
+                               .property("vendor", "vendor3")\r
+                               .property("nos-server-selflink", "nos-server-selflink3")\r
+                       .property("in-maint", false)\r
+                       .next();\r
+        \r
+        TransactionalGraphEngine spy = spy(dbEngine);\r
+        TransactionalGraphEngine.Admin adminSpy = spy(dbEngine.asAdmin());\r
+        GraphTraversalSource traversal = g;\r
+        when(spy.asAdmin()).thenReturn(adminSpy);\r
+        when(adminSpy.getTraversalSource()).thenReturn(traversal);\r
+        migration = new InMaintDefaultMigrator(spy, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);\r
+        migration.run();\r
+    }\r
+\r
+    @Test\r
+    public void testMissingProperty(){\r
+        assertTrue("Value of generic-vnf should be updated since the property in-maint doesn't exist",\r
+                g.V().has("aai-node-type", "generic-vnf").has("vnf-id", "generic-vnf0").has("in-maint", false).hasNext());\r
+        assertTrue("Value of l-interface should be updated since the property in-maint doesn't exist",\r
+                g.V().has("aai-node-type", "l-interface").has("interface-name", "l-interface0").has("in-maint", false).hasNext());\r
+        assertTrue("Value of lag-interface should be updated since the property in-maint doesn't exist",\r
+                g.V().has("aai-node-type", "lag-interface").has("interface-name", "lag-interface0").has("in-maint", false).hasNext());\r
+        assertTrue("Value of logical-link should be updated since the property in-maint doesn't exist",\r
+                g.V().has("aai-node-type", "logical-link").has("link-name", "logical-link0").has("in-maint", false).hasNext());\r
+        assertTrue("Value of p-interface should be updated since the property in-maint doesn't exist",\r
+                g.V().has("aai-node-type", "p-interface").has("interface-name", "p-interface0").has("in-maint", false).hasNext());\r
+        assertTrue("Value of pnf should be updated since the property in-maint doesn't exist",\r
+                g.V().has("aai-node-type", "pnf").has("pnf-name", "pnf0").has("in-maint", false).hasNext());\r
+        assertTrue("Value of pserver should be updated since the property in-maint doesn't exist",\r
+                g.V().has("aai-node-type", "pserver").has("pserver-id", "pserver0").has("in-maint", false).hasNext());\r
+        assertTrue("Value of vlan should be updated since the property in-maint doesn't exist",\r
+                g.V().has("aai-node-type", "vlan").has("vlan-interface", "vlan0").has("in-maint", false).hasNext());\r
+        assertTrue("Value of vnfc should be updated since the property in-maint doesn't exist",\r
+                g.V().has("aai-node-type", "vnfc").has("vnfc-name", "vnfc0").has("in-maint", false).hasNext());\r
+        assertTrue("Value of vserver should be updated since the property in-maint doesn't exist",\r
+                g.V().has("aai-node-type", "vserver").has("vserver-id", "vserver0").has("in-maint", false).hasNext());    \r
+        assertTrue("Value of nos-server should be updated since the property in-maint doesn't exist",\r
+                g.V().has("aai-node-type", "nos-server").has("nos-server-id", "nos-server0").has("in-maint", false).hasNext()); \r
+    }\r
+\r
+    @Test\r
+    public void testEmptyValue() {                \r
+        assertTrue("Value of generic-vnf should be updated since the value for in-maint is an empty string",\r
+                g.V().has("aai-node-type", "generic-vnf").has("vnf-id", "generic-vnf1").has("in-maint", false).hasNext());\r
+        assertTrue("Value of l-interface should be updated since the value for in-maint is an empty string",\r
+                g.V().has("aai-node-type", "l-interface").has("interface-name", "l-interface1").has("in-maint", false).hasNext());\r
+        assertTrue("Value of lag-interface should be updated since the value for in-maint is an empty string",\r
+                g.V().has("aai-node-type", "lag-interface").has("interface-name", "lag-interface1").has("in-maint", false).hasNext());\r
+        assertTrue("Value of logical-link should be updated since the value for in-maint is an empty string",\r
+                g.V().has("aai-node-type", "logical-link").has("link-name", "logical-link1").has("in-maint", false).hasNext());\r
+        assertTrue("Value of p-interface should be updated since the value for in-maint is an empty string",\r
+                g.V().has("aai-node-type", "p-interface").has("interface-name", "p-interface1").has("in-maint", false).hasNext());\r
+        assertTrue("Value of pnf should be updated since the value for in-maint is an empty string",\r
+                g.V().has("aai-node-type", "pnf").has("pnf-name", "pnf1").has("in-maint", false).hasNext());\r
+        assertTrue("Value of pserver should be updated since the value for in-maint is an empty string",\r
+                g.V().has("aai-node-type", "pserver").has("pserver-id", "pserver1").has("in-maint", false).hasNext());\r
+        assertTrue("Value of vlan should be updated since the value for in-maint is an empty string",\r
+                g.V().has("aai-node-type", "vlan").has("vlan-interface", "vlan1").has("in-maint", false).hasNext());\r
+        assertTrue("Value of vnfc should be updated since the value for in-maint is an empty string",\r
+                g.V().has("aai-node-type", "vnfc").has("vnfc-name", "vnfc1").has("in-maint", false).hasNext());\r
+        assertTrue("Value of vserver should be updated since the value for in-maint is an empty string",\r
+                g.V().has("aai-node-type", "vserver").has("vserver-id", "vserver1").has("in-maint", false).hasNext());\r
+        assertTrue("Value of nos-server should be updated since the value for in-maint is an empty string",\r
+                g.V().has("aai-node-type", "nos-server").has("nos-server-id", "nos-server1").has("in-maint", false).hasNext());\r
+    }\r
+    \r
+    @Test\r
+    public void testExistingTrueValues() {\r
+        assertTrue("Value of generic-vnf shouldn't be updated since in-maint already exists",\r
+                g.V().has("aai-node-type", "generic-vnf").has("vnf-id", "generic-vnf2").has("in-maint", true).hasNext());\r
+        assertTrue("Value of l-interface shouldn't be updated since in-maint already exists",\r
+                g.V().has("aai-node-type", "l-interface").has("interface-name", "l-interface2").has("in-maint", true).hasNext());\r
+        assertTrue("Value of lag-interface shouldn't be updated since in-maint already exists",\r
+                g.V().has("aai-node-type", "lag-interface").has("interface-name", "lag-interface2").has("in-maint", true).hasNext());\r
+        assertTrue("Value of logical-link shouldn't be updated since in-maint already exists",\r
+                g.V().has("aai-node-type", "logical-link").has("link-name", "logical-link2").has("in-maint", true).hasNext());\r
+        assertTrue("Value of p-interface shouldn't be updated since in-maint already exists",\r
+                g.V().has("aai-node-type", "p-interface").has("interface-name", "p-interface2").has("in-maint", true).hasNext());\r
+        assertTrue("Value of pnf shouldn't be updated since in-maint already exists",\r
+                g.V().has("aai-node-type", "pnf").has("pnf-name", "pnf2").has("in-maint", true).hasNext());\r
+        assertTrue("Value of pserver shouldn't be updated since in-maint already exists",\r
+                g.V().has("aai-node-type", "pserver").has("pserver-id", "pserver2").has("in-maint", true).hasNext());\r
+        assertTrue("Value of vlan shouldn't be updated since in-maint already exists",\r
+                g.V().has("aai-node-type", "vlan").has("vlan-interface", "vlan2").has("in-maint", true).hasNext());\r
+        assertTrue("Value of vnfc shouldn't be updated since in-maint already exists",\r
+                g.V().has("aai-node-type", "vnfc").has("vnfc-name", "vnfc2").has("in-maint", true).hasNext());\r
+        assertTrue("Value of vserver shouldn't be updated since in-maint already exists",\r
+                g.V().has("aai-node-type", "vserver").has("vserver-id", "vserver2").has("in-maint", true).hasNext());\r
+        assertTrue("Value of nos-server shouldn't be updated since in-maint already exists",\r
+                g.V().has("aai-node-type", "nos-server").has("nos-server-id", "nos-server2").has("in-maint", true).hasNext());\r
+    }\r
+    \r
+    @Test\r
+    public void testExistingFalseValues() {\r
+        assertTrue("Value of generic-vnf shouldn't be updated since in-maint already exists",\r
+                g.V().has("aai-node-type", "generic-vnf").has("vnf-id", "generic-vnf3").has("in-maint", false).hasNext());\r
+        assertTrue("Value of l-interface shouldn't be updated since in-maint already exists",\r
+                g.V().has("aai-node-type", "l-interface").has("interface-name", "l-interface3").has("in-maint", false).hasNext());\r
+        assertTrue("Value of lag-interface shouldn't be updated since in-maint already exists",\r
+                g.V().has("aai-node-type", "lag-interface").has("interface-name", "lag-interface3").has("in-maint", false).hasNext());\r
+        assertTrue("Value of logical-link shouldn't be updated since in-maint already exists",\r
+                g.V().has("aai-node-type", "logical-link").has("link-name", "logical-link3").has("in-maint", false).hasNext());\r
+        assertTrue("Value of p-interface shouldn't be updated since in-maint already exists",\r
+                g.V().has("aai-node-type", "p-interface").has("interface-name", "p-interface3").has("in-maint", false).hasNext());\r
+        assertTrue("Value of pnf shouldn't be updated since in-maint already exists",\r
+                g.V().has("aai-node-type", "pnf").has("pnf-name", "pnf3").has("in-maint", false).hasNext());\r
+        assertTrue("Value of pserver shouldn't be updated since in-maint already exists",\r
+                g.V().has("aai-node-type", "pserver").has("pserver-id", "pserver3").has("in-maint", false).hasNext());\r
+        assertTrue("Value of vlan shouldn't be updated since in-maint already exists",\r
+                g.V().has("aai-node-type", "vlan").has("vlan-interface", "vlan3").has("in-maint", false).hasNext());\r
+        assertTrue("Value of vnfc shouldn't be updated since in-maint already exists",\r
+                g.V().has("aai-node-type", "vnfc").has("vnfc-name", "vnfc3").has("in-maint", false).hasNext());\r
+        assertTrue("Value of vserver shouldn't be updated since in-maint already exists",\r
+                g.V().has("aai-node-type", "vserver").has("vserver-id", "vserver3").has("in-maint", false).hasNext());\r
+        assertTrue("Value of nos-server shouldn't be updated since in-maint already exists",\r
+                g.V().has("aai-node-type", "nos-server").has("nos-server-id", "nos-server3").has("in-maint", false).hasNext());\r
+    }\r
+}
\ No newline at end of file
diff --git a/src/test/java/org/onap/aai/migration/v13/MigrateInstanceGroupModelInvariantIdTest.java b/src/test/java/org/onap/aai/migration/v13/MigrateInstanceGroupModelInvariantIdTest.java
new file mode 100644 (file)
index 0000000..3610fac
--- /dev/null
@@ -0,0 +1,108 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v13;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.process.traversal.strategy.verification.ReadOnlyStrategy;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.janusgraph.core.JanusGraph;
+import org.janusgraph.core.JanusGraphFactory;
+import org.janusgraph.core.JanusGraphTransaction;
+import org.junit.*;
+import org.onap.aai.AAISetup;
+import org.onap.aai.dbmap.DBConnectionType;
+import org.onap.aai.introspection.Loader;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.setup.SchemaVersion;
+import org.onap.aai.serialization.engines.JanusGraphDBEngine;
+import org.onap.aai.serialization.engines.QueryStyle;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+
+import static org.junit.Assert.assertEquals;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.when;
+
+public class MigrateInstanceGroupModelInvariantIdTest extends AAISetup{
+
+       private final static ModelType introspectorFactoryType = ModelType.MOXY;
+       private final static QueryStyle queryStyle = QueryStyle.TRAVERSAL;
+       private final static DBConnectionType type = DBConnectionType.REALTIME;
+
+       private Loader loader;
+       private TransactionalGraphEngine dbEngine;
+       private JanusGraph graph;
+       private MigrateInstanceGroupModelInvariantId migration;
+       private JanusGraphTransaction tx;
+       private GraphTraversalSource g;
+
+       @Before
+       public void setUp() throws Exception {
+               graph = JanusGraphFactory.build().set("storage.backend","inmemory").open();
+               tx = graph.newTransaction();
+               g = tx.traversal();
+               loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, schemaVersions.getDefaultVersion());
+               dbEngine = new JanusGraphDBEngine(
+                               queryStyle,
+                               type,
+                               loader);
+
+               Vertex instancegroup1 = g.addV().property("aai-node-type", "instance-group").property("id", "instance-id-1")
+                .property("description","instance-description-1").property("instanceGroupType","instance-type-1")
+                .property("model-invariant-id", "instance-invariant-id-1").next();
+
+        Vertex instancegroup2 = g.addV().property("aai-node-type", "instance-group").property("id", "instance-id-2")
+                .property("description","instance-description-2").property("instanceGroupType","instance-type-1")
+                .property("model-invariant-id-local", "instance-invariant-id-2").next();
+
+               TransactionalGraphEngine spy = spy(dbEngine);
+               TransactionalGraphEngine.Admin adminSpy = spy(dbEngine.asAdmin());
+
+               GraphTraversalSource traversal = g;
+               GraphTraversalSource readOnly = tx.traversal(GraphTraversalSource.build().with(ReadOnlyStrategy.instance()));
+               when (spy.tx()).thenReturn(tx);
+               when(spy.asAdmin()).thenReturn(adminSpy);
+               when(adminSpy.getTraversalSource()).thenReturn(traversal);
+               when(adminSpy.getReadOnlyTraversalSource()).thenReturn(readOnly);
+
+               migration = new MigrateInstanceGroupModelInvariantId(spy, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+               migration.run();
+       }
+
+       @After
+       public void cleanUp() {
+               tx.tx().rollback();
+               graph.close();
+       }
+
+       @Test
+    public void testIdsUpdated() throws Exception {
+        assertEquals(true,
+                g.V().has("aai-node-type", "instance-group").has("id", "instance-id-1").has("model-invariant-id-local").next().property("model-invariant-id-local").isPresent());
+           assertEquals("model-invariant-id renamed to model-invariant-id-local for instance-group", "instance-invariant-id-1",
+                g.V().has("aai-node-type", "instance-group").has("id", "instance-id-1").next().value("model-invariant-id-local").toString());
+    }
+
+    @Test
+    public void testIdsNotUpdated() throws Exception {
+        assertEquals("model-invariant-id-local remains the same for instance-group", "instance-invariant-id-2",
+                g.V().has("aai-node-type", "instance-group").has("id", "instance-id-2").next().value("model-invariant-id-local").toString());
+    }
+}
diff --git a/src/test/java/org/onap/aai/migration/v13/MigrateInstanceGroupModelVersionIdTest.java b/src/test/java/org/onap/aai/migration/v13/MigrateInstanceGroupModelVersionIdTest.java
new file mode 100644 (file)
index 0000000..346d76a
--- /dev/null
@@ -0,0 +1,111 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v13;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.process.traversal.strategy.verification.ReadOnlyStrategy;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.janusgraph.core.JanusGraph;
+import org.janusgraph.core.JanusGraphFactory;
+import org.janusgraph.core.JanusGraphTransaction;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.aai.AAISetup;
+import org.onap.aai.dbmap.DBConnectionType;
+import org.onap.aai.introspection.Loader;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.setup.SchemaVersion;
+import org.onap.aai.serialization.engines.JanusGraphDBEngine;
+import org.onap.aai.serialization.engines.QueryStyle;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+
+import static org.junit.Assert.*;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.when;
+
+public class MigrateInstanceGroupModelVersionIdTest extends AAISetup {
+
+       private final static ModelType introspectorFactoryType = ModelType.MOXY;
+       private final static QueryStyle queryStyle = QueryStyle.TRAVERSAL;
+       private final static DBConnectionType type = DBConnectionType.REALTIME;
+
+       private static Loader loader;
+       private static TransactionalGraphEngine dbEngine;
+       private static JanusGraph graph;
+       private static MigrateInstanceGroupModelVersionId migration;
+       private static JanusGraphTransaction tx;
+       private static GraphTraversalSource g;
+
+       @Before
+       public void setUp() throws Exception {
+               graph = JanusGraphFactory.build().set("storage.backend","inmemory").open();
+               tx = graph.newTransaction();
+               g = tx.traversal();
+               loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, schemaVersions.getDefaultVersion());
+               dbEngine = new JanusGraphDBEngine(
+                               queryStyle,
+                               type,
+                               loader);
+
+               Vertex instancegroup1 = g.addV().property("aai-node-type", "instance-group").property("id", "instance-id-1")
+                .property("description","instance-description-1").property("instanceGroupType","instance-type-1")
+                .property("model-version-id", "instance-version-id-1").next();
+
+        Vertex instancegroup2 = g.addV().property("aai-node-type", "instance-group").property("id", "instance-id-2")
+                .property("description","instance-description-2").property("instanceGroupType","instance-type-1")
+                .property("model-version-id-local", "instance-version-id-2").next();
+
+               TransactionalGraphEngine spy = spy(dbEngine);
+               TransactionalGraphEngine.Admin adminSpy = spy(dbEngine.asAdmin());
+
+               GraphTraversalSource traversal = g;
+               GraphTraversalSource readOnly = tx.traversal(GraphTraversalSource.build().with(ReadOnlyStrategy.instance()));
+               when (spy.tx()).thenReturn(tx);
+               when(spy.asAdmin()).thenReturn(adminSpy);
+               when(adminSpy.getTraversalSource()).thenReturn(traversal);
+               when(adminSpy.getReadOnlyTraversalSource()).thenReturn(readOnly);
+
+               migration = new MigrateInstanceGroupModelVersionId(spy, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+               migration.run();
+       }
+
+       @AfterClass
+       public static void cleanUp() {
+               tx.tx().rollback();
+               graph.close();
+       }
+
+       @Test
+    public void testIdsUpdated() throws Exception {
+        assertEquals(true,
+                g.V().has("aai-node-type", "instance-group").has("id", "instance-id-1").has("model-version-id-local").next().property("model-version-id-local").isPresent());
+        assertEquals("model-version-id renamed to model-version-id-local for instance-group", "instance-version-id-1",
+                g.V().has("aai-node-type", "instance-group").has("id", "instance-id-1").next().value("model-version-id-local").toString());
+    }
+
+    @Test
+    public void testIdsNotUpdated() throws Exception {
+        assertEquals("model-version-id-local remains the same for instance-group", "instance-version-id-2",
+                g.V().has("aai-node-type", "instance-group").has("id", "instance-id-2").next().value("model-version-id-local").toString());
+    }
+}
diff --git a/src/test/java/org/onap/aai/migration/v13/MigrateInstanceGroupSubTypeTest.java b/src/test/java/org/onap/aai/migration/v13/MigrateInstanceGroupSubTypeTest.java
new file mode 100644 (file)
index 0000000..45a6cb8
--- /dev/null
@@ -0,0 +1,112 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v13;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.when;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.aai.AAISetup;
+import org.onap.aai.dbmap.DBConnectionType;
+import org.onap.aai.introspection.Loader;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.setup.SchemaVersion;
+import org.onap.aai.serialization.engines.QueryStyle;
+import org.onap.aai.serialization.engines.JanusGraphDBEngine;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+
+import org.janusgraph.core.JanusGraphFactory;
+import org.janusgraph.core.JanusGraph;
+import org.janusgraph.core.JanusGraphTransaction;
+
+
+public class MigrateInstanceGroupSubTypeTest extends AAISetup{
+
+       private static final String SUB_TYPE_VALUE = "SubTypeValue";
+       private final static ModelType introspectorFactoryType = ModelType.MOXY;
+       private final static QueryStyle queryStyle = QueryStyle.TRAVERSAL;
+       private final static DBConnectionType type = DBConnectionType.REALTIME;
+       private Loader loader;
+       private TransactionalGraphEngine dbEngine;
+       private JanusGraph graph;
+       private MigrateInstanceGroupSubType migration;
+       private GraphTraversalSource g;
+       private JanusGraphTransaction tx;
+       Vertex instanceGroup;
+       Vertex instanceGroupWithoutTSubType;
+
+
+       @Before
+       public void setUp() throws Exception {
+               graph = JanusGraphFactory.build().set("storage.backend", "inmemory").open();
+               tx = graph.newTransaction();
+               g = tx.traversal();
+               loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, schemaVersions.getDefaultVersion());
+               dbEngine = new JanusGraphDBEngine(
+                               queryStyle,
+                               type,
+                               loader);
+               instanceGroup = g.addV().property("aai-node-type", MigrateInstanceGroupSubType.INSTANCE_GROUP_NODE_TYPE)
+                               .property( MigrateInstanceGroupSubType.SUB_TYPE_PROPERTY, SUB_TYPE_VALUE)
+                               .next();
+
+               instanceGroupWithoutTSubType = g.addV().property("aai-node-type", MigrateInstanceGroupSubType.INSTANCE_GROUP_NODE_TYPE)
+                               .next();
+
+               TransactionalGraphEngine spy = spy(dbEngine);
+               TransactionalGraphEngine.Admin adminSpy = spy(dbEngine.asAdmin());
+               GraphTraversalSource traversal = g;
+               when(spy.asAdmin()).thenReturn(adminSpy);
+               when(adminSpy.getTraversalSource()).thenReturn(traversal);
+               migration = new MigrateInstanceGroupSubType(spy, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+       }
+
+       @After
+       public void cleanUp() {
+               tx.rollback();
+               graph.close();
+       }
+
+
+       /***
+        * checks if the type/subtype property were renamed
+        */
+
+       @Test
+       public void confirmTypeAndSubTypeWereRenamed() {
+               migration.run();
+
+               //instance group with sub-type
+               assertEquals(SUB_TYPE_VALUE, instanceGroup.property(MigrateInstanceGroupSubType.INSTANCE_GROUP_ROLE_PROPERTY).value());
+               assertFalse(instanceGroup.property(MigrateInstanceGroupSubType.SUB_TYPE_PROPERTY).isPresent());
+
+               //instance group without subtype
+               assertFalse(instanceGroupWithoutTSubType.property(MigrateInstanceGroupSubType.INSTANCE_GROUP_ROLE_PROPERTY).isPresent());
+               assertFalse(instanceGroupWithoutTSubType.property(MigrateInstanceGroupSubType.SUB_TYPE_PROPERTY).isPresent());
+       }
+}
\ No newline at end of file
diff --git a/src/test/java/org/onap/aai/migration/v13/MigrateInstanceGroupTypeTest.java b/src/test/java/org/onap/aai/migration/v13/MigrateInstanceGroupTypeTest.java
new file mode 100644 (file)
index 0000000..210b905
--- /dev/null
@@ -0,0 +1,112 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v13;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.when;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.onap.aai.AAISetup;
+import org.onap.aai.dbmap.DBConnectionType;
+import org.onap.aai.introspection.Loader;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.setup.SchemaVersion;
+import org.onap.aai.serialization.engines.QueryStyle;
+import org.onap.aai.serialization.engines.JanusGraphDBEngine;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+
+import org.janusgraph.core.JanusGraphFactory;
+import org.janusgraph.core.JanusGraph;
+import org.janusgraph.core.JanusGraphTransaction;
+
+
+public class MigrateInstanceGroupTypeTest extends AAISetup{
+
+    private static final String TYPE_VALUE = "TypeValue";
+    private final static ModelType introspectorFactoryType = ModelType.MOXY;
+    private final static QueryStyle queryStyle = QueryStyle.TRAVERSAL;
+    private final static DBConnectionType type = DBConnectionType.REALTIME;
+    private Loader loader;
+    private TransactionalGraphEngine dbEngine;
+    private JanusGraph graph;
+    private MigrateInstanceGroupType migration;
+    private GraphTraversalSource g;
+    private JanusGraphTransaction tx;
+    Vertex instanceGroup;
+    Vertex instanceGroupWithoutType;
+
+
+    @Before
+    public void setUp() throws Exception {
+        graph = JanusGraphFactory.build().set("storage.backend", "inmemory").open();
+        tx = graph.newTransaction();
+        g = tx.traversal();
+        loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, schemaVersions.getDefaultVersion());
+        dbEngine = new JanusGraphDBEngine(
+                queryStyle,
+                type,
+                loader);
+         instanceGroup = g.addV().property("aai-node-type", MigrateInstanceGroupType.INSTANCE_GROUP_NODE_TYPE)
+                .property( MigrateInstanceGroupType.TYPE_PROPERTY, TYPE_VALUE)                
+                .next();
+         
+         instanceGroupWithoutType = g.addV().property("aai-node-type", MigrateInstanceGroupType.INSTANCE_GROUP_NODE_TYPE)
+                 .next();
+         
+        TransactionalGraphEngine spy = spy(dbEngine);
+        TransactionalGraphEngine.Admin adminSpy = spy(dbEngine.asAdmin());
+        GraphTraversalSource traversal = g;
+        when(spy.asAdmin()).thenReturn(adminSpy);
+        when(adminSpy.getTraversalSource()).thenReturn(traversal);
+        migration = new MigrateInstanceGroupType(spy, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+    }
+
+    @After
+    public void cleanUp() {
+        tx.rollback();
+        graph.close();
+    }
+
+
+    /***
+     * checks if the type/subtype property were renamed
+     */
+
+    @Test
+    public void confirmTypeAndSubTypeWereRenamed() {
+        migration.run();
+
+        //instance group with type
+        assertEquals(TYPE_VALUE, instanceGroup.property(MigrateInstanceGroupType.INSTANCE_GROUP_TYPE_PROPERTY).value());
+        assertFalse(instanceGroup.property(MigrateInstanceGroupType.TYPE_PROPERTY).isPresent());
+
+        //instance group without type
+        assertFalse(instanceGroupWithoutType.property(MigrateInstanceGroupType.INSTANCE_GROUP_TYPE_PROPERTY).isPresent());
+        assertFalse(instanceGroupWithoutType.property(MigrateInstanceGroupType.TYPE_PROPERTY).isPresent());
+    }
+}
\ No newline at end of file
diff --git a/src/test/java/org/onap/aai/migration/v13/MigrateModelVerTest.java b/src/test/java/org/onap/aai/migration/v13/MigrateModelVerTest.java
new file mode 100644 (file)
index 0000000..00db1fa
--- /dev/null
@@ -0,0 +1,487 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v13;
+
+import static org.junit.Assert.*;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.when;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.janusgraph.core.JanusGraph;
+import org.janusgraph.core.JanusGraphFactory;
+import org.janusgraph.core.JanusGraphTransaction;
+import org.janusgraph.core.schema.JanusGraphManagement;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.Mockito;
+import org.onap.aai.AAISetup;
+import org.onap.aai.dbmap.DBConnectionType;
+import org.onap.aai.introspection.Loader;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.setup.SchemaVersion;
+import org.onap.aai.serialization.engines.JanusGraphDBEngine;
+import org.onap.aai.serialization.engines.QueryStyle;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+
+public class MigrateModelVerTest extends AAISetup{
+
+       private final static ModelType introspectorFactoryType = ModelType.MOXY;
+       private final static QueryStyle queryStyle = QueryStyle.TRAVERSAL;
+       private final static DBConnectionType type = DBConnectionType.REALTIME;
+
+       private Loader loader;
+       private TransactionalGraphEngine dbEngine;
+       private JanusGraph graph;
+       private GraphTraversalSource g;
+       private JanusGraphTransaction tx;
+       private MigrateModelVer migration;
+
+       @Before
+       public void setUp() throws Exception {
+               graph = JanusGraphFactory.build().set("storage.backend", "inmemory").open();
+               JanusGraphManagement janusgraphManagement = graph.openManagement();
+               tx = graph.newTransaction();
+               g = graph.traversal();
+               loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, schemaVersions.getDefaultVersion());
+               dbEngine = new JanusGraphDBEngine(queryStyle, type, loader);
+
+               TransactionalGraphEngine spy = spy(dbEngine);
+               TransactionalGraphEngine.Admin adminSpy = spy(dbEngine.asAdmin());
+               GraphTraversalSource traversal = g;
+               when(spy.asAdmin()).thenReturn(adminSpy);
+               when(adminSpy.getTraversalSource()).thenReturn(traversal);
+               Mockito.doReturn(janusgraphManagement).when(adminSpy).getManagementSystem();
+               
+               
+               // Add model1/model-ver1 -- invalid model/model-ver
+               Vertex model1 = g.addV().property("aai-node-type", "model")
+                               .property("model-invariant-id", "model-invariant-id-1").property("model-type", "widget").next();
+               Vertex modelVer1 = g.addV().property("aai-node-type", "model-ver").property("model-version-id", "model-version-id-1")
+                               .property("aai-uri", "/service-design-and-creation/models/model/model-invariant-id-1/model-vers/model-ver/model-version-id-1")
+                               .property("model-name", "connector").property("model-version", "v1.0").next();
+               edgeSerializer.addTreeEdge(g, model1, modelVer1);
+               //connector
+               Vertex connector1= g.addV().property("aai-node-type", "connector").property("resource-instance-id", "connector1")
+                               .property("model-invariant-id-local", "model-invariant-id-1").property("model-version-id-local", "model-version-id-1").next();
+               Vertex connector2= g.addV().property("aai-node-type", "connector").property("resource-instance-id", "connector2")
+                               .property("model-invariant-id-local", "model-invariant-id-x").property("model-version-id-local", "model-version-id-x").next();
+               Vertex connector3= g.addV().property("aai-node-type", "connector").property("resource-instance-id", "connector3")
+                               .property("model-invariant-id-local", "model-invariant-id-1").property("model-version-id-local", "model-version-id-1").next();
+               edgeSerializer.addPrivateEdge(traversal, connector3, modelVer1, null);
+
+
+               // Add model1/model-ver1 -- invalid model/model-ver
+               Vertex model2 = g.addV().property("aai-node-type", "model")
+                               .property("model-invariant-id", "model-invariant-id-2").property("model-type", "widget").next();
+               Vertex modelVer2 = g.addV().property("aai-node-type", "model-ver").property("model-version-id", "model-version-id-2")
+                               .property("aai-uri", "/service-design-and-creation/models/model/model-invariant-id-2/model-vers/model-ver/model-version-id-2")
+                               .property("model-name", "service-instance").property("model-version", "v1.0").next();
+               edgeSerializer.addTreeEdge(g, model2, modelVer2);
+               //serivce-instance
+               Vertex serviceInstance1= g.addV().property("aai-node-type", "service-instance").property("service-instance-id", "serviceinstance1")
+                               .property("model-invariant-id-local", "model-invariant-id-2").property("model-version-id-local", "model-version-id-2").next();
+               Vertex serviceInstance2= g.addV().property("aai-node-type", "service-instance").property("service-instance-id", "serviceinstance2")
+                               .property("model-invariant-id-local", "model-invariant-id-y").property("model-version-id-local", "model-version-id-y").next();
+               Vertex serviceInstance3= g.addV().property("aai-node-type", "service-instance").property("service-instance-id", "serviceinstance3")
+                               .property("model-invariant-id-local", "model-invariant-id-2").property("model-version-id-local", "model-version-id-2").next();
+               edgeSerializer.addPrivateEdge(traversal, serviceInstance3, modelVer2, null);
+
+               // Add model3/model-ver3
+               Vertex model3 = g.addV().property("aai-node-type", "model")
+                               .property("model-invariant-id", "model-invariant-id-3").property("model-type", "widget").next();
+               Vertex modelVer3 = g.addV().property("aai-node-type", "model-ver").property("model-version-id", "model-version-id-3")
+                               .property("aai-uri", "/service-design-and-creation/models/model/model-invariant-id-3/model-vers/model-ver/model-version-id-3")
+                               .property("model-name", "pnf").property("model-version", "v1.0").next();
+               edgeSerializer.addTreeEdge(g, model3, modelVer3);
+               //pnf
+               Vertex pnfName1= g.addV().property("aai-node-type", "pnf").property("pnf-name", "pnfName1")
+                               .property("model-invariant-id-local", "model-invariant-id-3").property("model-version-id-local", "model-version-id-3").next();
+               Vertex pnfName2= g.addV().property("aai-node-type", "pnf").property("pnf-name", "pnfName2")
+                               .property("model-invariant-id-local", "model-invariant-id-y").property("model-version-id-local", "model-version-id-y").next();
+               Vertex pnfName3= g.addV().property("aai-node-type", "pnf").property("pnf-name", "pnfName3")
+                               .property("model-invariant-id-local", "model-invariant-id-3").property("model-version-id-local", "model-version-id-3").next();
+               edgeSerializer.addPrivateEdge(traversal, pnfName3, modelVer3, null);
+               
+               // Add model4/model-ver4
+               Vertex model4 = g.addV().property("aai-node-type", "model")
+                               .property("model-invariant-id", "model-invariant-id-4").property("model-type", "widget").next();
+               Vertex modelVer4 = g.addV().property("aai-node-type", "model-ver").property("model-version-id", "model-version-id-4")
+                               .property("aai-uri", "/service-design-and-creation/models/model/model-invariant-id-4/model-vers/model-ver/model-version-id-4")
+                               .property("model-name", "logical-link").property("model-version", "v1.0").next();
+               edgeSerializer.addTreeEdge(g, model4, modelVer4);
+               //logical-link
+               Vertex linkName1= g.addV().property("aai-node-type", "logical-link").property("link-name", "linkName1")
+                               .property("model-invariant-id-local", "model-invariant-id-4").property("model-version-id-local", "model-version-id-4").next();
+               Vertex linkName2= g.addV().property("aai-node-type", "logical-link").property("link-name", "linkName2")
+                               .property("model-invariant-id-local", "model-invariant-id-y").property("model-version-id-local", "model-version-id-y").next();
+               Vertex linkName3= g.addV().property("aai-node-type", "logical-link").property("link-name", "linkName3")
+                               .property("model-invariant-id-local", "model-invariant-id-4").property("model-version-id-local", "model-version-id-4").next();
+               edgeSerializer.addPrivateEdge(traversal, linkName3, modelVer4, null);
+               
+               
+               // Add model5/model-ver5
+               Vertex model5 = g.addV().property("aai-node-type", "model")
+                               .property("model-invariant-id", "model-invariant-id-5").property("model-type", "widget").next();
+               Vertex modelVer5 = g.addV().property("aai-node-type", "model-ver").property("model-version-id", "model-version-id-5")
+                               .property("aai-uri", "/service-design-and-creation/models/model/model-invariant-id-5/model-vers/model-ver/model-version-id-5")
+                               .property("model-name", "vnfc").property("model-version", "v1.0").next();
+               edgeSerializer.addTreeEdge(g, model5, modelVer5);
+               //vnfc
+               Vertex vnfc1= g.addV().property("aai-node-type", "vnfc").property("vnfc-name", "vnfc1")
+                               .property("model-invariant-id-local", "model-invariant-id-5").property("model-version-id-local", "model-version-id-5").next();
+               Vertex vnfc2= g.addV().property("aai-node-type", "vnfc").property("vnfc-name", "vnfc2")
+                               .property("model-invariant-id-local", "model-invariant-id-y").property("model-version-id-local", "model-version-id-y").next();
+               Vertex vnfc3= g.addV().property("aai-node-type", "vnfc").property("vnfc-name", "vnfc3")
+                               .property("model-invariant-id-local", "model-invariant-id-5").property("model-version-id-local", "model-version-id-5").next();
+               edgeSerializer.addPrivateEdge(traversal, vnfc3, modelVer5, null);
+               
+               // Add model6/model-ver6
+               Vertex model6 = g.addV().property("aai-node-type", "model")
+                               .property("model-invariant-id", "model-invariant-id-6").property("model-type", "widget").next();
+               Vertex modelVer6 = g.addV().property("aai-node-type", "model-ver").property("model-version-id", "model-version-id-6")
+                               .property("aai-uri", "/service-design-and-creation/models/model/model-invariant-id-6/model-vers/model-ver/model-version-id-6")
+                               .property("model-name", "vnf").property("model-version", "v1.0").next();
+               edgeSerializer.addTreeEdge(g, model6, modelVer6);
+               //generic-vnf
+               Vertex vnf1= g.addV().property("aai-node-type", "generic-vnf").property("vnf-id", "vnf1")
+                               .property("model-invariant-id-local", "model-invariant-id-6").property("model-version-id-local", "model-version-id-6").next();
+               Vertex vnf2= g.addV().property("aai-node-type", "generic-vnf").property("vnf-id", "vnf2")
+                               .property("model-invariant-id-local", "model-invariant-id-y").property("model-version-id-local", "model-version-id-y").next();
+               Vertex vnf3= g.addV().property("aai-node-type", "generic-vnf").property("vnf-id", "vnf3")
+                               .property("model-invariant-id-local", "model-invariant-id-6").property("model-version-id-local", "model-version-id-6").next();
+               edgeSerializer.addPrivateEdge(traversal, vnf3, modelVer6, null);
+               
+               // Add model7/model-ver7
+               Vertex model7 = g.addV().property("aai-node-type", "model")
+                               .property("model-invariant-id", "model-invariant-id-7").property("model-type", "widget").next();
+               Vertex modelVer7 = g.addV().property("aai-node-type", "model-ver").property("model-version-id", "model-version-id-7")
+                               .property("aai-uri", "/service-design-and-creation/models/model/model-invariant-id-7/model-vers/model-ver/model-version-id-7")
+                               .property("model-name", "configuration").property("model-version", "v1.0").next();
+               edgeSerializer.addTreeEdge(g, model7, modelVer7);
+               //configuration
+               Vertex configuration1= g.addV().property("aai-node-type", "configuration").property("configuration-id", "configuration1")
+                               .property("model-invariant-id-local", "model-invariant-id-7").property("model-version-id-local", "model-version-id-7").next();
+               Vertex configuration2= g.addV().property("aai-node-type", "configuration").property("configuration-id", "configuration2")
+                               .property("model-invariant-id-local", "model-invariant-id-y").property("model-version-id-local", "model-version-id-y").next();
+               Vertex configuration3= g.addV().property("aai-node-type", "configuration").property("configuration-id", "configuration3")
+                               .property("model-invariant-id-local", "model-invariant-id-7").property("model-version-id-local", "model-version-id-7").next();
+               edgeSerializer.addPrivateEdge(traversal, configuration3, modelVer7, null);
+               
+               // Add model8/model-ver8
+               Vertex model8 = g.addV().property("aai-node-type", "model")
+                               .property("model-invariant-id", "model-invariant-id-8").property("model-type", "widget").next();
+               Vertex modelVer8 = g.addV().property("aai-node-type", "model-ver").property("model-version-id", "model-version-id-8")
+                               .property("aai-uri", "/service-design-and-creation/models/model/model-invariant-id-8/model-vers/model-ver/model-version-id-8")
+                               .property("model-name", "l3-network").property("model-version", "v1.0").next();
+               edgeSerializer.addTreeEdge(g, model8, modelVer8);
+               //l3-network
+               Vertex l3Network1= g.addV().property("aai-node-type", "l3-network").property("network-id", "l3Network1")
+                               .property("model-invariant-id-local", "model-invariant-id-8").property("model-version-id-local", "model-version-id-8").next();
+               Vertex l3Network2= g.addV().property("aai-node-type", "l3-network").property("network-id", "l3Network2")
+                               .property("model-invariant-id-local", "model-invariant-id-y").property("model-version-id-local", "model-version-id-y").next();
+               Vertex l3Network3= g.addV().property("aai-node-type", "l3-network").property("network-id", "l3Network3")
+                               .property("model-invariant-id-local", "model-invariant-id-8").property("model-version-id-local", "model-version-id-8").next();
+               edgeSerializer.addPrivateEdge(traversal, l3Network3, modelVer8, null);
+               
+               // Add model9/model-ver9
+               Vertex model9 = g.addV().property("aai-node-type", "model")
+                               .property("model-invariant-id", "model-invariant-id-9").property("model-type", "widget").next();
+               Vertex modelVer9 = g.addV().property("aai-node-type", "model-ver").property("model-version-id", "model-version-id-9")
+                               .property("aai-uri", "/service-design-and-creation/models/model/model-invariant-id-9/model-vers/model-ver/model-version-id-9")
+                               .property("model-name", "vf-module").property("model-version", "v1.0").next();
+               edgeSerializer.addTreeEdge(g, model9, modelVer9);
+               //vf-module
+               Vertex vfModule1= g.addV().property("aai-node-type", "vf-module").property("vf-module-id", "vfModule1")
+                               .property("model-invariant-id-local", "model-invariant-id-9").property("model-version-id-local", "model-version-id-9").next();
+               Vertex vfModule2= g.addV().property("aai-node-type", "vf-module").property("vf-module-id", "vfModule2")
+                               .property("model-invariant-id-local", "model-invariant-id-y").property("model-version-id-local", "model-version-id-y").next();
+               Vertex vfModule3= g.addV().property("aai-node-type", "vf-module").property("vf-module-id", "vfModule3")
+                               .property("model-invariant-id-local", "model-invariant-id-9").property("model-version-id-local", "model-version-id-9").next();
+               edgeSerializer.addPrivateEdge(traversal, vfModule3, modelVer9, null);
+               
+               // Add model10/model-ver10
+               Vertex model10 = g.addV().property("aai-node-type", "model")
+                               .property("model-invariant-id", "model-invariant-id-10").property("model-type", "widget").next();
+               Vertex modelVer10 = g.addV().property("aai-node-type", "model-ver").property("model-version-id", "model-version-id-10")
+                               .property("aai-uri", "/service-design-and-creation/models/model/model-invariant-id-10/model-vers/model-ver/model-version-id-10")
+                               .property("model-name", "collection").property("model-version", "v1.0").next();
+               edgeSerializer.addTreeEdge(g, model10, modelVer10);
+               //collection
+               Vertex collection1= g.addV().property("aai-node-type", "collection").property("collection-id", "collection1")
+                               .property("model-invariant-id-local", "model-invariant-id-10").property("model-version-id-local", "model-version-id-10").next();
+               Vertex collection2= g.addV().property("aai-node-type", "collection").property("collection-id", "collection2")
+                               .property("model-invariant-id-local", "model-invariant-id-y").property("model-version-id-local", "model-version-id-y").next();
+               Vertex collection3= g.addV().property("aai-node-type", "collection").property("collection-id", "collection3")
+                               .property("model-invariant-id-local", "model-invariant-id-10").property("model-version-id-local", "model-version-id-10").next();
+               edgeSerializer.addPrivateEdge(traversal, collection3, modelVer10, null);
+               
+               // Add model11/model-ver11
+               Vertex model11 = g.addV().property("aai-node-type", "model")
+                               .property("model-invariant-id", "model-invariant-id-11").property("model-type", "widget").next();
+               Vertex modelVer11 = g.addV().property("aai-node-type", "model-ver").property("model-version-id", "model-version-id-11")
+                               .property("aai-uri", "/service-design-and-creation/models/model/model-invariant-id-11/model-vers/model-ver/model-version-id-11")
+                               .property("model-name", "instance-group").property("model-version", "v1.0").next();
+               edgeSerializer.addTreeEdge(g, model11, modelVer11);
+               //instance-group
+               Vertex instanceGroup1= g.addV().property("aai-node-type", "instance-group").property("id", "instanceGroup1")
+                               .property("model-invariant-id-local", "model-invariant-id-11").property("model-version-id-local", "model-version-id-11").next();
+               Vertex instanceGroup2= g.addV().property("aai-node-type", "instance-group").property("id", "instanceGroup2")
+                               .property("model-invariant-id-local", "model-invariant-id-y").property("model-version-id-local", "model-version-id-y").next();
+               Vertex instanceGroup3= g.addV().property("aai-node-type", "instance-group").property("id", "instanceGroup3")
+                               .property("model-invariant-id-local", "model-invariant-id-11").property("model-version-id-local", "model-version-id-11").next();
+               edgeSerializer.addPrivateEdge(traversal, instanceGroup3, modelVer11, null);
+               
+               // Add model12/model-ver12
+               Vertex model12 = g.addV().property("aai-node-type", "model")
+                               .property("model-invariant-id", "model-invariant-id-12").property("model-type", "widget").next();
+               Vertex modelVer12 = g.addV().property("aai-node-type", "model-ver").property("model-version-id", "model-version-id-12")
+                               .property("aai-uri", "/service-design-and-creation/models/model/model-invariant-id-12/model-vers/model-ver/model-version-id-12")
+                               .property("model-name", "allotted-resource").property("model-version", "v1.0").next();
+               edgeSerializer.addTreeEdge(g, model12, modelVer12);
+               //allotted-resource
+               Vertex allottedResource1= g.addV().property("aai-node-type", "allotted-resource").property("id", "allottedResource1")
+                               .property("model-invariant-id-local", "model-invariant-id-12").property("model-version-id-local", "model-version-id-12").next();
+               Vertex allottedResource2= g.addV().property("aai-node-type", "allotted-resource").property("id", "allottedResource2")
+                               .property("model-invariant-id-local", "model-invariant-id-y").property("model-version-id-local", "model-version-id-y").next();
+               Vertex allottedResource3= g.addV().property("aai-node-type", "allotted-resource").property("id", "allottedResource3")
+                               .property("model-invariant-id-local", "model-invariant-id-12").property("model-version-id-local", "model-version-id-12").next();
+               edgeSerializer.addPrivateEdge(traversal, allottedResource3, modelVer12, null);
+               
+               
+               migration = new MigrateModelVer(spy, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+               migration.run();
+       }
+
+       @After
+       public void cleanUp() {
+               tx.rollback();
+               graph.close();
+       }
+
+       @Test
+       public void checkEdgeCreatedForConnector() {
+               assertEquals(true,
+                               g.V().has("aai-node-type", "connector").has("resource-instance-id", "connector1")
+                                               .out()
+                                               .has("aai-node-type", "model-ver").has("model-version-id","model-version-id-1").hasNext());
+
+               assertEquals( "Edge not created", false,
+                               g.V().has("aai-node-type", "connector").has("resource-instance-id", "connector2")
+                                               .out()
+                                               .has("aai-node-type", "model-ver").has("model-version-id","model-version-id-1").hasNext());
+
+               assertEquals("Edge exists to 2 connectors", new Long(2L),
+                               g.V().has("aai-node-type", "model-ver").has("model-version-id", "model-version-id-1")
+                                               .in().count().next());
+       }
+
+
+       @Test
+       public void checkEdgeCreatedForSerivceInstance() {
+               assertEquals(true,
+                               g.V().has("aai-node-type", "service-instance").has("service-instance-id", "serviceinstance1")
+                                               .out()
+                                               .has("aai-node-type", "model-ver").has("model-version-id","model-version-id-2").hasNext());
+
+               assertEquals( "Edge not created", false,
+                               g.V().has("aai-node-type", "service-instance").has("service-instance-id", "serviceinstance2")
+                                               .out()
+                                               .has("aai-node-type", "model-ver").has("model-version-id","model-version-id-2").hasNext());
+
+               assertEquals("Edge exists to only 2 service-instances", new Long(2L),
+                               g.V().has("aai-node-type", "model-ver").has("model-version-id", "model-version-id-2")
+                                               .in().count().next());
+       }
+       
+       @Test
+       public void checkEdgeCreatedForPnf() {
+               assertEquals(true,
+                               g.V().has("aai-node-type", "pnf").has("pnf-name", "pnfName1")
+                                               .out()
+                                               .has("aai-node-type", "model-ver").has("model-version-id","model-version-id-3").hasNext());
+
+               assertEquals( "Edge not created", false,
+                               g.V().has("aai-node-type", "pnf").has("pnf-name", "pnfName2")
+                                               .out()
+                                               .has("aai-node-type", "model-ver").has("model-version-id","model-version-id-3").hasNext());
+
+               assertEquals("Edge exists to only 2 pnfs", new Long(2L),
+                               g.V().has("aai-node-type", "model-ver").has("model-version-id", "model-version-id-3")
+                                               .in().count().next());
+       }
+       
+       @Test
+       public void checkEdgeCreatedForLogicalLink() {
+               assertEquals(true,
+                               g.V().has("aai-node-type", "logical-link").has("link-name", "linkName1")
+                                               .out()
+                                               .has("aai-node-type", "model-ver").has("model-version-id","model-version-id-4").hasNext());
+
+               assertEquals( "Edge not created", false,
+                               g.V().has("aai-node-type", "logical-link").has("link-name", "linkName2")
+                                               .out()
+                                               .has("aai-node-type", "model-ver").has("model-version-id","model-version-id-4").hasNext());
+
+               assertEquals("Edge exists to only 2 logical-link", new Long(2L),
+                               g.V().has("aai-node-type", "model-ver").has("model-version-id", "model-version-id-4")
+                                               .in().count().next());
+       }
+       
+       @Test
+       public void checkEdgeCreatedForVnfc() {
+               assertEquals(true,
+                               g.V().has("aai-node-type", "vnfc").has("vnfc-name", "vnfc1")
+                                               .out()
+                                               .has("aai-node-type", "model-ver").has("model-version-id","model-version-id-5").hasNext());
+
+               assertEquals( "Edge not created", false,
+                               g.V().has("aai-node-type", "vnfc").has("vnfc-name", "vnfc2")
+                                               .out()
+                                               .has("aai-node-type", "model-ver").has("model-version-id","model-version-id-5").hasNext());
+
+               assertEquals("Edge exists to only 2 logical-link", new Long(2L),
+                               g.V().has("aai-node-type", "model-ver").has("model-version-id", "model-version-id-5")
+                                               .in().count().next());
+       }
+       
+       @Test
+       public void checkEdgeCreatedForGenericVnf() {
+               assertEquals(true,
+                               g.V().has("aai-node-type", "generic-vnf").has("vnf-id", "vnf1")
+                                               .out()
+                                               .has("aai-node-type", "model-ver").has("model-version-id","model-version-id-6").hasNext());
+
+               assertEquals( "Edge not created", false,
+                               g.V().has("aai-node-type", "generic-vnf").has("vnf-id", "vnf2")
+                                               .out()
+                                               .has("aai-node-type", "model-ver").has("model-version-id","model-version-id-6").hasNext());
+
+               assertEquals("Edge exists to only 2 generic-vnfs", new Long(2L),
+                               g.V().has("aai-node-type", "model-ver").has("model-version-id", "model-version-id-6")
+                                               .in().count().next());
+       }
+       
+       @Test
+       public void checkEdgeCreatedForConfiguration() {
+               assertEquals(true,
+                               g.V().has("aai-node-type", "configuration").has("configuration-id", "configuration1")
+                                               .out()
+                                               .has("aai-node-type", "model-ver").has("model-version-id","model-version-id-7").hasNext());
+
+               assertEquals( "Edge not created", false,
+                               g.V().has("aai-node-type", "configuration").has("configuration-id", "configuration2")
+                                               .out()
+                                               .has("aai-node-type", "model-ver").has("model-version-id","model-version-id-7").hasNext());
+
+               assertEquals("Edge exists to only 2 configurations", new Long(2L),
+                               g.V().has("aai-node-type", "model-ver").has("model-version-id", "model-version-id-7")
+                                               .in().count().next());
+       }
+       
+       @Test
+       public void checkEdgeCreatedForl3Network() {
+               assertEquals(true,
+                               g.V().has("aai-node-type", "l3-network").has("network-id", "l3Network1")
+                                               .out()
+                                               .has("aai-node-type", "model-ver").has("model-version-id","model-version-id-8").hasNext());
+
+               assertEquals( "Edge not created", false,
+                               g.V().has("aai-node-type", "l3-network").has("network-id", "l3Network2")
+                                               .out()
+                                               .has("aai-node-type", "model-ver").has("model-version-id","model-version-id-8").hasNext());
+
+               assertEquals("Edge exists to only 2 l3-networks", new Long(2L),
+                               g.V().has("aai-node-type", "model-ver").has("model-version-id", "model-version-id-8")
+                                               .in().count().next());
+       }
+       
+       @Test
+       public void checkEdgeCreatedForVfModule() {
+               assertEquals(true,
+                               g.V().has("aai-node-type", "vf-module").has("vf-module-id", "vfModule1")
+                                               .out()
+                                               .has("aai-node-type", "model-ver").has("model-version-id","model-version-id-9").hasNext());
+
+               assertEquals( "Edge not created", false,
+                               g.V().has("aai-node-type", "vf-module").has(" vf-module-id", "vfModule2")
+                                               .out()
+                                               .has("aai-node-type", "model-ver").has("model-version-id","model-version-id-9").hasNext());
+
+               assertEquals("Edge exists to only 2 vf-modules", new Long(2L),
+                               g.V().has("aai-node-type", "model-ver").has("model-version-id", "model-version-id-9")
+                                               .in().count().next());
+       }
+       
+       @Test
+       public void checkEdgeCreatedForCollection() {
+               assertEquals(true,
+                               g.V().has("aai-node-type", "collection").has("collection-id", "collection1")
+                                               .out()
+                                               .has("aai-node-type", "model-ver").has("model-version-id","model-version-id-10").hasNext());
+
+               assertEquals( "Edge not created", false,
+                               g.V().has("aai-node-type", "collection").has("collection-id", "collection2")
+                                               .out()
+                                               .has("aai-node-type", "model-ver").has("model-version-id","model-version-id-10").hasNext());
+
+               assertEquals("Edge exists to only 2 collections", new Long(2L),
+                               g.V().has("aai-node-type", "model-ver").has("model-version-id", "model-version-id-10")
+                                               .in().count().next());
+       }
+       
+       @Test
+       public void checkEdgeCreatedForInstanceGroup() {
+               assertEquals(true,
+                               g.V().has("aai-node-type", "instance-group").has("id", "instanceGroup1")
+                                               .out()
+                                               .has("aai-node-type", "model-ver").has("model-version-id","model-version-id-11").hasNext());
+
+               assertEquals( "Edge not created", false,
+                               g.V().has("aai-node-type", "instance-group").has("collection-id", "instanceGroup2")
+                                               .out()
+                                               .has("aai-node-type", "model-ver").has("model-version-id","model-version-id-11").hasNext());
+
+               assertEquals("Edge exists to only 2 instance-group2", new Long(2L),
+                               g.V().has("aai-node-type", "model-ver").has("model-version-id", "model-version-id-11")
+                                               .in().count().next());
+       }
+       
+       @Test
+       public void checkEdgeCreatedForAllottedResource() {
+               assertEquals(true,
+                               g.V().has("aai-node-type", "allotted-resource").has("id", "allottedResource1")
+                                               .out()
+                                               .has("aai-node-type", "model-ver").has("model-version-id","model-version-id-12").hasNext());
+
+               assertEquals( "Edge not created", false,
+                               g.V().has("aai-node-type", "allotted-resource").has("id", "allottedResource2")
+                                               .out()
+                                               .has("aai-node-type", "model-ver").has("model-version-id","model-version-id-12").hasNext());
+
+               assertEquals("Edge exists to only 2 allotted-resource", new Long(2L),
+                               g.V().has("aai-node-type", "model-ver").has("model-version-id", "model-version-id-12")
+                                               .in().count().next());
+       }
+
+}
diff --git a/src/test/java/org/onap/aai/migration/v13/MigratePServerAndPnfEquipTypeTest.java b/src/test/java/org/onap/aai/migration/v13/MigratePServerAndPnfEquipTypeTest.java
new file mode 100644 (file)
index 0000000..adce73e
--- /dev/null
@@ -0,0 +1,138 @@
+/**\r
+ * ============LICENSE_START=======================================================\r
+ * org.onap.aai\r
+ * ================================================================================\r
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.\r
+ * ================================================================================\r
+ * Licensed under the Apache License, Version 2.0 (the "License");\r
+ * you may not use this file except in compliance with the License.\r
+ * You may obtain a copy of the License at\r
+ *\r
+ *    http://www.apache.org/licenses/LICENSE-2.0\r
+ *\r
+ * Unless required by applicable law or agreed to in writing, software\r
+ * distributed under the License is distributed on an "AS IS" BASIS,\r
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
+ * See the License for the specific language governing permissions and\r
+ * limitations under the License.\r
+ * ============LICENSE_END=========================================================\r
+ */\r
+package org.onap.aai.migration.v13;\r
+\r
+import static org.junit.Assert.assertEquals;\r
+import static org.mockito.Mockito.spy;\r
+import static org.mockito.Mockito.when;\r
+\r
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;\r
+import org.apache.tinkerpop.gremlin.structure.Vertex;\r
+import org.junit.After;\r
+import org.junit.Before;\r
+import org.junit.Test;\r
+import org.onap.aai.AAISetup;\r
+import org.onap.aai.dbmap.DBConnectionType;\r
+import org.onap.aai.introspection.Loader;\r
+import org.onap.aai.introspection.LoaderFactory;\r
+import org.onap.aai.introspection.ModelType;\r
+import org.onap.aai.setup.SchemaVersions;\r
+import org.onap.aai.setup.SchemaVersion;\r
+import org.onap.aai.serialization.engines.QueryStyle;\r
+import org.onap.aai.serialization.engines.JanusGraphDBEngine;\r
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;\r
+\r
+import org.janusgraph.core.JanusGraphFactory;\r
+import org.janusgraph.core.JanusGraph;\r
+import org.janusgraph.core.JanusGraphTransaction;\r
+\r
+\r
+public class MigratePServerAndPnfEquipTypeTest extends AAISetup{\r
+\r
+    private final static ModelType introspectorFactoryType = ModelType.MOXY;\r
+    private final static QueryStyle queryStyle = QueryStyle.TRAVERSAL;\r
+    private final static DBConnectionType type = DBConnectionType.REALTIME;\r
+    private Loader loader;\r
+    private TransactionalGraphEngine dbEngine;\r
+    private JanusGraph graph;\r
+    private MigratePserverAndPnfEquipType migration;\r
+    private GraphTraversalSource g;\r
+    private JanusGraphTransaction tx;\r
+    Vertex pserver1;\r
+    Vertex pserver2;\r
+    Vertex pnf1;\r
+    Vertex pserver3;\r
+    Vertex pnf2;\r
+    Vertex pnf22;\r
+\r
+\r
+    @Before\r
+    public void setUp() throws Exception {\r
+        graph = JanusGraphFactory.build().set("storage.backend", "inmemory").open();\r
+        tx = graph.newTransaction();\r
+        g = tx.traversal();\r
+        loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, schemaVersions.getDefaultVersion());\r
+        dbEngine = new JanusGraphDBEngine(\r
+                queryStyle,\r
+                type,\r
+                loader);\r
+         pserver1 = g.addV().property("aai-node-type", MigratePserverAndPnfEquipType.PSERVER_NODE_TYPE)\r
+                .property( MigratePserverAndPnfEquipType.EQUIP_TYPE_PROPERTY, "Server")\r
+                .next();\r
+         \r
+         pserver2 = g.addV().property("aai-node-type", MigratePserverAndPnfEquipType.PSERVER_NODE_TYPE)\r
+                 .property( MigratePserverAndPnfEquipType.EQUIP_TYPE_PROPERTY, "server")\r
+                 .next();\r
+         \r
+         pnf1 = g.addV().property("aai-node-type", MigratePserverAndPnfEquipType.PNF_NODE_TYPE)\r
+                 .property( MigratePserverAndPnfEquipType.EQUIP_TYPE_PROPERTY, "Switch")\r
+                 .next();\r
+         pnf22 = g.addV().property("aai-node-type", MigratePserverAndPnfEquipType.PNF_NODE_TYPE)\r
+                 .property( MigratePserverAndPnfEquipType.EQUIP_TYPE_PROPERTY, "switch")\r
+                 .next();\r
+\r
+         pserver3 = g.addV().property("aai-node-type", MigratePserverAndPnfEquipType.PSERVER_NODE_TYPE)\r
+                 .property( MigratePserverAndPnfEquipType.EQUIP_TYPE_PROPERTY, "server1")\r
+                 .next();\r
+         \r
+         pnf2 = g.addV().property("aai-node-type", MigratePserverAndPnfEquipType.PNF_NODE_TYPE)\r
+                 .property( MigratePserverAndPnfEquipType.EQUIP_TYPE_PROPERTY, "Switch1")\r
+                 .next();\r
+\r
+        TransactionalGraphEngine spy = spy(dbEngine);\r
+        TransactionalGraphEngine.Admin adminSpy = spy(dbEngine.asAdmin());\r
+        GraphTraversalSource traversal = g;\r
+        when(spy.asAdmin()).thenReturn(adminSpy);\r
+        when(adminSpy.getTraversalSource()).thenReturn(traversal);\r
+        migration = new MigratePserverAndPnfEquipType(spy, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);\r
+        migration.run();\r
+    }\r
+\r
+    @After\r
+    public void cleanUp() {\r
+        tx.rollback();\r
+        graph.close();\r
+    }\r
+\r
+\r
+    /***\r
+     * checks if the Equip Type value was changed\r
+     */\r
+\r
+    @Test\r
+    public void confirmEquipTypeChanged() {\r
+\r
+        assertEquals("SERVER",pserver1.property(MigratePserverAndPnfEquipType.EQUIP_TYPE_PROPERTY).value());\r
+        assertEquals("SERVER",pserver2.property(MigratePserverAndPnfEquipType.EQUIP_TYPE_PROPERTY).value());\r
+        assertEquals("SWITCH",pnf1.property(MigratePserverAndPnfEquipType.EQUIP_TYPE_PROPERTY).value());\r
+        assertEquals("SWITCH",pnf22.property(MigratePserverAndPnfEquipType.EQUIP_TYPE_PROPERTY).value());\r
+    }\r
+    \r
+    @Test\r
+    public void verifyEquipTypeIsNotChanged() {\r
+       assertEquals("server1",pserver3.property(MigratePserverAndPnfEquipType.EQUIP_TYPE_PROPERTY).value());\r
+        assertEquals("Switch1",pnf2.property(MigratePserverAndPnfEquipType.EQUIP_TYPE_PROPERTY).value());\r
+    }\r
+    \r
+    \r
+    \r
+\r
+\r
+}
\ No newline at end of file
diff --git a/src/test/java/org/onap/aai/migration/v13/MigrateVnfcModelInvariantIdTest.java b/src/test/java/org/onap/aai/migration/v13/MigrateVnfcModelInvariantIdTest.java
new file mode 100644 (file)
index 0000000..d546eea
--- /dev/null
@@ -0,0 +1,110 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v13;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.process.traversal.strategy.verification.ReadOnlyStrategy;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.janusgraph.core.JanusGraph;
+import org.janusgraph.core.JanusGraphFactory;
+import org.janusgraph.core.JanusGraphTransaction;
+import org.junit.*;
+import org.onap.aai.AAISetup;
+import org.onap.aai.dbmap.DBConnectionType;
+import org.onap.aai.introspection.Loader;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.setup.SchemaVersion;
+import org.onap.aai.serialization.engines.JanusGraphDBEngine;
+import org.onap.aai.serialization.engines.QueryStyle;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+
+import static org.junit.Assert.assertEquals;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.when;
+
+public class MigrateVnfcModelInvariantIdTest extends AAISetup{
+
+       private final static ModelType introspectorFactoryType = ModelType.MOXY;
+       private final static QueryStyle queryStyle = QueryStyle.TRAVERSAL;
+       private final static DBConnectionType type = DBConnectionType.REALTIME;
+
+       private Loader loader;
+       private TransactionalGraphEngine dbEngine;
+       private JanusGraph graph;
+       private MigrateVnfcModelInvariantId migration;
+       private JanusGraphTransaction tx;
+       private GraphTraversalSource g;
+
+       @Before
+       public void setUp() throws Exception {
+               graph = JanusGraphFactory.build().set("storage.backend","inmemory").open();
+               tx = graph.newTransaction();
+               g = tx.traversal();
+               loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, schemaVersions.getDefaultVersion());
+               dbEngine = new JanusGraphDBEngine(
+                               queryStyle,
+                               type,
+                               loader);
+
+               Vertex vnfc1 = g.addV().property("aai-node-type", "vnfc").property("model-invariant-id", "vnfc-invariant-id-1")
+                .property("vnfcName", "vnfc-name-1").property("nfcNamingCode", "naming-code-1")
+                .property("nfcFunction", "function-1")
+                .property("model-version-id", "vnfc-variant-id-1").next();
+
+        Vertex vnfc2 = g.addV().property("aai-node-type", "vnfc").property("model-invariant-id-local", "vnfc-invariant-id-2")
+                .property("vnfcName", "vnfc-name-2").property("nfcNamingCode", "naming-code-2")
+                .property("nfcFunction", "function-2")
+                .property("model-version-id-local", "vnfc-version-id-2").next();
+
+               TransactionalGraphEngine spy = spy(dbEngine);
+               TransactionalGraphEngine.Admin adminSpy = spy(dbEngine.asAdmin());
+
+               GraphTraversalSource traversal = g;
+               GraphTraversalSource readOnly = tx.traversal(GraphTraversalSource.build().with(ReadOnlyStrategy.instance()));
+               when (spy.tx()).thenReturn(tx);
+               when(spy.asAdmin()).thenReturn(adminSpy);
+               when(adminSpy.getTraversalSource()).thenReturn(traversal);
+               when(adminSpy.getReadOnlyTraversalSource()).thenReturn(readOnly);
+
+               migration = new MigrateVnfcModelInvariantId(spy, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+               migration.run();
+       }
+
+       @After
+       public void cleanUp() {
+               tx.tx().rollback();
+               graph.close();
+       }
+
+       @Test
+    public void testIdsUpdated() throws Exception {
+        assertEquals(true,
+                g.V().has("aai-node-type", "vnfc").has("vnfcName", "vnfc-name-1").has("model-invariant-id-local").next().property("model-invariant-id-local").isPresent());
+        assertEquals("model-invariant-id renamed to model-invariant-id-local for vnfc", "vnfc-invariant-id-1",
+                g.V().has("aai-node-type", "vnfc").has("vnfcName", "vnfc-name-1").next().value("model-invariant-id-local").toString());
+    }
+
+    @Test
+    public void testIdsNotUpdated() throws Exception {
+           assertEquals("model-invariant-id-local should not be renamed for vnfc", "vnfc-invariant-id-2",
+                g.V().has("aai-node-type", "vnfc").has("vnfcName", "vnfc-name-2").next().value("model-invariant-id-local").toString());
+    }
+}
diff --git a/src/test/java/org/onap/aai/migration/v13/MigrateVnfcModelVersionIdTest.java b/src/test/java/org/onap/aai/migration/v13/MigrateVnfcModelVersionIdTest.java
new file mode 100644 (file)
index 0000000..b74756e
--- /dev/null
@@ -0,0 +1,112 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.migration.v13;
+
+import static org.junit.Assert.assertEquals;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.when;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.process.traversal.strategy.verification.ReadOnlyStrategy;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.junit.*;
+import org.onap.aai.AAISetup;
+import org.onap.aai.dbmap.DBConnectionType;
+import org.onap.aai.introspection.Loader;
+import org.onap.aai.introspection.LoaderFactory;
+import org.onap.aai.introspection.ModelType;
+import org.onap.aai.setup.SchemaVersions;
+import org.onap.aai.setup.SchemaVersion;
+import org.onap.aai.serialization.engines.QueryStyle;
+import org.onap.aai.serialization.engines.JanusGraphDBEngine;
+import org.onap.aai.serialization.engines.TransactionalGraphEngine;
+
+import org.janusgraph.core.JanusGraphFactory;
+import org.janusgraph.core.JanusGraph;
+import org.janusgraph.core.JanusGraphTransaction;
+
+public class MigrateVnfcModelVersionIdTest extends AAISetup{
+
+       private final static ModelType introspectorFactoryType = ModelType.MOXY;
+       private final static QueryStyle queryStyle = QueryStyle.TRAVERSAL;
+       private final static DBConnectionType type = DBConnectionType.REALTIME;
+
+       private Loader loader;
+       private TransactionalGraphEngine dbEngine;
+       private JanusGraph graph;
+       private MigrateVnfcModelVersionId migration;
+       private JanusGraphTransaction tx;
+       private GraphTraversalSource g;
+
+       @Before
+       public void setUp() throws Exception {
+               graph = JanusGraphFactory.build().set("storage.backend","inmemory").open();
+               tx = graph.newTransaction();
+               g = tx.traversal();
+               loader = loaderFactory.createLoaderForVersion(introspectorFactoryType, schemaVersions.getDefaultVersion());
+               dbEngine = new JanusGraphDBEngine(
+                               queryStyle,
+                               type,
+                               loader);
+
+               Vertex vnfc1 = g.addV().property("aai-node-type", "vnfc").property("model-version-id", "vnfc-version-id-1")
+                .property("vnfcName", "vnfc-name-1").property("nfcNamingCode", "naming-code-1")
+                .property("nfcFunction", "function-1")
+                .property("model-version-id", "vnfc-version-id-1").next();
+
+        Vertex vnfc2 = g.addV().property("aai-node-type", "vnfc").property("model-version-id-local", "vnfc-version-id-1")
+                .property("vnfcName", "vnfc-name-2").property("nfcNamingCode", "naming-code-1")
+                .property("nfcFunction", "function-2")
+                .property("model-version-id-local", "vnfc-version-id-2").next();
+
+               TransactionalGraphEngine spy = spy(dbEngine);
+               TransactionalGraphEngine.Admin adminSpy = spy(dbEngine.asAdmin());
+
+               GraphTraversalSource traversal = g;
+               GraphTraversalSource readOnly = tx.traversal(GraphTraversalSource.build().with(ReadOnlyStrategy.instance()));
+               when (spy.tx()).thenReturn(tx);
+               when(spy.asAdmin()).thenReturn(adminSpy);
+               when(adminSpy.getTraversalSource()).thenReturn(traversal);
+               when(adminSpy.getReadOnlyTraversalSource()).thenReturn(readOnly);
+
+               migration = new MigrateVnfcModelVersionId(spy, loaderFactory, edgeIngestor, edgeSerializer, schemaVersions);
+               migration.run();
+       }
+
+       @After
+       public void cleanUp() {
+               tx.tx().rollback();
+               graph.close();
+       }
+
+       @Test
+    public void testIdsUpdated() throws Exception {
+        assertEquals(true,
+                g.V().has("aai-node-type", "vnfc").has("vnfcName", "vnfc-name-1").has("model-version-id-local").next().property("model-version-id-local").isPresent());
+        assertEquals("model-version-id renamed to model-invariant-id-local for vnfc", "vnfc-version-id-1",
+                g.V().has("aai-node-type", "vnfc").has("vnfcName", "vnfc-name-1").next().value("model-version-id-local").toString());
+    }
+
+    @Test
+    public void testIdsNotUpdated() throws Exception {
+        assertEquals("model-version-id-local should not be renamed for vnfc", "vnfc-version-id-2",
+                g.V().has("aai-node-type", "vnfc").has("vnfcName", "vnfc-name-2").next().value("model-version-id-local").toString());
+    }
+}
diff --git a/src/test/java/org/onap/aai/schema/db/ManageSchemaTest.java b/src/test/java/org/onap/aai/schema/db/ManageSchemaTest.java
new file mode 100644 (file)
index 0000000..ddaad21
--- /dev/null
@@ -0,0 +1,111 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.schema.db;
+
+import org.codehaus.jackson.JsonParseException;
+import org.codehaus.jackson.map.JsonMappingException;
+import org.codehaus.jackson.map.ObjectMapper;
+import org.janusgraph.core.JanusGraph;
+import org.janusgraph.core.JanusGraphFactory;
+import org.janusgraph.core.schema.JanusGraphManagement;
+import org.junit.Before;
+import org.junit.Ignore;
+import org.junit.Test;
+import org.onap.aai.AAISetup;
+import org.onap.aai.db.schema.DBIndex;
+import org.onap.aai.db.schema.ManageJanusGraphSchema;
+
+import java.io.IOException;
+import java.util.Set;
+
+@Ignore("not ready yet")
+public class ManageSchemaTest extends AAISetup {
+
+       private JanusGraph graph = null;
+
+       @Before
+       public void beforeTest() {
+               graph = JanusGraphFactory.open("bundleconfig-local/etc/appprops/aaiconfig.properties");
+       }
+       
+       /*
+       @Test
+       public void populateEmptyGraph() {
+               ManageJanusGraphSchema schema = new ManageJanusGraphSchema(graph);
+               schema.buildSchema();
+       }
+       
+       @Test
+       public void modifyIndex() {
+               ManageJanusGraphSchema schema = new ManageJanusGraphSchema(graph);
+               schema.buildSchema();
+               Vertex v = graph.addVertex();
+               v.setProperty("aai-node-type", "pserver");
+               v.setProperty("hostname", "test1");
+               v.setProperty("internet-topology", "test2");
+               graph.commit();
+               DBIndex index = new DBIndex();
+               index.setName("internet-topology");
+               index.setUnique(false);
+               schema.updateIndex(index);
+               
+       }
+       */
+       @Test
+       public void closeRunningInstances() {
+               
+               JanusGraphManagement mgmt = graph.openManagement();
+               Set<String> instances = mgmt.getOpenInstances();
+               
+               for (String instance : instances) {
+                       
+                       if (!instance.contains("(current)")) {
+                               mgmt.forceCloseInstance(instance);
+                       }
+               }
+               mgmt.commit();
+               
+               graph.close();
+               
+       }
+       @Test
+       public void addNewIndex() throws JsonParseException, JsonMappingException, IOException {
+               ObjectMapper mapper = new ObjectMapper();
+               String content = " {\r\n" + 
+                               "    \"name\" : \"equipment-name\",\r\n" + 
+                               "    \"unique\" : false,\r\n" + 
+                               "    \"properties\" : [ {\r\n" + 
+                               "      \"name\" : \"equipment-name\",\r\n" + 
+                               "      \"cardinality\" : \"SINGLE\",\r\n" + 
+                               "      \"typeClass\" : \"java.lang.String\"\r\n" + 
+                               "    } ]\r\n" + 
+                               "  }";
+               DBIndex index = mapper.readValue(content, DBIndex.class);
+               ManageJanusGraphSchema schema = new ManageJanusGraphSchema(graph, auditorFactory, schemaVersions);
+               JanusGraphManagement mgmt = graph.openManagement();
+               Set<String> instances = mgmt.getOpenInstances();
+               System.out.println(instances);
+               schema.updateIndex(index);
+               
+               graph.close();
+               
+       }
+       
+}
diff --git a/src/test/java/org/onap/aai/util/SendDeleteMigrationNotificationsTest.java b/src/test/java/org/onap/aai/util/SendDeleteMigrationNotificationsTest.java
new file mode 100644 (file)
index 0000000..5df4a75
--- /dev/null
@@ -0,0 +1,166 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.util;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.janusgraph.core.JanusGraph;
+import org.janusgraph.core.JanusGraphFactory;
+import org.janusgraph.core.JanusGraphTransaction;
+import org.junit.*;
+import org.junit.runners.MethodSorters;
+import org.mockito.Mockito;
+import org.onap.aai.AAISetup;
+import org.onap.aai.db.props.AAIProperties;
+import org.onap.aai.dbmap.AAIGraph;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.introspection.Introspector;
+import org.onap.aai.migration.EventAction;
+
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+import java.nio.file.StandardOpenOption;
+import java.util.*;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+import static org.junit.Assert.assertEquals;
+import static org.mockito.Mockito.*;
+
+@FixMethodOrder(MethodSorters.NAME_ASCENDING)
+public class SendDeleteMigrationNotificationsTest extends AAISetup {
+
+       private final static String FILE = "./test.txt";
+
+       private static AtomicBoolean graphCreated = new AtomicBoolean(false);
+
+       private JanusGraph graph;
+       private JanusGraphTransaction tx;
+       private GraphTraversalSource g;
+
+       private static final String REALTIME_CONFIG = "./src/main/resources/etc/appprops/janusgraph-realtime.properties";
+
+       @Before
+       public void setUp() throws Exception {
+               System.setProperty("realtime.db.config", REALTIME_CONFIG);
+               AAIGraph.getInstance();
+               graph = AAIGraph.getInstance().getGraph();
+               tx = graph.newTransaction();
+               g = tx.traversal();
+
+               createFile();
+       }
+
+       public void createFile() throws AAIException, IOException {
+               
+               /*String str = "pserver#@#/cloud-infrastructure/pservers/pserver/mtunj102sd9#@#{\"hostname\":\"mtunj102sd9\",\"ptnii-equip-name\":\"mtunj102sd9\",\"equip-type\":\"SERVER\",\"equip-vendor\":\"HP\",\"equip-model\":\"DL380p9-nd\",\"fqdn\":\"mtunjrsv102.mtunj.sbcglobal.net\",\"ipv4-oam-address\":\"10.64.220.7\",\"resource-version\":\"1523039038578\",\"purpose\":\"LCPA-3.0\",\"relationship-list\":{\"relationship\":[{\"related-to\":\"complex\",\"relationship-label\":\"org.onap.relationships.inventory.LocatedIn\",\"related-link\":\"/aai/v14/cloud-infrastructure/complexes/complex/MDTWNJ21A5\",\"relationship-data\":[{\"relationship-key\":\"complex.physical-location-id\",\"relationship-value\":\"MDTWNJ21A5\"}]}]}}";
+               Files.write(Paths.get(FILE), str.getBytes());
+               graphCreated.compareAndSet(false, true);
+               */
+           if(!graphCreated.get()){
+                       Vertex pserver1 = g.addV()
+                                       .property("aai-node-type", "pserver")
+                                       .property("hostname", SendDeleteMigrationNotifications.class.getSimpleName()+"-pserver-1")
+                                       .property(AAIProperties.RESOURCE_VERSION, "333")
+                                       .next();
+                       
+                       Vertex pserver2 = g.addV()
+                                       .property("aai-node-type", "pserver")
+                                       .property("hostname", SendDeleteMigrationNotifications.class.getSimpleName()+"-pserver-2")
+                                       .property(AAIProperties.RESOURCE_VERSION, "334")
+                                       .next();
+                       
+                       Vertex pserver3 = g.addV()
+                                       .property("aai-node-type", "pserver")
+                                       .property("hostname", SendDeleteMigrationNotifications.class.getSimpleName()+"-pserver-3")
+                                       .property(AAIProperties.RESOURCE_VERSION, "335")
+                                       .next();
+                       
+                       Vertex pserver4 = g.addV()
+                                       .property("aai-node-type", "pserver")
+                                       .property("hostname", SendDeleteMigrationNotifications.class.getSimpleName()+"-pserver-4")
+                                       .property(AAIProperties.RESOURCE_VERSION, "336")
+                                       .next();
+
+                       tx.commit();
+                       
+                       try{
+                               Files.createFile(Paths.get(FILE));
+                       }catch(Exception e) {
+                               e.printStackTrace();
+                       }
+                       String finalStr = "";
+                       finalStr = "pserver" + "#@#" + "/cloud-infrastructure/pservers/pserver/"+SendDeleteMigrationNotifications.class.getSimpleName()+"-pserver-1" + "#@#" + "{\"hostname\":\""+ SendDeleteMigrationNotifications.class.getSimpleName()+"-pserver-1\",\"resource-version\":\"333\"}" + "\n";
+                       Files.write(Paths.get(FILE), finalStr.getBytes(),StandardOpenOption.APPEND);
+                       finalStr = "pserver" + "#@#" + "/cloud-infrastructure/pservers/pserver/"+SendDeleteMigrationNotifications.class.getSimpleName()+"-pserver-2" + "#@#" + "{\"hostname\":\""+ SendDeleteMigrationNotifications.class.getSimpleName()+"-pserver-2\",\"resource-version\":\"334\"}" + "\n";
+                       Files.write(Paths.get(FILE), finalStr.getBytes(),StandardOpenOption.APPEND);
+                       finalStr = "pserver" + "#@#" + "/cloud-infrastructure/pservers/pserver/"+SendDeleteMigrationNotifications.class.getSimpleName()+"-pserver-3" + "#@#" + "{\"hostname\":\""+ SendDeleteMigrationNotifications.class.getSimpleName()+"-pserver-3\",\"resource-version\":\"335\"}" + "\n";
+                       Files.write(Paths.get(FILE), finalStr.getBytes(),StandardOpenOption.APPEND);
+                       finalStr = "pserver" + "#@#" + "/cloud-infrastructure/pservers/pserver/"+SendDeleteMigrationNotifications.class.getSimpleName()+"-pserver-4" + "#@#" + "{\"hostname\":\""+ SendDeleteMigrationNotifications.class.getSimpleName()+"-pserver-4\",\"resource-version\":\"336\"}" + "\n";
+                       Files.write(Paths.get(FILE), finalStr.getBytes(),StandardOpenOption.APPEND);
+                       graphCreated.compareAndSet(false, true);
+               }
+       }
+       @AfterClass
+       public static void cleanUp() throws IOException {
+               Files.delete(Paths.get(FILE));
+       }
+
+       @After
+       public void tearDown() throws IOException {
+               if (tx.isOpen()) {
+                       tx.tx().rollback();
+               }
+       }
+
+       @Test
+       public void processEverything() throws Exception {
+               SendDeleteMigrationNotifications s  = spy(new SendDeleteMigrationNotifications(
+                               loaderFactory, schemaVersions, REALTIME_CONFIG, FILE, Collections.EMPTY_SET, 0, 0, "test", EventAction.DELETE, "DMAAP-LOAD"));
+               doNothing().when(s).trigger();
+               doNothing().when(s).cleanup();
+               s.process("/aai/");
+               assertEquals("1 events are created ", 4, s.notificationHelper.getNotifications().getEvents().size());
+
+       }
+
+       @Test
+       public void processEverythingBatched2() throws Exception {
+               SendDeleteMigrationNotifications s  = spy(new SendDeleteMigrationNotifications(
+                               loaderFactory, schemaVersions, REALTIME_CONFIG, FILE, Collections.EMPTY_SET, 0, 2, "test", EventAction.DELETE, "DMAAP-LOAD"));
+               doNothing().when(s).trigger();
+               doNothing().when(s).cleanup();
+               s.process("/aai/");
+               Mockito.verify(s, times(2)).trigger();
+
+       }
+
+       @Test
+       public void processEverythingBatched3() throws Exception {
+               SendDeleteMigrationNotifications s  = spy(new SendDeleteMigrationNotifications(
+                               loaderFactory, schemaVersions,  REALTIME_CONFIG, FILE, Collections.EMPTY_SET, 0, 3, "test", EventAction.DELETE, "DMAAP-LOAD"));
+               doNothing().when(s).trigger();
+               doNothing().when(s).cleanup();
+               s.process("/aai/");
+               Mockito.verify(s, times(2)).trigger();
+
+       }
+
+}
\ No newline at end of file
diff --git a/src/test/java/org/onap/aai/util/SendMigrationNotificationsTest.java b/src/test/java/org/onap/aai/util/SendMigrationNotificationsTest.java
new file mode 100644 (file)
index 0000000..6719d0f
--- /dev/null
@@ -0,0 +1,184 @@
+/**
+ * ============LICENSE_START=======================================================
+ * org.onap.aai
+ * ================================================================================
+ * Copyright © 2017-2018 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+package org.onap.aai.util;
+
+import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.janusgraph.core.JanusGraph;
+import org.janusgraph.core.JanusGraphFactory;
+import org.janusgraph.core.JanusGraphTransaction;
+import org.junit.*;
+import org.junit.runners.MethodSorters;
+import org.mockito.Mockito;
+import org.onap.aai.AAISetup;
+import org.onap.aai.db.props.AAIProperties;
+import org.onap.aai.dbmap.AAIGraph;
+import org.onap.aai.exceptions.AAIException;
+import org.onap.aai.migration.EventAction;
+
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+import java.util.*;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+import static org.junit.Assert.assertEquals;
+import static org.mockito.Mockito.*;
+
+@FixMethodOrder(MethodSorters.NAME_ASCENDING)
+public class SendMigrationNotificationsTest extends AAISetup {
+
+       private final static String FILE = "./test.txt";
+
+       private static AtomicBoolean graphCreated = new AtomicBoolean(false);
+
+       private JanusGraph graph;
+       private JanusGraphTransaction tx;
+       private GraphTraversalSource g;
+
+       private static final String REALTIME_CONFIG = "./src/main/resources/etc/appprops/janusgraph-realtime.properties";
+
+       @Before
+       public void setUp() throws Exception {
+               System.setProperty("realtime.db.config", REALTIME_CONFIG);
+               AAIGraph.getInstance();
+               graph = AAIGraph.getInstance().getGraph();
+               tx = graph.newTransaction();
+               g = tx.traversal();
+
+               createGraph();
+       }
+
+       public void createGraph() throws AAIException, IOException {
+           if(!graphCreated.get()){
+                       Vertex pnf1 = g.addV()
+                                       .property("aai-node-type", "pnf")
+                                       .property("pnf-name", SendMigrationNotifications.class.getSimpleName()+"-pnf-1")
+                                       .property(AAIProperties.RESOURCE_VERSION, "123")
+                                       .next();
+
+                       Vertex pnf2 = g.addV()
+                                       .property("aai-node-type", "pnf")
+                                       .property("pnf-name", SendMigrationNotifications.class.getSimpleName()+"-pnf-2")
+                                       .property(AAIProperties.RESOURCE_VERSION, "456")
+                                       .next();
+
+                       Vertex pnf3 = g.addV()
+                                       .property("aai-node-type", "pnf")
+                                       .property("pnf-name", SendMigrationNotifications.class.getSimpleName()+"-pnf-3")
+                                       .property(AAIProperties.RESOURCE_VERSION, "111")
+                                       .next();
+
+                       Vertex pinterface1 = g.addV()
+                                       .property("aai-node-type", "p-interface")
+                                       .property("interface-name", SendMigrationNotifications.class.getSimpleName()+"-pinterface-1")
+                                       .property(AAIProperties.RESOURCE_VERSION, "789")
+                                       .next();
+
+                       Vertex pserver1 = g.addV()
+                                       .property("aai-node-type", "pserver")
+                                       .property("hostname", SendMigrationNotifications.class.getSimpleName()+"-pserver-1")
+                                       .property(AAIProperties.RESOURCE_VERSION, "333")
+                                       .next();
+
+                       edgeSerializer.addTreeEdge(g, pnf1, pinterface1);
+
+                       tx.commit();
+
+                       List<String> list = new ArrayList<>();
+                       list.add(pnf1.id().toString() + "_123"); // valid
+                       list.add(pnf2.id().toString() + "_345"); // invalid: no longer the current resource version
+                       list.add(pnf2.id().toString() + "_456"); // valid: same as above but with the correct resource version
+                       list.add(pinterface1.id().toString() + "_789"); // valid
+                       list.add(pnf3.id().toString() + "_222"); // invalid: wrong resource version
+                       list.add("345_345"); // invalid
+                       list.add(pserver1.id().toString() + "_333"); // valid
+                       Files.write(Paths.get(FILE), (Iterable<String>)list.stream()::iterator);
+                       graphCreated.compareAndSet(false, true);
+               }
+       }
+       @AfterClass
+       public static void cleanUp() throws IOException {
+               Files.delete(Paths.get(FILE));
+       }
+
+       @After
+       public void tearDown() throws IOException {
+               if (tx.isOpen()) {
+                       tx.tx().rollback();
+               }
+       }
+
+       @Test
+       public void processEverything() throws Exception {
+               SendMigrationNotifications s  = spy(new SendMigrationNotifications(
+                               loaderFactory, schemaVersions, REALTIME_CONFIG, FILE, Collections.EMPTY_SET, 0, 0, "test", EventAction.UPDATE, "DMAAP-LOAD"));
+               doNothing().when(s).trigger();
+               doNothing().when(s).cleanup();
+               s.process("/aai/");
+               assertEquals("4 events are created ", 4, s.notificationHelper.getNotifications().getEvents().size());
+
+       }
+
+       @Test
+       public void processOnlyPnfs() throws Exception {
+               SendMigrationNotifications s  = spy(new SendMigrationNotifications(
+                               loaderFactory, schemaVersions, REALTIME_CONFIG, FILE, new HashSet<>(Arrays.asList("pnf")), 0, 0, "test", EventAction.UPDATE, "DMAAP-LOAD"));
+               doNothing().when(s).trigger();
+               doNothing().when(s).cleanup();
+               s.process("/aai/");
+               assertEquals("2 events are created ", 2, s.notificationHelper.getNotifications().getEvents().size());
+
+       }
+
+       @Test
+       public void processOnlyPnfsAndPservers() throws Exception {
+               SendMigrationNotifications s  = spy(new SendMigrationNotifications(
+                               loaderFactory, schemaVersions, REALTIME_CONFIG, FILE, new HashSet<>(Arrays.asList("pserver","pnf")), 0, 0, "test", EventAction.UPDATE, "DMAAP-LOAD"));
+               doNothing().when(s).trigger();
+               doNothing().when(s).cleanup();
+               s.process("/aai/");
+               assertEquals("3 events are created ", 3, s.notificationHelper.getNotifications().getEvents().size());
+
+       }
+
+       @Test
+       public void processEverythingBatched2() throws Exception {
+               SendMigrationNotifications s  = spy(new SendMigrationNotifications(
+                               loaderFactory, schemaVersions, REALTIME_CONFIG, FILE, Collections.EMPTY_SET, 0, 2, "test", EventAction.UPDATE, "DMAAP-LOAD"));
+               doNothing().when(s).trigger();
+               doNothing().when(s).cleanup();
+               s.process("/aai/");
+               Mockito.verify(s, times(2)).trigger();
+
+       }
+
+       @Test
+       public void processEverythingBatched3() throws Exception {
+               SendMigrationNotifications s  = spy(new SendMigrationNotifications(
+                               loaderFactory, schemaVersions,  REALTIME_CONFIG, FILE, Collections.EMPTY_SET, 0, 3, "test", EventAction.UPDATE, "DMAAP-LOAD"));
+               doNothing().when(s).trigger();
+               doNothing().when(s).cleanup();
+               s.process("/aai/");
+               Mockito.verify(s, times(2)).trigger();
+
+       }
+
+}
\ No newline at end of file
diff --git a/src/test/resources/logback.xml b/src/test/resources/logback.xml
new file mode 100644 (file)
index 0000000..6a898f0
--- /dev/null
@@ -0,0 +1,392 @@
+<!--
+
+    ============LICENSE_START=======================================================
+    org.onap.aai
+    ================================================================================
+    Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+    ================================================================================
+    Licensed under the Apache License, Version 2.0 (the "License");
+    you may not use this file except in compliance with the License.
+    You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing, software
+    distributed under the License is distributed on an "AS IS" BASIS,
+    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    See the License for the specific language governing permissions and
+    limitations under the License.
+    ============LICENSE_END=========================================================
+
+    ECOMP is a trademark and service mark of AT&T Intellectual Property.
+
+-->
+<configuration scan="true" scanPeriod="60 seconds" debug="false">
+       <statusListener class="ch.qos.logback.core.status.NopStatusListener" />
+
+       <property resource="application.properties" />
+
+       <property name="namespace" value="graph-admin"/>
+
+       <property name="AJSC_HOME" value="${AJSC_HOME:-.}" />
+       <jmxConfigurator />
+       <property name="logDirectory" value="${AJSC_HOME}/logs" />
+       <property name="eelfLogPattern" value="%ecompStartTime|%date{yyyy-MM-dd'T'HH:mm:ss.SSSZ, UTC}|%X{requestId}|%X{serviceInstanceId}|%-10t|%X{serverName}|%X{serviceName}|%X{partnerName}|%X{statusCode}|%X{responseCode}|%replace(%replace(%X{responseDescription}){'\\|', '!'}){'\r|\n', '^'}|%X{instanceUUID}|%level|%X{severity}|%X{serverIpAddress}|%ecompElapsedTime|%X{server}|%X{clientIpAddress}|%eelfClassOfCaller|%X{unused}|%X{processKey}|%X{customField1}|%X{customField2}|%X{customField3}|%X{customField4}|co=%X{component}:%replace(%replace(%m){'\\|', '!'}){'\r|\n', '^'}%n"/>
+       <property name="eelfAuditLogPattern" value="%ecompStartTime|%date{yyyy-MM-dd'T'HH:mm:ss.SSSZ, UTC}|%X{requestId}|%X{serviceInstanceId}|%-10t|%X{serverName}|%X{serviceName}|%X{partnerName}|%X{statusCode}|%X{responseCode}|%replace(%replace(%X{responseDescription}){'\\|', '!'}){'\r|\n|\r\n', '^'}|%X{instanceUUID}|%level|%X{severity}|%X{serverIpAddress}|%ecompElapsedTime|%X{server}|%X{clientIpAddress}|%eelfClassOfCaller|%X{unused}|%X{processKey}|%X{customField1}|%X{customField2}|%X{customField3}|%X{customField4}|co=%X{component}:%replace(%replace(%m){'\\|', '!'}){'\r|\n', '^'}%n"/>
+       <property name="eelfMetricLogPattern" value="%ecompStartTime|%date{yyyy-MM-dd'T'HH:mm:ss.SSSZ, UTC}|%X{requestId}|%X{serviceInstanceId}|%-10t|%X{serverName}|%X{serviceName}|%X{partnerName}|%X{targetEntity}|%X{targetServiceName}|%X{statusCode}|%X{responseCode}|%replace(%replace(%X{responseDescription}){'\\|', '!'}){'\r|\n', '^'}|%X{instanceUUID}|%level|%X{severity}|%X{serverIpAddress}|%ecompElapsedTime|%X{server}|%X{clientIpAddress}|%eelfClassOfCaller|%X{unused}|%X{processKey}|%X{targetVirtualEntity}|%X{customField1}|%X{customField2}|%X{customField3}|%X{customField4}|co=%X{component}:%replace(%replace(%m){'\\|', '!'}){'\r|\n', '^'}%n"/>
+       <!--  <property name="eelfErrorLogPattern" value="%ecompStartTime|%X{requestId}|%-10t|%X{serviceName}|%X{partnerName}|%X{targetEntity}|%X{targetServiceName}|%ecompErrorCategory|%X{responseCode}|%replace(%replace(%X{responseDescription}){'\\|', '!'}){'\r|\n|\r\n', '^'}|co=%X{component}:%replace(%replace(%m){'\\|', '!'}){'\r|\n', '^'}%n"/> -->
+       <property name="eelfErrorLogPattern" value="%ecompStartTime|%X{requestId}|%-10t|%X{serviceName}|%X{partnerName}|%X{targetEntity}|%X{targetServiceName}|%ecompErrorCategory|%ecompResponseCode|%ecompResponseDescription|co=%X{component}:%replace(%replace(%m){'\\|', '!'}){'\r|\n', '^'}%n"/>
+    <property name="eelfTransLogPattern" value="%ecompStartTime|%date{yyyy-MM-dd'T'HH:mm:ss.SSSZ, UTC}|%X{requestId}|%X{serviceInstanceId}|%-10t|%X{serverName}|%X{serviceName}|%X{partnerName}|%X{statusCode}|%X{responseCode}|%replace(%replace(%X{responseDescription}){'\\|', '!'}){'\r|\n', '^'}|%X{instanceUUID}|%level|%X{severity}|%X{serverIpAddress}|%ecompElapsedTime|%X{server}|%X{clientIpAddress}|%eelfClassOfCaller|%X{unused}|%X{processKey}|%X{customField1}|%X{customField2}|%X{customField3}|%X{customField4}|co=%X{partnerName}:%m%n"/>
+
+    <conversionRule conversionWord="clr" converterClass="org.springframework.boot.logging.logback.ColorConverter" />
+    <conversionRule conversionWord="wex" converterClass="org.springframework.boot.logging.logback.WhitespaceThrowableProxyConverter" />
+    <conversionRule conversionWord="wEx" converterClass="org.springframework.boot.logging.logback.ExtendedWhitespaceThrowableProxyConverter" />
+       <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
+               <encoder>
+                       <pattern>
+                               %clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(---){faint} %clr([%15.15t]){faint} %clr(%-40.40logger{39}){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}
+                       </pattern>
+               </encoder>
+       </appender>
+
+       <appender name="SANE" class="ch.qos.logback.core.rolling.RollingFileAppender">
+               <file>${logDirectory}/rest/sane.log</file>
+               <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+                       <fileNamePattern>${logDirectory}/rest/sane.log.%d{yyyy-MM-dd}</fileNamePattern>
+               </rollingPolicy>
+               <encoder>
+                       <pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{1024} - %msg%n
+                       </pattern>
+               </encoder>
+       </appender>
+
+       <appender name="asyncSANE" class="ch.qos.logback.classic.AsyncAppender">
+               <queueSize>1000</queueSize>
+               <includeCallerData>true</includeCallerData>
+               <appender-ref ref="SANE" />
+       </appender>
+
+       <appender name="METRIC"
+               class="ch.qos.logback.core.rolling.RollingFileAppender">
+               <filter class="ch.qos.logback.classic.filter.LevelFilter">
+                       <level>INFO</level>
+                       <onMatch>ACCEPT</onMatch>
+                       <onMismatch>DENY</onMismatch>
+               </filter>
+               <file>${logDirectory}/rest/metrics.log</file>
+               <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+                       <fileNamePattern>${logDirectory}/rest/metrics.log.%d{yyyy-MM-dd}
+                       </fileNamePattern>
+               </rollingPolicy>
+               <encoder class="org.onap.aai.logging.EcompEncoder">
+                       <pattern>${eelfMetricLogPattern}</pattern>
+               </encoder>
+       </appender>
+       <appender name="asyncMETRIC" class="ch.qos.logback.classic.AsyncAppender">
+               <queueSize>1000</queueSize>
+               <includeCallerData>true</includeCallerData>
+               <appender-ref ref="METRIC" />
+       </appender>
+
+       <appender name="DEBUG"
+               class="ch.qos.logback.core.rolling.RollingFileAppender">
+               <filter class="ch.qos.logback.classic.filter.LevelFilter">
+                       <level>DEBUG</level>
+                       <onMatch>ACCEPT</onMatch>
+                       <onMismatch>DENY</onMismatch>
+               </filter>
+               <file>${logDirectory}/rest/debug.log</file>
+               <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+                       <fileNamePattern>${logDirectory}/rest/debug.log.%d{yyyy-MM-dd}
+                       </fileNamePattern>
+               </rollingPolicy>
+               <encoder class="org.onap.aai.logging.EcompEncoder">
+                       <pattern>${eelfLogPattern}</pattern>
+               </encoder>
+       </appender>
+
+       <appender name="asyncDEBUG" class="ch.qos.logback.classic.AsyncAppender">
+               <queueSize>1000</queueSize>
+               <includeCallerData>true</includeCallerData>
+               <appender-ref ref="DEBUG" />
+       </appender>
+
+       <appender name="ERROR"
+               class="ch.qos.logback.core.rolling.RollingFileAppender">
+               <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
+                       <level>WARN</level>
+               </filter>
+               <file>${logDirectory}/rest/error.log</file>
+               <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+                       <fileNamePattern>${logDirectory}/rest/error.log.%d{yyyy-MM-dd}
+                       </fileNamePattern>
+               </rollingPolicy>
+               <encoder class="org.onap.aai.logging.EcompEncoder">
+                       <pattern>${eelfErrorLogPattern}</pattern>
+               </encoder>
+       </appender>
+
+       <appender name="asyncERROR" class="ch.qos.logback.classic.AsyncAppender">
+               <queueSize>1000</queueSize>
+               <includeCallerData>true</includeCallerData>
+               <appender-ref ref="ERROR" />
+       </appender>
+
+       <appender name="AUDIT"
+               class="ch.qos.logback.core.rolling.RollingFileAppender">
+               <file>${logDirectory}/rest/audit.log</file>
+               <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+                       <fileNamePattern>${logDirectory}/rest/audit.log.%d{yyyy-MM-dd}
+                       </fileNamePattern>
+               </rollingPolicy>
+               <encoder class="org.onap.aai.logging.EcompEncoder">
+                       <pattern>${eelfAuditLogPattern}</pattern>
+               </encoder>
+       </appender>
+
+       <appender name="asyncAUDIT" class="ch.qos.logback.classic.AsyncAppender">
+               <queueSize>1000</queueSize>
+               <includeCallerData>true</includeCallerData>
+               <appender-ref ref="AUDIT" />
+       </appender>
+
+       <appender name="translog"
+               class="ch.qos.logback.core.rolling.RollingFileAppender">
+               <filter class="ch.qos.logback.classic.filter.LevelFilter">
+                       <level>DEBUG</level>
+                       <onMatch>ACCEPT</onMatch>
+                       <onMismatch>DENY</onMismatch>
+               </filter>
+               <file>${logDirectory}/rest/translog.log</file>
+               <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+                       <fileNamePattern>${logDirectory}/rest/translog.log.%d{yyyy-MM-dd}
+                       </fileNamePattern>
+               </rollingPolicy>
+               <encoder class="org.onap.aai.logging.EcompEncoder">
+                       <pattern>${eelfTransLogPattern}</pattern>
+               </encoder>
+       </appender>
+       
+       <appender name="asynctranslog" class="ch.qos.logback.classic.AsyncAppender">
+               <queueSize>1000</queueSize>
+               <includeCallerData>true</includeCallerData>
+               <appender-ref ref="translog" />
+       </appender>
+
+       <appender name="dmaapAAIEventConsumer"
+               class="ch.qos.logback.core.rolling.RollingFileAppender">
+               <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
+                       <level>WARN</level>
+               </filter>
+               <File>${logDirectory}/dmaapAAIEventConsumer/error.log</File>
+               <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+                       <fileNamePattern>${logDirectory}/dmaapAAIEventConsumer/error.log.%d{yyyy-MM-dd}
+                       </fileNamePattern>
+               </rollingPolicy>
+               <encoder class="org.onap.aai.logging.EcompEncoder">
+                       <pattern>${eelfLogPattern}</pattern>
+               </encoder>
+       </appender>
+
+       <appender name="dmaapAAIEventConsumerDebug"
+               class="ch.qos.logback.core.rolling.RollingFileAppender">
+               <filter class="ch.qos.logback.classic.filter.LevelFilter">
+                       <level>DEBUG</level>
+                       <onMatch>ACCEPT</onMatch>
+                       <onMismatch>DENY</onMismatch>
+               </filter>
+               <File>${logDirectory}/dmaapAAIEventConsumer/debug.log</File>
+               <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+                       <fileNamePattern>${logDirectory}/dmaapAAIEventConsumer/debug.log.%d{yyyy-MM-dd}
+                       </fileNamePattern>
+               </rollingPolicy>
+               <encoder class="org.onap.aai.logging.EcompEncoder">
+                       <pattern>${eelfLogPattern}</pattern>
+               </encoder>
+       </appender>
+       <appender name="dmaapAAIEventConsumerMetric"
+               class="ch.qos.logback.core.rolling.RollingFileAppender">
+               <filter class="ch.qos.logback.classic.filter.LevelFilter">
+                       <level>INFO</level>
+                       <onMatch>ACCEPT</onMatch>
+                       <onMismatch>DENY</onMismatch>
+               </filter>
+               <File>${logDirectory}/dmaapAAIEventConsumer/metrics.log</File>
+               <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+                       <fileNamePattern>${logDirectory}/dmaapAAIEventConsumer/metrics.log.%d{yyyy-MM-dd}
+                       </fileNamePattern>
+               </rollingPolicy>
+               <encoder class="org.onap.aai.logging.EcompEncoder">
+                       <pattern>${eelfMetricLogPattern}</pattern>
+               </encoder>
+       </appender>
+       <appender name="external"
+               class="ch.qos.logback.core.rolling.RollingFileAppender">
+               <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
+                       <level>WARN</level>
+               </filter>
+               <file>${logDirectory}/external/external.log</file>
+               <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+                       <fileNamePattern>${logDirectory}/external/external.log.%d{yyyy-MM-dd}
+                       </fileNamePattern>
+               </rollingPolicy>
+               <encoder class="org.onap.aai.logging.EcompEncoder">
+                       <pattern>${eelfLogPattern}</pattern>
+               </encoder>
+       </appender>
+       <logger name="org.onap.aai" level="DEBUG" additivity="true">
+               <appender-ref ref="asyncDEBUG" />
+               <appender-ref ref="asyncERROR" />
+               <appender-ref ref="asyncMETRIC" />
+               <appender-ref ref="asyncSANE" />
+       </logger>
+
+       <!-- Spring related loggers -->
+       <logger name="org.springframework" level="WARN" />
+       <logger name="org.springframework.beans" level="WARN" />
+       <logger name="org.springframework.web" level="WARN" />
+       <logger name="com.blog.spring.jms" level="WARN" />
+       <logger name="com.jayway.jsonpath" level="WARN" />
+
+       <!-- AJSC Services (bootstrap services) -->
+       <logger name="ajsc" level="WARN" />
+       <logger name="ajsc.RouteMgmtService" level="WARN" />
+       <logger name="ajsc.ComputeService" level="WARN" />
+       <logger name="ajsc.VandelayService" level="WARN" />
+       <logger name="ajsc.FilePersistenceService" level="WARN" />
+       <logger name="ajsc.UserDefinedJarService" level="WARN" />
+       <logger name="ajsc.UserDefinedBeansDefService" level="WARN" />
+       <logger name="ajsc.LoggingConfigurationService" level="WARN" />
+
+       <!-- AJSC related loggers (DME2 Registration, csi logging, restlet, servlet 
+               logging) -->
+       <logger name="org.codehaus.groovy" level="WARN" />
+       <logger name="com.att.scamper" level="WARN" />
+       <logger name="ajsc.utils" level="WARN" />
+       <logger name="ajsc.utils.DME2Helper" level="WARN" />
+       <logger name="ajsc.filters" level="WARN" />
+       <logger name="ajsc.beans.interceptors" level="WARN" />
+       <logger name="ajsc.restlet" level="WARN" />
+       <logger name="ajsc.servlet" level="WARN" />
+       <logger name="com.att.ajsc" level="WARN" />
+       <logger name="com.att.ajsc.csi.logging" level="WARN" />
+       <logger name="com.att.ajsc.filemonitor" level="WARN" />
+       <logger name="com.netflix.loadbalancer" level="WARN" />
+
+       <logger name="org.apache.zookeeper" level="WARN" />
+
+       <!-- Other Loggers that may help troubleshoot -->
+       <logger name="net.sf" level="WARN" />
+       <logger name="org.apache.commons.httpclient" level="WARN" />
+       <logger name="org.apache.commons" level="WARN" />
+       <logger name="org.apache.coyote" level="WARN" />
+       <logger name="org.apache.jasper" level="WARN" />
+
+       <!-- Camel Related Loggers (including restlet/servlet/jaxrs/cxf logging. 
+               May aid in troubleshooting) -->
+       <logger name="org.apache.camel" level="WARN" />
+       <logger name="org.apache.cxf" level="WARN" />
+       <logger name="org.apache.camel.processor.interceptor" level="WARN" />
+       <logger name="org.apache.cxf.jaxrs.interceptor" level="WARN" />
+       <logger name="org.apache.cxf.service" level="WARN" />
+       <logger name="org.restlet" level="WARN" />
+       <logger name="org.apache.camel.component.restlet" level="WARN" />
+
+       <logger name="org.hibernate.validator" level="WARN" />
+       <logger name="org.hibernate" level="WARN" />
+       <logger name="org.hibernate.ejb" level="OFF" />
+
+       <!-- logback internals logging -->
+       <logger name="ch.qos.logback.classic" level="WARN" />
+       <logger name="ch.qos.logback.core" level="WARN" />
+
+       <logger name="org.eclipse.jetty" level="WARN" />
+
+       <!-- logback jms appenders & loggers definition starts here -->
+       <appender name="auditLogs"
+               class="ch.qos.logback.core.rolling.RollingFileAppender">
+               <filter class="ch.qos.logback.classic.filter.ThresholdFilter" />
+               <file>${logDirectory}/perf-audit/Audit-${lrmRVer}-${lrmRO}-${Pid}.log
+               </file>
+               <rollingPolicy class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy">
+                       <fileNamePattern>${logDirectory}/perf-audit/Audit-${lrmRVer}-${lrmRO}-${Pid}.%i.log.zip
+                       </fileNamePattern>
+                       <minIndex>1</minIndex>
+                       <maxIndex>9</maxIndex>
+               </rollingPolicy>
+               <triggeringPolicy
+                       class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
+                       <maxFileSize>5MB</maxFileSize>
+               </triggeringPolicy>
+               <encoder>
+                       <pattern>"%d [%thread] %-5level %logger{1024} - %msg%n"</pattern>
+               </encoder>
+       </appender>
+       <appender name="perfLogs"
+               class="ch.qos.logback.core.rolling.RollingFileAppender">
+               <filter class="ch.qos.logback.classic.filter.ThresholdFilter" />
+               <file>${logDirectory}/perf-audit/Perform-${lrmRVer}-${lrmRO}-${Pid}.log
+               </file>
+               <rollingPolicy class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy">
+                       <fileNamePattern>${logDirectory}/perf-audit/Perform-${lrmRVer}-${lrmRO}-${Pid}.%i.log.zip
+                       </fileNamePattern>
+                       <minIndex>1</minIndex>
+                       <maxIndex>9</maxIndex>
+               </rollingPolicy>
+               <triggeringPolicy
+                       class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
+                       <maxFileSize>5MB</maxFileSize>
+               </triggeringPolicy>
+               <encoder>
+                       <pattern>"%d [%thread] %-5level %logger{1024} - %msg%n"</pattern>
+               </encoder>
+       </appender>
+       <logger name="AuditRecord" level="INFO" additivity="false">
+               <appender-ref ref="auditLogs" />
+       </logger>
+       <logger name="AuditRecord_DirectCall" level="INFO" additivity="false">
+               <appender-ref ref="auditLogs" />
+       </logger>
+       <logger name="PerfTrackerRecord" level="INFO" additivity="false">
+               <appender-ref ref="perfLogs" />
+       </logger>
+       <!-- logback jms appenders & loggers definition ends here -->
+
+       <logger name="org.onap.aai.interceptors.post" level="DEBUG"
+                       additivity="false">
+               <appender-ref ref="asynctranslog" />
+       </logger>
+
+       <logger name="org.onap.aai.interceptors.pre.SetLoggingContext" level="DEBUG">
+               <appender-ref ref="asyncAUDIT"/>
+       </logger>
+
+       <logger name="org.onap.aai.interceptors.post.ResetLoggingContext" level="DEBUG">
+               <appender-ref ref="asyncAUDIT"/>
+       </logger>
+
+       <logger name="org.onap.aai.dmaap" level="DEBUG" additivity="false">
+               <appender-ref ref="dmaapAAIEventConsumer" />
+               <appender-ref ref="dmaapAAIEventConsumerDebug" />
+               <appender-ref ref="dmaapAAIEventConsumerMetric" />
+       </logger>
+
+       <logger name="org.apache" level="WARN" />
+       <logger name="org.zookeeper" level="WARN" />
+       <logger name="org.janusgraph" level="WARN" />
+       <logger name="com.att.aft.dme2" level="WARN" />
+
+       <!-- ============================================================================ -->
+       <!-- General EELF logger -->
+       <!-- ============================================================================ -->
+       <logger name="com.att.eelf" level="WARN" additivity="false">
+               <appender-ref ref="asyncDEBUG" />
+               <appender-ref ref="asyncERROR" />
+               <appender-ref ref="asyncMETRIC" />
+       </logger>
+
+       <root level="DEBUG">
+               <appender-ref ref="external" />
+               <appender-ref ref="STDOUT" />
+       </root>
+</configuration>
diff --git a/src/test/resources/payloads/templates/cloud-region-with-linterface.json b/src/test/resources/payloads/templates/cloud-region-with-linterface.json
new file mode 100644 (file)
index 0000000..54fc44d
--- /dev/null
@@ -0,0 +1,57 @@
+{
+  "cloud-owner": "${cloud-owner}",
+  "cloud-region-id": "${cloud-region-id}",
+  "tenants": {
+    "tenant": [
+      {
+        "tenant-id": "${tenant-id}",
+        "tenant-name": "${tenant-name}",
+        "vservers": {
+          "vserver": [
+            {
+              "vserver-id": "${vserver-id}",
+              "vserver-name": "${vserver-name}",
+              "vserver-name2": "vserver-name-2",
+              "vserver-selflink": "vserver-selflink-1",
+              "in-maint": false,
+              "is-closed-loop-disabled": false,
+              "volumes": {},
+              "l-interfaces": {
+                "l-interface": [
+                  {
+                    "interface-name": "${interface-name}",
+                    "interface-role": "role",
+                    "v6-wan-link-ip": "test-v6-wan-link-ip",
+                    "selflink": "example-selflink-val-10559",
+                    "interface-id": "example-interface-id-val",
+                    "l3-interface-ipv4-address-list" : [ {
+                      "l3-interface-ipv4-address" : "${ipv4-address}",
+                      "l3-interface-ipv4-prefix-length" : 849,
+                      "vlan-id-inner" : 174,
+                      "vlan-id-outer" : 120,
+                      "is-floating" : true
+                    }],
+                    "l3-interface-ipv6-address-list" : [ {
+                      "l3-interface-ipv6-address" : "${ipv6-address}",
+                      "l3-interface-ipv6-prefix-length" : 509,
+                      "vlan-id-inner" : 890,
+                      "vlan-id-outer" : 192,
+                      "is-floating" : false
+                    }],
+                    "vlans" : {
+                      "vlan" : [ {
+                        "vlan-interface" : "${vlan-interface}",
+                        "vlan-id-inner" : 948,
+                        "vlan-id-outer" : 722,
+                        "speed-value" : "oR8Zpuk5",
+                        "speed-units" : "lZsf2I"
+                      }]}
+                  }]}
+
+            }
+          ]
+        }
+      }
+    ]
+  }
+}
diff --git a/src/test/resources/payloads/templates/cloud-region-with-vserver.json b/src/test/resources/payloads/templates/cloud-region-with-vserver.json
new file mode 100644 (file)
index 0000000..08d5b02
--- /dev/null
@@ -0,0 +1,32 @@
+{
+  "cloud-owner": "${cloud-owner}",
+  "cloud-region-id": "${cloud-region-id}",
+  "tenants": {
+    "tenant": [
+      {
+        "tenant-id": "${tenant-id}",
+        "tenant-name": "${tenant-name}",
+        "vservers": {
+          "vserver": [
+            {
+              "vserver-id": "${vserver-id}",
+              "vserver-name": "${vserver-name}",
+              "vserver-name2": "vserver-name-2",
+              "vserver-selflink": "vserver-selflink-1",
+              "in-maint": false,
+              "is-closed-loop-disabled": false,
+              "relationship-list": {
+                "relationship": [
+                  {
+                    "related-to": "pserver",
+                    "related-link": "${pserver-uri}"
+                  }
+                ]
+              }
+            }
+          ]
+        }
+      }
+    ]
+  }
+}
diff --git a/src/test/resources/payloads/templates/custom-query.json b/src/test/resources/payloads/templates/custom-query.json
new file mode 100644 (file)
index 0000000..6875a99
--- /dev/null
@@ -0,0 +1,4 @@
+{
+  "start": [ "${start}" ],
+  "query": "query/${query}"
+}
\ No newline at end of file
diff --git a/src/test/resources/payloads/templates/dsl-query.json b/src/test/resources/payloads/templates/dsl-query.json
new file mode 100644 (file)
index 0000000..60057b4
--- /dev/null
@@ -0,0 +1,4 @@
+{
+  "dsl": "${dsl-query}"
+}
+
diff --git a/src/test/resources/payloads/templates/generic-vnf.json b/src/test/resources/payloads/templates/generic-vnf.json
new file mode 100644 (file)
index 0000000..7b59251
--- /dev/null
@@ -0,0 +1,7 @@
+{
+  "vnf-id": "${vnf-id}",
+  "vnf-type": "someval",
+  "vnf-name": "someval",
+  "model-invariant-id": "${model-invariant-id}",
+  "model-version-id": "${model-version-id}"
+}
diff --git a/src/test/resources/payloads/templates/gremlin-query.json b/src/test/resources/payloads/templates/gremlin-query.json
new file mode 100644 (file)
index 0000000..b163261
--- /dev/null
@@ -0,0 +1,4 @@
+{
+  "gremlin": "${gremlin-query}"
+}
+
diff --git a/src/test/resources/payloads/templates/model-ver.json b/src/test/resources/payloads/templates/model-ver.json
new file mode 100644 (file)
index 0000000..e20e0a3
--- /dev/null
@@ -0,0 +1,5 @@
+{
+  "model-version-id": "${model-version-id}",
+  "model-name": "${model-name}",
+  "model-version": "${model-version}"
+}
\ No newline at end of file
diff --git a/src/test/resources/payloads/templates/model.json b/src/test/resources/payloads/templates/model.json
new file mode 100644 (file)
index 0000000..7eaa4c6
--- /dev/null
@@ -0,0 +1,4 @@
+{
+  "model-invariant-id": "${model-invariant-id}",
+  "model-type": "some-type"
+}
\ No newline at end of file
diff --git a/src/test/resources/payloads/templates/pserver.json b/src/test/resources/payloads/templates/pserver.json
new file mode 100644 (file)
index 0000000..dbaf2a9
--- /dev/null
@@ -0,0 +1,3 @@
+{
+  "hostname": "${hostname}"
+}
diff --git a/src/test/resources/schema-ingest.properties b/src/test/resources/schema-ingest.properties
new file mode 100644 (file)
index 0000000..a1949e6
--- /dev/null
@@ -0,0 +1,3 @@
+schema.configuration.location=N/A
+schema.nodes.location=src/main/resources/etc/oxm/
+schema.edges.location=src/main/resources/etc/dbedgerules/
diff --git a/src/test/resources/updateEdgeTestRules.json b/src/test/resources/updateEdgeTestRules.json
new file mode 100644 (file)
index 0000000..4da2b59
--- /dev/null
@@ -0,0 +1,60 @@
+{
+    "rules": [
+    {
+    "from": "generic-vnf",
+    "to": "l-interface",
+    "label": "hasLInterface",
+    "direction": "OUT",
+    "multiplicity": "MANY2MANY",
+    "contains-other-v": "!${direction}",
+    "delete-other-v": "!${direction}",
+    "SVC-INFRA": "${direction}",
+    "prevent-delete": "NONE",
+    "description": "An edge comment",
+    "default": "true",
+    "newProperty": "newValue"
+    },
+    {
+    "from": "pserver",
+    "to": "vnfc",
+    "label": "tosca.relationships.HostedOn",
+    "direction": "OUT",
+    "multiplicity": "MANY2MANY",
+    "contains-other-v": "!${direction}",
+    "delete-other-v": "!${direction}",
+    "SVC-INFRA": "${direction}",
+    "prevent-delete": "NONE",
+    "description": "A pserver/vnfc edge description",
+    "default": "true",
+    "newProperty": "newValue"
+    },
+    {
+    "from": "l-interface",
+    "to": "logical-link",
+    "label": "usesLogicalLink",
+    "direction": "OUT",
+    "multiplicity": "MANY2MANY",
+    "contains-other-v": "!${direction}",
+    "delete-other-v": "!${direction}",
+    "SVC-INFRA": "${direction}",
+    "prevent-delete": "NONE",
+    "description": "A l-interface/logical-link(1) edge description",
+    "default": "true",
+    "newProperty": "newValue"
+    },
+    {
+    "from": "l-interface",
+    "to": "logical-link",
+    "label": "sourceLInterface",
+    "direction": "OUT",
+    "multiplicity": "MANY2MANY",
+    "contains-other-v": "!${direction}",
+    "delete-other-v": "!${direction}",
+    "SVC-INFRA": "${direction}",
+    "prevent-delete": "NONE",
+    "description": "A l-interface/logical-link(2) edge description",
+    "default": "true",
+    "newProperty": "newValue"
+    }
+    ]
+}
\ No newline at end of file