Reformat asdctool 35/119935/2
authorvasraz <vasyl.razinkov@est.tech>
Sat, 27 Mar 2021 15:43:05 +0000 (15:43 +0000)
committerChristophe Closset <christophe.closset@intl.att.com>
Mon, 29 Mar 2021 07:13:25 +0000 (07:13 +0000)
Signed-off-by: Vasyl Razinkov <vasyl.razinkov@est.tech>
Issue-ID: SDC-3449
Change-Id: Ide3e6fe0488c700b9197fb8e7e0f10a9282bf9df

97 files changed:
asdctool/src/main/java/org/openecomp/sdc/asdctool/App.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/Utils.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/cli/CLITool.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/cli/CLIToolData.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/cli/CLIUtils.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/cli/SpringCLITool.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/ArtifactUUIDFixConfiguration.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/ConfigurationUploader.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/CsarGeneratorConfiguration.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/InternalToolConfiguration.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/SdcSchemaFileImportConfiguration.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/VrfObjectFixConfiguration.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/enums/LifecycleStateEnum.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/enums/SchemaZipFileEnum.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ArtifactUuidFix.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ComponentInstanceRow.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ComponentRow.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphJsonValidator.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphMLConverter.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphMLDataAnalyzer.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/JanusGraphInitializer.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ProductLogic.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/RestUtils.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/UpdatePropertyOnVertex.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/VrfObjectFixHandler.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/internal/tool/CommonInternalTool.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/internal/tool/CsarGenerator.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/internal/tool/DeleteComponentHandler.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/ArtifactToolBL.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/ValidationToolBL.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/config/ValidationConfigManager.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/config/ValidationToolConfiguration.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executor/ArtifactValidatorExecutor.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executor/IArtifactValidatorExecutor.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executor/NodeToscaArtifactsValidatorExecutor.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executor/ServiceToscaArtifactsValidatorExecutor.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executor/TopologyTemplateValidatorExecutor.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executor/VFToscaArtifactValidatorExecutor.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executor/ValidatorExecutor.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/report/FileType.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/report/Report.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/report/ReportFile.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/report/ReportFileWriter.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/tasks/ServiceValidationTask.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/tasks/TopologyTemplateValidationTask.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/tasks/VfValidationTask.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/tasks/artifacts/ArtifactValidationUtils.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/tasks/artifacts/ArtifactsVertexResult.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/tasks/artifacts/ServiceArtifactValidationTask.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/tasks/artifacts/VfArtifactValidationTask.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/tasks/module/json/ModuleJsonTask.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/utils/VertexResult.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/utils/VfModuleArtifactPayloadEx.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ArtifactUUIDFixMenu.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ArtifactValidatorTool.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/main/CsarGeneratorTool.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/main/DataSchemaMenu.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/main/DeleteComponentTool.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ExportImportMenu.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/main/RemoveUtils.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/main/SdcInternalTool.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/main/SdcSchemaFileImport.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/main/UpdateIsVnfMenu.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ValidationTool.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/main/VrfObjectFixMenu.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/MigrationSpringConfig.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/mocks/CambriaHandlerMock.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/mocks/DistributionEngineClusterHealthMock.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/mocks/DistributionEngineMock.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/mocks/DmaapHealthCheckMock.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/mocks/PortalHealthCheckBuilderMock.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/DBVersion.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/MigrationException.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/SdcMigrationTool.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/execution/MigrationExecutionResult.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/execution/MigrationExecutor.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/execution/MigrationExecutorImpl.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/task/IMigrationStage.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/task/Migration.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/task/MigrationResult.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/task/PostMigration.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/dao/MigrationTasksAccessor.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/dao/MigrationTasksDao.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/main/MigrationMenu.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/resolver/MigrationResolver.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/resolver/SpringBeansMigrationResolver.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/service/SdcRepoService.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/handlers/OutputHandler.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/handlers/XlsOutputHandler.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/servlets/EntryPoint.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/servlets/ExportImportJanusGraphServlet.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/simulator/tenant/ImportCassandraTableTool.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/simulator/tenant/ImportTableConfig.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/simulator/tenant/OperationalEnvironment.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/simulator/tenant/OperationalEvnironmentImportHandler.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/utils/ConsoleWriter.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/utils/ReportWriter.java

index b433357..db541a8 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -17,7 +17,6 @@
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool;
 
 import org.eclipse.jetty.server.Server;
@@ -26,41 +25,38 @@ import org.eclipse.jetty.servlet.ServletHolder;
 
 /**
  * Hello world!
- *
  */
 public class App {
-       public static void main(String[] args) {
-
-               String asdcToolPort = "8087";
-
-               ServletContextHandler context = new ServletContextHandler(ServletContextHandler.SESSIONS);
-               context.setContextPath("/asdctool");
-
-               Server jettyServer = new Server(Integer.valueOf(asdcToolPort));
-               jettyServer.setHandler(context);
 
-               ServletHolder jerseyServlet = context.addServlet(org.glassfish.jersey.servlet.ServletContainer.class, "/*");
-               jerseyServlet.setInitOrder(0);
-
-               // Tells the Jersey Servlet which REST service/class to load.
-               // jerseyServlet.setInitParameter("jersey.config.server.provider.classnames",
-               // EntryPoint.class.getCanonicalName());
-               jerseyServlet.setInitParameter("jersey.config.server.provider.packages", "org.openecomp.sdc.asdctool.servlets");
-               jerseyServlet.setInitParameter("jersey.config.server.provider.classnames",
-                               "org.glassfish.jersey.media.multipart.MultiPartFeature");
-
-               try {
-                       jettyServer.start();
-
-                       System.out.println("Server was started on port " + asdcToolPort);
-
-                       jettyServer.join();
-
-               } catch (Exception e) {
-                       e.printStackTrace();
-                       System.exit(1);
-               } finally {
-                       jettyServer.destroy();
-               }
-       }
+    public static void main(String[] args) {
+        String asdcToolPort = "8087";
+        ServletContextHandler context = new ServletContextHandler(ServletContextHandler.SESSIONS);
+        context.setContextPath("/asdctool");
+        Server jettyServer = new Server(Integer.valueOf(asdcToolPort));
+        jettyServer.setHandler(context);
+        ServletHolder jerseyServlet = context.addServlet(org.glassfish.jersey.servlet.ServletContainer.class, "/*");
+        jerseyServlet.setInitOrder(0);
+
+        // Tells the Jersey Servlet which REST service/class to load.
+        // jerseyServlet.setInitParameter("jersey.config.server.provider.classnames",
+        // EntryPoint.class.getCanonicalName());
+        jerseyServlet.setInitParameter("jersey.config.server.provider.packages", "org.openecomp.sdc.asdctool.servlets");
+        jerseyServlet.setInitParameter("jersey.config.server.provider.classnames",
+            "org.glassfish.jersey.media.multipart.MultiPartFeature");
+
+        try {
+            jettyServer.start();
+
+            System.out.println("Server was started on port " + asdcToolPort);
+
+            jettyServer.join();
+
+        } catch (Exception e) {
+            e.printStackTrace();
+            System.exit(1);
+        } finally {
+            jettyServer.destroy();
+        }
+    }
 }
+
index 0f44968..5debda2 100644 (file)
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool;
 
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Optional;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.Response.ResponseBuilder;
 import org.apache.commons.collections4.CollectionUtils;
 import org.apache.commons.configuration.Configuration;
 import org.apache.tinkerpop.gremlin.structure.Element;
@@ -29,18 +34,10 @@ import org.janusgraph.core.JanusGraph;
 import org.janusgraph.core.JanusGraphFactory;
 import org.openecomp.sdc.common.log.wrappers.Logger;
 
-import javax.ws.rs.core.Response;
-import javax.ws.rs.core.Response.ResponseBuilder;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Optional;
-
 public class Utils {
 
-    private static final Logger log = Logger.getLogger(Utils.class.getName());
-
     public static final String NEW_LINE = System.getProperty("line.separator");
+    private static final Logger log = Logger.getLogger(Utils.class.getName());
 
     private Utils() {
     }
@@ -75,8 +72,9 @@ public class Utils {
     }
 
     public static boolean vertexLeftContainsRightProps(Map<String, Object> leftProps, Map<String, Object> rightProps) {
-        if (rightProps == null)
+        if (rightProps == null) {
             return true;
+        }
         for (Entry<String, Object> entry : rightProps.entrySet()) {
             String key = entry.getKey();
             Object leftValue = leftProps.get(key);
@@ -111,15 +109,11 @@ public class Utils {
 
     public static Map<String, Object> getProperties(Element element) {
         Map<String, Object> result = new HashMap<>();
-
         if (element != null && CollectionUtils.isNotEmpty(element.keys())) {
-            Map<String, Property> propertyMap = ElementHelper.propertyMap(element,
-                    element.keys().toArray(new String[element.keys().size()]));
-
+            Map<String, Property> propertyMap = ElementHelper.propertyMap(element, element.keys().toArray(new String[element.keys().size()]));
             for (Entry<String, Property> entry : propertyMap.entrySet()) {
                 String key = entry.getKey();
                 Object value = entry.getValue().value();
-
                 result.put(key, value);
             }
         }
index d4ebff6..dce10bd 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -17,7 +17,6 @@
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.cli;
 
 import org.apache.commons.cli.CommandLine;
@@ -45,10 +44,9 @@ public abstract class CLITool {
         Options options = buildCmdLineOptions();
         CommandLineParser parser = new DefaultParser();
         try {
-            return parser.parse( options, args );
-        }
-        catch( ParseException exp ) {
-            LOGGER.error("Parsing failed.  Reason: " + exp.getMessage() );
+            return parser.parse(options, args);
+        } catch (ParseException exp) {
+            LOGGER.error("Parsing failed.  Reason: " + exp.getMessage());
             usageAndExit(options);
             return null;
         }
@@ -56,21 +54,17 @@ public abstract class CLITool {
 
     private void usageAndExit(Options options) {
         HelpFormatter formatter = new HelpFormatter();
-        formatter.printHelp(commandName(), options );
+        formatter.printHelp(commandName(), options);
         System.exit(1);
     }
 
     /**
-     *
      * @return all command line options required by this command line tool
      */
     protected abstract Options buildCmdLineOptions();
 
     /**
-     *
      * @return the command name
      */
     protected abstract String commandName();
-
-
 }
index e75f866..eec5bd5 100644 (file)
@@ -17,7 +17,6 @@
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.cli;
 
 import org.apache.commons.cli.CommandLine;
index 379c8dd..8e33bd3 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -17,7 +17,6 @@
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.cli;
 
 import org.apache.commons.cli.Option;
@@ -27,15 +26,11 @@ public class CLIUtils {
     static final String CONFIG_PATH_SHORT_OPT = "c";
     private static final String CONFIG_PATH_LONG_OPT = "configFolderPath";
 
-    private CLIUtils(){}
+    private CLIUtils() {
+    }
 
     public static Option getConfigurationPathOption() {
-        return Option.builder(CONFIG_PATH_SHORT_OPT)
-                .longOpt(CONFIG_PATH_LONG_OPT)
-                .required()
-                .hasArg()
-                .desc("path to sdc configuration folder - required")
-                .build();
+        return Option.builder(CONFIG_PATH_SHORT_OPT).longOpt(CONFIG_PATH_LONG_OPT).required().hasArg()
+            .desc("path to sdc configuration folder - required").build();
     }
-
 }
index 471b931..ad8bb07 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -17,7 +17,6 @@
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.cli;
 
 import org.apache.commons.cli.Options;
@@ -45,7 +44,6 @@ public abstract class SpringCLITool extends CLITool {
     }
 
     /**
-     *
      * @return the {@code Class} which holds all the spring bean declaration needed by this cli tool
      */
     protected abstract Class<?> getSpringConfigurationClass();
index 9923436..1eb4b2a 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.configuration;
 
-
 import org.openecomp.sdc.asdctool.impl.ArtifactUuidFix;
 import org.openecomp.sdc.be.config.CatalogModelSpringConfig;
 import org.openecomp.sdc.be.dao.cassandra.ArtifactCassandraDao;
@@ -38,15 +36,12 @@ import org.springframework.context.annotation.Import;
 @Configuration
 @Import({DAOSpringConfig.class, CatalogBESpringConfig.class, CatalogModelSpringConfig.class})
 @ComponentScan({"org.openecomp.sdc.asdctool.migration.tasks",//migration tasks
-        "org.openecomp.sdc.asdctool.migration.config.mocks"
-                })
+    "org.openecomp.sdc.asdctool.migration.config.mocks"})
 public class ArtifactUUIDFixConfiguration {
 
     @Bean
-    public ArtifactUuidFix artifactUuidFix(JanusGraphDao janusGraphDao,
-        ToscaOperationFacade toscaOperationFacade, ToscaExportHandler toscaExportHandler,
-        ArtifactCassandraDao artifactCassandraDao, CsarUtils csarUtils) {
+    public ArtifactUuidFix artifactUuidFix(JanusGraphDao janusGraphDao, ToscaOperationFacade toscaOperationFacade,
+                                           ToscaExportHandler toscaExportHandler, ArtifactCassandraDao artifactCassandraDao, CsarUtils csarUtils) {
         return new ArtifactUuidFix(janusGraphDao, toscaOperationFacade, toscaExportHandler, artifactCassandraDao, csarUtils);
     }
-    
 }
index 975066f..e1bffea 100644 (file)
  * Modifications copyright (c) 2019 Nokia
  * ================================================================================
  */
-
 package org.openecomp.sdc.asdctool.configuration;
 
+import java.io.File;
 import org.openecomp.sdc.be.config.ConfigurationManager;
 import org.openecomp.sdc.common.api.ConfigurationSource;
 import org.openecomp.sdc.common.impl.ExternalConfiguration;
 import org.openecomp.sdc.common.impl.FSConfigurationSource;
 
-import java.io.File;
-
 public class ConfigurationUploader {
 
     public static void uploadConfigurationFiles(String appConfigDir) {
@@ -41,7 +39,7 @@ public class ConfigurationUploader {
 
     private static String buildArtifactGeneratorPath(String appConfigDir) {
         StringBuilder artifactGeneratorPath = new StringBuilder(appConfigDir);
-        if(!appConfigDir.endsWith(File.separator)){
+        if (!appConfigDir.endsWith(File.separator)) {
             artifactGeneratorPath.append(File.separator);
         }
         artifactGeneratorPath.append(ConfigurationManager.getConfigurationManager().getConfiguration().getArtifactGeneratorConfig());
index ee4d2c1..a55d965 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -17,7 +17,6 @@
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.configuration;
 
 import org.openecomp.sdc.asdctool.impl.internal.tool.CsarGenerator;
@@ -36,17 +35,12 @@ import org.springframework.context.annotation.Import;
 
 @Configuration
 @Import({DAOSpringConfig.class, CatalogBESpringConfig.class, CatalogModelSpringConfig.class})
-@ComponentScan({"org.openecomp.sdc.asdctool.migration.config.mocks"
-            })
+@ComponentScan({"org.openecomp.sdc.asdctool.migration.config.mocks"})
 public class CsarGeneratorConfiguration {
 
     @Bean
-    public CsarGenerator csarGenerator(JanusGraphDao janusGraphDao, CsarUtils csarUtils,
-        ToscaOperationFacade toscaOperationFacade,
-        ArtifactCassandraDao artifactCassandraDao, ToscaExportHandler toscaExportHandler) {
-        return new CsarGenerator(janusGraphDao, csarUtils, toscaOperationFacade,
-            artifactCassandraDao, toscaExportHandler);
+    public CsarGenerator csarGenerator(JanusGraphDao janusGraphDao, CsarUtils csarUtils, ToscaOperationFacade toscaOperationFacade,
+                                       ArtifactCassandraDao artifactCassandraDao, ToscaExportHandler toscaExportHandler) {
+        return new CsarGenerator(janusGraphDao, csarUtils, toscaOperationFacade, artifactCassandraDao, toscaExportHandler);
     }
-
-
 }
index 1b09b2a..c0d65b2 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -17,7 +17,6 @@
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.configuration;
 
 import org.openecomp.sdc.asdctool.impl.internal.tool.DeleteComponentHandler;
@@ -33,13 +32,10 @@ import org.springframework.context.annotation.Import;
 @Configuration
 @Import({DAOSpringConfig.class, CatalogModelSpringConfig.class})
 public class InternalToolConfiguration {
-    
+
     @Bean
-    public DeleteComponentHandler deleteComponentHandler(
-        JanusGraphDao janusGraphDao,
-        NodeTypeOperation nodeTypeOperation,
-        TopologyTemplateOperation topologyTemplateOperation) {
+    public DeleteComponentHandler deleteComponentHandler(JanusGraphDao janusGraphDao, NodeTypeOperation nodeTypeOperation,
+                                                         TopologyTemplateOperation topologyTemplateOperation) {
         return new DeleteComponentHandler(janusGraphDao, nodeTypeOperation, topologyTemplateOperation);
     }
-   
 }
index 0cfd894..fe12ef1 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -17,7 +17,6 @@
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.configuration;
 
 import org.openecomp.sdc.be.dao.cassandra.CassandraClient;
@@ -28,14 +27,13 @@ import org.springframework.context.annotation.Configuration;
 @Configuration
 public class SdcSchemaFileImportConfiguration {
 
+    @Bean(name = "cassandra-client")
+    public CassandraClient cassandraClient() {
+        return new CassandraClient();
+    }
 
-       @Bean(name = "cassandra-client")
-       public CassandraClient cassandraClient() {
-               return new CassandraClient();
-       }
-       @Bean(name = "sdc-schema-files-cassandra-dao")
-       public SdcSchemaFilesCassandraDao sdcSchemaFilesCassandraDao() {
-               return new SdcSchemaFilesCassandraDao(cassandraClient());
-       }
-
+    @Bean(name = "sdc-schema-files-cassandra-dao")
+    public SdcSchemaFilesCassandraDao sdcSchemaFilesCassandraDao() {
+        return new SdcSchemaFilesCassandraDao(cassandraClient());
+    }
 }
index af18d63..4df895f 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -17,7 +17,6 @@
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.configuration;
 
 import org.openecomp.sdc.asdctool.impl.VrfObjectFixHandler;
@@ -34,25 +33,23 @@ import org.springframework.context.annotation.Primary;
 public class VrfObjectFixConfiguration {
 
     @Bean(name = "janusgraph-dao")
-    public JanusGraphDao janusGraphDao(@Qualifier("janusgraph-client") JanusGraphClient janusGraphClient){
+    public JanusGraphDao janusGraphDao(@Qualifier("janusgraph-client") JanusGraphClient janusGraphClient) {
         return new JanusGraphDao(janusGraphClient);
     }
 
     @Bean(name = "janusgraph-client")
     @Primary
-    public JanusGraphClient janusGraphClient(@Qualifier("dao-client-strategy")
-                                            JanusGraphClientStrategy janusGraphClientStrategy) {
+    public JanusGraphClient janusGraphClient(@Qualifier("dao-client-strategy") JanusGraphClientStrategy janusGraphClientStrategy) {
         return new JanusGraphClient(janusGraphClientStrategy);
     }
 
-    @Bean(name ="dao-client-strategy")
+    @Bean(name = "dao-client-strategy")
     public JanusGraphClientStrategy janusGraphClientStrategy() {
         return new DAOJanusGraphStrategy();
     }
 
     @Bean
-    public VrfObjectFixHandler vrfObjectFixHandler(@Qualifier("janusgraph-dao")
-                                                       JanusGraphDao janusGraphDao){
+    public VrfObjectFixHandler vrfObjectFixHandler(@Qualifier("janusgraph-dao") JanusGraphDao janusGraphDao) {
         return new VrfObjectFixHandler(janusGraphDao);
     }
 }
index e376d17..9f01557 100644 (file)
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.enums;
 
 public enum LifecycleStateEnum {
-
-    READY_FOR_CERTIFICATION,
-    CERTIFICATION_IN_PROGRESS,
-    CERTIFIED,
-    NOT_CERTIFIED_CHECKIN,
-    NOT_CERTIFIED_CHECKOUT;
-
-}
\ No newline at end of file
+    READY_FOR_CERTIFICATION, CERTIFICATION_IN_PROGRESS, CERTIFIED, NOT_CERTIFIED_CHECKIN, NOT_CERTIFIED_CHECKOUT;
+}
index a764b8d..ac5c01f 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -17,7 +17,6 @@
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.enums;
 
 import static org.openecomp.sdc.asdctool.enums.SchemaZipFileEnum.SchemaZipConstants.DATA_IMPORT_LIST;
@@ -30,7 +29,7 @@ import lombok.Getter;
 @Getter
 @AllArgsConstructor
 public enum SchemaZipFileEnum {
-
+    // @formatter:off
     DATA("data", "data-types", "dataTypes", "data_types", EMPTY_IMPORT_LIST),
     GROUPS("groups", "group-types", "groupTypes", "group_types", DATA_IMPORT_LIST),
     POLICIES("policies", "policy-types", "policyTypes", "policy_types", DATA_IMPORT_LIST),
@@ -39,6 +38,7 @@ public enum SchemaZipFileEnum {
     ARTIFACTS("artifacts", "artifact-types", "artifactTypes", "artifact_types", DATA_IMPORT_LIST),
     CAPABILITIES("capabilities", "capability-types", "capabilityTypes", "capability_types", DATA_IMPORT_LIST),
     INTERFACES("interfaces", "interface-lifecycle-types", "interfaceLifecycleTypes", "interface_types", DATA_IMPORT_LIST);
+    // @formatter:off
 
     private final String fileName;
     private final String sourceFolderName;
@@ -47,9 +47,9 @@ public enum SchemaZipFileEnum {
     private final String[] importFileList;
 
     static class SchemaZipConstants {
-        static final String [] EMPTY_IMPORT_LIST =  new String[]{};
-        static final String [] DATA_IMPORT_LIST =  new String[]{"data.yml"};
-        static final String [] RELATIONSHIPS_TYPES_IMPORT_LIST = new String[]{"capabilities.yml", "data.yml", "interfaces.yml"};
-    }
 
+        static final String[] EMPTY_IMPORT_LIST = new String[]{};
+        static final String[] DATA_IMPORT_LIST = new String[]{"data.yml"};
+        static final String[] RELATIONSHIPS_TYPES_IMPORT_LIST = new String[]{"capabilities.yml", "data.yml", "interfaces.yml"};
+    }
 }
index 2a77b25..b256f12 100644 (file)
@@ -17,7 +17,6 @@
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.impl;
 
 import static java.nio.charset.StandardCharsets.UTF_8;
@@ -95,9 +94,7 @@ import org.springframework.beans.factory.annotation.Autowired;
 public class ArtifactUuidFix {
 
     private static final String MIGRATION1707_ARTIFACT_UUID_FIX = "Migration1707ArtifactUuidFix  fix group:  group name {} correct artifactId {} artifactUUID {} ";
-
     private static final String FAILED_TO_FETCH_VF_RESOURCES = "Failed to fetch vf resources ";
-
     private static Logger log = Logger.getLogger(ArtifactUuidFix.class.getName());
     private JanusGraphDao janusGraphDao;
     private ToscaOperationFacade toscaOperationFacade;
@@ -106,8 +103,7 @@ public class ArtifactUuidFix {
     private CsarUtils csarUtils;
 
     @Autowired
-    public ArtifactUuidFix(JanusGraphDao janusGraphDao,
-                           ToscaOperationFacade toscaOperationFacade, ToscaExportHandler toscaExportUtils,
+    public ArtifactUuidFix(JanusGraphDao janusGraphDao, ToscaOperationFacade toscaOperationFacade, ToscaExportHandler toscaExportUtils,
                            ArtifactCassandraDao artifactCassandraDao, CsarUtils csarUtils) {
         this.janusGraphDao = janusGraphDao;
         this.toscaOperationFacade = toscaOperationFacade;
@@ -122,11 +118,8 @@ public class ArtifactUuidFix {
         Map<String, List<Component>> nodeToFixTosca = new HashMap<>();
         Map<String, List<Component>> vfToFixTosca = new HashMap<>();
         Map<String, List<Component>> serviceToFixTosca = new HashMap<>();
-
         long time = System.currentTimeMillis();
-
         doFixTosca(nodeToFixTosca, vfToFixTosca, serviceToFixTosca);
-
         if ("vf_only".equals(fixComponent)) {
             if (!fetchFaultVf(vfLst, time)) {
                 return false;
@@ -152,7 +145,6 @@ public class ArtifactUuidFix {
             }
             log.info("Mode {}. Fix finished with success", runMode);
         }
-
         return true;
     }
 
@@ -161,15 +153,12 @@ public class ArtifactUuidFix {
         String fileName = "fault_" + time + ".csv";
         try (Writer writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileName), UTF_8))) {
             writer.write("vf name, vf id, state, version\n");
-
             Map<GraphPropertyEnum, Object> hasProps = new EnumMap<>(GraphPropertyEnum.class);
             hasProps.put(GraphPropertyEnum.COMPONENT_TYPE, ComponentTypeEnum.RESOURCE.name());
             hasProps.put(GraphPropertyEnum.RESOURCE_TYPE, ResourceTypeEnum.VF.name());
-
             Map<GraphPropertyEnum, Object> hasNotProps = new EnumMap<>(GraphPropertyEnum.class);
             hasNotProps.put(GraphPropertyEnum.IS_DELETED, true);
             log.info("Try to fetch resources with properties {} and not {}", hasProps, hasNotProps);
-
             Either<List<GraphVertex>, JanusGraphOperationStatus> servicesByCriteria = janusGraphDao
                 .getByCriteria(VertexTypeEnum.TOPOLOGY_TEMPLATE, hasProps, hasNotProps, JsonParseFlagEnum.ParseAll);
             if (servicesByCriteria.isRight()) {
@@ -182,14 +171,11 @@ public class ArtifactUuidFix {
                 filter.setIgnoreComponentInstances(false);
                 filter.setIgnoreArtifacts(false);
                 filter.setIgnoreGroups(false);
-
-                Either<Resource, StorageOperationStatus> toscaElement = toscaOperationFacade
-                    .getToscaElement(gv.getUniqueId());
+                Either<Resource, StorageOperationStatus> toscaElement = toscaOperationFacade.getToscaElement(gv.getUniqueId());
                 if (toscaElement.isRight()) {
                     log.info("Failed to fetch resources {} {}", gv.getUniqueId(), toscaElement.right().value());
                     continue;
                 }
-
                 Resource resource = toscaElement.left().value();
                 String resourceName = resource.getName();
                 Map<String, ArtifactDefinition> deploymentArtifacts = resource.getDeploymentArtifacts();
@@ -212,7 +198,6 @@ public class ArtifactUuidFix {
                 }
                 janusGraphDao.commit();
             }
-
         } catch (Exception e) {
             log.info(FAILED_TO_FETCH_VF_RESOURCES, e);
             return false;
@@ -242,11 +227,9 @@ public class ArtifactUuidFix {
                         filter.setIgnoreComponentInstances(false);
                         filter.setIgnoreArtifacts(false);
                         filter.setIgnoreGroups(false);
-                        Either<Resource, StorageOperationStatus> toscaElement = toscaOperationFacade
-                            .getToscaElement(ci.getComponentUid(), filter);
+                        Either<Resource, StorageOperationStatus> toscaElement = toscaOperationFacade.getToscaElement(ci.getComponentUid(), filter);
                         if (toscaElement.isRight()) {
-                            log.info("Failed to fetch resource {} {}", ci.getComponentUid(),
-                                toscaElement.right().value());
+                            log.info("Failed to fetch resource {} {}", ci.getComponentUid(), toscaElement.right().value());
                             continue;
                         }
                         Resource resource = toscaElement.left().value();
@@ -254,7 +237,6 @@ public class ArtifactUuidFix {
                             vfLst.add(resource);
                             writeModuleResultToFile(writer, resource, service);
                             writer.flush();
-
                         }
                         janusGraphDao.commit();
                     }
@@ -275,18 +257,15 @@ public class ArtifactUuidFix {
         String fileName = "problemService_" + time + ".csv";
         try (Writer writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileName), UTF_8))) {
             writer.write("service name, service id, state, version\n");
-
             Map<GraphPropertyEnum, Object> hasProps = new EnumMap<>(GraphPropertyEnum.class);
             hasProps.put(GraphPropertyEnum.COMPONENT_TYPE, ComponentTypeEnum.SERVICE.name());
             if ("distributed_only".equals(fixServices)) {
                 hasProps.put(GraphPropertyEnum.STATE, LifecycleStateEnum.CERTIFIED.name());
                 hasProps.put(GraphPropertyEnum.DISTRIBUTION_STATUS, DistributionStatusEnum.DISTRIBUTED.name());
             }
-
             Map<GraphPropertyEnum, Object> hasNotProps = new EnumMap<>(GraphPropertyEnum.class);
             hasNotProps.put(GraphPropertyEnum.IS_DELETED, true);
             log.info("Try to fetch services with properties {} and not {}", hasProps, hasNotProps);
-
             Either<List<GraphVertex>, JanusGraphOperationStatus> servicesByCriteria = janusGraphDao
                 .getByCriteria(VertexTypeEnum.TOPOLOGY_TEMPLATE, hasProps, hasNotProps, JsonParseFlagEnum.ParseAll);
             if (servicesByCriteria.isRight()) {
@@ -299,25 +278,19 @@ public class ArtifactUuidFix {
                 filter.setIgnoreComponentInstances(false);
                 filter.setIgnoreArtifacts(false);
                 filter.setIgnoreGroups(false);
-
-                Either<Service, StorageOperationStatus> toscaElement = toscaOperationFacade
-                    .getToscaElement(gv.getUniqueId(), filter);
+                Either<Service, StorageOperationStatus> toscaElement = toscaOperationFacade.getToscaElement(gv.getUniqueId(), filter);
                 if (toscaElement.isRight()) {
                     log.info("Failed to fetch service {} {}", gv.getUniqueId(), toscaElement.right().value());
                     continue;
                 }
                 Service service = toscaElement.left().value();
-
                 String serviceName = (String) gv.getMetadataProperty(GraphPropertyEnum.NAME);
-
                 boolean isProblematic = isProblematicService(service, serviceName);
                 if (isProblematic) {
                     serviceList.add(service);
                     writeModuleResultToFile(writer, service, null);
                     writer.flush();
-
                 }
-
                 janusGraphDao.commit();
             }
             log.info("output file with list of services : {}", fileName);
@@ -331,9 +304,7 @@ public class ArtifactUuidFix {
     }
 
     private boolean isProblematicService(Service service, String serviceName) {
-
         List<ComponentInstance> componentInstances = service.getComponentInstances();
-
         if (componentInstances == null) {
             log.info("No instances for service {} ", service.getUniqueId());
             return false;
@@ -347,19 +318,16 @@ public class ArtifactUuidFix {
             Map<String, ArtifactDefinition> deploymentArtifacts = ci.getDeploymentArtifacts();
             List<GroupInstance> groupInstances = ci.getGroupInstances();
             if (groupInstances == null || groupInstances.isEmpty()) {
-                log.info("No instance groups for instance {} in service {} id {} ", ci.getName(), serviceName,
-                    service.getUniqueId());
+                log.info("No instance groups for instance {} in service {} id {} ", ci.getName(), serviceName, service.getUniqueId());
                 continue;
             }
             List<VfModuleArtifactPayloadEx> vfModules = null;
             if (isCheckVFModules) {
                 Optional<ArtifactDefinition> optionalVfModuleArtifact = deploymentArtifacts.values().stream()
                     .filter(p -> p.getArtifactType().equals(ArtifactTypeEnum.VF_MODULES_METADATA.getType())).findAny();
-
                 if (!optionalVfModuleArtifact.isPresent()) {
                     return true;
                 }
-
                 ArtifactDefinition vfModuleArtifact = optionalVfModuleArtifact.get();
                 Either<List<VfModuleArtifactPayloadEx>, StorageOperationStatus> vfModulesEither = parseVFModuleJson(vfModuleArtifact);
                 if (vfModulesEither.isRight()) {
@@ -372,7 +340,6 @@ public class ArtifactUuidFix {
                     return true;
                 }
             }
-
             for (GroupInstance gi : groupInstances) {
                 if (gi.getType().equals(Constants.DEFAULT_GROUP_VF_MODULE)) {
                     VfModuleArtifactPayloadEx vfModule = null;
@@ -390,17 +357,14 @@ public class ArtifactUuidFix {
                     }
                 }
             }
-
         }
         return false;
     }
 
-    private boolean isProblematicGroup(GroupDefinition gr, String resourceName,
-                                       Map<String, ArtifactDefinition> deploymentArtifacts) {
+    private boolean isProblematicGroup(GroupDefinition gr, String resourceName, Map<String, ArtifactDefinition> deploymentArtifacts) {
         List<String> artifacts = gr.getArtifacts();
         List<String> artifactsUuid = gr.getArtifactsUuid();
         Set<String> artifactsSet = new HashSet<>();
-
         if ((artifactsUuid == null || artifactsUuid.isEmpty()) && (artifacts == null || artifacts.isEmpty())) {
             log.info("No groups in resource {} ", resourceName);
             return true;
@@ -410,42 +374,36 @@ public class ArtifactUuidFix {
             log.info(" artifactsSet.size() < artifacts.size() group {} in resource {} ", gr.getName(), resourceName);
             return true;
         }
-
         if ((artifactsUuid != null) && (artifacts.size() < artifactsUuid.size())) {
             log.info(" artifacts.size() < artifactsUuid.size() group {} in resource {} ", gr.getName(), resourceName);
             return true;
         }
         if (!artifacts.isEmpty() && (artifactsUuid == null || artifactsUuid.isEmpty())) {
-            log.info(
-                " artifacts.size() > 0 && (artifactsUuid == null || artifactsUuid.isEmpty() group {} in resource {} ",
-                gr.getName(), resourceName);
+            log.info(" artifacts.size() > 0 && (artifactsUuid == null || artifactsUuid.isEmpty() group {} in resource {} ", gr.getName(),
+                resourceName);
             return true;
         }
         if (artifactsUuid != null && artifactsUuid.contains(null)) {
             log.info(" artifactsUuid.contains(null) group {} in resource {} ", gr.getName(), resourceName);
             return true;
         }
-
         for (String artifactId : artifacts) {
             String artifactlabel = findArtifactLabelFromArtifactId(artifactId);
             ArtifactDefinition artifactDefinition = deploymentArtifacts.get(artifactlabel);
             if (artifactDefinition == null) {
-                log.info(" artifactDefinition == null label {} group {} in resource {} ", artifactlabel, gr.getName(),
-                    resourceName);
+                log.info(" artifactDefinition == null label {} group {} in resource {} ", artifactlabel, gr.getName(), resourceName);
                 return true;
             }
             ArtifactTypeEnum artifactType = ArtifactTypeEnum.parse(artifactDefinition.getArtifactType());
             if (artifactType != ArtifactTypeEnum.HEAT_ENV) {
                 if (!artifactId.equals(artifactDefinition.getUniqueId())) {
-                    log.info(
-                        " !artifactId.equals(artifactDefinition.getUniqueId() artifact {}  artId {} group {} in resource {} ",
-                        artifactlabel, artifactId, gr.getName(), resourceName);
+                    log.info(" !artifactId.equals(artifactDefinition.getUniqueId() artifact {}  artId {} group {} in resource {} ", artifactlabel,
+                        artifactId, gr.getName(), resourceName);
                     return true;
                 }
                 if (artifactsUuid != null && !artifactsUuid.contains(artifactDefinition.getArtifactUUID())) {
-                    log.info(
-                        " artifactsUuid.contains(artifactDefinition.getArtifactUUID() label {} group {} in resource {} ",
-                        artifactlabel, gr.getName(), resourceName);
+                    log.info(" artifactsUuid.contains(artifactDefinition.getArtifactUUID() label {} group {} in resource {} ", artifactlabel,
+                        gr.getName(), resourceName);
                     return true;
                 }
             }
@@ -456,7 +414,6 @@ public class ArtifactUuidFix {
                 return true;
             }
         }
-
         return false;
     }
 
@@ -468,7 +425,6 @@ public class ArtifactUuidFix {
         List<String> instArtifactsId = gi.getGroupInstanceArtifacts();
         Set<String> instArtifatIdSet = new HashSet<>();
         Set<String> artifactsSet = new HashSet<>();
-
         log.info("check group {} for instance {} ", gi.getGroupName(), instName);
         if ((artifactsUuid == null || artifactsUuid.isEmpty()) && (artifacts == null || artifacts.isEmpty())) {
             log.info("No instance groups for instance {} in service {} ", instName, servicename);
@@ -479,19 +435,15 @@ public class ArtifactUuidFix {
             log.info(" artifactsSet.size() < artifacts.size() group {} in resource {} ", instName, servicename);
             return true;
         }
-
         if (instArtifactsId != null && !instArtifactsId.isEmpty()) {
             instArtifatIdSet.addAll(instArtifactsId);
         }
-
         if ((artifactsUuid != null) && (artifacts.size() < artifactsUuid.size())) {
             log.info(" artifacts.size() < artifactsUuid.size() inst {} in service {} ", instName, servicename);
             return true;
         }
         if (!artifacts.isEmpty() && (artifactsUuid == null || artifactsUuid.isEmpty())) {
-            log.info(
-                " artifacts.size() > 0 && (artifactsUuid == null || artifactsUuid.isEmpty() inst {} in service {} ",
-                instName, servicename);
+            log.info(" artifacts.size() > 0 && (artifactsUuid == null || artifactsUuid.isEmpty() inst {} in service {} ", instName, servicename);
             return true;
         }
         if (artifactsUuid != null && artifactsUuid.contains(null)) {
@@ -502,46 +454,37 @@ public class ArtifactUuidFix {
             log.info(" instArtifatIdSet.size() < instArtifactsId.size() inst {} in service {} ", instName, servicename);
             return true;
         }
-
-        if ((instArtifactsId != null && instArtifactsUuid != null)
-            && instArtifactsId.size() != instArtifactsUuid.size()) {
-            log.info(" instArtifactsId.size() != instArtifactsUuid.size() inst {} in service {} ", instName,
-                servicename);
+        if ((instArtifactsId != null && instArtifactsUuid != null) && instArtifactsId.size() != instArtifactsUuid.size()) {
+            log.info(" instArtifactsId.size() != instArtifactsUuid.size() inst {} in service {} ", instName, servicename);
             return true;
         }
-
         for (String artifactId : artifacts) {
             String artifactlabel = findArtifactLabelFromArtifactId(artifactId);
             ArtifactDefinition artifactDefinition = deploymentArtifacts.get(artifactlabel);
             if (artifactDefinition == null) {
-                log.info(" artifactDefinition == null label {} inst {} in service {} ", artifactlabel, instName,
-                    servicename);
+                log.info(" artifactDefinition == null label {} inst {} in service {} ", artifactlabel, instName, servicename);
                 return true;
             }
             ArtifactTypeEnum artifactType = ArtifactTypeEnum.parse(artifactDefinition.getArtifactType());
             if (artifactType != ArtifactTypeEnum.HEAT_ENV) {
                 if (!artifactId.equals(artifactDefinition.getUniqueId())) {
-                    log.info(
-                        " !artifactId.equals(artifactDefinition.getUniqueId() artifact {}  artId {} inst {} in service {} ",
-                        artifactlabel, artifactId, instName, servicename);
+                    log.info(" !artifactId.equals(artifactDefinition.getUniqueId() artifact {}  artId {} inst {} in service {} ", artifactlabel,
+                        artifactId, instName, servicename);
                     return true;
                 }
                 if (artifactsUuid != null && !artifactsUuid.contains(artifactDefinition.getArtifactUUID())) {
-                    log.info(
-                        " artifactsUuid.contains(artifactDefinition.getArtifactUUID() label {} inst {} in service {} ",
-                        artifactlabel, instName, servicename);
+                    log.info(" artifactsUuid.contains(artifactDefinition.getArtifactUUID() label {} inst {} in service {} ", artifactlabel, instName,
+                        servicename);
                     return true;
                 }
             } else {
                 if (instArtifactsUuid == null || instArtifactsUuid.isEmpty()) {
-                    log.info(" instArtifactsUuid empty. label {} inst {} in service {} ", artifactlabel, instName,
-                        servicename);
+                    log.info(" instArtifactsUuid empty. label {} inst {} in service {} ", artifactlabel, instName, servicename);
                     return true;
                 }
                 if (!instArtifactsUuid.contains(artifactDefinition.getArtifactUUID())) {
-                    log.info(
-                        " instArtifactsUuid.contains(artifactDefinition.getArtifactUUID() label {} inst {} in service {} ",
-                        artifactlabel, instName, servicename);
+                    log.info(" instArtifactsUuid.contains(artifactDefinition.getArtifactUUID() label {} inst {} in service {} ", artifactlabel,
+                        instName, servicename);
                     return true;
                 }
             }
@@ -555,12 +498,10 @@ public class ArtifactUuidFix {
         if (vfModule != null && artifactsUuid != null) {
             return isProblematicVFModule(vfModule, artifactsUuid, instArtifactsUuid);
         }
-
         return false;
     }
 
-    private boolean isProblematicVFModule(VfModuleArtifactPayloadEx vfModule, List<String> artifactsUuid,
-                                          List<String> instArtifactsUuid) {
+    private boolean isProblematicVFModule(VfModuleArtifactPayloadEx vfModule, List<String> artifactsUuid, List<String> instArtifactsUuid) {
         log.info(" isProblematicVFModule  {}  ", vfModule.getVfModuleModelName());
         List<String> vfModuleArtifacts = vfModule.getArtifacts();
         List<String> allArtifacts = new ArrayList<>();
@@ -588,41 +529,28 @@ public class ArtifactUuidFix {
         return false;
     }
 
-
     private boolean fix(List<Resource> vfLst, List<Service> serviceList, Map<String, List<Component>> nodesToFixTosca,
                         Map<String, List<Component>> vfToFixTosca, Map<String, List<Component>> servicesToFixTosca) {
         boolean res = true;
         log.info(" Fix started ***** ");
         if (vfLst != null && !vfLst.isEmpty()) {
             res = fixVf(vfLst);
-
         }
-
         if (res && serviceList != null && !serviceList.isEmpty()) {
             res = fixServices(serviceList);
-
         }
-
         Set<String> fixedIds = new HashSet<>();
-
         long time = System.currentTimeMillis();
         String fileName = "FailedGenerateTosca" + "_" + time + ".csv";
-
         try (Writer writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileName), UTF_8))) {
             writer.write("componentType, name, version, UID, UUID, invariantUUID, state\n");
             List<Component> failedList = new ArrayList<>();
-
             if (res && nodesToFixTosca != null && !nodesToFixTosca.isEmpty()) {
-
                 generateAndSaveToscaArtifacts(nodesToFixTosca, fixedIds, null, failedList);
-
             }
             if (vfToFixTosca != null && !vfToFixTosca.isEmpty()) {
-
                 generateAndSaveToscaArtifacts(vfToFixTosca, fixedIds, vfLst, failedList);
-
             }
-
             for (Component component : vfLst) {
                 res = generateToscaPerComponent(fixedIds, component);
                 if (res) {
@@ -631,57 +559,47 @@ public class ArtifactUuidFix {
                     res = fixDataOnGraph(component.getUniqueId(), VertexTypeEnum.GROUPS, EdgeLabelEnum.GROUPS, groups);
                     if (res) {
                         Map<String, ArtifactDataDefinition> arifacts = topologyTemplate.getDeploymentArtifacts();
-                        res = fixDataOnGraph(component.getUniqueId(), VertexTypeEnum.DEPLOYMENT_ARTIFACTS,
-                            EdgeLabelEnum.DEPLOYMENT_ARTIFACTS, arifacts);
+                        res = fixDataOnGraph(component.getUniqueId(), VertexTypeEnum.DEPLOYMENT_ARTIFACTS, EdgeLabelEnum.DEPLOYMENT_ARTIFACTS,
+                            arifacts);
                     }
                     if (res) {
                         Map<String, ArtifactDataDefinition> arifacts = topologyTemplate.getToscaArtifacts();
-                        res = fixDataOnGraph(component.getUniqueId(), VertexTypeEnum.TOSCA_ARTIFACTS,
-                            EdgeLabelEnum.TOSCA_ARTIFACTS, arifacts);
+                        res = fixDataOnGraph(component.getUniqueId(), VertexTypeEnum.TOSCA_ARTIFACTS, EdgeLabelEnum.TOSCA_ARTIFACTS, arifacts);
                     }
                     janusGraphDao.commit();
                 } else {
                     failedList.add(component);
                 }
             }
-
             if (servicesToFixTosca != null && !servicesToFixTosca.isEmpty()) {
                 generateAndSaveToscaArtifacts(servicesToFixTosca, fixedIds, serviceList, failedList);
-
             }
-
             for (Component component : serviceList) {
                 res = generateToscaPerComponent(fixedIds, component);
                 if (res) {
                     TopologyTemplate topologyTemplate = ModelConverter.convertToToscaElement(component);
                     Map<String, MapGroupsDataDefinition> groups = topologyTemplate.getInstGroups();
-                    res = fixDataOnGraph(component.getUniqueId(), VertexTypeEnum.INST_GROUPS, EdgeLabelEnum.INST_GROUPS,
-                        groups);
-
+                    res = fixDataOnGraph(component.getUniqueId(), VertexTypeEnum.INST_GROUPS, EdgeLabelEnum.INST_GROUPS, groups);
                     if (res) {
-                        Map<String, MapArtifactDataDefinition> artifacts = topologyTemplate
-                            .getInstDeploymentArtifacts();
+                        Map<String, MapArtifactDataDefinition> artifacts = topologyTemplate.getInstDeploymentArtifacts();
                         res = fixDataOnGraph(component.getUniqueId(), VertexTypeEnum.INST_DEPLOYMENT_ARTIFACTS,
                             EdgeLabelEnum.INST_DEPLOYMENT_ARTIFACTS, artifacts);
                     }
                     if (res) {
                         Map<String, ArtifactDataDefinition> arifacts = topologyTemplate.getToscaArtifacts();
-                        res = fixDataOnGraph(component.getUniqueId(), VertexTypeEnum.TOSCA_ARTIFACTS,
-                            EdgeLabelEnum.TOSCA_ARTIFACTS, arifacts);
+                        res = fixDataOnGraph(component.getUniqueId(), VertexTypeEnum.TOSCA_ARTIFACTS, EdgeLabelEnum.TOSCA_ARTIFACTS, arifacts);
                     }
                     janusGraphDao.commit();
                 } else {
                     failedList.add(component);
                 }
-
             }
             if (!failedList.isEmpty()) {
                 for (Component component : failedList) {
                     StringBuilder sb = new StringBuilder(component.getComponentType().getValue());
-                    sb.append(",").append(component.getName()).append(",").append(component.getVersion()).append(",")
-                        .append(component.getUniqueId()).append(",").append(component.getUUID()).append(",")
-                        .append(component.getInvariantUUID()).append(",").append(component.getLifecycleState());
-
+                    sb.append(",").append(component.getName()).append(",").append(component.getVersion()).append(",").append(component.getUniqueId())
+                        .append(",").append(component.getUUID()).append(",").append(component.getInvariantUUID()).append(",")
+                        .append(component.getLifecycleState());
                     sb.append("\n");
                     writer.write(sb.toString());
                 }
@@ -701,11 +619,9 @@ public class ArtifactUuidFix {
         boolean res = true;
         log.debug("Migration1707ArtifactUuidFix  generateAndSaveToscaArtifacts started ");
         for (Map.Entry<String, List<Component>> entry : nodesToFixTosca.entrySet()) {
-
             List<Component> component = entry.getValue();
             for (Component c : component) {
-                log.debug("Migration1707ArtifactUuidFix  fix tosca on component : id {},  name {} ", c.getUniqueId(),
-                    c.getName());
+                log.debug("Migration1707ArtifactUuidFix  fix tosca on component : id {},  name {} ", c.getUniqueId(), c.getName());
                 if (componentsWithFailedGroups != null) {
                     Optional<Component> op = (Optional<Component>) componentsWithFailedGroups.stream()
                         .filter(cg -> cg.getUniqueId().equals(c.getUniqueId())).findAny();
@@ -718,13 +634,11 @@ public class ArtifactUuidFix {
                 if (res) {
                     ToscaElement topologyTemplate = ModelConverter.convertToToscaElement(c);
                     Map<String, ArtifactDataDefinition> arifacts = topologyTemplate.getToscaArtifacts();
-                    res = fixDataOnGraph(c.getUniqueId(), VertexTypeEnum.TOSCA_ARTIFACTS, EdgeLabelEnum.TOSCA_ARTIFACTS,
-                        arifacts);
+                    res = fixDataOnGraph(c.getUniqueId(), VertexTypeEnum.TOSCA_ARTIFACTS, EdgeLabelEnum.TOSCA_ARTIFACTS, arifacts);
                     janusGraphDao.commit();
                 } else {
                     failedList.add(c);
                 }
-
             }
         }
         log.debug("Migration1707ArtifactUuidFix  generateAndSaveToscaArtifacts finished with res {} ", res);
@@ -733,11 +647,9 @@ public class ArtifactUuidFix {
 
     private boolean generateToscaPerComponent(Set<String> fixedIds, Component c) {
         boolean res = true;
-        log.debug("Migration1707ArtifactUuidFix  generateToscaPerComponent started component name {} id {}",
-            c.getName(), c.getUniqueId());
+        log.debug("Migration1707ArtifactUuidFix  generateToscaPerComponent started component name {} id {}", c.getName(), c.getUniqueId());
         try {
-            Either<Component, StorageOperationStatus> toscaElement = toscaOperationFacade
-                .getToscaFullElement(c.getUniqueId());
+            Either<Component, StorageOperationStatus> toscaElement = toscaOperationFacade.getToscaFullElement(c.getUniqueId());
             if (toscaElement.isRight()) {
                 log.info("Failed to fetch resources {} {}", c.getUniqueId(), toscaElement.right().value());
                 return false;
@@ -748,21 +660,16 @@ public class ArtifactUuidFix {
             List<ComponentInstance> ciList = c.getComponentInstances();
             if (ciListFull != null && !ciListFull.isEmpty()) {
                 ciListFull.forEach(ciFull -> {
-                    ComponentInstance compInst = ciList.stream()
-                        .filter(ci -> ci.getUniqueId().equals(ciFull.getUniqueId())).findAny().get();
+                    ComponentInstance compInst = ciList.stream().filter(ci -> ci.getUniqueId().equals(ciFull.getUniqueId())).findAny().get();
                     ciFull.setGroupInstances(compInst.getGroupInstances());
                 });
             }
-
             Either<Component, ToscaError> either = generateToscaArtifact(toscaElementFull);
-
             if (either.isRight()) {
-                log.error("Couldn't generate and save tosca template component  unique id {}, name {} error: {}",
-                    toscaElementFull.getUniqueId(), toscaElementFull.getName(), either.right().value());
+                log.error("Couldn't generate and save tosca template component  unique id {}, name {} error: {}", toscaElementFull.getUniqueId(),
+                    toscaElementFull.getName(), either.right().value());
                 res = false;
-
             }
-
             if (res) {
                 c.setToscaArtifacts(either.left().value().getToscaArtifacts());
                 fixedIds.add(toscaElementFull.getUniqueId());
@@ -774,29 +681,25 @@ public class ArtifactUuidFix {
                 janusGraphDao.rollback();
             }
         }
-        log.debug("Migration1707ArtifactUuidFix  generateToscaPerComponent finished  component name {} id {} res {}",
-            c.getName(), c.getUniqueId(), res);
+        log.debug("Migration1707ArtifactUuidFix  generateToscaPerComponent finished  component name {} id {} res {}", c.getName(), c.getUniqueId(),
+            res);
         return res;
     }
 
-    private <T extends ToscaDataDefinition> boolean fixDataOnGraph(String componentId, VertexTypeEnum vertexTypeEnum,
-                                                                   EdgeLabelEnum edgeLabelEnum, Map<String, T> groups) {
-        log.debug("amount groups to update: VertexTypeEnum {} EdgeLabelEnum {} data size {}", vertexTypeEnum.getName(),
-            edgeLabelEnum, groups.size());
+    private <T extends ToscaDataDefinition> boolean fixDataOnGraph(String componentId, VertexTypeEnum vertexTypeEnum, EdgeLabelEnum edgeLabelEnum,
+                                                                   Map<String, T> groups) {
+        log.debug("amount groups to update: VertexTypeEnum {} EdgeLabelEnum {} data size {}", vertexTypeEnum.getName(), edgeLabelEnum, groups.size());
         boolean res = true;
-        Either<GraphVertex, JanusGraphOperationStatus> getResponse = janusGraphDao.getVertexById(componentId,
-            JsonParseFlagEnum.NoParse);
+        Either<GraphVertex, JanusGraphOperationStatus> getResponse = janusGraphDao.getVertexById(componentId, JsonParseFlagEnum.NoParse);
         if (getResponse.isRight()) {
             log.debug("Couldn't fetch component  unique id {}, error: {}", componentId, getResponse.right().value());
             res = false;
-
         }
         if (res) {
             GraphVertex componentVertex = getResponse.left().value();
-
             GraphVertex toscaDataVertex = null;
-            Either<GraphVertex, JanusGraphOperationStatus> groupVertexEither = janusGraphDao.getChildVertex(componentVertex,
-                edgeLabelEnum, JsonParseFlagEnum.ParseJson);
+            Either<GraphVertex, JanusGraphOperationStatus> groupVertexEither = janusGraphDao
+                .getChildVertex(componentVertex, edgeLabelEnum, JsonParseFlagEnum.ParseJson);
             if (groupVertexEither.isRight() && groupVertexEither.right().value() == JanusGraphOperationStatus.NOT_FOUND) {
                 log.debug("no child {}  vertex for component  unique id {}, error: {}", edgeLabelEnum, componentId,
                     groupVertexEither.right().value());
@@ -804,38 +707,33 @@ public class ArtifactUuidFix {
             }
             if (groupVertexEither.isRight()) {
                 res = false;
-                log.debug("failed to get child {}  vertex for component  unique id {}, error: {}", edgeLabelEnum,
-                    componentId, groupVertexEither.right().value());
+                log.debug("failed to get child {}  vertex for component  unique id {}, error: {}", edgeLabelEnum, componentId,
+                    groupVertexEither.right().value());
             }
             if (res) {
                 toscaDataVertex = groupVertexEither.left().value();
                 toscaDataVertex.setJson(groups);
                 Either<GraphVertex, JanusGraphOperationStatus> updatevertexEither = janusGraphDao.updateVertex(toscaDataVertex);
                 if (updatevertexEither.isRight()) {
-                    log.debug("failed to update vertex for component  unique id {}, error: {}", componentId,
-                        updatevertexEither.right().value());
+                    log.debug("failed to update vertex for component  unique id {}, error: {}", componentId, updatevertexEither.right().value());
                     janusGraphDao.rollback();
                     return false;
                 }
             }
         }
-        log.debug("Fix data on graph finished: VertexTypeEnum {} EdgeLabelEnum {} res {}", vertexTypeEnum.getName(),
-            res);
+        log.debug("Fix data on graph finished: VertexTypeEnum {} EdgeLabelEnum {} res {}", vertexTypeEnum.getName(), res);
         return res;
     }
 
     private boolean fixServices(List<Service> serviceList) {
         for (Service service : serviceList) {
-            log.debug("Migration1707ArtifactUuidFix  fix service: id {},  name {} ", service.getUniqueId(),
-                service.getName());
+            log.debug("Migration1707ArtifactUuidFix  fix service: id {},  name {} ", service.getUniqueId(), service.getName());
             List<ComponentInstance> instances = service.getComponentInstances();
             for (ComponentInstance instance : instances) {
                 fixComponentInstances(service, instance);
             }
-
         }
         return true;
-
     }
 
     private void fixComponentInstances(Service service, ComponentInstance instance) {
@@ -845,16 +743,12 @@ public class ArtifactUuidFix {
             List<GroupInstance> groupsToDelete = new ArrayList<>();
             for (GroupInstance group : groupsList) {
                 fixGroupInstances(service, artifactsMap, groupsToDelete, group);
-
             }
-
             if (!groupsToDelete.isEmpty()) {
-                log.debug("Migration1707ArtifactUuidFix  delete group:  resource id {}, group instance to delete {} ",
-                    service.getUniqueId(), groupsToDelete);
+                log.debug("Migration1707ArtifactUuidFix  delete group:  resource id {}, group instance to delete {} ", service.getUniqueId(),
+                    groupsToDelete);
                 groupsList.removeAll(groupsToDelete);
-
             }
-
             Optional<ArtifactDefinition> optionalVfModuleArtifact = artifactsMap.values().stream()
                 .filter(p -> p.getArtifactType().equals(ArtifactTypeEnum.VF_MODULES_METADATA.getType())).findAny();
             ArtifactDefinition vfModuleArtifact;
@@ -868,11 +762,10 @@ public class ArtifactUuidFix {
         }
     }
 
-    private void fixGroupInstances(Service service, Map<String, ArtifactDefinition> artifactsMap,
-                                   List<GroupInstance> groupsToDelete, GroupInstance group) {
+    private void fixGroupInstances(Service service, Map<String, ArtifactDefinition> artifactsMap, List<GroupInstance> groupsToDelete,
+                                   GroupInstance group) {
         if (group.getType().equals(Constants.DEFAULT_GROUP_VF_MODULE)) {
-            log.debug("Migration1707ArtifactUuidFix  fix group:  resource id {}, group name {} ", service.getUniqueId(),
-                group.getName());
+            log.debug("Migration1707ArtifactUuidFix  fix group:  resource id {}, group name {} ", service.getUniqueId(), group.getName());
             if (group.getArtifacts() != null) {
                 Set<String> groupArtifactsSet = new HashSet<>(group.getArtifacts());
                 if (group.getGroupInstanceArtifacts() != null) {
@@ -880,16 +773,13 @@ public class ArtifactUuidFix {
                     groupArtifactsSet.addAll(groupInsArtifacts);
                 }
                 List<String> groupArtifacts = new ArrayList<>(groupArtifactsSet);
-
                 clearGroupInstanceArtifacts(group);
-
                 for (String artifactId : groupArtifacts) {
                     fixArtifactUndergroupInstances(artifactsMap, group, groupArtifacts, artifactId);
                 }
             }
             if (group.getArtifacts() == null || group.getArtifacts().isEmpty()) {
-                log.debug(
-                    "Migration1707ArtifactUuidFix  fix groupInstance add to delete list:  resource id {} name {} , group name {} ",
+                log.debug("Migration1707ArtifactUuidFix  fix groupInstance add to delete list:  resource id {} name {} , group name {} ",
                     service.getUniqueId(), service.getName(), group.getName());
                 groupsToDelete.add(group);
             }
@@ -919,11 +809,11 @@ public class ArtifactUuidFix {
         }
     }
 
-    private void fixArtifactUndergroupInstances(Map<String, ArtifactDefinition> artifactsMap, GroupInstance group,
-                                                List<String> groupArtifacts, String artifactId) {
+    private void fixArtifactUndergroupInstances(Map<String, ArtifactDefinition> artifactsMap, GroupInstance group, List<String> groupArtifacts,
+                                                String artifactId) {
         String artifactlabel = findArtifactLabelFromArtifactId(artifactId);
-        log.debug("Migration1707ArtifactUuidFix  fix group:  group name {} artifactId for fix {} artifactlabel {} ",
-            group.getName(), artifactId, artifactlabel);
+        log.debug("Migration1707ArtifactUuidFix  fix group:  group name {} artifactId for fix {} artifactlabel {} ", group.getName(), artifactId,
+            artifactlabel);
         if (!artifactlabel.isEmpty() && artifactsMap.containsKey(artifactlabel)) {
             ArtifactDefinition artifact = artifactsMap.get(artifactlabel);
             ArtifactTypeEnum artifactType = ArtifactTypeEnum.parse(artifact.getArtifactType());
@@ -936,18 +826,14 @@ public class ArtifactUuidFix {
                     artifact.setArtifactType(ArtifactTypeEnum.OTHER.getType());
                 }
                 if (isAddToGroup) {
-                    log.debug(
-                        MIGRATION1707_ARTIFACT_UUID_FIX,
-                        group.getName(), correctArtifactId, correctArtifactUUID);
+                    log.debug(MIGRATION1707_ARTIFACT_UUID_FIX, group.getName(), correctArtifactId, correctArtifactUUID);
                     group.getArtifacts().add(correctArtifactId);
                     if (correctArtifactUUID != null && !correctArtifactUUID.isEmpty()) {
                         group.getArtifactsUuid().add(correctArtifactUUID);
                     }
                 }
             } else {
-                log.debug(
-                    MIGRATION1707_ARTIFACT_UUID_FIX,
-                    group.getName(), correctArtifactId, correctArtifactUUID);
+                log.debug(MIGRATION1707_ARTIFACT_UUID_FIX, group.getName(), correctArtifactId, correctArtifactUUID);
                 Set<String> tmpSet = new HashSet<>(group.getGroupInstanceArtifacts());
                 tmpSet.add(correctArtifactId);
                 group.setGroupInstanceArtifacts(new ArrayList<>(tmpSet));
@@ -962,8 +848,7 @@ public class ArtifactUuidFix {
 
     private boolean fixVf(List<Resource> vfLst) {
         for (Resource resource : vfLst) {
-            log.debug("Migration1707ArtifactUuidFix  fix resource: id {},  name {} ", resource.getUniqueId(),
-                resource.getName());
+            log.debug("Migration1707ArtifactUuidFix  fix resource: id {},  name {} ", resource.getUniqueId(), resource.getName());
             Map<String, ArtifactDefinition> artifactsMap = resource.getDeploymentArtifacts();
             List<GroupDefinition> groupsList = resource.getGroups();
             List<GroupDefinition> groupsToDelete = new ArrayList<>();
@@ -972,45 +857,37 @@ public class ArtifactUuidFix {
                     if (group.getType().equals(Constants.DEFAULT_GROUP_VF_MODULE) && group.getArtifacts() != null) {
                         fixVfGroup(resource, artifactsMap, group);
                     }
-                    if (group.getType().equals(Constants.DEFAULT_GROUP_VF_MODULE)
-                        && (group.getArtifacts() == null || group.getArtifacts().isEmpty())) {
-                        log.debug(
-                            "Migration1707ArtifactUuidFix  add group to delete list fix resource: id {},  name {} ",
-                            resource.getUniqueId(), resource.getName(), group.getName());
+                    if (group.getType().equals(Constants.DEFAULT_GROUP_VF_MODULE) && (group.getArtifacts() == null || group.getArtifacts()
+                        .isEmpty())) {
+                        log.debug("Migration1707ArtifactUuidFix  add group to delete list fix resource: id {},  name {} ", resource.getUniqueId(),
+                            resource.getName(), group.getName());
                         groupsToDelete.add(group);
                     }
                 }
-
                 if (!groupsToDelete.isEmpty()) {
                     groupsList.removeAll(groupsToDelete);
-
                 }
             }
-
         }
-
         return true;
     }
 
     private void fixVfGroup(Resource resource, Map<String, ArtifactDefinition> artifactsMap, GroupDefinition group) {
-        log.debug("Migration1707ArtifactUuidFix  fix group:  resource id {}, group name {} ", resource.getUniqueId(),
-            group.getName());
+        log.debug("Migration1707ArtifactUuidFix  fix group:  resource id {}, group name {} ", resource.getUniqueId(), group.getName());
         Set<String> groupArtifactsSet = new HashSet<>(group.getArtifacts());
         List<String> groupArtifacts = new ArrayList<>(groupArtifactsSet);
         group.getArtifacts().clear();
         group.getArtifactsUuid().clear();
-
         for (String artifactId : groupArtifacts) {
             fixArtifactUnderGroup(artifactsMap, group, groupArtifacts, artifactId);
         }
     }
 
-    private void fixArtifactUnderGroup(Map<String, ArtifactDefinition> artifactsMap, GroupDefinition group,
-                                       List<String> groupArtifacts, String artifactId) {
-
+    private void fixArtifactUnderGroup(Map<String, ArtifactDefinition> artifactsMap, GroupDefinition group, List<String> groupArtifacts,
+                                       String artifactId) {
         String artifactlabel = findArtifactLabelFromArtifactId(artifactId);
-        log.debug("Migration1707ArtifactUuidFix  fix group:  group name {} artifactId for fix {} artifactlabel {} ",
-            group.getName(), artifactId, artifactlabel);
+        log.debug("Migration1707ArtifactUuidFix  fix group:  group name {} artifactId for fix {} artifactlabel {} ", group.getName(), artifactId,
+            artifactlabel);
         if (!artifactlabel.isEmpty() && artifactsMap.containsKey(artifactlabel)) {
             ArtifactDefinition artifact = artifactsMap.get(artifactlabel);
             String correctArtifactId = artifact.getUniqueId();
@@ -1024,21 +901,17 @@ public class ArtifactUuidFix {
                 }
             }
             if (isAddToGroup) {
-                log.debug(
-                    MIGRATION1707_ARTIFACT_UUID_FIX,
-                    group.getName(), correctArtifactId, correctArtifactUUID);
+                log.debug(MIGRATION1707_ARTIFACT_UUID_FIX, group.getName(), correctArtifactId, correctArtifactUUID);
                 group.getArtifacts().add(correctArtifactId);
                 if (correctArtifactUUID != null && !correctArtifactUUID.isEmpty()) {
                     group.getArtifactsUuid().add(correctArtifactUUID);
                 }
             }
-
         }
     }
 
     private String findArtifactLabelFromArtifactId(String artifactId) {
         String artifactLabel = "";
-
         int index = artifactId.lastIndexOf('.');
         if (index > 0 && index + 1 < artifactId.length()) {
             artifactLabel = artifactId.substring(index + 1);
@@ -1046,8 +919,7 @@ public class ArtifactUuidFix {
         return artifactLabel;
     }
 
-    private void writeModuleResultToFile(Writer writer, org.openecomp.sdc.be.model.Component component,
-                                         Service service) {
+    private void writeModuleResultToFile(Writer writer, org.openecomp.sdc.be.model.Component component, Service service) {
         try {
             // "service name, service id, state, version
             StringBuilder sb = new StringBuilder(component.getName());
@@ -1068,52 +940,40 @@ public class ArtifactUuidFix {
             // "service name, service id, state, version
             for (Component component : components) {
                 StringBuilder sb = new StringBuilder(component.getName());
-                sb.append(",").append(component.getUniqueId()).append(",").append(component.getInvariantUUID())
-                    .append(",").append(component.getLifecycleState()).append(",").append(component.getVersion());
-
+                sb.append(",").append(component.getUniqueId()).append(",").append(component.getInvariantUUID()).append(",")
+                    .append(component.getLifecycleState()).append(",").append(component.getVersion());
                 sb.append("\n");
                 writer.write(sb.toString());
             }
         } catch (IOException e) {
-
             log.error(e.getMessage());
         }
     }
 
     public boolean doFixTosca(Map<String, List<Component>> nodeToFix, Map<String, List<Component>> vfToFix,
                               Map<String, List<Component>> serviceToFix) {
-
         Map<GraphPropertyEnum, Object> hasProps = new EnumMap<>(GraphPropertyEnum.class);
         hasProps.put(GraphPropertyEnum.COMPONENT_TYPE, ComponentTypeEnum.RESOURCE.name());
         hasProps.put(GraphPropertyEnum.STATE, LifecycleStateEnum.CERTIFIED.name());
-
         Map<String, List<Component>> vertices = getVerticesToValidate(VertexTypeEnum.NODE_TYPE, hasProps);
         validateTosca(vertices, nodeToFix, "RESOURCE_TOSCA_ARTIFACTS");//
-
         hasProps.clear();
         hasProps.put(GraphPropertyEnum.COMPONENT_TYPE, ComponentTypeEnum.RESOURCE.name());
         hasProps.put(GraphPropertyEnum.RESOURCE_TYPE, ResourceTypeEnum.VF);
         hasProps.put(GraphPropertyEnum.STATE, LifecycleStateEnum.CERTIFIED.name());
-
         vertices = getVerticesToValidate(VertexTypeEnum.TOPOLOGY_TEMPLATE, hasProps);
         validateTosca(vertices, vfToFix, "VF_TOSCA_ARTIFACTS");
-
         hasProps.clear();
         hasProps.put(GraphPropertyEnum.COMPONENT_TYPE, ComponentTypeEnum.SERVICE.name());
         hasProps.put(GraphPropertyEnum.STATE, LifecycleStateEnum.CERTIFIED.name());
-
         vertices = getVerticesToValidate(VertexTypeEnum.TOPOLOGY_TEMPLATE, hasProps);
         return validateTosca(vertices, serviceToFix, "SERVICE_TOSCA_ARTIFACTS");
     }
 
-    public Map<String, List<Component>> getVerticesToValidate(VertexTypeEnum type,
-                                                              Map<GraphPropertyEnum, Object> hasProps) {
-
+    public Map<String, List<Component>> getVerticesToValidate(VertexTypeEnum type, Map<GraphPropertyEnum, Object> hasProps) {
         Map<String, List<Component>> result = new HashMap<>();
         try {
-
-            Either<List<GraphVertex>, JanusGraphOperationStatus> resultsEither = janusGraphDao
-                .getByCriteria(type, hasProps);
+            Either<List<GraphVertex>, JanusGraphOperationStatus> resultsEither = janusGraphDao.getByCriteria(type, hasProps);
             if (resultsEither.isRight()) {
                 log.error("getVerticesToValidate failed {} ", resultsEither.right().value());
                 return result;
@@ -1127,34 +987,25 @@ public class ArtifactUuidFix {
                     result.put(ivariantUuid, compList);
                 }
                 List<Component> compList = result.get(ivariantUuid);
-
                 ComponentParametersView filter = new ComponentParametersView(true);
                 filter.setIgnoreArtifacts(false);
-
-                Either<Component, StorageOperationStatus> toscaElement = toscaOperationFacade
-                    .getToscaElement(vertex.getUniqueId(), filter);
+                Either<Component, StorageOperationStatus> toscaElement = toscaOperationFacade.getToscaElement(vertex.getUniqueId(), filter);
                 if (toscaElement.isRight()) {
-                    log.error("getVerticesToValidate: failed to find element {}  staus is {}", vertex.getUniqueId()
-                        , toscaElement.right().value());
+                    log.error("getVerticesToValidate: failed to find element {}  staus is {}", vertex.getUniqueId(), toscaElement.right().value());
                 } else {
                     compList.add(toscaElement.left().value());
                 }
                 janusGraphDao.commit();
-
             });
-
         } catch (Exception e) {
             log.info(FAILED_TO_FETCH_VF_RESOURCES, e);
-
         } finally {
             janusGraphDao.commit();
-
         }
         return result;
     }
 
-    public boolean validateTosca(Map<String, List<Component>> vertices, Map<String, List<Component>> compToFix,
-                                 String name) {
+    public boolean validateTosca(Map<String, List<Component>> vertices, Map<String, List<Component>> compToFix, String name) {
         boolean result = true;
         long time = System.currentTimeMillis();
         String fileName = name + "_" + time + ".csv";
@@ -1173,8 +1024,7 @@ public class ArtifactUuidFix {
                         writer.flush();
                         break;
                     } else {
-                        artifactEsId.addAll(toscaArtifacts.values().stream().map(ArtifactDefinition::getEsId)
-                            .collect(Collectors.toList()));
+                        artifactEsId.addAll(toscaArtifacts.values().stream().map(ArtifactDefinition::getEsId).collect(Collectors.toList()));
                     }
                 }
                 if (!result) {
@@ -1184,9 +1034,7 @@ public class ArtifactUuidFix {
                         filter.setIgnoreComponentInstances(false);
                         filter.setIgnoreArtifacts(false);
                         filter.setIgnoreGroups(false);
-
-                        Either<Component, StorageOperationStatus> toscaElement = toscaOperationFacade
-                            .getToscaElement(c.getUniqueId(), filter);
+                        Either<Component, StorageOperationStatus> toscaElement = toscaOperationFacade.getToscaElement(c.getUniqueId(), filter);
                         if (toscaElement.isRight()) {
                             log.debug("getVerticesToValidate: failed to find element {} status is {}", c.getUniqueId(), toscaElement.right().value());
                         } else {
@@ -1194,13 +1042,10 @@ public class ArtifactUuidFix {
                         }
                         this.janusGraphDao.commit();
                     }
-
                     compToFix.put(entry.getKey(), compListfull);
                     result = true;
                 }
-
             }
-
         } catch (Exception e) {
             log.info(FAILED_TO_FETCH_VF_RESOURCES, e);
             return false;
@@ -1214,88 +1059,63 @@ public class ArtifactUuidFix {
         log.debug("tosca artifact generation");
         try {
             Map<String, ArtifactDefinition> toscaArtifacts = parent.getToscaArtifacts();
-
             ArtifactDefinition toscaArtifact = null;
             Optional<ArtifactDefinition> op = toscaArtifacts.values().stream()
                 .filter(p -> p.getArtifactType().equals(ArtifactTypeEnum.TOSCA_TEMPLATE.getType())).findAny();
-
             if (op.isPresent()) {
                 toscaArtifact = op.get();
             }
             if (toscaArtifact != null) {
-                log.debug("Migration1707ArtifactUuidFix  generateToscaPerComponent artifact name {} id {} esId {}",
-                    toscaArtifact.getArtifactName(), toscaArtifact.getUniqueId(), toscaArtifact.getEsId());
-
+                log.debug("Migration1707ArtifactUuidFix  generateToscaPerComponent artifact name {} id {} esId {}", toscaArtifact.getArtifactName(),
+                    toscaArtifact.getUniqueId(), toscaArtifact.getEsId());
                 Either<ToscaRepresentation, ToscaError> exportComponent = toscaExportUtils.exportComponent(parent);
                 if (exportComponent.isRight()) {
-                    log.debug("Failed export tosca yaml for component {} error {}", parent.getUniqueId(),
-                        exportComponent.right().value());
-
+                    log.debug("Failed export tosca yaml for component {} error {}", parent.getUniqueId(), exportComponent.right().value());
                     return Either.right(exportComponent.right().value());
                 }
                 log.debug("Tosca yaml exported for component {} ", parent.getUniqueId());
-
                 toscaArtifact.setPayload(exportComponent.left().value().getMainYaml());
                 byte[] decodedPayload = toscaArtifact.getPayloadData();
-
-                String uniqueId = UniqueIdBuilder.buildPropertyUniqueId(parent.getUniqueId(),
-                    toscaArtifact.getArtifactLabel());
+                String uniqueId = UniqueIdBuilder.buildPropertyUniqueId(parent.getUniqueId(), toscaArtifact.getArtifactLabel());
                 toscaArtifact.setUniqueId(uniqueId);
                 toscaArtifact.setEsId(toscaArtifact.getUniqueId());
-
                 toscaArtifact.setArtifactChecksum(GeneralUtility.calculateMD5Base64EncodedByByteArray(decodedPayload));
                 DAOArtifactData artifactData = new DAOArtifactData(toscaArtifact.getEsId(), decodedPayload);
                 artifactCassandraDao.saveArtifact(artifactData);
-
                 log.debug("Tosca yaml artifact esId  {} ", toscaArtifact.getEsId());
             }
             ArtifactDefinition csarArtifact = null;
-            op = toscaArtifacts.values().stream()
-                .filter(p -> p.getArtifactType().equals(ArtifactTypeEnum.TOSCA_CSAR.getType())).findAny();
-
+            op = toscaArtifacts.values().stream().filter(p -> p.getArtifactType().equals(ArtifactTypeEnum.TOSCA_CSAR.getType())).findAny();
             if (op.isPresent()) {
                 csarArtifact = op.get();
             }
-
             if (csarArtifact != null) {
                 Either<byte[], ResponseFormat> generated = csarUtils.createCsar(parent, true, true);
-
                 if (generated.isRight()) {
-                    log.debug("Failed to export tosca csar for component {} error {}", parent.getUniqueId(),
-                        generated.right().value());
-
+                    log.debug("Failed to export tosca csar for component {} error {}", parent.getUniqueId(), generated.right().value());
                     return Either.right(ToscaError.GENERAL_ERROR);
                 }
                 byte[] value = generated.left().value();
                 csarArtifact.setPayload(value);
                 byte[] decodedPayload = csarArtifact.getPayloadData();
-
-                String uniqueId = UniqueIdBuilder.buildPropertyUniqueId(parent.getUniqueId(),
-                    csarArtifact.getArtifactLabel());
+                String uniqueId = UniqueIdBuilder.buildPropertyUniqueId(parent.getUniqueId(), csarArtifact.getArtifactLabel());
                 csarArtifact.setUniqueId(uniqueId);
                 csarArtifact.setEsId(csarArtifact.getUniqueId());
-
                 csarArtifact.setArtifactChecksum(GeneralUtility.calculateMD5Base64EncodedByByteArray(decodedPayload));
                 DAOArtifactData artifactData = new DAOArtifactData(csarArtifact.getEsId(), decodedPayload);
                 artifactCassandraDao.saveArtifact(artifactData);
                 log.debug("Tosca csar artifact esId  {} ", csarArtifact.getEsId());
-
             }
-
         } catch (Exception ex) {
-            log.error("Failed to generate tosca atifact component id {} component name {} error {}",
-                parent.getUniqueId(), parent.getName(), ex.getMessage());
-
+            log.error("Failed to generate tosca atifact component id {} component name {} error {}", parent.getUniqueId(), parent.getName(),
+                ex.getMessage());
             return Either.right(ToscaError.GENERAL_ERROR);
         }
-
         return Either.left(parent);
     }
 
     private ArtifactDefinition createVfModuleArtifact(ComponentInstance currVF) {
-
         ArtifactDefinition vfModuleArtifactDefinition = new ArtifactDefinition();
-
         vfModuleArtifactDefinition.setDescription("Auto-generated VF Modules information artifact");
         vfModuleArtifactDefinition.setArtifactDisplayName("Vf Modules Metadata");
         vfModuleArtifactDefinition.setArtifactType(ArtifactTypeEnum.VF_MODULES_METADATA.getType());
@@ -1303,61 +1123,44 @@ public class ArtifactUuidFix {
         vfModuleArtifactDefinition.setArtifactLabel("vfModulesMetadata");
         vfModuleArtifactDefinition.setTimeout(0);
         vfModuleArtifactDefinition.setArtifactName(currVF.getNormalizedName() + "_modules.json");
-
         return vfModuleArtifactDefinition;
     }
 
-
     private void fillVfModuleInstHeatEnvPayload(Component parent, ComponentInstance instance, List<GroupInstance> groupsForCurrVF,
                                                 ArtifactDefinition vfModuleArtifact) {
         log.debug("generate new vf module for component. name  {}, id {}, Version {}", instance.getName(), instance.getUniqueId());
-
         String uniqueId = UniqueIdBuilder
             .buildInstanceArtifactUniqueId(parent.getUniqueId(), instance.getUniqueId(), vfModuleArtifact.getArtifactLabel());
-
         vfModuleArtifact.setUniqueId(uniqueId);
         vfModuleArtifact.setEsId(vfModuleArtifact.getUniqueId());
-
         List<VfModuleArtifactPayload> vfModulePayloadForCurrVF = new ArrayList<>();
         if (groupsForCurrVF != null) {
             for (GroupInstance groupInstance : groupsForCurrVF) {
                 VfModuleArtifactPayload modulePayload = new VfModuleArtifactPayload(groupInstance);
                 vfModulePayloadForCurrVF.add(modulePayload);
             }
-            Collections.sort(vfModulePayloadForCurrVF,
-                (art1, art2) -> VfModuleArtifactPayload.compareByGroupName(art1, art2));
-
+            Collections.sort(vfModulePayloadForCurrVF, (art1, art2) -> VfModuleArtifactPayload.compareByGroupName(art1, art2));
             final Gson gson = new GsonBuilder().setPrettyPrinting().create();
-
             String vfModulePayloadString = gson.toJson(vfModulePayloadForCurrVF);
             log.debug("vfModulePayloadString {}", vfModulePayloadString);
             if (vfModulePayloadString != null) {
-                String newCheckSum = GeneralUtility
-                    .calculateMD5Base64EncodedByByteArray(vfModulePayloadString.getBytes());
+                String newCheckSum = GeneralUtility.calculateMD5Base64EncodedByByteArray(vfModulePayloadString.getBytes());
                 vfModuleArtifact.setArtifactChecksum(newCheckSum);
-
-                DAOArtifactData artifactData = new DAOArtifactData(vfModuleArtifact.getEsId(),
-                    vfModulePayloadString.getBytes());
+                DAOArtifactData artifactData = new DAOArtifactData(vfModuleArtifact.getEsId(), vfModulePayloadString.getBytes());
                 artifactCassandraDao.saveArtifact(artifactData);
-
             }
-
         }
-
     }
 
     private Either<List<VfModuleArtifactPayloadEx>, StorageOperationStatus> parseVFModuleJson(ArtifactDefinition vfModuleArtifact) {
         log.info("Try to get vfModule json from cassandra {}", vfModuleArtifact.getEsId());
         Either<DAOArtifactData, CassandraOperationStatus> vfModuleData = artifactCassandraDao.getArtifact(vfModuleArtifact.getEsId());
-
         if (vfModuleData.isRight()) {
             CassandraOperationStatus resourceUploadStatus = vfModuleData.right().value();
             StorageOperationStatus storageResponse = DaoStatusConverter.convertCassandraStatusToStorageStatus(resourceUploadStatus);
             log.error("failed to fetch vfModule json {} from cassandra. Status is {}", vfModuleArtifact.getEsId(), storageResponse);
             return Either.right(storageResponse);
-
         }
-
         DAOArtifactData daoArtifactData = vfModuleData.left().value();
         String gsonData = new String(daoArtifactData.getDataAsArray());
         final Gson gson = new GsonBuilder().setPrettyPrinting().create();
@@ -1368,11 +1171,7 @@ public class ArtifactUuidFix {
             VfModuleArtifactPayloadEx vfModule = ComponentsUtils.parseJsonToObject(je.toString(), VfModuleArtifactPayloadEx.class);
             vfModules.add(vfModule);
         });
-
         log.debug("parse vf module finish {}", gsonData);
         return Either.left(vfModules);
-
     }
 }
-
-
index f2ee154..fa13cfc 100644 (file)
@@ -20,6 +20,7 @@
 package org.openecomp.sdc.asdctool.impl;
 
 public class ComponentInstanceRow {
+
     private String uniqueId;
     private String name;
     private String originUid;
index 795810c..960b0f3 100644 (file)
@@ -20,6 +20,7 @@
 package org.openecomp.sdc.asdctool.impl;
 
 public class ComponentRow {
+
     private String uniqueId;
     private String type;
     private String name;
index 2b96ba3..78372c7 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.impl;
 
-
 import com.fasterxml.jackson.databind.ObjectMapper;
-import org.openecomp.sdc.common.log.wrappers.Logger;
-
 import java.io.IOException;
 import java.nio.file.Files;
 import java.nio.file.Paths;
@@ -31,6 +27,7 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.stream.Stream;
+import org.openecomp.sdc.common.log.wrappers.Logger;
 
 /**
  * simple util class to verify that the janusgraph export json graph is not corrupted
@@ -47,7 +44,7 @@ public class GraphJsonValidator {
             stream.forEach(line -> {
                 try {
                     verifyJsonLine(objectMapper, atomicInteger, line);
-                } catch (RuntimeException  | IOException e) {
+                } catch (RuntimeException | IOException e) {
                     logInvalidJsonRow(atomicInteger, line, e);
                     invalidRows.add(atomicInteger.get());
                 }
@@ -57,14 +54,14 @@ public class GraphJsonValidator {
     }
 
     private void verifyJsonLine(ObjectMapper objectMapper, AtomicInteger atomicInteger, String line) throws IOException {
-        log.info("verifying line: " +  atomicInteger.get());
+        log.info("verifying line: " + atomicInteger.get());
         objectMapper.readTree(line);
         atomicInteger.incrementAndGet();
     }
 
     private void logInvalidJsonRow(AtomicInteger atomicInteger, String line, Exception e) {
         log.error("Invalid Json!!!!!!!!!!!!!!!!!!!!", e);
-        log.info("line number: " +  atomicInteger.get());
+        log.info("line number: " + atomicInteger.get());
         log.info("line value: " + line);
     }
 
@@ -75,5 +72,4 @@ public class GraphJsonValidator {
         }
         return true;
     }
-
 }
index d0eea9a..09be2a8 100644 (file)
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.impl;
 
+import static org.openecomp.sdc.asdctool.Utils.getProperties;
+
 import com.google.gson.Gson;
+import java.io.BufferedInputStream;
+import java.io.BufferedOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.FileWriter;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
 import org.apache.commons.configuration.BaseConfiguration;
 import org.apache.commons.lang3.tuple.ImmutablePair;
 import org.apache.tinkerpop.gremlin.structure.Graph;
@@ -37,61 +50,39 @@ import org.openecomp.sdc.be.dao.neo4j.GraphPropertiesDictionary;
 import org.openecomp.sdc.be.datatypes.enums.NodeTypeEnum;
 import org.openecomp.sdc.common.log.wrappers.Logger;
 
-import java.io.BufferedInputStream;
-import java.io.BufferedOutputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
-import java.io.FileWriter;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-
-import static org.openecomp.sdc.asdctool.Utils.getProperties;
-
 public class GraphMLConverter {
 
     private static final String STORAGE_BACKEND = "storage.backend";
-
     private static final String INMEMORY = "inmemory";
-
     private static final String EXPORT_GRAPH = "exportGraph.";
-
     private static final String DOT_JSON = ".json";
-
     private static final String EXPORTED_FILE = "Exported file={}";
-
     private static final String NODE_LABEL = "nodeLabel";
-
+    private static final String LOG_FORMATTER = "{} {}";
     private static Logger log = Logger.getLogger(GraphMLConverter.class.getName());
-
     private Gson gson = new Gson();
-    private static final String LOG_FORMATTER = "{} {}";
 
-    public boolean importGraph(String[] args) {
+    private static GraphSONMapper newGraphSONMapper(final Graph graph) {
+        final GraphSONMapper.Builder builder = graph.io(IoCore.graphson()).mapper();
+        return builder.create();
+    }
 
+    public boolean importGraph(String[] args) {
         JanusGraph graph = null;
         try {
             String janusGraphFileLocation = args[1];
             String inputFile = args[2];
             graph = openGraph(janusGraphFileLocation);
-
             List<ImmutablePair<String, String>> propertiesCriteriaToDelete = new ArrayList<>();
             ImmutablePair<String, String> immutablePair1 = new ImmutablePair<>("healthcheckis", "GOOD");
             ImmutablePair<String, String> immutablePair2 = new ImmutablePair<>(NODE_LABEL, "user");
             ImmutablePair<String, String> immutablePair3 = new ImmutablePair<>(NODE_LABEL, "resourceCategory");
             ImmutablePair<String, String> immutablePair4 = new ImmutablePair<>(NODE_LABEL, "serviceCategory");
-
             propertiesCriteriaToDelete.add(immutablePair1);
             propertiesCriteriaToDelete.add(immutablePair2);
             propertiesCriteriaToDelete.add(immutablePair3);
             propertiesCriteriaToDelete.add(immutablePair4);
-
             return importJsonGraph(graph, inputFile, propertiesCriteriaToDelete);
-
         } catch (Exception e) {
             log.info("import graph failed ", e);
             return false;
@@ -100,23 +91,18 @@ public class GraphMLConverter {
                 graph.close();
             }
         }
-
     }
 
     public boolean exportGraph(String[] args) {
-
         JanusGraph graph = null;
         try {
             String janusGraphFileLocation = args[1];
             String outputDirectory = args[2];
             graph = openGraph(janusGraphFileLocation);
-
             String result = exportJsonGraph(graph, outputDirectory);
-
             if (result == null) {
                 return false;
             }
-
             log.info(LOG_FORMATTER, EXPORTED_FILE, result);
         } catch (Exception e) {
             log.info("export graph failed ", e);
@@ -126,21 +112,17 @@ public class GraphMLConverter {
                 graph.close();
             }
         }
-
         return true;
     }
 
     public String exportGraphMl(String[] args) {
-
         JanusGraph graph = null;
         String result;
         try {
             String janusGraphFileLocation = args[1];
             String outputDirectory = args[2];
             graph = openGraph(janusGraphFileLocation);
-
             result = exportGraphMl(graph, outputDirectory);
-
             log.info(LOG_FORMATTER, EXPORTED_FILE, result);
         } catch (Exception e) {
             log.info("export exportGraphMl failed ", e);
@@ -150,24 +132,19 @@ public class GraphMLConverter {
                 graph.close();
             }
         }
-
         return result;
     }
 
     public boolean findErrorInJsonGraph(String[] args) {
-
         JanusGraph graph = null;
         try {
             String janusGraphFileLocation = args[1];
             String outputDirectory = args[2];
             graph = openGraph(janusGraphFileLocation);
-
             String result = findErrorInJsonGraph(graph, outputDirectory);
-
             if (result == null) {
                 return false;
             }
-
             log.info(LOG_FORMATTER, EXPORTED_FILE, result);
         } catch (Exception e) {
             log.info("find Error In Json Graph failed ", e);
@@ -177,7 +154,6 @@ public class GraphMLConverter {
                 graph.close();
             }
         }
-
         return true;
     }
 
@@ -186,35 +162,26 @@ public class GraphMLConverter {
     }
 
     public String exportJsonGraph(JanusGraph graph, String outputDirectory) {
-
         String result = null;
-
         String outputFile = outputDirectory + File.separator + EXPORT_GRAPH + System.currentTimeMillis() + DOT_JSON;
-
         try (final OutputStream out = new BufferedOutputStream(new FileOutputStream(outputFile))) {
-
             final GraphSONWriter.Builder builder = GraphSONWriter.build();
             final GraphSONMapper mapper = newGraphSONMapper(graph);
             builder.mapper(mapper);
             final GraphSONWriter writer = builder.create();
             writer.writeGraph(out, graph);
-
             graph.tx().commit();
-
             result = outputFile;
-
         } catch (Exception e) {
             log.info("export Json Graph failed ", e);
             graph.tx().rollback();
         }
         return result;
-
     }
 
     public String exportGraphMl(JanusGraph graph, String outputDirectory) {
         String result = null;
-        String outputFile =
-            outputDirectory + File.separator + EXPORT_GRAPH + System.currentTimeMillis() + ".graphml";
+        String outputFile = outputDirectory + File.separator + EXPORT_GRAPH + System.currentTimeMillis() + ".graphml";
         try {
             try (final OutputStream os = new BufferedOutputStream(new FileOutputStream(outputFile))) {
                 graph.io(IoCore.graphml()).writer().normalize(true).create().writeGraph(os, graph);
@@ -226,108 +193,72 @@ public class GraphMLConverter {
             log.info("export Graph Ml failed ", e);
         }
         return result;
-
     }
 
-    private static GraphSONMapper newGraphSONMapper(final Graph graph) {
-        final GraphSONMapper.Builder builder = graph.io(IoCore.graphson()).mapper();
-        return builder.create();
-    }
-
-    public boolean importJsonGraph(JanusGraph graph, String graphJsonFile,
-                                   List<ImmutablePair<String, String>> propertiesCriteriaToDelete) {
-
+    public boolean importJsonGraph(JanusGraph graph, String graphJsonFile, List<ImmutablePair<String, String>> propertiesCriteriaToDelete) {
         boolean result = false;
-
         if (propertiesCriteriaToDelete != null) {
             for (Entry<String, String> entry : propertiesCriteriaToDelete) {
-
                 String key = entry.getKey();
                 String value = entry.getValue();
                 for (JanusGraphVertex janusGraphVertex : graph.query().has(key, value).vertices()) {
                     janusGraphVertex.remove();
                     log.info("Remove vertex of type {} and value {}", key, value);
                 }
-
             }
         }
         File file = new File(graphJsonFile);
         if (!file.isFile()) {
-            log.info("File {} cannot be found.", graphJsonFile );
+            log.info("File {} cannot be found.", graphJsonFile);
             return false;
         }
-
         try (final InputStream is = new BufferedInputStream(new FileInputStream(graphJsonFile))) {
-
             log.info("Before importing file {}", graphJsonFile);
-
             GraphSONReader create = GraphSONReader.build().create();
             create.readGraph(is, graph);
-
             graph.tx().commit();
-
             result = true;
-
         } catch (Exception e) {
             log.info("Failed to import graph ", e);
             graph.tx().rollback();
         }
         return result;
-
     }
 
     public String findErrorInJsonGraph(JanusGraph graph, String outputDirectory) {
-
         String result = null;
         String outputFile = outputDirectory + File.separator + EXPORT_GRAPH + System.currentTimeMillis() + DOT_JSON;
-
         try (final OutputStream out = new BufferedOutputStream(new FileOutputStream(outputFile))) {
-
             graph.query().has(GraphPropertiesDictionary.HEALTH_CHECK.getProperty(), "GOOD").vertices();
-
             BaseConfiguration conf = new BaseConfiguration();
             conf.setProperty(STORAGE_BACKEND, INMEMORY);
             for (NodeTypeEnum nodeTypeEnum : NodeTypeEnum.values()) {
                 removeNodesByLabel(graph, nodeTypeEnum.getName());
             }
-
             GraphSONWriter create = GraphSONWriter.build().create();
             create.writeGraph(out, graph);
-
             graph.tx().rollback();
-
             result = outputFile;
-
         } catch (Exception e) {
             log.info("Find error In Json Graph ", e);
             graph.tx().rollback();
         }
-
         return result;
-
     }
 
     private void removeNodesByLabel(JanusGraph graph, String label) {
-        Iterable<JanusGraphVertex> vertices =
-                graph.query().has(GraphPropertiesDictionary.LABEL.getProperty(), label).vertices();
+        Iterable<JanusGraphVertex> vertices = graph.query().has(GraphPropertiesDictionary.LABEL.getProperty(), label).vertices();
         for (Vertex vertex : vertices) {
             vertex.remove();
         }
     }
 
     public String exportUsers(JanusGraph graph, String outputDirectory) {
-
         List<Map<String, Object>> users = new ArrayList<>();
         String result = null;
-
         String outputFile = outputDirectory + File.separator + "users." + System.currentTimeMillis() + DOT_JSON;
-
-        JanusGraphQuery graphQuery =
-            graph.query().has(GraphPropertiesDictionary.LABEL.getProperty(), NodeTypeEnum.User.getName());
-
-        @SuppressWarnings("unchecked")
-        Iterable<JanusGraphVertex> vertices = graphQuery.vertices();
-
+        JanusGraphQuery graphQuery = graph.query().has(GraphPropertiesDictionary.LABEL.getProperty(), NodeTypeEnum.User.getName());
+        @SuppressWarnings("unchecked") Iterable<JanusGraphVertex> vertices = graphQuery.vertices();
         if (vertices != null) {
             for (Vertex v : vertices) {
                 Map<String, Object> properties = getProperties(v);
@@ -335,40 +266,28 @@ public class GraphMLConverter {
                 users.add(properties);
             }
         }
-
         graph.tx().commit();
-
         String jsonUsers = gson.toJson(users);
-
         try (final FileWriter fileWriter = new FileWriter(outputFile)) {
-
             fileWriter.write(jsonUsers);
-
             result = outputFile;
-
         } catch (Exception e) {
             log.info("Export users failed because ", e);
             graph.tx().rollback();
         }
-
         return result;
-
     }
 
     public boolean exportUsers(String[] args) {
-
         JanusGraph graph = null;
         try {
             String janusGraphFileLocation = args[1];
             String outputDirectory = args[2];
             graph = openGraph(janusGraphFileLocation);
-
             String result = exportUsers(graph, outputDirectory);
-
             if (result == null) {
                 return false;
             }
-
             log.info(EXPORTED_FILE, result);
         } catch (Exception e) {
             log.info("Export users failed because", e);
@@ -378,7 +297,6 @@ public class GraphMLConverter {
                 graph.close();
             }
         }
-
         return true;
     }
 }
index d8642eb..bbfcbd3 100644 (file)
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.impl;
 
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
 import javax.xml.XMLConstants;
 import org.apache.poi.hssf.usermodel.HSSFWorkbook;
 import org.apache.poi.ss.usermodel.Row;
@@ -34,25 +40,14 @@ import org.jdom2.util.IteratorIterable;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-
 public class GraphMLDataAnalyzer {
 
-    private static Logger log = LoggerFactory.getLogger(GraphMLDataAnalyzer.class);
-
-    private static final String[] COMPONENT_SHEET_HEADER = {"uniqueId", "type", "name", "toscaResourceName",
-        "resourceType", "version", "deleted", "hasNonCalculatedReqCap"};
-    private static final String[] COMPONENT_INSTANCES_SHEET_HEADER =
-        {"uniqueId", "name", "originUid", "originType", "containerUid"};
-
     public static final String GRAPH_ML_EXTENSION = ".graphml";
     public static final String EXCEL_EXTENSION = ".xls";
+    private static final String[] COMPONENT_SHEET_HEADER = {"uniqueId", "type", "name", "toscaResourceName", "resourceType", "version", "deleted",
+        "hasNonCalculatedReqCap"};
+    private static final String[] COMPONENT_INSTANCES_SHEET_HEADER = {"uniqueId", "name", "originUid", "originType", "containerUid"};
+    private static Logger log = LoggerFactory.getLogger(GraphMLDataAnalyzer.class);
 
     public String analyzeGraphMLData(String[] args) {
         String result;
@@ -72,13 +67,10 @@ public class GraphMLDataAnalyzer {
         SAXBuilder builder = new SAXBuilder();
         builder.setProperty(XMLConstants.ACCESS_EXTERNAL_DTD, "");
         builder.setProperty(XMLConstants.ACCESS_EXTERNAL_SCHEMA, "");
-
         File xmlFile = new File(mlFileLocation);
         Document document = builder.build(xmlFile);
-
         // XLS data file name
         String outputFile = mlFileLocation.replace(GRAPH_ML_EXTENSION, EXCEL_EXTENSION);
-
         try (Workbook wb = new HSSFWorkbook(); FileOutputStream fileOut = new FileOutputStream(outputFile)) {
             writeComponents(wb, document);
             writeComponentInstances(wb, document);
@@ -95,7 +87,6 @@ public class GraphMLDataAnalyzer {
         for (int i = 0; i < COMPONENT_SHEET_HEADER.length; i++) {
             currentRow.createCell(i).setCellValue(COMPONENT_SHEET_HEADER[i]);
         }
-
         List<ComponentRow> components = getComponents(document);
         int rowNum = 1;
         for (ComponentRow row : components) {
@@ -106,8 +97,7 @@ public class GraphMLDataAnalyzer {
             currentRow.createCell(3).setCellValue(row.getToscaResourceName());
             currentRow.createCell(4).setCellValue(row.getResourceType());
             currentRow.createCell(5).setCellValue(row.getVersion());
-            currentRow.createCell(6)
-                .setCellValue(row.getIsDeleted() != null ? row.getIsDeleted().toString() : "false");
+            currentRow.createCell(6).setCellValue(row.getIsDeleted() != null ? row.getIsDeleted().toString() : "false");
             currentRow.createCell(7).setCellValue(row.getHasNonCalculatedReqCap());
         }
     }
@@ -151,7 +141,6 @@ public class GraphMLDataAnalyzer {
                 }
             }
         }
-
         filter = new ElementFilter("node");
         IteratorIterable<Element> nodes = graph.getDescendants(filter);
         filter = new ElementFilter("data");
@@ -236,6 +225,7 @@ public class GraphMLDataAnalyzer {
             }
             if (isComponentInst) {
                 // Assuming the uid is in standard form of
+
                 // <container>.<origin>.<name>
                 String uniqueId = componentInstRow.getUniqueId();
                 if (uniqueId != null) {
@@ -250,5 +240,4 @@ public class GraphMLDataAnalyzer {
         }
         return res;
     }
-
 }
index 33fbf29..77c6049 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.impl;
 
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Map;
 import org.apache.tinkerpop.gremlin.structure.Edge;
 import org.apache.tinkerpop.gremlin.structure.Vertex;
 import org.janusgraph.core.JanusGraph;
@@ -43,166 +45,161 @@ import org.openecomp.sdc.be.resources.data.UserData;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.Map;
-
 public class JanusGraphInitializer {
 
-       private static Logger logger = LoggerFactory.getLogger(JanusGraphInitializer.class.getName());
-       private static JanusGraph graph;
-
-       public static boolean createGraph(String janusGraphCfgFile) {
-               logger.info("** createGraph with {}", janusGraphCfgFile);
-               try {
-                       logger.info("createGraph : try to load file {}", janusGraphCfgFile);
-                       graph = JanusGraphFactory.open(janusGraphCfgFile);
-                       if (graph.isClosed()) {
-                               return false;
-                       }
-
-               } catch (JanusGraphException e) {
-                       logger.info("createGraph : failed to open JanusGraph graph with configuration file: {}", janusGraphCfgFile, e);
-                       return false;
-               }
-
-               createIndexesAndDefaults();
-
-               logger.info("** JanusGraph graph created ");
-
-               return true;
-       }
-
-       private static boolean isVertexExist(Map<String, Object> properties) {
-               JanusGraphQuery query = graph.query();
-
-               if (properties != null && !properties.isEmpty()) {
-                       for (Map.Entry<String, Object> entry : properties.entrySet()) {
-                               query = query.has(entry.getKey(), entry.getValue());
-                       }
-               }
-               Iterable<Vertex> vertecies = query.vertices();
-               java.util.Iterator<Vertex> iterator = vertecies.iterator();
-               if (iterator.hasNext()) {
-                       return true;
-               }
-               return false;
-       }
-
-       private static boolean isVertexNotExist(Map<String, Object> properties) {
-               return !isVertexExist(properties);
-       }
-
-       private static void createDefaultAdminUser() {
-               createUser(getDefaultUserAdmin());
-               graph.tx().commit();
-
-       }
-
-       private static void createUser(UserData user) {
-               Map<String, Object> checkedProperties = new HashMap<>();
-               checkedProperties.put(GraphPropertiesDictionary.USERID.getProperty(), user.getUserId());
-               checkedProperties.put(GraphPropertiesDictionary.LABEL.getProperty(), NodeTypeEnum.User.getName());
-               Map<String, Object> properties = null;
-               if (!isVertexExist(checkedProperties)) {
-                       Vertex vertex = graph.addVertex();
-                       vertex.property(GraphPropertiesDictionary.LABEL.getProperty(), NodeTypeEnum.User.getName());
-                       properties = user.toGraphMap();
-                       for (Map.Entry<String, Object> entry : properties.entrySet()) {
-                               vertex.property(entry.getKey(), entry.getValue());
-                       }
-               }
-       }
-
-       private static UserData getDefaultUserAdmin() {
-               UserData userData = new UserData();
-               userData.setAction(ActionEnum.Create);
-               userData.setElementType(GraphElementTypeEnum.Node);
-               userData.setUserId("jh0003");
-               userData.setEmail("admin@sdc.com");
-               userData.setFirstName("Jimmy");
-               userData.setLastName("Hendrix");
-               userData.setRole("ADMIN");
-               userData.setStatus(UserStatusEnum.ACTIVE.name());
-               userData.setLastLoginTime(0L);
-               return userData;
-       }
-
-       private static void createVertexIndixes() {
-               logger.info("** createVertexIndixes started");
-
-               JanusGraphManagement graphMgt = graph.openManagement();
-               JanusGraphIndex index = null;
-               for (GraphPropertiesDictionary prop : GraphPropertiesDictionary.values()) {
-                       PropertyKey propKey = null;
-                       if (!graphMgt.containsPropertyKey(prop.getProperty())) {
-                               Class<?> clazz = prop.getClazz();
-                               if (!clazz.isAssignableFrom(ArrayList.class) && !clazz.isAssignableFrom(HashMap.class)) {
-                                       propKey = graphMgt.makePropertyKey(prop.getProperty()).dataType(prop.getClazz()).make();
-                               }
-                       } else {
-                               propKey = graphMgt.getPropertyKey(prop.getProperty());
-                       }
-                       if (prop.isIndexed()) {
-                               if (!graphMgt.containsGraphIndex(prop.getProperty())) {
-                                       if (prop.isUnique()) {
-                                               index = graphMgt.buildIndex(prop.getProperty(), Vertex.class).addKey(propKey).unique().buildCompositeIndex();
-
-                                               graphMgt.setConsistency(propKey, ConsistencyModifier.LOCK); // Ensures
-                                                                                                                                                                       // only
-                                                                                                                                                                       // one
-                                                                                                                                                                       // name
-                                                                                                                                                                       // per
-                                                                                                                                                                       // vertex
-                                               graphMgt.setConsistency(index, ConsistencyModifier.LOCK); // Ensures
-                                                                                                                                                                       // name
-                                                                                                                                                                       // uniqueness
-                                                                                                                                                                       // in
-                                                                                                                                                                       // the
-                                                                                                                                                                       // graph
-
-                                       } else {
-                                               graphMgt.buildIndex(prop.getProperty(), Vertex.class).addKey(propKey).buildCompositeIndex();
-                                       }
-                               }
-                       }
-               }
-               graphMgt.commit();
-               logger.info("** createVertexIndixes ended");
-
-       }
-
-       private static void createEdgeIndixes() {
-               logger.info("** createEdgeIndixes started");
-               JanusGraphManagement graphMgt = graph.openManagement();
-               for (GraphEdgePropertiesDictionary prop : GraphEdgePropertiesDictionary.values()) {
-                       if (!graphMgt.containsGraphIndex(prop.getProperty())) {
-                               PropertyKey propKey = graphMgt.makePropertyKey(prop.getProperty()).dataType(prop.getClazz()).make();
-                               graphMgt.buildIndex(prop.getProperty(), Edge.class).addKey(propKey).buildCompositeIndex();
-
-                       }
-               }
-               graphMgt.commit();
-               logger.info("** createEdgeIndixes ended");
-       }
-
-       private static void createIndexesAndDefaults() {
-               createVertexIndixes();
-               createEdgeIndixes();
-               createDefaultAdminUser();
-               createRootVertex(VertexTypeEnum.CATALOG_ROOT);
-               createRootVertex(VertexTypeEnum.ARCHIVE_ROOT);
-       }
-
-       private static void createRootVertex(VertexTypeEnum vertexTypeEnum) {
-               Map<String, Object> checkedProperties = new HashMap<>();
-               checkedProperties.put(GraphPropertiesDictionary.LABEL.getProperty(), vertexTypeEnum.getName());
-               if (isVertexNotExist(checkedProperties)) {
-                       Vertex vertex = graph.addVertex();
-                       vertex.property(GraphPropertyEnum.UNIQUE_ID.getProperty(), IdBuilderUtils.generateUniqueId());
-                       vertex.property(GraphPropertyEnum.LABEL.getProperty(), vertexTypeEnum.getName());
-                       graph.tx().commit();
-               }
-       }
-
+    private static Logger logger = LoggerFactory.getLogger(JanusGraphInitializer.class.getName());
+    private static JanusGraph graph;
+
+    public static boolean createGraph(String janusGraphCfgFile) {
+        logger.info("** createGraph with {}", janusGraphCfgFile);
+        try {
+            logger.info("createGraph : try to load file {}", janusGraphCfgFile);
+            graph = JanusGraphFactory.open(janusGraphCfgFile);
+            if (graph.isClosed()) {
+                return false;
+            }
+        } catch (JanusGraphException e) {
+            logger.info("createGraph : failed to open JanusGraph graph with configuration file: {}", janusGraphCfgFile, e);
+            return false;
+        }
+        createIndexesAndDefaults();
+        logger.info("** JanusGraph graph created ");
+        return true;
+    }
+
+    private static boolean isVertexExist(Map<String, Object> properties) {
+        JanusGraphQuery query = graph.query();
+        if (properties != null && !properties.isEmpty()) {
+            for (Map.Entry<String, Object> entry : properties.entrySet()) {
+                query = query.has(entry.getKey(), entry.getValue());
+            }
+        }
+        Iterable<Vertex> vertecies = query.vertices();
+        java.util.Iterator<Vertex> iterator = vertecies.iterator();
+        if (iterator.hasNext()) {
+            return true;
+        }
+        return false;
+    }
+
+    private static boolean isVertexNotExist(Map<String, Object> properties) {
+        return !isVertexExist(properties);
+    }
+
+    private static void createDefaultAdminUser() {
+        createUser(getDefaultUserAdmin());
+        graph.tx().commit();
+    }
+
+    private static void createUser(UserData user) {
+        Map<String, Object> checkedProperties = new HashMap<>();
+        checkedProperties.put(GraphPropertiesDictionary.USERID.getProperty(), user.getUserId());
+        checkedProperties.put(GraphPropertiesDictionary.LABEL.getProperty(), NodeTypeEnum.User.getName());
+        Map<String, Object> properties = null;
+        if (!isVertexExist(checkedProperties)) {
+            Vertex vertex = graph.addVertex();
+            vertex.property(GraphPropertiesDictionary.LABEL.getProperty(), NodeTypeEnum.User.getName());
+            properties = user.toGraphMap();
+            for (Map.Entry<String, Object> entry : properties.entrySet()) {
+                vertex.property(entry.getKey(), entry.getValue());
+            }
+        }
+    }
+
+    private static UserData getDefaultUserAdmin() {
+        UserData userData = new UserData();
+        userData.setAction(ActionEnum.Create);
+        userData.setElementType(GraphElementTypeEnum.Node);
+        userData.setUserId("jh0003");
+        userData.setEmail("admin@sdc.com");
+        userData.setFirstName("Jimmy");
+        userData.setLastName("Hendrix");
+        userData.setRole("ADMIN");
+        userData.setStatus(UserStatusEnum.ACTIVE.name());
+        userData.setLastLoginTime(0L);
+        return userData;
+    }
+
+    private static void createVertexIndixes() {
+        logger.info("** createVertexIndixes started");
+        JanusGraphManagement graphMgt = graph.openManagement();
+        JanusGraphIndex index = null;
+        for (GraphPropertiesDictionary prop : GraphPropertiesDictionary.values()) {
+            PropertyKey propKey = null;
+            if (!graphMgt.containsPropertyKey(prop.getProperty())) {
+                Class<?> clazz = prop.getClazz();
+                if (!clazz.isAssignableFrom(ArrayList.class) && !clazz.isAssignableFrom(HashMap.class)) {
+                    propKey = graphMgt.makePropertyKey(prop.getProperty()).dataType(prop.getClazz()).make();
+                }
+            } else {
+                propKey = graphMgt.getPropertyKey(prop.getProperty());
+            }
+            if (prop.isIndexed()) {
+                if (!graphMgt.containsGraphIndex(prop.getProperty())) {
+                    if (prop.isUnique()) {
+                        index = graphMgt.buildIndex(prop.getProperty(), Vertex.class).addKey(propKey).unique().buildCompositeIndex();
+                        graphMgt.setConsistency(propKey, ConsistencyModifier.LOCK); // Ensures
+
+                        // only
+
+                        // one
+
+                        // name
+
+                        // per
+
+                        // vertex
+
+                        graphMgt.setConsistency(index, ConsistencyModifier.LOCK); // Ensures
+
+                        // name
+
+                        // uniqueness
+
+                        // in
+
+                        // the
+
+                        // graph
+                    } else {
+                        graphMgt.buildIndex(prop.getProperty(), Vertex.class).addKey(propKey).buildCompositeIndex();
+                    }
+                }
+            }
+        }
+        graphMgt.commit();
+        logger.info("** createVertexIndixes ended");
+    }
+
+    private static void createEdgeIndixes() {
+        logger.info("** createEdgeIndixes started");
+        JanusGraphManagement graphMgt = graph.openManagement();
+        for (GraphEdgePropertiesDictionary prop : GraphEdgePropertiesDictionary.values()) {
+            if (!graphMgt.containsGraphIndex(prop.getProperty())) {
+                PropertyKey propKey = graphMgt.makePropertyKey(prop.getProperty()).dataType(prop.getClazz()).make();
+                graphMgt.buildIndex(prop.getProperty(), Edge.class).addKey(propKey).buildCompositeIndex();
+            }
+        }
+        graphMgt.commit();
+        logger.info("** createEdgeIndixes ended");
+    }
+
+    private static void createIndexesAndDefaults() {
+        createVertexIndixes();
+        createEdgeIndixes();
+        createDefaultAdminUser();
+        createRootVertex(VertexTypeEnum.CATALOG_ROOT);
+        createRootVertex(VertexTypeEnum.ARCHIVE_ROOT);
+    }
+
+    private static void createRootVertex(VertexTypeEnum vertexTypeEnum) {
+        Map<String, Object> checkedProperties = new HashMap<>();
+        checkedProperties.put(GraphPropertiesDictionary.LABEL.getProperty(), vertexTypeEnum.getName());
+        if (isVertexNotExist(checkedProperties)) {
+            Vertex vertex = graph.addVertex();
+            vertex.property(GraphPropertyEnum.UNIQUE_ID.getProperty(), IdBuilderUtils.generateUniqueId());
+            vertex.property(GraphPropertyEnum.LABEL.getProperty(), vertexTypeEnum.getName());
+            graph.tx().commit();
+        }
+    }
 }
index d757d81..64a78af 100644 (file)
@@ -17,7 +17,6 @@
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.impl;
 
 import java.util.ArrayList;
@@ -55,8 +54,7 @@ public class ProductLogic {
         Transaction transac = null;
         try (JanusGraph graph = JanusGraphFactory.open(janusGraphFile)) {
             transac = graph.tx();
-            Iterable vertices = graph.query()
-                    .has(GraphPropertiesDictionary.LABEL.getProperty(), NodeTypeEnum.Product.getName()).vertices();
+            Iterable vertices = graph.query().has(GraphPropertiesDictionary.LABEL.getProperty(), NodeTypeEnum.Product.getName()).vertices();
             if (vertices != null) {
                 for (Vertex vertex : (Iterable<Vertex>) vertices) {
                     String id = vertex.value(GraphPropertiesDictionary.UNIQUE_ID.getProperty());
index 3332131..84501de 100644 (file)
@@ -17,7 +17,6 @@
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.impl;
 
 import java.util.Properties;
@@ -32,12 +31,10 @@ import org.openecomp.sdc.common.log.wrappers.Logger;
 public class RestUtils {
 
     static final String DELETE_PRODUCT = "http://%s:%s/sdc2/rest/v1/catalog/products/%s";
-
     private static Logger log = Logger.getLogger(RestUtils.class.getName());
 
     public Integer deleteProduct(String productUid, String beHost, String bePort, String adminUser) {
         String url = String.format(DELETE_PRODUCT, beHost, bePort, productUid);
-
         Properties headers = new Properties();
         headers.put("USER_ID", adminUser);
         try {
index 7f9064f..7807a56 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.impl;
 
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
 import org.apache.tinkerpop.gremlin.structure.Vertex;
 import org.janusgraph.core.JanusGraph;
 import org.janusgraph.core.JanusGraphFactory;
@@ -29,142 +32,98 @@ import org.openecomp.sdc.be.dao.neo4j.GraphPropertiesDictionary;
 import org.openecomp.sdc.be.model.LifecycleStateEnum;
 import org.openecomp.sdc.common.log.wrappers.Logger;
 
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-
 public class UpdatePropertyOnVertex {
 
-       private static Logger log = Logger.getLogger(UpdatePropertyOnVertex.class.getName());
-
-       public Integer updatePropertyOnServiceAtLeastCertified(String janusGraphFile, Map<String, Object> keyValueToSet,
-                       List<Map<String, Object>> orCriteria) {
-
-               JanusGraph graph = null;
-
-               Integer numberOfUpdatedVertexes = 0;
-
-               try {
-                       graph = openGraph(janusGraphFile);
-
-                       if (orCriteria != null && false == orCriteria.isEmpty()) {
-
-                               for (Map<String, Object> criteria : orCriteria) {
-
-                                       JanusGraphQuery<? extends JanusGraphQuery> query = graph.query();
-
-                                       if (criteria != null && !criteria.isEmpty()) {
-                                               for (Map.Entry<String, Object> entry : criteria.entrySet()) {
-                                                       query = query.has(entry.getKey(), entry.getValue());
-                                               }
-                                       }
-
-                                       Iterator iterator = query
-                                                       .has(GraphPropertiesDictionary.STATE.getProperty(), LifecycleStateEnum.CERTIFIED.name())
-                                                       .vertices().iterator();
-
-                                       boolean isFoundAtLeastOneCertifiedService = false;
-                                       while (iterator.hasNext()) {
-                                               Vertex vertex = (Vertex) iterator.next();
-
-                                               Map<String, Object> leftProps = Utils.getProperties(vertex);
-                                               boolean vertexLeftContainsRightProps = Utils.vertexLeftContainsRightProps(leftProps, criteria);
-                                               if (false == vertexLeftContainsRightProps) {
-                                                       log.debug("Ignore vertex since properties it does not contains properties {}. Vertex properties are {}",criteria,leftProps);
-                                                       continue;
-                                               }
-
-                                               isFoundAtLeastOneCertifiedService = true;
-                                               break;
-                                       }
-
-                                       if (true == isFoundAtLeastOneCertifiedService) {
-
-                                               Integer currentNumberOfUpdates = updateVertexes(keyValueToSet, graph, criteria);
-
-                                               if (currentNumberOfUpdates != null) {
-                                                       numberOfUpdatedVertexes += currentNumberOfUpdates;
-                                               }
-
-                                       } else {
-                                               log.debug("No certified service was found for criteria {}",criteria);
-                                       }
-                               }
-
-                       }
-
-                       graph.tx().commit();
-
-                       return numberOfUpdatedVertexes;
-
-               } catch (Exception e) {
-                       e.printStackTrace();
-                       graph.tx().rollback();
-
-                       return null;
-
-               } finally {
-                       if (graph != null) {
-                               graph.close();
-                       }
-               }
-
-       }
-
-       private Integer updateVertexes(Map<String, Object> keyValueToSet, JanusGraph graph, Map<String, Object> criteria) {
-               Integer numberOfUpdatedVertexesPerService = 0;
-
-               JanusGraphQuery<? extends JanusGraphQuery> updateQuery = graph.query();
-
-               if (criteria != null && !criteria.isEmpty()) {
-                       for (Map.Entry<String, Object> entry : criteria.entrySet()) {
-                               updateQuery = updateQuery.has(entry.getKey(), entry.getValue());
-                       }
-               }
-               Iterator updateIterator = updateQuery.vertices().iterator();
-
-               while (updateIterator.hasNext()) {
-
-                       Vertex vertex = (Vertex) updateIterator.next();
-
-                       Map<String, Object> leftProps = Utils.getProperties(vertex);
-
-                       boolean vertexLeftContainsRightProps = Utils.vertexLeftContainsRightProps(leftProps, criteria);
-                       if (false == vertexLeftContainsRightProps) {
-                               log.debug("Ignore vertex since properties it does not contains properties {}. Vertex properties are {}",criteria,leftProps);
-                               continue;
-                       }
-
-                       if (keyValueToSet != null) {
-                               for (Entry<String, Object> entry : keyValueToSet.entrySet()) {
-                                       String key = entry.getKey();
-                                       Object value = entry.getValue();
-
-                                       vertex.property(key, value);
-                                       
-                                       if (log.isDebugEnabled()){
-                                               log.debug("After setting vertex {} {} with key value {},{}",  
-                                                       vertex.property(GraphPropertiesDictionary.NAME.getProperty()),
-                                                       vertex.property(GraphPropertiesDictionary.VERSION.getProperty()),key,value);
-                                       }
-                                       numberOfUpdatedVertexesPerService++;
-                               }
-                       }
-
-               }
-
-               log.info(
-                               "The number of updated services for criteria " + criteria + " is " + numberOfUpdatedVertexesPerService);
-               return numberOfUpdatedVertexesPerService;
-       }
-
-       public JanusGraph openGraph(String janusGraphFileLocation) {
-
-               JanusGraph graph = JanusGraphFactory.open(janusGraphFileLocation);
-
-               return graph;
-
-       }
-
+    private static Logger log = Logger.getLogger(UpdatePropertyOnVertex.class.getName());
+
+    public Integer updatePropertyOnServiceAtLeastCertified(String janusGraphFile, Map<String, Object> keyValueToSet,
+                                                           List<Map<String, Object>> orCriteria) {
+        JanusGraph graph = null;
+        Integer numberOfUpdatedVertexes = 0;
+        try {
+            graph = openGraph(janusGraphFile);
+            if (orCriteria != null && false == orCriteria.isEmpty()) {
+                for (Map<String, Object> criteria : orCriteria) {
+                    JanusGraphQuery<? extends JanusGraphQuery> query = graph.query();
+                    if (criteria != null && !criteria.isEmpty()) {
+                        for (Map.Entry<String, Object> entry : criteria.entrySet()) {
+                            query = query.has(entry.getKey(), entry.getValue());
+                        }
+                    }
+                    Iterator iterator = query.has(GraphPropertiesDictionary.STATE.getProperty(), LifecycleStateEnum.CERTIFIED.name()).vertices()
+                        .iterator();
+                    boolean isFoundAtLeastOneCertifiedService = false;
+                    while (iterator.hasNext()) {
+                        Vertex vertex = (Vertex) iterator.next();
+                        Map<String, Object> leftProps = Utils.getProperties(vertex);
+                        boolean vertexLeftContainsRightProps = Utils.vertexLeftContainsRightProps(leftProps, criteria);
+                        if (false == vertexLeftContainsRightProps) {
+                            log.debug("Ignore vertex since properties it does not contains properties {}. Vertex properties are {}", criteria,
+                                leftProps);
+                            continue;
+                        }
+                        isFoundAtLeastOneCertifiedService = true;
+                        break;
+                    }
+                    if (true == isFoundAtLeastOneCertifiedService) {
+                        Integer currentNumberOfUpdates = updateVertexes(keyValueToSet, graph, criteria);
+                        if (currentNumberOfUpdates != null) {
+                            numberOfUpdatedVertexes += currentNumberOfUpdates;
+                        }
+                    } else {
+                        log.debug("No certified service was found for criteria {}", criteria);
+                    }
+                }
+            }
+            graph.tx().commit();
+            return numberOfUpdatedVertexes;
+        } catch (Exception e) {
+            e.printStackTrace();
+            graph.tx().rollback();
+            return null;
+        } finally {
+            if (graph != null) {
+                graph.close();
+            }
+        }
+    }
+
+    private Integer updateVertexes(Map<String, Object> keyValueToSet, JanusGraph graph, Map<String, Object> criteria) {
+        Integer numberOfUpdatedVertexesPerService = 0;
+        JanusGraphQuery<? extends JanusGraphQuery> updateQuery = graph.query();
+        if (criteria != null && !criteria.isEmpty()) {
+            for (Map.Entry<String, Object> entry : criteria.entrySet()) {
+                updateQuery = updateQuery.has(entry.getKey(), entry.getValue());
+            }
+        }
+        Iterator updateIterator = updateQuery.vertices().iterator();
+        while (updateIterator.hasNext()) {
+            Vertex vertex = (Vertex) updateIterator.next();
+            Map<String, Object> leftProps = Utils.getProperties(vertex);
+            boolean vertexLeftContainsRightProps = Utils.vertexLeftContainsRightProps(leftProps, criteria);
+            if (false == vertexLeftContainsRightProps) {
+                log.debug("Ignore vertex since properties it does not contains properties {}. Vertex properties are {}", criteria, leftProps);
+                continue;
+            }
+            if (keyValueToSet != null) {
+                for (Entry<String, Object> entry : keyValueToSet.entrySet()) {
+                    String key = entry.getKey();
+                    Object value = entry.getValue();
+                    vertex.property(key, value);
+                    if (log.isDebugEnabled()) {
+                        log.debug("After setting vertex {} {} with key value {},{}", vertex.property(GraphPropertiesDictionary.NAME.getProperty()),
+                            vertex.property(GraphPropertiesDictionary.VERSION.getProperty()), key, value);
+                    }
+                    numberOfUpdatedVertexesPerService++;
+                }
+            }
+        }
+        log.info("The number of updated services for criteria " + criteria + " is " + numberOfUpdatedVertexesPerService);
+        return numberOfUpdatedVertexesPerService;
+    }
+
+    public JanusGraph openGraph(String janusGraphFileLocation) {
+        JanusGraph graph = JanusGraphFactory.open(janusGraphFileLocation);
+        return graph;
+    }
 }
index 3633be7..cadab7f 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.impl;
 
+import static java.util.Collections.emptyList;
+import static java.util.stream.Collectors.toList;
+
+import java.io.IOException;
+import java.util.EnumMap;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
 import org.apache.commons.collections.CollectionUtils;
 import org.apache.commons.collections.MapUtils;
 import org.apache.tinkerpop.gremlin.structure.Direction;
@@ -41,31 +49,15 @@ import org.openecomp.sdc.be.model.jsonjanusgraph.enums.JsonConstantKeysEnum;
 import org.openecomp.sdc.be.model.operations.StorageException;
 import org.openecomp.sdc.common.log.wrappers.Logger;
 
-import java.io.IOException;
-import java.util.EnumMap;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-
-import static java.util.Collections.emptyList;
-import static java.util.stream.Collectors.toList;
-
 @org.springframework.stereotype.Component("vrfObjectFixHandler")
 public class VrfObjectFixHandler {
 
     private static final Logger log = Logger.getLogger(VrfObjectFixHandler.class);
     private static final String VALID_TOSCA_NAME = "org.openecomp.nodes.VRFObject";
-    private static final Object[] outputTableTitle =
-            new String[]{"VRF OBJECT VERSION",
-                    "CONTAINER NAME",
-                    "CONTAINER UNIQUE ID",
-                    "INSTANCE NAME",
-                    "INSTANCE UNIQUE ID"};
-
-    private XlsOutputHandler outputHandler;
+    private static final Object[] outputTableTitle = new String[]{"VRF OBJECT VERSION", "CONTAINER NAME", "CONTAINER UNIQUE ID", "INSTANCE NAME",
+        "INSTANCE UNIQUE ID"};
     private final String sheetName = this.getClass().getSimpleName() + "Report";
-
+    private XlsOutputHandler outputHandler;
     private JanusGraphDao janusGraphDao;
 
     public VrfObjectFixHandler(JanusGraphDao janusGraphDao) {
@@ -74,24 +66,24 @@ public class VrfObjectFixHandler {
 
     public boolean handle(String mode, String outputPath) {
         outputHandler = new XlsOutputHandler(outputPath, sheetName, outputTableTitle);
-        switch (mode){
-            case "detect" :
+        switch (mode) {
+            case "detect":
                 return detectCorruptedData();
             case "fix":
                 return fixCorruptedData();
-            default :
+            default:
                 log.debug("#handle - The invalid mode parameter has been received: {}", mode);
                 return false;
         }
     }
 
-    private boolean fixCorruptedData(){
-        try{
-            Map<GraphVertex,Map<Vertex, List<ComponentInstanceDataDefinition>>> corruptedData = fetchCorruptedData();
+    private boolean fixCorruptedData() {
+        try {
+            Map<GraphVertex, Map<Vertex, List<ComponentInstanceDataDefinition>>> corruptedData = fetchCorruptedData();
             corruptedData.forEach(this::fixCorruptedVfrObjectAndRelatedInstances);
             janusGraphDao.commit();
             writeOutput(corruptedData);
-        } catch (Exception e){
+        } catch (Exception e) {
             janusGraphDao.rollback();
             log.debug("#fixCorruptedData - Failed to detect corrupted data. The exception occurred: ", e);
             return false;
@@ -99,11 +91,11 @@ public class VrfObjectFixHandler {
         return true;
     }
 
-    private boolean detectCorruptedData(){
-        try{
-            Map<GraphVertex,Map<Vertex, List<ComponentInstanceDataDefinition>>> corruptedData = fetchCorruptedData();
+    private boolean detectCorruptedData() {
+        try {
+            Map<GraphVertex, Map<Vertex, List<ComponentInstanceDataDefinition>>> corruptedData = fetchCorruptedData();
             writeOutput(corruptedData);
-        } catch (Exception e){
+        } catch (Exception e) {
             log.debug("#detectCorruptedData - Failed to detect corrupted data. The exception occurred: ", e);
             return false;
         }
@@ -120,10 +112,10 @@ public class VrfObjectFixHandler {
         janusGraphDao.updateVertex(vfrObjectV).left().on(this::rightOnUpdate);
     }
 
-    private Map<GraphVertex,Map<Vertex,List<ComponentInstanceDataDefinition>>> fetchCorruptedData(){
-        Map<GraphVertex,Map<Vertex, List<ComponentInstanceDataDefinition>>> corruptedData = new HashMap<>();
+    private Map<GraphVertex, Map<Vertex, List<ComponentInstanceDataDefinition>>> fetchCorruptedData() {
+        Map<GraphVertex, Map<Vertex, List<ComponentInstanceDataDefinition>>> corruptedData = new HashMap<>();
         List<GraphVertex> vrfObjectsV = getCorruptedVrfObjects();
-        vrfObjectsV.forEach(vrfObjectV-> fillCorruptedData(vrfObjectV, corruptedData));
+        vrfObjectsV.forEach(vrfObjectV -> fillCorruptedData(vrfObjectV, corruptedData));
         return corruptedData;
     }
 
@@ -137,27 +129,23 @@ public class VrfObjectFixHandler {
         Map<Vertex, List<ComponentInstanceDataDefinition>> corruptedInstances = new HashMap<>();
         findToUpdate.put(vrfObjectV, corruptedInstances);
         Iterator<Edge> instanceEdges = vrfObjectV.getVertex().edges(Direction.IN, EdgeLabelEnum.INSTANCE_OF.name());
-        while(instanceEdges.hasNext()){
+        while (instanceEdges.hasNext()) {
             Edge edge = instanceEdges.next();
-            putCorruptedInstances(corruptedInstances, edge, (List<String>) janusGraphDao
-                .getProperty(edge, EdgePropertyEnum.INSTANCES));
+            putCorruptedInstances(corruptedInstances, edge, (List<String>) janusGraphDao.getProperty(edge, EdgePropertyEnum.INSTANCES));
         }
     }
 
     private void putCorruptedInstances(Map<Vertex, List<ComponentInstanceDataDefinition>> corruptedInstances, Edge edge, List<String> ids) {
-        if(CollectionUtils.isNotEmpty(ids)){
+        if (CollectionUtils.isNotEmpty(ids)) {
             Vertex container = edge.outVertex();
             Map<String, ? extends ToscaDataDefinition> jsonObj = getJsonMap(container);
-            CompositionDataDefinition composition = (CompositionDataDefinition)jsonObj.get(JsonConstantKeysEnum.COMPOSITION.getValue());
-            corruptedInstances.put(container, composition.getComponentInstances()
-                    .values()
-                    .stream()
-                    .filter(i->ids.contains(i.getUniqueId()))
-                    .collect(toList()));
+            CompositionDataDefinition composition = (CompositionDataDefinition) jsonObj.get(JsonConstantKeysEnum.COMPOSITION.getValue());
+            corruptedInstances
+                .put(container, composition.getComponentInstances().values().stream().filter(i -> ids.contains(i.getUniqueId())).collect(toList()));
         }
     }
 
-    private void fixCorruptedContainerInstances(Vertex container, List<ComponentInstanceDataDefinition> corruptedInstances){
+    private void fixCorruptedContainerInstances(Vertex container, List<ComponentInstanceDataDefinition> corruptedInstances) {
         try {
             Map jsonObj = getJsonMap(container);
             fixComponentToscaName(corruptedInstances, jsonObj);
@@ -169,29 +157,22 @@ public class VrfObjectFixHandler {
     }
 
     private void fixComponentToscaName(List<ComponentInstanceDataDefinition> corruptedInstances, Map<String, ? extends ToscaDataDefinition> jsonObj) {
-        List<String> ids = corruptedInstances
-                .stream()
-                .map(ComponentInstanceDataDefinition::getUniqueId)
-                .collect(toList());
-
-        CompositionDataDefinition composition = (CompositionDataDefinition)jsonObj.get(JsonConstantKeysEnum.COMPOSITION.getValue());
-        composition.getComponentInstances()
-                .values()
-                .stream()
-                .filter(i->ids.contains(i.getUniqueId()))
-                .forEach(i->i.setToscaComponentName(VALID_TOSCA_NAME));
+        List<String> ids = corruptedInstances.stream().map(ComponentInstanceDataDefinition::getUniqueId).collect(toList());
+        CompositionDataDefinition composition = (CompositionDataDefinition) jsonObj.get(JsonConstantKeysEnum.COMPOSITION.getValue());
+        composition.getComponentInstances().values().stream().filter(i -> ids.contains(i.getUniqueId()))
+            .forEach(i -> i.setToscaComponentName(VALID_TOSCA_NAME));
     }
 
     private Map getJsonMap(Vertex container) {
-        String json = (String)container.property(GraphPropertyEnum.JSON.getProperty()).value();
+        String json = (String) container.property(GraphPropertyEnum.JSON.getProperty()).value();
         Map<GraphPropertyEnum, Object> properties = janusGraphDao.getVertexProperties(container);
         VertexTypeEnum label = VertexTypeEnum.getByName((String) (properties.get(GraphPropertyEnum.LABEL)));
         return JsonParserUtils.toMap(json, label != null ? label.getClassOfJson() : null);
     }
 
     private void writeOutput(Map<GraphVertex, Map<Vertex, List<ComponentInstanceDataDefinition>>> corruptedData) {
-        if(outputHandler.getOutputPath() != null){
-            if(MapUtils.isNotEmpty(corruptedData)){
+        if (outputHandler.getOutputPath() != null) {
+            if (MapUtils.isNotEmpty(corruptedData)) {
                 corruptedData.forEach(this::addVrfObjectRecord);
             } else {
                 outputHandler.addRecord("CORRUPTED VRF OBJECT NOT FOUND");
@@ -201,11 +182,12 @@ public class VrfObjectFixHandler {
     }
 
     private List<GraphVertex> rightOnGet(JanusGraphOperationStatus status) {
-        if(status == JanusGraphOperationStatus.NOT_FOUND){
+        if (status == JanusGraphOperationStatus.NOT_FOUND) {
             return emptyList();
         }
         throw new StorageException(status);
     }
+
     private GraphVertex rightOnUpdate(JanusGraphOperationStatus status) {
         throw new StorageException(status);
     }
@@ -216,7 +198,8 @@ public class VrfObjectFixHandler {
     }
 
     private void addVrfObjectInstances(Vertex container, List<ComponentInstanceDataDefinition> instances) {
-        outputHandler.addRecord("", container.property(GraphPropertyEnum.NAME.getProperty()).value().toString(), container.property(GraphPropertyEnum.UNIQUE_ID.getProperty()).value().toString());
-        instances.forEach(i->outputHandler.addRecord("","","",i.getName(),i.getUniqueId()));
+        outputHandler.addRecord("", container.property(GraphPropertyEnum.NAME.getProperty()).value().toString(),
+            container.property(GraphPropertyEnum.UNIQUE_ID.getProperty()).value().toString());
+        instances.forEach(i -> outputHandler.addRecord("", "", "", i.getName(), i.getUniqueId()));
     }
 }
index 0adaf51..c3364f1 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  */
 package org.openecomp.sdc.asdctool.impl.internal.tool;
 
+import java.io.IOException;
+import java.util.Map;
 import org.openecomp.sdc.asdctool.utils.ConsoleWriter;
 import org.openecomp.sdc.asdctool.utils.ReportWriter;
 import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
 
-import java.io.IOException;
-import java.util.Map;
-
 public abstract class CommonInternalTool {
+
     protected ReportWriter reportWriter;
     private String reportType;
-    
-    CommonInternalTool(String reportType){
+
+    CommonInternalTool(String reportType) {
         this.reportType = reportType;
     }
-    protected ReportWriter getReportWriter() throws IOException{
-        if ( reportWriter == null ){
-            reportWriter = new ReportWriter(reportType); 
+
+    protected ReportWriter getReportWriter() throws IOException {
+        if (reportWriter == null) {
+            reportWriter = new ReportWriter(reportType);
         }
         return reportWriter;
     }
+
     public void closeAll() {
         try {
             getReportWriter().close();
         } catch (IOException e) {
             ConsoleWriter.dataLine("\nFailed to close report file.");
-       }
+        }
     }
+
     protected void printComponentInfo(Map<GraphPropertyEnum, Object> metadataProperties) {
         ConsoleWriter.dataLine("component from type", metadataProperties.get(GraphPropertyEnum.COMPONENT_TYPE));
         ConsoleWriter.dataLine("component name", metadataProperties.get(GraphPropertyEnum.NAME));
index 751801f..2b655b0 100644 (file)
@@ -62,8 +62,7 @@ public class CsarGenerator extends CommonInternalTool {
     private ToscaExportHandler toscaExportHandler;
 
     @Autowired
-    public CsarGenerator(JanusGraphDao janusGraphDao, CsarUtils csarUtils,
-                         ToscaOperationFacade toscaOperationFacade,
+    public CsarGenerator(JanusGraphDao janusGraphDao, CsarUtils csarUtils, ToscaOperationFacade toscaOperationFacade,
                          ArtifactCassandraDao artifactCassandraDao, ToscaExportHandler toscaExportHandler) {
         super("generate");
         this.janusGraphDao = janusGraphDao;
@@ -75,26 +74,21 @@ public class CsarGenerator extends CommonInternalTool {
 
     public void generateCsar(String uuid, Scanner scanner) {
         JanusGraphOperationStatus status = JanusGraphOperationStatus.OK;
-
         Map<GraphPropertyEnum, Object> props = new EnumMap<>(GraphPropertyEnum.class);
         props.put(GraphPropertyEnum.UUID, uuid);
         props.put(GraphPropertyEnum.STATE, LifecycleStateEnum.CERTIFIED.name());
         props.put(GraphPropertyEnum.COMPONENT_TYPE, ComponentTypeEnum.SERVICE.name());
-
-        List<GraphVertex> byCriteria = janusGraphDao
-            .getByCriteria(VertexTypeEnum.TOPOLOGY_TEMPLATE, props).either(l -> l, r -> null);
+        List<GraphVertex> byCriteria = janusGraphDao.getByCriteria(VertexTypeEnum.TOPOLOGY_TEMPLATE, props).either(l -> l, r -> null);
         if (byCriteria != null && !byCriteria.isEmpty()) {
             if (byCriteria.size() > 1) {
                 ConsoleWriter.dataLine("Warning ! More that 1 certified service with uuid", uuid);
                 // TBD
             } else {
                 GraphVertex metadataV = byCriteria.get(0);
-
                 printComponentInfo(metadataV.getMetadataProperties());
                 ConsoleWriter.dataLine("\nGenerate CSAR (yes/no)?");
                 String input = scanner.nextLine();
                 if (input.equalsIgnoreCase("yes")) {
-
                     status = handleService(metadataV, uuid);
                 }
             }
@@ -112,22 +106,18 @@ public class CsarGenerator extends CommonInternalTool {
         JanusGraphOperationStatus status = JanusGraphOperationStatus.OK;
         org.openecomp.sdc.be.model.Component component = toscaOperationFacade.getToscaFullElement(metadataV.getUniqueId()).either(l -> l, r -> null);
         if (component != null) {
-
             Supplier<byte[]> supplier = () -> generateToscaPayload(component);
             generateArtifact(component, ArtifactTypeEnum.TOSCA_TEMPLATE, supplier);
-
             supplier = () -> generateCsarPayload(component);
             generateArtifact(component, ArtifactTypeEnum.TOSCA_CSAR, supplier);
-
-            GraphVertex toscaArtifactV = janusGraphDao
-                .getChildVertex(metadataV, EdgeLabelEnum.TOSCA_ARTIFACTS, JsonParseFlagEnum.ParseJson).either(l -> l, r -> null);
+            GraphVertex toscaArtifactV = janusGraphDao.getChildVertex(metadataV, EdgeLabelEnum.TOSCA_ARTIFACTS, JsonParseFlagEnum.ParseJson)
+                .either(l -> l, r -> null);
             if (toscaArtifactV != null) {
                 Map<String, ArtifactDataDefinition> copy = component.getToscaArtifacts().entrySet().stream()
                     .collect(Collectors.toMap(Map.Entry::getKey, e -> new ArtifactDataDefinition(e.getValue())));
                 toscaArtifactV.setJson(copy);
                 janusGraphDao.updateVertex(toscaArtifactV);
             }
-
         } else {
             ConsoleWriter.dataLine("Failed to fetch certified service with UUID ", uuid);
         }
@@ -141,7 +131,6 @@ public class CsarGenerator extends CommonInternalTool {
             .filter(p -> p.getArtifactType().equals(artifactType.getType())).findAny();
         if (op.isPresent()) {
             csarArtifact = op.get();
-
             status = savePayload(component, csarArtifact, supplier);
         }
         return status;
@@ -158,7 +147,6 @@ public class CsarGenerator extends CommonInternalTool {
     private JanusGraphOperationStatus savePayload(org.openecomp.sdc.be.model.Component component, ArtifactDefinition csarArtifact,
                                                   Supplier<byte[]> supplier) {
         byte[] payload = supplier.get();
-
         if (payload == null) {
             ConsoleWriter.dataLine("create artifact failed ", csarArtifact.getArtifactLabel());
             return JanusGraphOperationStatus.GENERAL_ERROR;
@@ -166,19 +154,15 @@ public class CsarGenerator extends CommonInternalTool {
         ConsoleWriter.dataLine("create artifact  success ", csarArtifact.getArtifactLabel());
         csarArtifact.setPayload(payload);
         byte[] decodedPayload = csarArtifact.getPayloadData();
-
         String uniqueId = UniqueIdBuilder.buildPropertyUniqueId(component.getUniqueId(), csarArtifact.getArtifactLabel());
         csarArtifact.setUniqueId(uniqueId);
         csarArtifact.setEsId(csarArtifact.getUniqueId());
-
         ConsoleWriter.dataLine("create artifact unique id ", uniqueId);
-
         csarArtifact.setArtifactChecksum(GeneralUtility.calculateMD5Base64EncodedByByteArray(decodedPayload));
         DAOArtifactData artifactData = new DAOArtifactData(csarArtifact.getEsId(), decodedPayload);
         artifactCassandraDao.saveArtifact(artifactData);
         ConsoleWriter.dataLine("Artifact generated and saved into Cassandra ", csarArtifact.getArtifactLabel());
         report(component, csarArtifact);
-
         return JanusGraphOperationStatus.OK;
     }
 
@@ -195,7 +179,6 @@ public class CsarGenerator extends CommonInternalTool {
         dataToPrint.put("artifact id", csarArtifact.getUniqueId());
         dataToPrint.put("csar es id", csarArtifact.getEsId());
         dataToPrint.put("artifact checksum", csarArtifact.getArtifactChecksum());
-
         try {
             getReportWriter().report(dataToPrint);
         } catch (IOException e) {
index 62dd489..b286ffd 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 package org.openecomp.sdc.asdctool.impl.internal.tool;
 
 import fj.data.Either;
+import java.io.IOException;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Scanner;
 import org.apache.tinkerpop.gremlin.structure.Direction;
 import org.apache.tinkerpop.gremlin.structure.Edge;
 import org.apache.tinkerpop.gremlin.structure.Vertex;
@@ -38,30 +42,23 @@ import org.openecomp.sdc.common.log.wrappers.Logger;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.stereotype.Component;
 
-import java.io.IOException;
-import java.util.Iterator;
-import java.util.Map;
-import java.util.Scanner;
-
 @Component("deleteComponentHandler")
-public class DeleteComponentHandler extends CommonInternalTool{
+public class DeleteComponentHandler extends CommonInternalTool {
+
+    private static Logger log = Logger.getLogger(DeleteComponentHandler.class.getName());
     private JanusGraphDao janusGraphDao;
     private NodeTypeOperation nodeTypeOperation;
     private TopologyTemplateOperation topologyTemplateOperation;
 
     @Autowired
-    public DeleteComponentHandler(JanusGraphDao janusGraphDao,
-        NodeTypeOperation nodeTypeOperation,
-        TopologyTemplateOperation topologyTemplateOperation) {
+    public DeleteComponentHandler(JanusGraphDao janusGraphDao, NodeTypeOperation nodeTypeOperation,
+                                  TopologyTemplateOperation topologyTemplateOperation) {
         super("delete");
         this.janusGraphDao = janusGraphDao;
         this.nodeTypeOperation = nodeTypeOperation;
         this.topologyTemplateOperation = topologyTemplateOperation;
     }
 
-    private static Logger log = Logger.getLogger(DeleteComponentHandler.class.getName());
-
-
     public void deleteComponent(String id, Scanner scanner) {
         JanusGraphOperationStatus status = JanusGraphOperationStatus.OK;
         GraphVertex metadataVertex = janusGraphDao.getVertexById(id).either(l -> l, r -> null);
@@ -81,7 +78,6 @@ public class DeleteComponentHandler extends CommonInternalTool{
         Map<GraphPropertyEnum, Object> metadataProperties = metadataVertex.getMetadataProperties();
         JanusGraphOperationStatus status = JanusGraphOperationStatus.OK;
         printComponentInfo(metadataProperties);
-
         Iterator<Edge> edges = metadataVertex.getVertex().edges(Direction.OUT, EdgeLabelEnum.VERSION.name());
         if (edges != null && edges.hasNext()) {
             ConsoleWriter.dataLine("\ncomponent is not latest version and cannot be deleted");
@@ -105,22 +101,18 @@ public class DeleteComponentHandler extends CommonInternalTool{
         Iterator<Edge> edges = metadataVertex.getVertex().edges(Direction.IN, EdgeLabelEnum.VERSION.name());
         if (edges != null && edges.hasNext()) {
             JanusGraphOperationStatus status = updatePreviousVersion(metadataVertex, edges);
-            if ( status != JanusGraphOperationStatus.OK ){
+            if (status != JanusGraphOperationStatus.OK) {
                 return status;
             }
         }
-        toscaElementOperation.deleteToscaElement(metadataVertex)
-             .left()
-             .map(l -> {
-                 ConsoleWriter.dataLine("\nDeleted");
-                 report(metadataVertex);
-                 return JanusGraphOperationStatus.OK;
-             })
-             .right()
-             .map(r-> {
-                 ConsoleWriter.dataLine("\nFailed to delete. see log file");
-                 return r;
-             });
+        toscaElementOperation.deleteToscaElement(metadataVertex).left().map(l -> {
+            ConsoleWriter.dataLine("\nDeleted");
+            report(metadataVertex);
+            return JanusGraphOperationStatus.OK;
+        }).right().map(r -> {
+            ConsoleWriter.dataLine("\nFailed to delete. see log file");
+            return r;
+        });
         return JanusGraphOperationStatus.OK;
     }
 
@@ -135,10 +127,9 @@ public class DeleteComponentHandler extends CommonInternalTool{
         }
         // update highest property for previous version
         JanusGraphOperationStatus status = updateStateOfPreviuosVersion(prevVersionVertex);
-        if ( JanusGraphOperationStatus.OK != status ){
+        if (JanusGraphOperationStatus.OK != status) {
             return status;
         }
-        
         // connect to catalog or archive
         return connectToCatalogAndArchive(metadataVertex, prevVersionVertex);
     }
@@ -149,13 +140,11 @@ public class DeleteComponentHandler extends CommonInternalTool{
         GraphVertex prevVertex = prevGraphVertex.left().value();
         prevVertex.addMetadataProperty(GraphPropertyEnum.IS_HIGHEST_VERSION, true);
         janusGraphDao.updateVertex(prevVertex);
-  
         Iterator<Edge> edgesIter = prevVersionVertex.edges(Direction.IN, EdgeLabelEnum.LAST_STATE.name());
-        if ( edgesIter.hasNext() ) {
+        if (edgesIter.hasNext()) {
             Edge lastStateEdge = edgesIter.next();
             Vertex lastModifier = lastStateEdge.outVertex();
-            JanusGraphOperationStatus
-                replaceRes = janusGraphDao
+            JanusGraphOperationStatus replaceRes = janusGraphDao
                 .replaceEdgeLabel(lastModifier, prevVersionVertex, lastStateEdge, EdgeLabelEnum.LAST_STATE, EdgeLabelEnum.STATE);
             if (replaceRes != JanusGraphOperationStatus.OK) {
                 log.info("Failed to replace label from {} to {}. status = {}", EdgeLabelEnum.LAST_STATE, EdgeLabelEnum.STATE, replaceRes);
@@ -166,35 +155,35 @@ public class DeleteComponentHandler extends CommonInternalTool{
         return JanusGraphOperationStatus.OK;
     }
 
-   
     private JanusGraphOperationStatus connectToCatalogAndArchive(GraphVertex metadataVertex, JanusGraphVertex prevVersionVertex) {
-        
-        JanusGraphOperationStatus
-            status = connectByLabel(metadataVertex, prevVersionVertex, EdgeLabelEnum.CATALOG_ELEMENT, VertexTypeEnum.CATALOG_ROOT);
-        if ( status == JanusGraphOperationStatus.OK ){
+        JanusGraphOperationStatus status = connectByLabel(metadataVertex, prevVersionVertex, EdgeLabelEnum.CATALOG_ELEMENT,
+            VertexTypeEnum.CATALOG_ROOT);
+        if (status == JanusGraphOperationStatus.OK) {
             status = connectByLabel(metadataVertex, prevVersionVertex, EdgeLabelEnum.ARCHIVE_ELEMENT, VertexTypeEnum.ARCHIVE_ROOT);
         }
         return status;
     }
 
-    private JanusGraphOperationStatus connectByLabel(GraphVertex metadataVertex, JanusGraphVertex prevVersionVertex, EdgeLabelEnum edgeLabel, VertexTypeEnum vertexlabel) {
+    private JanusGraphOperationStatus connectByLabel(GraphVertex metadataVertex, JanusGraphVertex prevVersionVertex, EdgeLabelEnum edgeLabel,
+                                                     VertexTypeEnum vertexlabel) {
         Iterator<Edge> edgesToCatalog = metadataVertex.getVertex().edges(Direction.IN, edgeLabel.name());
-        if ( edgesToCatalog != null && edgesToCatalog.hasNext() ){
+        if (edgesToCatalog != null && edgesToCatalog.hasNext()) {
             //exist edge move to prev version
             Either<GraphVertex, JanusGraphOperationStatus> catalog = janusGraphDao.getVertexByLabel(vertexlabel);
             if (catalog.isRight()) {
                 log.debug("Failed to fetch {} vertex, error {}", vertexlabel, catalog.right().value());
                 return catalog.right().value();
             }
-            GraphVertex catalogV = catalog.left().value();      
+            GraphVertex catalogV = catalog.left().value();
             Edge edge = edgesToCatalog.next();
-            return janusGraphDao.createEdge(catalogV.getVertex(), prevVersionVertex, edgeLabel, edge );
+            return janusGraphDao.createEdge(catalogV.getVertex(), prevVersionVertex, edgeLabel, edge);
         }
         return JanusGraphOperationStatus.OK;
     }
 
     private boolean isReferenceExist(GraphVertex metadataVertex) {
-        return existEdgeByLabel(metadataVertex, EdgeLabelEnum.INSTANCE_OF) || existEdgeByLabel(metadataVertex, EdgeLabelEnum.PROXY_OF) || existEdgeByLabel(metadataVertex, EdgeLabelEnum.ALLOTTED_OF);
+        return existEdgeByLabel(metadataVertex, EdgeLabelEnum.INSTANCE_OF) || existEdgeByLabel(metadataVertex, EdgeLabelEnum.PROXY_OF)
+            || existEdgeByLabel(metadataVertex, EdgeLabelEnum.ALLOTTED_OF);
     }
 
     private boolean existEdgeByLabel(GraphVertex metadataVertex, EdgeLabelEnum label) {
@@ -210,7 +199,7 @@ public class DeleteComponentHandler extends CommonInternalTool{
             return topologyTemplateOperation;
         }
     }
-   
+
     private void report(GraphVertex metadataVertex) {
         try {
             getReportWriter().report(metadataVertex.getMetadataProperties());
@@ -218,8 +207,4 @@ public class DeleteComponentHandler extends CommonInternalTool{
             ConsoleWriter.dataLine("\nFailed to created report file.");
         }
     }
-
-
-
 }
index 92af43e..7c98948 100644 (file)
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.impl.validator;
 
+import java.util.List;
 import org.openecomp.sdc.asdctool.impl.validator.executor.IArtifactValidatorExecutor;
 import org.openecomp.sdc.common.log.wrappers.Logger;
 import org.springframework.beans.factory.annotation.Autowired;
 
-import java.util.List;
-
 public class ArtifactToolBL {
 
     private static final Logger log = Logger.getLogger(ArtifactToolBL.class);
-
     private final List<IArtifactValidatorExecutor> validators;
 
     @Autowired
index 8f9202f..ae58fad 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -17,9 +17,9 @@
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.impl.validator;
 
+import java.util.List;
 import org.openecomp.sdc.asdctool.impl.validator.executor.ValidatorExecutor;
 import org.openecomp.sdc.asdctool.impl.validator.report.Report;
 import org.openecomp.sdc.asdctool.impl.validator.report.ReportFile;
@@ -27,13 +27,10 @@ import org.openecomp.sdc.common.log.wrappers.Logger;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.stereotype.Component;
 
-import java.util.List;
-
 @Component
 public class ValidationToolBL {
 
     private static final Logger log = Logger.getLogger(ValidationToolBL.class);
-
     private final List<ValidatorExecutor> validators;
 
     @Autowired
@@ -43,17 +40,15 @@ public class ValidationToolBL {
 
     public boolean validateAll(Report report, ReportFile.TXTFile textFile) {
         boolean allValid = true;
-        for (ValidatorExecutor validatorExec: validators) {
-            log.debug("ValidatorExecuter "+validatorExec.getName()+" started");
+        for (ValidatorExecutor validatorExec : validators) {
+            log.debug("ValidatorExecuter " + validatorExec.getName() + " started");
             if (!validatorExec.executeValidations(report, textFile)) {
                 allValid = false;
-                log.debug("ValidatorExecuter "+validatorExec.getName()+" finished with warnings");
-            }
-            else {
-                log.debug("ValidatorExecuter "+validatorExec.getName()+" finished successfully");
+                log.debug("ValidatorExecuter " + validatorExec.getName() + " finished with warnings");
+            } else {
+                log.debug("ValidatorExecuter " + validatorExec.getName() + " finished successfully");
             }
         }
         return allValid;
     }
-
 }
index 6178fe3..0a80632 100644 (file)
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.impl.validator.config;
 
 import java.util.function.Supplier;
 
 public final class ValidationConfigManager {
 
+    public static final String DEFAULT_CSV_PATH = "summary.csv";
     private static final String REPORT_OUTPUT_FILE_NAME = "/reportOutput.txt";
     private static final String CSV_FILE_PREFIX = "/csvSummary_";
     private static final String CSV_EXT = ".csv";
-    public static final String DEFAULT_CSV_PATH = "summary.csv";
 
     private ValidationConfigManager() {
     }
index 5fbed89..05cdcd2 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -17,9 +17,9 @@
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.impl.validator.config;
 
+import java.util.List;
 import org.openecomp.sdc.asdctool.impl.VrfObjectFixHandler;
 import org.openecomp.sdc.asdctool.impl.validator.ArtifactToolBL;
 import org.openecomp.sdc.asdctool.impl.validator.ValidationToolBL;
@@ -67,8 +67,6 @@ import org.springframework.context.annotation.Import;
 import org.springframework.context.annotation.Primary;
 import org.springframework.core.io.FileSystemResource;
 
-import java.util.List;
-
 /**
  * Created by chaya on 7/3/2017.
  */
@@ -83,19 +81,18 @@ public class ValidationToolConfiguration {
 
     @Bean
     public NodeToscaArtifactsValidatorExecutor NodeToscaArtifactsValidatorValidator(JanusGraphDao janusGraphDao,
-        ToscaOperationFacade toscaOperationFacade) {
+                                                                                    ToscaOperationFacade toscaOperationFacade) {
         return new NodeToscaArtifactsValidatorExecutor(janusGraphDao, toscaOperationFacade);
     }
 
     @Bean
     public ServiceToscaArtifactsValidatorExecutor ServiceToscaArtifactsValidator(JanusGraphDao janusGraphDao,
-        ToscaOperationFacade toscaOperationFacade) {
+                                                                                 ToscaOperationFacade toscaOperationFacade) {
         return new ServiceToscaArtifactsValidatorExecutor(janusGraphDao, toscaOperationFacade);
     }
 
     @Bean
-    public VFToscaArtifactValidatorExecutor VFToscaArtifactValidator(JanusGraphDao janusGraphDao,
-        ToscaOperationFacade toscaOperationFacade) {
+    public VFToscaArtifactValidatorExecutor VFToscaArtifactValidator(JanusGraphDao janusGraphDao, ToscaOperationFacade toscaOperationFacade) {
         return new VFToscaArtifactValidatorExecutor(janusGraphDao, toscaOperationFacade);
     }
 
@@ -136,7 +133,7 @@ public class ValidationToolConfiguration {
 
     @Bean
     public ArtifactValidationUtils artifactValidationUtils(ArtifactCassandraDao artifactCassandraDao,
-        TopologyTemplateOperation topologyTemplateOperation) {
+                                                           TopologyTemplateOperation topologyTemplateOperation) {
         return new ArtifactValidationUtils(artifactCassandraDao, topologyTemplateOperation);
     }
 
@@ -156,8 +153,7 @@ public class ValidationToolConfiguration {
     }
 
     @Bean(name = "migration-janusgraph-client", initMethod = "createGraph")
-    public JanusGraphClient janusGraphMigrationClient(@Qualifier("dao-janusgraph-strategy")
-                                                     JanusGraphClientStrategy janusGraphClientStrategy) {
+    public JanusGraphClient janusGraphMigrationClient(@Qualifier("dao-janusgraph-strategy") JanusGraphClientStrategy janusGraphClientStrategy) {
         return new JanusGraphClient(janusGraphClientStrategy);
     }
 
@@ -181,7 +177,6 @@ public class ValidationToolConfiguration {
         return new NodeTemplateOperation();
     }
 
-
     @Bean(name = "mig-derived-resolver")
     public DerivedNodeTypeResolver migrationDerivedNodeTypeResolver() {
         return new ByToscaNameDerivedNodeTypeResolver();
@@ -212,24 +207,22 @@ public class ValidationToolConfiguration {
 
     @Bean(name = "janusgraph-client")
     @Primary
-    public JanusGraphClient janusGraphClient(@Qualifier("dao-client-strategy")
-        JanusGraphClientStrategy janusGraphClientStrategy) {
+    public JanusGraphClient janusGraphClient(@Qualifier("dao-client-strategy") JanusGraphClientStrategy janusGraphClientStrategy) {
         return new JanusGraphClient(janusGraphClientStrategy);
     }
 
-    @Bean(name ="dao-client-strategy")
+    @Bean(name = "dao-client-strategy")
     public JanusGraphClientStrategy janusGraphClientStrategy() {
         return new DAOJanusGraphStrategy();
     }
 
     @Bean
-    public VrfObjectFixHandler vrfObjectFixHandler(@Qualifier("janusgraph-dao")
-        JanusGraphDao janusGraphDao){
+    public VrfObjectFixHandler vrfObjectFixHandler(@Qualifier("janusgraph-dao") JanusGraphDao janusGraphDao) {
         return new VrfObjectFixHandler(janusGraphDao);
     }
 
     @Bean(name = "healingPipelineDao")
-    public HealingPipelineDao healingPipelineDao(){
+    public HealingPipelineDao healingPipelineDao() {
         return new HealingPipelineDao();
     }
 
index 29f808f..367c2a9 100644 (file)
@@ -17,7 +17,6 @@
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.impl.validator.executor;
 
 import static java.nio.charset.StandardCharsets.UTF_8;
@@ -52,16 +51,11 @@ import org.openecomp.sdc.common.log.wrappers.Logger;
 public abstract class ArtifactValidatorExecutor {
 
     private static final Logger log = Logger.getLogger(ArtifactValidatorExecutor.class);
-
     private final JanusGraphDao janusGraphDao;
     private final ToscaOperationFacade toscaOperationFacade;
     private final String name;
 
-    public ArtifactValidatorExecutor(
-        JanusGraphDao janusGraphDao,
-        ToscaOperationFacade toscaOperationFacade,
-        String name
-    ) {
+    public ArtifactValidatorExecutor(JanusGraphDao janusGraphDao, ToscaOperationFacade toscaOperationFacade, String name) {
         this.janusGraphDao = janusGraphDao;
         this.toscaOperationFacade = toscaOperationFacade;
         this.name = name;
@@ -71,11 +65,9 @@ public abstract class ArtifactValidatorExecutor {
         return name;
     }
 
-    public Map<String, List<Component>> getVerticesToValidate(VertexTypeEnum type,
-        Map<GraphPropertyEnum, Object> hasProps) {
+    public Map<String, List<Component>> getVerticesToValidate(VertexTypeEnum type, Map<GraphPropertyEnum, Object> hasProps) {
         Map<String, List<Component>> result = new HashMap<>();
-        Either<List<GraphVertex>, JanusGraphOperationStatus> resultsEither = janusGraphDao
-            .getByCriteria(type, hasProps);
+        Either<List<GraphVertex>, JanusGraphOperationStatus> resultsEither = janusGraphDao.getByCriteria(type, hasProps);
         if (resultsEither.isRight()) {
             log.error("getVerticesToValidate failed " + resultsEither.right().value());
             return result;
@@ -89,22 +81,15 @@ public abstract class ArtifactValidatorExecutor {
                 result.put(ivariantUuid, compList);
             }
             List<Component> compList = result.get(ivariantUuid);
-
             ComponentParametersView filter = new ComponentParametersView(true);
             filter.setIgnoreArtifacts(false);
-
-            Either<Component, StorageOperationStatus> toscaElement = toscaOperationFacade
-                .getToscaElement(vertex.getUniqueId(), filter);
+            Either<Component, StorageOperationStatus> toscaElement = toscaOperationFacade.getToscaElement(vertex.getUniqueId(), filter);
             if (toscaElement.isRight()) {
-                log.error(
-                    "getVerticesToValidate: failed to find element" + vertex.getUniqueId() + " staus is" + toscaElement
-                        .right().value());
+                log.error("getVerticesToValidate: failed to find element" + vertex.getUniqueId() + " staus is" + toscaElement.right().value());
             } else {
                 compList.add(toscaElement.left().value());
             }
-
         });
-
         return result;
     }
 
@@ -119,21 +104,17 @@ public abstract class ArtifactValidatorExecutor {
                 Set<String> artifactEsId = new HashSet<>();
                 for (Component component : compList) {
                     Map<String, ArtifactDefinition> toscaArtifacts = component.getToscaArtifacts();
-                    Optional<ArtifactDefinition> op = toscaArtifacts.values().
-                        stream().filter(a -> artifactEsId.contains(a.getEsId())).findAny();
+                    Optional<ArtifactDefinition> op = toscaArtifacts.values().stream().filter(a -> artifactEsId.contains(a.getEsId())).findAny();
                     if (op.isPresent()) {
                         result = false;
                         writeModuleResultToFile(writer, compList);
                         writer.flush();
                         break;
                     } else {
-                        artifactEsId.addAll(toscaArtifacts.values().stream().map(ArtifactDefinition::getEsId)
-                            .collect(Collectors.toList()));
+                        artifactEsId.addAll(toscaArtifacts.values().stream().map(ArtifactDefinition::getEsId).collect(Collectors.toList()));
                     }
                 }
-
             }
-
         } catch (Exception e) {
             log.error("Failed to fetch vf resources ", e);
             return false;
@@ -147,14 +128,13 @@ public abstract class ArtifactValidatorExecutor {
         try {
             // "service name, service id, state, version
             for (Component component : components) {
-                String sb = component.getName() + "," + component.getUniqueId() + "," + component.getInvariantUUID()
-                    + "," + component.getLifecycleState() + "," + component.getVersion()
-                    + "\n";
+                String sb =
+                    component.getName() + "," + component.getUniqueId() + "," + component.getInvariantUUID() + "," + component.getLifecycleState()
+                        + "," + component.getVersion() + "\n";
                 writer.write(sb);
             }
         } catch (IOException e) {
             log.error("Failed to write module result to file ", e);
         }
     }
-
 }
index fb905fe..9a8c952 100644 (file)
@@ -17,7 +17,6 @@
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.impl.validator.executor;
 
 // TODO: Merge this interface with ArtifactValidatorExecutor
index 791bafb..2b22963 100644 (file)
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.impl.validator.executor;
 
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
 import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
 import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
 import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
@@ -28,15 +30,9 @@ import org.openecomp.sdc.be.model.Component;
 import org.openecomp.sdc.be.model.LifecycleStateEnum;
 import org.openecomp.sdc.be.model.jsonjanusgraph.operations.ToscaOperationFacade;
 
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
+public class NodeToscaArtifactsValidatorExecutor extends ArtifactValidatorExecutor implements IArtifactValidatorExecutor {
 
-public class NodeToscaArtifactsValidatorExecutor
-    extends ArtifactValidatorExecutor implements IArtifactValidatorExecutor {
-
-    public NodeToscaArtifactsValidatorExecutor(JanusGraphDao janusGraphDao,
-        ToscaOperationFacade toscaOperationFacade) {
+    public NodeToscaArtifactsValidatorExecutor(JanusGraphDao janusGraphDao, ToscaOperationFacade toscaOperationFacade) {
         super(janusGraphDao, toscaOperationFacade, "RESOURCE_TOSCA_ARTIFACTS");
     }
 
@@ -45,7 +41,6 @@ public class NodeToscaArtifactsValidatorExecutor
         Map<GraphPropertyEnum, Object> hasProps = new HashMap<>();
         hasProps.put(GraphPropertyEnum.COMPONENT_TYPE, ComponentTypeEnum.RESOURCE.name());
         hasProps.put(GraphPropertyEnum.STATE, LifecycleStateEnum.CERTIFIED.name());
-
         Map<String, List<Component>> vertices = getVerticesToValidate(VertexTypeEnum.NODE_TYPE, hasProps);
         return validate(vertices, outputFilePath);
     }
index c09bda5..64401f7 100644 (file)
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.impl.validator.executor;
 
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
 import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
 import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
 import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
@@ -29,17 +31,11 @@ import org.openecomp.sdc.be.model.LifecycleStateEnum;
 import org.openecomp.sdc.be.model.jsonjanusgraph.operations.ToscaOperationFacade;
 import org.springframework.beans.factory.annotation.Autowired;
 
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
 @org.springframework.stereotype.Component
-public class ServiceToscaArtifactsValidatorExecutor
-    extends ArtifactValidatorExecutor implements IArtifactValidatorExecutor {
+public class ServiceToscaArtifactsValidatorExecutor extends ArtifactValidatorExecutor implements IArtifactValidatorExecutor {
 
     @Autowired
-    public ServiceToscaArtifactsValidatorExecutor(JanusGraphDao janusGraphDao,
-        ToscaOperationFacade toscaOperationFacade) {
+    public ServiceToscaArtifactsValidatorExecutor(JanusGraphDao janusGraphDao, ToscaOperationFacade toscaOperationFacade) {
         super(janusGraphDao, toscaOperationFacade, "SERVICE_TOSCA_ARTIFACTS");
     }
 
@@ -48,7 +44,6 @@ public class ServiceToscaArtifactsValidatorExecutor
         Map<GraphPropertyEnum, Object> hasProps = new HashMap<>();
         hasProps.put(GraphPropertyEnum.COMPONENT_TYPE, ComponentTypeEnum.SERVICE.name());
         hasProps.put(GraphPropertyEnum.STATE, LifecycleStateEnum.CERTIFIED.name());
-
         Map<String, List<Component>> vertices = getVerticesToValidate(VertexTypeEnum.TOPOLOGY_TEMPLATE, hasProps);
         return validate(vertices, outputFilePath);
     }
index 36feb3b..ab568c3 100644 (file)
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.impl.validator.executor;
 
+import java.util.ArrayList;
+import java.util.EnumMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
 import lombok.Getter;
 import org.openecomp.sdc.asdctool.impl.validator.report.Report;
 import org.openecomp.sdc.asdctool.impl.validator.report.ReportFile.TXTFile;
@@ -35,48 +40,31 @@ import org.openecomp.sdc.be.datatypes.enums.ResourceTypeEnum;
 import org.openecomp.sdc.common.log.wrappers.Logger;
 import org.springframework.beans.factory.annotation.Autowired;
 
-import java.util.ArrayList;
-import java.util.EnumMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
 public class TopologyTemplateValidatorExecutor implements ValidatorExecutor {
 
     private static final Logger log = Logger.getLogger(TopologyTemplateValidatorExecutor.class);
-
     private final JanusGraphDao janusGraphDao;
     private final ComponentTypeEnum componentType;
     private final List<? extends TopologyTemplateValidationTask> tasks;
-
     @Getter
     private final String name;
 
+    private TopologyTemplateValidatorExecutor(JanusGraphDao janusGraphDao, String name, ComponentTypeEnum componentType,
+                                              List<? extends TopologyTemplateValidationTask> tasks) {
+        this.janusGraphDao = janusGraphDao;
+        this.name = name;
+        this.componentType = componentType;
+        this.tasks = tasks;
+    }
+
     @Autowired(required = false)
     public static ValidatorExecutor serviceValidatorExecutor(JanusGraphDao janusGraphDao) {
-        return new TopologyTemplateValidatorExecutor(
-                janusGraphDao, "SERVICE_VALIDATOR", ComponentTypeEnum.SERVICE, new ArrayList<>()
-        );
+        return new TopologyTemplateValidatorExecutor(janusGraphDao, "SERVICE_VALIDATOR", ComponentTypeEnum.SERVICE, new ArrayList<>());
     }
 
     @Autowired(required = false)
     public static ValidatorExecutor vfValidatorExecutor(List<VfValidationTask> tasks, JanusGraphDao janusGraphDao) {
-        return new TopologyTemplateValidatorExecutor(
-                janusGraphDao, "BASIC_VF_VALIDATOR", ComponentTypeEnum.RESOURCE, tasks
-        );
-    }
-
-    private TopologyTemplateValidatorExecutor(
-            JanusGraphDao janusGraphDao,
-            String name,
-            ComponentTypeEnum componentType,
-            List<? extends TopologyTemplateValidationTask> tasks
-    ) {
-        this.janusGraphDao = janusGraphDao;
-        this.name = name;
-        this.componentType = componentType;
-        this.tasks = tasks;
+        return new TopologyTemplateValidatorExecutor(janusGraphDao, "BASIC_VF_VALIDATOR", ComponentTypeEnum.RESOURCE, tasks);
     }
 
     @Override
@@ -88,7 +76,6 @@ public class TopologyTemplateValidatorExecutor implements ValidatorExecutor {
         boolean successAllVertices = true;
         int vertexNum = 0;
         int verticesSize = vertices.size();
-
         for (GraphVertex vertex : vertices) {
             vertexNum++;
             boolean successAllTasks = true;
@@ -113,18 +100,13 @@ public class TopologyTemplateValidatorExecutor implements ValidatorExecutor {
     }
 
     private List<GraphVertex> getVerticesToValidate() {
-        return janusGraphDao
-                .getByCriteria(VertexTypeEnum.TOPOLOGY_TEMPLATE, buildProps())
-                .either(
-                        vs -> {
-                            log.info("getVerticesToValidate: {} vertices to scan", vs.size());
-                            return vs;
-                        },
-                        sos -> {
-                            log.error("getVerticesToValidate failed {}", sos);
-                            return new ArrayList<>();
-                        }
-                );
+        return janusGraphDao.getByCriteria(VertexTypeEnum.TOPOLOGY_TEMPLATE, buildProps()).either(vs -> {
+            log.info("getVerticesToValidate: {} vertices to scan", vs.size());
+            return vs;
+        }, sos -> {
+            log.error("getVerticesToValidate failed {}", sos);
+            return new ArrayList<>();
+        });
     }
 
     private Map<GraphPropertyEnum, Object> buildProps() {
index 237507a..8de7efc 100644 (file)
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.impl.validator.executor;
 
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
 import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
 import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
 import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
@@ -29,12 +31,7 @@ import org.openecomp.sdc.be.model.Component;
 import org.openecomp.sdc.be.model.LifecycleStateEnum;
 import org.openecomp.sdc.be.model.jsonjanusgraph.operations.ToscaOperationFacade;
 
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-public class VFToscaArtifactValidatorExecutor
-    extends ArtifactValidatorExecutor implements IArtifactValidatorExecutor {
+public class VFToscaArtifactValidatorExecutor extends ArtifactValidatorExecutor implements IArtifactValidatorExecutor {
 
     public VFToscaArtifactValidatorExecutor(JanusGraphDao janusGraphDao, ToscaOperationFacade toscaOperationFacade) {
         super(janusGraphDao, toscaOperationFacade, "VF_TOSCA_ARTIFACTS");
@@ -46,9 +43,7 @@ public class VFToscaArtifactValidatorExecutor
         hasProps.put(GraphPropertyEnum.COMPONENT_TYPE, ComponentTypeEnum.RESOURCE.name());
         hasProps.put(GraphPropertyEnum.RESOURCE_TYPE, ResourceTypeEnum.VF);
         hasProps.put(GraphPropertyEnum.STATE, LifecycleStateEnum.CERTIFIED.name());
-
         Map<String, List<Component>> vertices = getVerticesToValidate(VertexTypeEnum.TOPOLOGY_TEMPLATE, hasProps);
         return validate(vertices, outputFilePath);
-
     }
 }
index b01af0b..884cc77 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -17,7 +17,6 @@
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.impl.validator.executor;
 
 import org.openecomp.sdc.asdctool.impl.validator.report.Report;
@@ -26,5 +25,6 @@ import org.openecomp.sdc.asdctool.impl.validator.report.ReportFile.TXTFile;
 public interface ValidatorExecutor {
 
     boolean executeValidations(Report report, TXTFile textFile);
+
     String getName();
 }
index 6ce0791..444e65e 100644 (file)
@@ -17,7 +17,6 @@
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.impl.validator.report;
 
 /**
@@ -25,7 +24,12 @@ package org.openecomp.sdc.asdctool.impl.validator.report;
  * a {@link org.openecomp.sdc.asdctool.impl.validator.report.ReportFileWriter }.
  */
 public interface FileType {
-    interface TXT extends FileType { }
 
-    interface CSV extends FileType { }
+    interface TXT extends FileType {
+
+    }
+
+    interface CSV extends FileType {
+
+    }
 }
index cf6fb0e..a4f1977 100644 (file)
@@ -17,7 +17,6 @@
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.impl.validator.report;
 
 import java.util.HashMap;
@@ -35,49 +34,29 @@ public final class Report {
     private final Map<String, Set<String>> failedVerticesPerTask = new HashMap<>();
     private final Map<String, Map<String, VertexResult>> resultsPerVertex = new HashMap<>();
 
-    public static Report make() {
-        return new Report();
+    private Report() {
     }
 
-    private Report() {
+    public static Report make() {
+        return new Report();
     }
 
     public void addFailure(String taskName, String vertexId) {
-        Set<String> failedVertices =
-            get(failedVerticesPerTask, HashSet::new).apply(taskName);
-
-        put(failedVerticesPerTask).apply(taskName,
-            add(failedVertices).apply(vertexId));
+        Set<String> failedVertices = get(failedVerticesPerTask, HashSet::new).apply(taskName);
+        put(failedVerticesPerTask).apply(taskName, add(failedVertices).apply(vertexId));
     }
 
     public void addSuccess(String vertexId, String taskName, VertexResult result) {
-        Map<String, VertexResult> vertexTasksResults =
-            get(resultsPerVertex, HashMap::new).apply(vertexId);
-
-        put(resultsPerVertex).apply(vertexId,
-            put(vertexTasksResults).apply(taskName, result));
+        Map<String, VertexResult> vertexTasksResults = get(resultsPerVertex, HashMap::new).apply(vertexId);
+        put(resultsPerVertex).apply(vertexId, put(vertexTasksResults).apply(taskName, result));
     }
 
     public void forEachFailure(FailureConsumer c) {
         failedVerticesPerTask.forEach(c::traverse);
     }
 
-    @FunctionalInterface
-    public interface FailureConsumer {
-
-        void traverse(String taskName, Set<String> failedVertices);
-    }
-
     public void forEachSuccess(SuccessConsumer p) {
-        resultsPerVertex.forEach((vertex, tasksResults) ->
-            tasksResults.forEach((task, result) ->
-                p.traverse(vertex, task, result)));
-    }
-
-    @FunctionalInterface
-    public interface SuccessConsumer {
-
-        void traverse(String vertex, String task, VertexResult result);
+        resultsPerVertex.forEach((vertex, tasksResults) -> tasksResults.forEach((task, result) -> p.traverse(vertex, task, result)));
     }
 
     <K, V> Function<K, V> get(Map<K, V> kvs, Supplier<V> fallback) {
@@ -97,4 +76,16 @@ public final class Report {
             return kvs;
         };
     }
+
+    @FunctionalInterface
+    public interface FailureConsumer {
+
+        void traverse(String taskName, Set<String> failedVertices);
+    }
+
+    @FunctionalInterface
+    public interface SuccessConsumer {
+
+        void traverse(String vertex, String task, VertexResult result);
+    }
 }
index e706d8a..9b4bb6e 100644 (file)
@@ -17,7 +17,6 @@
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.impl.validator.report;
 
 import java.util.Set;
@@ -52,39 +51,24 @@ public class ReportFile {
 
         public void reportStartTaskRun(GraphVertex vertex, String taskName) {
             writer.writeln("");
-            writer.writeln("-----------------------Vertex: " + vertex.getUniqueId() +
-                ", Task " + taskName + " Started-----------------------");
+            writer.writeln("-----------------------Vertex: " + vertex.getUniqueId() + ", Task " + taskName + " Started-----------------------");
         }
 
         public void reportStartValidatorRun(String validatorName, int componentsNum) {
             writer.writeln("");
-            writer.writeln("------ValidatorExecuter " +
-                validatorName + " Validation Started, on " +
-                componentsNum + " components---------");
+            writer.writeln("------ValidatorExecuter " + validatorName + " Validation Started, on " + componentsNum + " components---------");
         }
 
-        public void printValidationTaskStatus(
-            GraphVertex vertexScanned,
-            String taskName,
-            boolean success
-        ) {
+        public void printValidationTaskStatus(GraphVertex vertexScanned, String taskName, boolean success) {
             String successStatus = success ? "success" : "failed";
             writer.writeln("");
-            writer.writeln("-----------------------Vertex: " +
-                vertexScanned.getUniqueId() + ", Task " +
-                taskName + " " + successStatus +
-                "-----------------------"
-            );
+            writer.writeln("-----------------------Vertex: " + vertexScanned.getUniqueId() + ", Task " + taskName + " " + successStatus
+                + "-----------------------");
         }
 
-        public void reportValidatorTypeSummary(
-            String validatorName,
-            Set<String> failedTasksNames,
-            Set<String> successTasksNames
-        ) {
+        public void reportValidatorTypeSummary(String validatorName, Set<String> failedTasksNames, Set<String> successTasksNames) {
             StrBuilder sb = new StrBuilder();
-            sb.appendln("-----------------------ValidatorExecuter " + validatorName
-                + " Validation Summary-----------------------");
+            sb.appendln("-----------------------ValidatorExecuter " + validatorName + " Validation Summary-----------------------");
             sb.appendln("Failed tasks: " + failedTasksNames);
             sb.appendln("Success tasks: " + successTasksNames);
             writer.writeln("");
@@ -94,14 +78,8 @@ public class ReportFile {
         public void reportEndOfToolRun(Report report) {
             StrBuilder sb = new StrBuilder();
             sb.appendln("-----------------------------------Validator Tool Summary-----------------------------------");
-            report.forEachFailure((taskName, failedVertices) ->
-                sb.append("Task: ")
-                    .append(taskName)
-                    .appendNewLine()
-                    .append("FailedVertices: ")
-                    .append(String.valueOf(failedVertices))
-                    .appendNewLine());
-
+            report.forEachFailure((taskName, failedVertices) -> sb.append("Task: ").append(taskName).appendNewLine().append("FailedVertices: ")
+                .append(String.valueOf(failedVertices)).appendNewLine());
             writer.writeln("");
             writer.write(sb.toString());
         }
index e1a0090..a25999d 100644 (file)
@@ -17,8 +17,6 @@
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
-
 package org.openecomp.sdc.asdctool.impl.validator.report;
 
 import static java.nio.file.StandardOpenOption.APPEND;
@@ -32,27 +30,18 @@ import java.util.function.Consumer;
 
 /**
  * Describes a writer for report's data
- * @param <A> phantom type which is only used for type-safety to prevent mixing writers for TXT Report files
- *           and CSV Report files
+ *
+ * @param <A> phantom type which is only used for type-safety to prevent mixing writers for TXT Report files and CSV Report files
  */
 @SuppressWarnings("unused")
 public abstract class ReportFileWriter<A extends FileType> {
 
-    abstract public void write(String s);
-
-    public void writeln(String s) {
-        write(s + "\n");
-    }
-
     /**
-     *
      * @param filePath The resulting file path
-     * @param onError error handling callback
-     * @param <A> phantom type which is only used for type-safety
+     * @param onError  error handling callback
+     * @param <A>      phantom type which is only used for type-safety
      */
-    public static <A extends FileType> ReportFileWriter<A> makeNioWriter(
-        Path filePath, Consumer<IOException> onError
-    ) {
+    public static <A extends FileType> ReportFileWriter<A> makeNioWriter(Path filePath, Consumer<IOException> onError) {
         return new ReportFileWriter<A>() {
             @Override
             public void write(String line) {
@@ -65,5 +54,10 @@ public abstract class ReportFileWriter<A extends FileType> {
             }
         };
     }
-}
 
+    abstract public void write(String s);
+
+    public void writeln(String s) {
+        write(s + "\n");
+    }
+}
index 8f764f6..19704d4 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.impl.validator.tasks;
 
 /**
  * Created by chaya on 7/5/2017.
  */
 public abstract class ServiceValidationTask implements TopologyTemplateValidationTask {
+
     protected String name = "";
     protected String taskStatus = "NOT_STARTED";
 
@@ -41,5 +41,4 @@ public abstract class ServiceValidationTask implements TopologyTemplateValidatio
     public void setTaskResultStatus(String status) {
         this.taskStatus = status;
     }
-
 }
index 99470c6..7a39300 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -17,7 +17,6 @@
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.impl.validator.tasks;
 
 import org.openecomp.sdc.asdctool.impl.validator.report.Report;
@@ -26,8 +25,12 @@ import org.openecomp.sdc.asdctool.impl.validator.utils.VertexResult;
 import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
 
 public interface TopologyTemplateValidationTask {
+
     VertexResult validate(Report report, GraphVertex vertex, ReportFile.TXTFile reportFile);
+
     String getTaskName();
+
     String getTaskResultStatus();
+
     void setTaskResultStatus(String status);
 }
index 76e5952..5d1ea40 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.impl.validator.tasks;
 
 public abstract class VfValidationTask implements TopologyTemplateValidationTask {
+
     protected String taskStatus = "NOT_STARTED";
     protected String name = "";
 
@@ -38,5 +38,4 @@ public abstract class VfValidationTask implements TopologyTemplateValidationTask
     public void setTaskResultStatus(String status) {
         this.taskStatus = status;
     }
-
 }
index bf3d267..17da786 100644 (file)
@@ -18,7 +18,6 @@
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.impl.validator.tasks.artifacts;
 
 import fj.data.Either;
@@ -45,30 +44,22 @@ import org.springframework.beans.factory.annotation.Autowired;
 public class ArtifactValidationUtils {
 
     private static final Logger logger = Logger.getLogger(ArtifactValidationUtils.class);
-
     private final ArtifactCassandraDao artifactCassandraDao;
-
     private final TopologyTemplateOperation topologyTemplateOperation;
 
     @Autowired
-    public ArtifactValidationUtils(ArtifactCassandraDao artifactCassandraDao,
-        TopologyTemplateOperation topologyTemplateOperation) {
+    public ArtifactValidationUtils(ArtifactCassandraDao artifactCassandraDao, TopologyTemplateOperation topologyTemplateOperation) {
         this.artifactCassandraDao = artifactCassandraDao;
         this.topologyTemplateOperation = topologyTemplateOperation;
     }
 
-    public ArtifactsVertexResult validateArtifactsAreInCassandra(
-        Report report,
-        GraphVertex vertex,
-        String taskName,
-        List<ArtifactDataDefinition> artifacts,
-        ReportFile.TXTFile reportFile
-    ) {
+    public ArtifactsVertexResult validateArtifactsAreInCassandra(Report report, GraphVertex vertex, String taskName,
+                                                                 List<ArtifactDataDefinition> artifacts, ReportFile.TXTFile reportFile) {
         ArtifactsVertexResult result = new ArtifactsVertexResult(true);
         for (ArtifactDataDefinition artifact : artifacts) {
             boolean isArtifactExist = isArtifactInCassandra(artifact.getEsId());
-            String status = isArtifactExist ? "Artifact " + artifact.getEsId() + " is in Cassandra" :
-                "Artifact " + artifact.getEsId() + " doesn't exist in Cassandra";
+            String status = isArtifactExist ? "Artifact " + artifact.getEsId() + " is in Cassandra"
+                "Artifact " + artifact.getEsId() + " doesn't exist in Cassandra";
             reportFile.writeReportLineToFile(status);
             if (!isArtifactExist) {
                 report.addFailure(taskName, vertex.getUniqueId());
@@ -80,8 +71,7 @@ public class ArtifactValidationUtils {
     }
 
     public boolean isArtifactInCassandra(String uniqueId) {
-        Either<Long, CassandraOperationStatus> countOfArtifactsEither =
-            artifactCassandraDao.getCountOfArtifactById(uniqueId);
+        Either<Long, CassandraOperationStatus> countOfArtifactsEither = artifactCassandraDao.getCountOfArtifactById(uniqueId);
         if (countOfArtifactsEither.isRight()) {
             logger.debug("Failed to retrieve artifact with id: {} from Cassandra", uniqueId);
             return false;
@@ -100,19 +90,14 @@ public class ArtifactValidationUtils {
         return artifacts;
     }
 
-    public ArtifactsVertexResult validateTopologyTemplateArtifacts(
-        Report report,
-        GraphVertex vertex,
-        String taskName,
-        ReportFile.TXTFile reportFile
-    ) {
+    public ArtifactsVertexResult validateTopologyTemplateArtifacts(Report report, GraphVertex vertex, String taskName,
+                                                                   ReportFile.TXTFile reportFile) {
         ArtifactsVertexResult result = new ArtifactsVertexResult();
         ComponentParametersView paramView = new ComponentParametersView();
         paramView.disableAll();
         paramView.setIgnoreArtifacts(false);
         paramView.setIgnoreComponentInstances(false);
-        Either<ToscaElement, StorageOperationStatus> toscaElementEither = topologyTemplateOperation
-            .getToscaElement(vertex.getUniqueId(), paramView);
+        Either<ToscaElement, StorageOperationStatus> toscaElementEither = topologyTemplateOperation.getToscaElement(vertex.getUniqueId(), paramView);
         if (toscaElementEither.isRight()) {
             result.setStatus(false);
             return result;
@@ -123,23 +108,17 @@ public class ArtifactValidationUtils {
         Map<String, ArtifactDataDefinition> apiArtifacts = element.getServiceApiArtifacts();
         Map<String, MapArtifactDataDefinition> instanceArtifacts = element.getInstanceArtifacts();
         Map<String, MapArtifactDataDefinition> instanceDeploymentArtifacts = element.getInstDeploymentArtifacts();
-
         List<ArtifactDataDefinition> allArtifacts = new ArrayList<>();
-
         allArtifacts.addAll(addRelevantArtifacts(deploymentArtifacts));
         allArtifacts.addAll(addRelevantArtifacts(artifacts));
         allArtifacts.addAll(addRelevantArtifacts(apiArtifacts));
-
         if (instanceArtifacts != null) {
-            instanceArtifacts.forEach((key, artifactMap) ->
-                allArtifacts.addAll(addRelevantArtifacts(artifactMap.getMapToscaDataDefinition())));
+            instanceArtifacts.forEach((key, artifactMap) -> allArtifacts.addAll(addRelevantArtifacts(artifactMap.getMapToscaDataDefinition())));
         }
-
         if (instanceDeploymentArtifacts != null) {
-            instanceDeploymentArtifacts.forEach((key, artifactMap) ->
-                allArtifacts.addAll(addRelevantArtifacts(artifactMap.getMapToscaDataDefinition())));
+            instanceDeploymentArtifacts
+                .forEach((key, artifactMap) -> allArtifacts.addAll(addRelevantArtifacts(artifactMap.getMapToscaDataDefinition())));
         }
-
         return validateArtifactsAreInCassandra(report, vertex, taskName, allArtifacts, reportFile);
     }
 }
index b9c1116..c1f75c9 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.impl.validator.tasks.artifacts;
 
-import org.openecomp.sdc.asdctool.impl.validator.utils.VertexResult;
-
 import java.util.HashSet;
 import java.util.Set;
+import org.openecomp.sdc.asdctool.impl.validator.utils.VertexResult;
 
 /**
  * Created by chaya on 7/25/2017.
  */
-public class ArtifactsVertexResult extends VertexResult{
+public class ArtifactsVertexResult extends VertexResult {
+
     Set<String> notFoundArtifacts = new HashSet<>();
 
     public ArtifactsVertexResult() {
-
     }
 
     public ArtifactsVertexResult(boolean status) {
index a5d5185..d1e39ea 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -17,7 +17,6 @@
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.impl.validator.tasks.artifacts;
 
 import org.openecomp.sdc.asdctool.impl.validator.report.Report;
@@ -26,7 +25,6 @@ import org.openecomp.sdc.asdctool.impl.validator.tasks.ServiceValidationTask;
 import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
 import org.springframework.beans.factory.annotation.Autowired;
 
-
 public class ServiceArtifactValidationTask extends ServiceValidationTask {
 
     private ArtifactValidationUtils artifactValidationUtils;
index 26495fa..ee06c73 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -17,7 +17,6 @@
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.impl.validator.tasks.artifacts;
 
 import org.openecomp.sdc.asdctool.impl.validator.report.Report;
@@ -27,7 +26,6 @@ import org.openecomp.sdc.asdctool.impl.validator.utils.VertexResult;
 import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
 import org.springframework.beans.factory.annotation.Autowired;
 
-
 /**
  * Created by chaya on 7/4/2017.
  */
index 62404e1..aff2d14 100644 (file)
@@ -17,7 +17,6 @@
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.impl.validator.tasks.module.json;
 
 import fj.data.Either;
@@ -60,53 +59,42 @@ public class ModuleJsonTask extends ServiceValidationTask {
         if (!isAfterSubmitForTesting(vertex)) {
             return new VertexResult(true);
         }
-
         ComponentParametersView paramView = new ComponentParametersView();
         paramView.disableAll();
         paramView.setIgnoreArtifacts(false);
         paramView.setIgnoreGroups(false);
         paramView.setIgnoreComponentInstances(false);
-        Either<ToscaElement, StorageOperationStatus> toscaElementEither = topologyTemplateOperation
-                .getToscaElement(vertex.getUniqueId(), paramView);
+        Either<ToscaElement, StorageOperationStatus> toscaElementEither = topologyTemplateOperation.getToscaElement(vertex.getUniqueId(), paramView);
         if (toscaElementEither.isRight()) {
             return new VertexResult(false);
         }
         TopologyTemplate element = (TopologyTemplate) toscaElementEither.left().value();
         Map<String, MapGroupsDataDefinition> instGroups = element.getInstGroups();
         Map<String, MapArtifactDataDefinition> instDeploymentArtifacts = element.getInstDeploymentArtifacts();
-
-        for (Map.Entry<String, MapGroupsDataDefinition> pair : Optional.ofNullable(instGroups)
-                .orElse(Collections.emptyMap()).entrySet()) {
+        for (Map.Entry<String, MapGroupsDataDefinition> pair : Optional.ofNullable(instGroups).orElse(Collections.emptyMap()).entrySet()) {
             MapGroupsDataDefinition groups = pair.getValue();
             if (groups != null && !groups.getMapToscaDataDefinition().isEmpty()) {
-                return new VertexResult(
-                    findCoordinateModuleJson(report, pair, instDeploymentArtifacts, vertex, reportFile));
+                return new VertexResult(findCoordinateModuleJson(report, pair, instDeploymentArtifacts, vertex, reportFile));
             }
         }
         return new VertexResult(true);
     }
 
-    private boolean findCoordinateModuleJson(
-        Report report,
-        Map.Entry<String, MapGroupsDataDefinition> pair,
-        Map<String, MapArtifactDataDefinition> instDeploymentArtifacts,
-        GraphVertex vertex,
-        ReportFile.TXTFile reportFile
-    ) {
+    private boolean findCoordinateModuleJson(Report report, Map.Entry<String, MapGroupsDataDefinition> pair,
+                                             Map<String, MapArtifactDataDefinition> instDeploymentArtifacts, GraphVertex vertex,
+                                             ReportFile.TXTFile reportFile) {
         String groupKey = pair.getKey();
         String[] split = groupKey.split("\\.");
         String instanceName = split[split.length - 1];
         MapArtifactDataDefinition deploymentsArtifacts = instDeploymentArtifacts.get(groupKey);
         if (deploymentsArtifacts != null && !deploymentsArtifacts.getMapToscaDataDefinition().isEmpty()) {
-            List<ArtifactDataDefinition> moduleJsonArtifacts = deploymentsArtifacts.getMapToscaDataDefinition().values()
-                .stream().filter(artifact -> {
-                    String artifactName = artifact.getArtifactName();
-                    return artifactName.startsWith(instanceName) && artifactName.endsWith("modules.json");
-                }).collect(Collectors.toList());
+            List<ArtifactDataDefinition> moduleJsonArtifacts = deploymentsArtifacts.getMapToscaDataDefinition().values().stream().filter(artifact -> {
+                String artifactName = artifact.getArtifactName();
+                return artifactName.startsWith(instanceName) && artifactName.endsWith("modules.json");
+            }).collect(Collectors.toList());
             if (moduleJsonArtifacts.size() > 0) {
                 String status =
-                    "Instance " + instanceName + " has a corresponding modules.json file: " + moduleJsonArtifacts.get(0)
-                        .getArtifactName();
+                    "Instance " + instanceName + " has a corresponding modules.json file: " + moduleJsonArtifacts.get(0).getArtifactName();
                 reportFile.writeReportLineToFile(status);
                 return true;
             }
index d043653..148a0b7 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -17,7 +17,6 @@
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.impl.validator.utils;
 
 /**
@@ -28,7 +27,6 @@ public class VertexResult {
     boolean status;
 
     public VertexResult() {
-
     }
 
     public VertexResult(boolean status) {
@@ -46,5 +44,4 @@ public class VertexResult {
     public String getResult() {
         return String.valueOf(status);
     }
-
 }
index 611a2f1..9029615 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.impl.validator.utils;
 
 import java.util.List;
 import java.util.Map;
 
-
-
 public class VfModuleArtifactPayloadEx {
-       
-       private String vfModuleModelName, vfModuleModelInvariantUUID, vfModuleModelVersion, vfModuleModelUUID, vfModuleModelCustomizationUUID, vfModuleModelDescription;
-       private Boolean isBase;
-       private List<String> artifacts;
-       private Map< String, Object> properties;
-
-
-
-       public String getVfModuleModelName() {
-               return vfModuleModelName;
-       }
 
-       public void setVfModuleModelName(String vfModuleModelName) {
-               this.vfModuleModelName = vfModuleModelName;
-       }
+    private String vfModuleModelName, vfModuleModelInvariantUUID, vfModuleModelVersion, vfModuleModelUUID, vfModuleModelCustomizationUUID, vfModuleModelDescription;
+    private Boolean isBase;
+    private List<String> artifacts;
+    private Map<String, Object> properties;
 
-       public String getVfModuleModelInvariantUUID() {
-               return vfModuleModelInvariantUUID;
-       }
+    public String getVfModuleModelName() {
+        return vfModuleModelName;
+    }
 
-       public void setVfModuleModelInvariantUUID(String vfModuleModelInvariantUUID) {
-               this.vfModuleModelInvariantUUID = vfModuleModelInvariantUUID;
-       }
+    public void setVfModuleModelName(String vfModuleModelName) {
+        this.vfModuleModelName = vfModuleModelName;
+    }
 
-       public String getVfModuleModelVersion() {
-               return vfModuleModelVersion;
-       }
+    public String getVfModuleModelInvariantUUID() {
+        return vfModuleModelInvariantUUID;
+    }
 
-       public void setVfModuleModelVersion(String vfModuleModelVersion) {
-               this.vfModuleModelVersion = vfModuleModelVersion;
-       }
+    public void setVfModuleModelInvariantUUID(String vfModuleModelInvariantUUID) {
+        this.vfModuleModelInvariantUUID = vfModuleModelInvariantUUID;
+    }
 
-       public String getVfModuleModelUUID() {
-               return vfModuleModelUUID;
-       }
+    public String getVfModuleModelVersion() {
+        return vfModuleModelVersion;
+    }
 
-       public void setVfModuleModelUUID(String vfModuleModelUUID) {
-               this.vfModuleModelUUID = vfModuleModelUUID;
-       }
+    public void setVfModuleModelVersion(String vfModuleModelVersion) {
+        this.vfModuleModelVersion = vfModuleModelVersion;
+    }
 
-       public String getVfModuleModelCustomizationUUID() {
-               return vfModuleModelCustomizationUUID;
-       }
+    public String getVfModuleModelUUID() {
+        return vfModuleModelUUID;
+    }
 
-       public void setVfModuleModelCustomizationUUID(String vfModuleModelCustomizationUUID) {
-               this.vfModuleModelCustomizationUUID = vfModuleModelCustomizationUUID;
-       }
+    public void setVfModuleModelUUID(String vfModuleModelUUID) {
+        this.vfModuleModelUUID = vfModuleModelUUID;
+    }
 
-       public String getVfModuleModelDescription() {
-               return vfModuleModelDescription;
-       }
+    public String getVfModuleModelCustomizationUUID() {
+        return vfModuleModelCustomizationUUID;
+    }
 
-       public void setVfModuleModelDescription(String vfModuleModelDescription) {
-               this.vfModuleModelDescription = vfModuleModelDescription;
-       }
+    public void setVfModuleModelCustomizationUUID(String vfModuleModelCustomizationUUID) {
+        this.vfModuleModelCustomizationUUID = vfModuleModelCustomizationUUID;
+    }
 
-       public Boolean getIsBase() {
-               return isBase;
-       }
+    public String getVfModuleModelDescription() {
+        return vfModuleModelDescription;
+    }
 
-       public void setIsBase(Boolean isBase) {
-               this.isBase = isBase;
-       }
+    public void setVfModuleModelDescription(String vfModuleModelDescription) {
+        this.vfModuleModelDescription = vfModuleModelDescription;
+    }
 
-       public List<String> getArtifacts() {
-               return artifacts;
-       }
+    public Boolean getIsBase() {
+        return isBase;
+    }
 
-       public void setArtifacts(List<String> artifacts) {
-               this.artifacts = artifacts;
-       }
+    public void setIsBase(Boolean isBase) {
+        this.isBase = isBase;
+    }
 
-       public Map<String, Object> getProperties() {
-               return properties;
-       }
+    public List<String> getArtifacts() {
+        return artifacts;
+    }
 
-       public void setProperties(Map<String, Object> properties) {
-               this.properties = properties;
-       }
+    public void setArtifacts(List<String> artifacts) {
+        this.artifacts = artifacts;
+    }
 
-       
-       
+    public Map<String, Object> getProperties() {
+        return properties;
+    }
 
+    public void setProperties(Map<String, Object> properties) {
+        this.properties = properties;
+    }
 }
index ff5fb48..616717b 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -17,7 +17,6 @@
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.main;
 
 import org.openecomp.sdc.asdctool.configuration.ArtifactUUIDFixConfiguration;
@@ -37,7 +36,8 @@ public class ArtifactUUIDFixMenu {
         }
         String fixServices = args[1];
         String runMode = args[2];
-        log.info("Start fixing artifact UUID after 1707 migration with arguments run with configuration [{}] , for [{}] services", runMode, fixServices);
+        log.info("Start fixing artifact UUID after 1707 migration with arguments run with configuration [{}] , for [{}] services", runMode,
+            fixServices);
         String appConfigDir = args[0];
         ConfigurationUploader.uploadConfigurationFiles(appConfigDir);
         AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(ArtifactUUIDFixConfiguration.class);
@@ -45,12 +45,10 @@ public class ArtifactUUIDFixMenu {
         boolean isSuccessful = artifactUuidFix.doFix(fixServices, runMode);
         if (isSuccessful) {
             log.info("Fixing artifacts UUID for 1707  was finished successfully");
-    
-        } else{
+        } else {
             log.info("Fixing artifacts UUID for 1707  has failed");
             System.exit(2);
         }
         System.exit(0);
     }
-
 }
index 0978f4e..5bf5ed8 100644 (file)
@@ -17,7 +17,6 @@
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.main;
 
 import org.openecomp.sdc.asdctool.configuration.ConfigurationUploader;
@@ -29,14 +28,11 @@ import org.springframework.context.annotation.AnnotationConfigApplicationContext
 public class ArtifactValidatorTool {
 
     public static void main(String[] args) {
-
         String outputPath = args[0];
         String txtReportFilePath = ValidationConfigManager.txtReportFilePath(outputPath);
-
         String appConfigDir = args[1];
         AnnotationConfigApplicationContext context = initContext(appConfigDir);
         ArtifactToolBL validationToolBL = context.getBean(ArtifactToolBL.class);
-
         System.out.println("Start ArtifactValidation Tool");
         Boolean result = validationToolBL.validateAll(txtReportFilePath);
         if (result) {
@@ -50,9 +46,7 @@ public class ArtifactValidatorTool {
 
     private static AnnotationConfigApplicationContext initContext(String appConfigDir) {
         ConfigurationUploader.uploadConfigurationFiles(appConfigDir);
-        AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(
-            ValidationToolConfiguration.class);
+        AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(ValidationToolConfiguration.class);
         return context;
     }
-
 }
index 882a4e1..af33045 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.main;
 
+import java.util.Scanner;
 import org.openecomp.sdc.asdctool.configuration.ConfigurationUploader;
 import org.openecomp.sdc.asdctool.configuration.CsarGeneratorConfiguration;
 import org.openecomp.sdc.asdctool.impl.internal.tool.CsarGenerator;
 import org.openecomp.sdc.asdctool.utils.ConsoleWriter;
 import org.springframework.context.annotation.AnnotationConfigApplicationContext;
 
-import java.util.Scanner;
-
 public class CsarGeneratorTool extends SdcInternalTool {
 
     public static void main(String[] args) {
@@ -36,14 +34,11 @@ public class CsarGeneratorTool extends SdcInternalTool {
             System.exit(1);
         }
         String appConfigDir = args[0];
-
         disableConsole();
-
         ConfigurationUploader.uploadConfigurationFiles(appConfigDir);
         AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(CsarGeneratorConfiguration.class);
         CsarGenerator csarGenerator = context.getBean(CsarGenerator.class);
         ConsoleWriter.dataLine("STARTED... ");
-
         String input = "";
         Scanner scanner = new Scanner(System.in);
         do {
index 4b3496f..4290c98 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -19,7 +19,6 @@
  * Modifications copyright (c) 2018 Nokia
  * ================================================================================
  */
-
 package org.openecomp.sdc.asdctool.main;
 
 import org.openecomp.sdc.asdctool.impl.JanusGraphInitializer;
@@ -33,72 +32,65 @@ import org.openecomp.sdc.common.log.wrappers.Logger;
 
 public class DataSchemaMenu {
 
-       private static Logger log = Logger.getLogger(DataSchemaMenu.class.getName());
-
-       public static void main(String[] args) {
-
-               String operation = args[0];
-
-               String appConfigDir = args[1];
-
-               if (args == null || args.length < 2) {
-                       usageAndExit();
-               }
-
-               ConfigurationSource configurationSource = new FSConfigurationSource(ExternalConfiguration.getChangeListener(), appConfigDir);
-               ConfigurationManager configurationManager = new ConfigurationManager(configurationSource);
-
-               try {
-
-        SdcSchemaBuilder sdcSchemaBuilder = new SdcSchemaBuilder(new SdcSchemaUtils(),
-            ConfigurationManager.getConfigurationManager().getConfiguration()::getCassandraConfig);
+    private static Logger log = Logger.getLogger(DataSchemaMenu.class.getName());
 
-                       switch (operation.toLowerCase()) {
-                       case "create-cassandra-structures":
-                               log.debug("Start create cassandra keyspace, tables and indexes");
-                if (sdcSchemaBuilder.createSchema()) {
-                                       log.debug("create cassandra keyspace, tables and indexes successfull");
-                                       System.exit(0);
-                               } else {
-                                       log.debug("create cassandra keyspace, tables and indexes failed");
-                                       System.exit(2);
-                               }
-            case "create-janusgraph-structures":
-                log.debug("Start create janusgraph keyspace");
-                String janusGraphCfg = 2 == args.length ? configurationManager.getConfiguration().getJanusGraphCfgFile() : args[2];
-                if (JanusGraphInitializer.createGraph(janusGraphCfg)) {
-                    log.debug("create janusgraph keyspace successfull");
-                                       System.exit(0);
-                               } else {
-                    log.debug("create janusgraph keyspace failed");
-                                       System.exit(2);
-                               }
-                       case "clean-cassndra":
-                               log.debug("Start clean keyspace, tables");
-                if (sdcSchemaBuilder.deleteSchema()) {
-                                       log.debug(" successfull");
-                                       System.exit(0);
-                               } else {
-                                       log.debug(" failed");
-                                       System.exit(2);
-                               }
-                       default:
-                               usageAndExit();
-                       }
-               } catch (Throwable t) {
-                       t.printStackTrace();
-                       log.debug("create cassandra keyspace, tables and indexes failed");
-                       System.exit(3);
-               }
-       }
+    public static void main(String[] args) {
+        String operation = args[0];
+        String appConfigDir = args[1];
+        if (args == null || args.length < 2) {
+            usageAndExit();
+        }
+        ConfigurationSource configurationSource = new FSConfigurationSource(ExternalConfiguration.getChangeListener(), appConfigDir);
+        ConfigurationManager configurationManager = new ConfigurationManager(configurationSource);
+        try {
+            SdcSchemaBuilder sdcSchemaBuilder = new SdcSchemaBuilder(new SdcSchemaUtils(),
+                ConfigurationManager.getConfigurationManager().getConfiguration()::getCassandraConfig);
+            switch (operation.toLowerCase()) {
+                case "create-cassandra-structures":
+                    log.debug("Start create cassandra keyspace, tables and indexes");
+                    if (sdcSchemaBuilder.createSchema()) {
+                        log.debug("create cassandra keyspace, tables and indexes successfull");
+                        System.exit(0);
+                    } else {
+                        log.debug("create cassandra keyspace, tables and indexes failed");
+                        System.exit(2);
+                    }
+                case "create-janusgraph-structures":
+                    log.debug("Start create janusgraph keyspace");
+                    String janusGraphCfg = 2 == args.length ? configurationManager.getConfiguration().getJanusGraphCfgFile() : args[2];
+                    if (JanusGraphInitializer.createGraph(janusGraphCfg)) {
+                        log.debug("create janusgraph keyspace successfull");
+                        System.exit(0);
+                    } else {
+                        log.debug("create janusgraph keyspace failed");
+                        System.exit(2);
+                    }
+                case "clean-cassndra":
+                    log.debug("Start clean keyspace, tables");
+                    if (sdcSchemaBuilder.deleteSchema()) {
+                        log.debug(" successfull");
+                        System.exit(0);
+                    } else {
+                        log.debug(" failed");
+                        System.exit(2);
+                    }
+                default:
+                    usageAndExit();
+            }
+        } catch (Throwable t) {
+            t.printStackTrace();
+            log.debug("create cassandra keyspace, tables and indexes failed");
+            System.exit(3);
+        }
+    }
 
-       private static void usageAndExit() {
-               DataSchemeUsage();
-               System.exit(1);
-       }
+    private static void usageAndExit() {
+        DataSchemeUsage();
+        System.exit(1);
+    }
 
-       private static void DataSchemeUsage() {
-               System.out.println("Usage: create-cassandra-structures <configuration dir> ");
+    private static void DataSchemeUsage() {
+        System.out.println("Usage: create-cassandra-structures <configuration dir> ");
         System.out.println("Usage: create-janusgraph-structures <configuration dir> ");
-       }
+    }
 }
index 77b21fd..10d9171 100644 (file)
@@ -17,7 +17,6 @@
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.main;
 
 import java.util.Scanner;
@@ -35,14 +34,11 @@ public class DeleteComponentTool extends SdcInternalTool {
             System.exit(1);
         }
         String appConfigDir = args[0];
-
         disableConsole();
         ConsoleWriter.dataLine("STARTED... ");
-
         ConfigurationUploader.uploadConfigurationFiles(appConfigDir);
         AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(InternalToolConfiguration.class);
         DeleteComponentHandler deleteComponentHandler = context.getBean(DeleteComponentHandler.class);
-
         String input = "";
         Scanner scanner = new Scanner(System.in);
         do {
@@ -61,6 +57,4 @@ public class DeleteComponentTool extends SdcInternalTool {
         ConsoleWriter.dataLine("DeleteComponentTool exit...");
         System.exit(0);
     }
-
-
 }
index 98aea26..3efc0ef 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * Modifications copyright (c) 2019 Nokia
  * ================================================================================
  */
-
 package org.openecomp.sdc.asdctool.main;
 
-import org.openecomp.sdc.asdctool.impl.GraphJsonValidator;
-import org.openecomp.sdc.asdctool.impl.GraphMLConverter;
-import org.openecomp.sdc.asdctool.impl.GraphMLDataAnalyzer;
-
 import java.io.IOException;
 import java.util.Arrays;
 import java.util.List;
 import java.util.stream.Collectors;
+import org.openecomp.sdc.asdctool.impl.GraphJsonValidator;
+import org.openecomp.sdc.asdctool.impl.GraphMLConverter;
+import org.openecomp.sdc.asdctool.impl.GraphMLDataAnalyzer;
 
 public class ExportImportMenu {
 
-       enum ExportImportEnum {
-               DATA_REPORT("Usage: get-data-report-from-graph-ml <full path of .graphml file>", "get-data-report-from-graph-ml"){
-                       @Override
-                       void handle(String[] args) {
-                               if (verifyParamsLength(args, 2)) {
-                                       usage();
-                                       System.exit(1);
-                               }
-                               String[] dataArgs = new String[] { args[1] };
-                               if (new GraphMLDataAnalyzer().analyzeGraphMLData(dataArgs) == null) {
-                                       System.exit(2);
-                               }
-                       }
-               },
-               EXPORT("Usage: export <janusgraph.properties> <output directory>", "export"){
-                       @Override
-                       void handle(String[] args) {
-                               if (verifyParamsLength(args, 3)) {
-                                       usage();
-                                       System.exit(1);
-                               }
-
-                               if (!GRAPH_ML_CONVERTER.exportGraph(args)) {
-                                       System.exit(2);
-                               }
-                       }
-               },EXPORT_AS_GRAPH("Usage: export-as-graph-ml <janusgraph.properties> <output directory>", "export-as-graph-ml"){
-                       @Override
-                       void handle(String[] args) {
-                               if (verifyParamsLength(args, 3)) {
-                                       usage();
-                                       System.exit(1);
-                               }
-                               if (GRAPH_ML_CONVERTER.exportGraphMl(args) == null) {
-                                       System.exit(2);
-                               }
-                       }
-               },EXPORT_USERS("Usage: exportusers <janusgraph.properties> <output directory>", "exportusers"){
-                       @Override
-                       void handle(String[] args) {
-                               if (verifyParamsLength(args, 3)) {
-                                       usage();
-                                       System.exit(1);
-                               }
-                               if (!GRAPH_ML_CONVERTER.exportUsers(args)) {
-                                       System.exit(2);
-                               }
-                       }
-               },EXPORT_WITH_REPORT("Usage: export-as-graph-ml-with-data-report <janusgraph.properties> <output directory>", "export-as-graph-ml-with-data-report"){
-                       @Override
-                       void handle(String[] args) {
-                               if (verifyParamsLength(args, 3)) {
-                                       usage();
-                                       System.exit(1);
-                               }
-                               if (GRAPH_ML_CONVERTER.exportGraphMl(args) == null) {
-                                       System.exit(2);
-                               }
-                               String[] dataArgs = new String[] {GRAPH_ML_CONVERTER.exportGraphMl(args)};
-                               if (new GraphMLDataAnalyzer().analyzeGraphMLData(dataArgs) == null) {
-                                       System.exit(2);
-                               }
-                       }
-               },FIND_PROBLEM("Usage: findproblem <janusgraph.properties> <graph file location>", "findproblem"){
-                       @Override
-                       void handle(String[] args) {
-                               if (verifyParamsLength(args, 3)) {
-                                       usage();
-                                       System.exit(1);
-                               }
-                               if (!GRAPH_ML_CONVERTER.findErrorInJsonGraph(args)) {
-                                       System.exit(2);
-                               }
-                       }
-               },IMPORT("Usage: import <janusgraph.properties> <graph file location>", "import"){
-                       @Override
-                       void handle(String[] args) {
-                               if (verifyParamsLength(args, 3)) {
-                                       usage();
-                                       System.exit(1);
-                               }
-                               if (!GRAPH_ML_CONVERTER.importGraph(args)) {
-                                       System.exit(2);
-                               }
-                       }
-               },VALIDATE_JSON("Usage: validate-json <export graph path>", "validate-json"){
-                       @Override
-                       void handle(String[] args) throws IOException {
-                               if (verifyParamsLength(args, 2)) {
-                                       usage();
-                                       System.exit(1);
-                               }
-                               String jsonFilePath = args[1];
-                               GraphJsonValidator graphJsonValidator = new GraphJsonValidator();
-                               if (graphJsonValidator.verifyJanusGraphJson(jsonFilePath)) {
-                                       System.exit(2);
-                               }
-                       }
-               },NONE{
-                       @Override
-                       void handle(String[] args) {
-                               usage();
-                               System.exit(1);
-                       }
-
-                       void usage(){
-                               Arrays.stream(ExportImportEnum.values()).filter(type -> type != NONE).forEach(ExportImportEnum::usage);
-                       }
-               };
-
-               private static final GraphMLConverter GRAPH_ML_CONVERTER = new GraphMLConverter();
-               private String usage;
-               private String keyword;
-
-               ExportImportEnum(String usage, String keyword) {
-                       this.usage = usage;
-                       this.keyword = keyword;
-               }
-
-               ExportImportEnum() {}
-
-               void usage(){
-                       System.out.println(usage);
-               }
-
-               static ExportImportEnum getByKeyword(String keyword) {
-                       List<ExportImportEnum> collected = Arrays.stream(ExportImportEnum.values())
-                               .filter(type -> type != NONE)
-                               .filter(type -> type.keyword.equals(keyword))
-                               .collect(Collectors.toList());
-                       return collected.isEmpty() ? NONE : collected.get(0);
-               }
-
-               abstract void handle(String[] args) throws IOException;
-
-               private static boolean verifyParamsLength(String[] args, int i) {
-                       if (args == null) {
-                               return i > 0;
-                       }
-                       return args.length < i;
-               }
-       }
-
-       public static void main(String[] args) throws Exception {
-               ExportImportEnum type;
-               if (args == null || args.length < 1) {
-                       type = ExportImportEnum.NONE;
-               }else{
-                       type = ExportImportEnum.getByKeyword(getOperation(args).toLowerCase());
-               }
-               type.handle(args);
-       }
-
-       private static String getOperation(String[] args) {
-               String operation = null;
-               if (args != null) {
-                       operation = args[0];
-               }
-               return operation;
-       }
-
+    public static void main(String[] args) throws Exception {
+        ExportImportEnum type;
+        if (args == null || args.length < 1) {
+            type = ExportImportEnum.NONE;
+        } else {
+            type = ExportImportEnum.getByKeyword(getOperation(args).toLowerCase());
+        }
+        type.handle(args);
+    }
+
+    private static String getOperation(String[] args) {
+        String operation = null;
+        if (args != null) {
+            operation = args[0];
+        }
+        return operation;
+    }
+
+    enum ExportImportEnum {
+        DATA_REPORT("Usage: get-data-report-from-graph-ml <full path of .graphml file>", "get-data-report-from-graph-ml") {
+            @Override
+            void handle(String[] args) {
+                if (verifyParamsLength(args, 2)) {
+                    usage();
+                    System.exit(1);
+                }
+                String[] dataArgs = new String[]{args[1]};
+                if (new GraphMLDataAnalyzer().analyzeGraphMLData(dataArgs) == null) {
+                    System.exit(2);
+                }
+            }
+        }, EXPORT("Usage: export <janusgraph.properties> <output directory>", "export") {
+            @Override
+            void handle(String[] args) {
+                if (verifyParamsLength(args, 3)) {
+                    usage();
+                    System.exit(1);
+                }
+                if (!GRAPH_ML_CONVERTER.exportGraph(args)) {
+                    System.exit(2);
+                }
+            }
+        }, EXPORT_AS_GRAPH("Usage: export-as-graph-ml <janusgraph.properties> <output directory>", "export-as-graph-ml") {
+            @Override
+            void handle(String[] args) {
+                if (verifyParamsLength(args, 3)) {
+                    usage();
+                    System.exit(1);
+                }
+                if (GRAPH_ML_CONVERTER.exportGraphMl(args) == null) {
+                    System.exit(2);
+                }
+            }
+        }, EXPORT_USERS("Usage: exportusers <janusgraph.properties> <output directory>", "exportusers") {
+            @Override
+            void handle(String[] args) {
+                if (verifyParamsLength(args, 3)) {
+                    usage();
+                    System.exit(1);
+                }
+                if (!GRAPH_ML_CONVERTER.exportUsers(args)) {
+                    System.exit(2);
+                }
+            }
+        }, EXPORT_WITH_REPORT("Usage: export-as-graph-ml-with-data-report <janusgraph.properties> <output directory>",
+            "export-as-graph-ml-with-data-report") {
+            @Override
+            void handle(String[] args) {
+                if (verifyParamsLength(args, 3)) {
+                    usage();
+                    System.exit(1);
+                }
+                if (GRAPH_ML_CONVERTER.exportGraphMl(args) == null) {
+                    System.exit(2);
+                }
+                String[] dataArgs = new String[]{GRAPH_ML_CONVERTER.exportGraphMl(args)};
+                if (new GraphMLDataAnalyzer().analyzeGraphMLData(dataArgs) == null) {
+                    System.exit(2);
+                }
+            }
+        }, FIND_PROBLEM("Usage: findproblem <janusgraph.properties> <graph file location>", "findproblem") {
+            @Override
+            void handle(String[] args) {
+                if (verifyParamsLength(args, 3)) {
+                    usage();
+                    System.exit(1);
+                }
+                if (!GRAPH_ML_CONVERTER.findErrorInJsonGraph(args)) {
+                    System.exit(2);
+                }
+            }
+        }, IMPORT("Usage: import <janusgraph.properties> <graph file location>", "import") {
+            @Override
+            void handle(String[] args) {
+                if (verifyParamsLength(args, 3)) {
+                    usage();
+                    System.exit(1);
+                }
+                if (!GRAPH_ML_CONVERTER.importGraph(args)) {
+                    System.exit(2);
+                }
+            }
+        }, VALIDATE_JSON("Usage: validate-json <export graph path>", "validate-json") {
+            @Override
+            void handle(String[] args) throws IOException {
+                if (verifyParamsLength(args, 2)) {
+                    usage();
+                    System.exit(1);
+                }
+                String jsonFilePath = args[1];
+                GraphJsonValidator graphJsonValidator = new GraphJsonValidator();
+                if (graphJsonValidator.verifyJanusGraphJson(jsonFilePath)) {
+                    System.exit(2);
+                }
+            }
+        }, NONE {
+            @Override
+            void handle(String[] args) {
+                usage();
+                System.exit(1);
+            }
+
+            void usage() {
+                Arrays.stream(ExportImportEnum.values()).filter(type -> type != NONE).forEach(ExportImportEnum::usage);
+            }
+        };
+        private static final GraphMLConverter GRAPH_ML_CONVERTER = new GraphMLConverter();
+        private String usage;
+        private String keyword;
+
+        ExportImportEnum(String usage, String keyword) {
+            this.usage = usage;
+            this.keyword = keyword;
+        }
+
+        ExportImportEnum() {
+        }
+
+        static ExportImportEnum getByKeyword(String keyword) {
+            List<ExportImportEnum> collected = Arrays.stream(ExportImportEnum.values()).filter(type -> type != NONE)
+                .filter(type -> type.keyword.equals(keyword)).collect(Collectors.toList());
+            return collected.isEmpty() ? NONE : collected.get(0);
+        }
+
+        private static boolean verifyParamsLength(String[] args, int i) {
+            if (args == null) {
+                return i > 0;
+            }
+            return args.length < i;
+        }
+
+        void usage() {
+            System.out.println(usage);
+        }
+
+        abstract void handle(String[] args) throws IOException;
+    }
 }
index cad059c..54addb3 100644 (file)
@@ -17,7 +17,6 @@
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.main;
 
 import org.openecomp.sdc.asdctool.impl.ProductLogic;
index 2fb99e2..cca64b9 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -17,7 +17,6 @@
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.main;
 
 import ch.qos.logback.core.Appender;
@@ -25,6 +24,7 @@ import org.openecomp.sdc.common.log.wrappers.Logger;
 import org.slf4j.LoggerFactory;
 
 public abstract class SdcInternalTool {
+
     protected static void disableConsole() {
         org.slf4j.Logger rootLogger = LoggerFactory.getILoggerFactory().getLogger(Logger.ROOT_LOGGER_NAME);
         Appender appender = ((ch.qos.logback.classic.Logger) rootLogger).getAppender("STDOUT");
index e2a0834..661398b 100644 (file)
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.main;
 
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.file.FileSystems;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.Date;
+import java.util.LinkedHashMap;
+import java.util.Map;
+import java.util.stream.Stream;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipOutputStream;
 import org.apache.commons.codec.digest.DigestUtils;
 import org.apache.commons.lang3.ArrayUtils;
 import org.openecomp.sdc.asdctool.configuration.SdcSchemaFileImportConfiguration;
@@ -38,54 +53,25 @@ import org.springframework.context.annotation.AnnotationConfigApplicationContext
 import org.yaml.snakeyaml.DumperOptions;
 import org.yaml.snakeyaml.Yaml;
 
-import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.io.InputStream;
-import java.nio.file.FileSystems;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.util.Date;
-import java.util.LinkedHashMap;
-import java.util.Map;
-import java.util.stream.Stream;
-import java.util.zip.ZipEntry;
-import java.util.zip.ZipOutputStream;
-
-
 public class SdcSchemaFileImport {
 
     private static final Logger LOGGER = LoggerFactory.getLogger(SdcSchemaFileImport.class);
-
     private static final String SEPARATOR = FileSystems.getDefault().getSeparator();
-
     private static final String TOSCA_VERSION = "tosca_simple_yaml_1_1";
-
-    private static String importToscaPath;
-
     private static final byte[] buffer = new byte[1024];
-
     private static final String YAML_EXTENSION = ".yml";
-
     private static final String DEPLOYMENT_TYPE_ONAP = "onap";
-
+    private static String importToscaPath;
     private static String LICENSE_TXT;
-
     private static ZipOutputStream zos;
 
     public static void main(String[] args) throws Exception {
-
         //Generation flow start - generating SDC from normatives
         System.out.println("Starting SdcSchemaFileImport procedure...");
         final String FILE_NAME = "SDC.zip";
-
         if (args == null || !(args.length == 4 || args.length == 5)) {
             usageAndExit();
         }
-
         importToscaPath = args[0];
         String sdcReleaseNum = args[1];
         String conformanceLevel = args[2];
@@ -94,11 +80,8 @@ public class SdcSchemaFileImport {
         if (args.length == 5) {
             deploymentType = args[4];
         }
-
         ByteArrayOutputStream baos = new ByteArrayOutputStream();
-
         zos = new ZipOutputStream(baos);
-
         //Initialize the license text
         try {
             LICENSE_TXT = new String(Files.readAllBytes(Paths.get(appConfigDir + SEPARATOR + "license.txt")));
@@ -106,111 +89,78 @@ public class SdcSchemaFileImport {
             System.err.println("Couldn't read license.txt in location :" + appConfigDir + ", error: " + e);
             System.exit(1);
         }
-
         //Loop over schema file list and create each yaml file from /import/tosca folder
         SchemaZipFileEnum[] schemaFileList = SchemaZipFileEnum.values();
         for (SchemaZipFileEnum schemaZipFileEnum : schemaFileList) {
             String folderName = schemaZipFileEnum.getSourceFolderName();
             String fileName = schemaZipFileEnum.getSourceFileName();
-
             if ((folderName != null) && (fileName != null)) {
                 File folder = new File(importToscaPath, folderName);
                 File path = new File(folder, fileName + YAML_EXTENSION);
-
                 try (InputStream input = new FileInputStream(path)) {
                     // Convert the content of file to yaml
                     Yaml yamlFileSource = new Yaml();
                     Object content = yamlFileSource.load(input);
-
                     createAndSaveSchemaFileYaml(schemaZipFileEnum, content);
                 } catch (Exception e) {
-                    System.err.println(
-                        "Error in file creation : "
-                            + schemaZipFileEnum.getFileName()
-                            + ", "
-                            + e.getMessage());
+                    System.err.println("Error in file creation : " + schemaZipFileEnum.getFileName() + ", " + e.getMessage());
                     System.exit(1);
                 }
             }
         }
-
         createAndSaveNodeSchemaFile(deploymentType);
-
         try {
             //close the ZipOutputStream
             zos.close();
             System.out.println("File SDC.zip creation successful");
-
         } catch (Exception ex) {
             System.err.println("Failed to pack SDC.zip file, error: " + ex);
             System.exit(1);
         }
-
         //Generation flow end - generating SDC from narratives
-
         AnnotationConfigApplicationContext context = initContext(appConfigDir);
-        SdcSchemaFilesCassandraDao schemaFilesCassandraDao = (SdcSchemaFilesCassandraDao) context
-            .getBean("sdc-schema-files-cassandra-dao");
-
+        SdcSchemaFilesCassandraDao schemaFilesCassandraDao = (SdcSchemaFilesCassandraDao) context.getBean("sdc-schema-files-cassandra-dao");
         byte[] fileBytes = baos.toByteArray();
-
         Date date = new Date();
         String md5Hex = DigestUtils.md5Hex(fileBytes);
-
-        SdcSchemaFilesData schemeFileData = new SdcSchemaFilesData(sdcReleaseNum, date, conformanceLevel, FILE_NAME,
-            fileBytes, md5Hex);
+        SdcSchemaFilesData schemeFileData = new SdcSchemaFilesData(sdcReleaseNum, date, conformanceLevel, FILE_NAME, fileBytes, md5Hex);
         CassandraOperationStatus saveSchemaFile = schemaFilesCassandraDao.saveSchemaFile(schemeFileData);
-
         if (!saveSchemaFile.equals(CassandraOperationStatus.OK)) {
             System.err.println("SdcSchemaFileImport failed cassandra error" + saveSchemaFile);
             System.exit(1);
         }
-
         System.out.println("SdcSchemaFileImport successfully completed");
-
         System.exit(0);
     }
 
     public static void createAndSaveSchemaFileYaml(SchemaZipFileEnum schemaZipFileEnum, Object content) {
-        createAndSaveSchemaFileYaml(schemaZipFileEnum.getFileName(), schemaZipFileEnum.getImportFileList(),
-            schemaZipFileEnum.getCollectionTitle(), content);
+        createAndSaveSchemaFileYaml(schemaZipFileEnum.getFileName(), schemaZipFileEnum.getImportFileList(), schemaZipFileEnum.getCollectionTitle(),
+            content);
     }
 
-    public static void createAndSaveSchemaFileYaml(String fileName, String[] importFileList, String collectionTitle,
-        Object content) {
-
+    public static void createAndSaveSchemaFileYaml(String fileName, String[] importFileList, String collectionTitle, Object content) {
         //Initialize the snake yaml dumper option
         DumperOptions options = new DumperOptions();
         options.setDefaultFlowStyle(DumperOptions.FlowStyle.BLOCK);
-
         //Create the new yaml
         Yaml yaml = new Yaml(options);
         yaml.setName(fileName);
-
         //Initialize the yaml contents
         final Map<String, Object> data = new LinkedHashMap<>();
         data.put(ToscaTagNamesEnum.TOSCA_VERSION.getElementName(), TOSCA_VERSION);
-
         if (importFileList.length > 0) {
             data.put("imports", importFileList);
         }
-
         data.put(collectionTitle, content);
-
         //Save the new yaml to file
         try {
-
             FileWriter writer;
             File file = File.createTempFile(fileName, YAML_EXTENSION);
             writer = new FileWriter(file);
-
             //Add the license as comment in top of file
             writer.write(LICENSE_TXT);
-
             yaml.dump(data, writer);
-
             writer.close();
-
             // begin writing a new ZIP entry, positions the stream to the start of the entry data
             ZipEntry entry = new ZipEntry(yaml.getName() + YAML_EXTENSION);
             zos.putNextEntry(entry);
@@ -223,8 +173,6 @@ public class SdcSchemaFileImport {
             file.delete();
             stream.close();
             zos.closeEntry();
-
-
         } catch (IOException e) {
             System.out.println("Error in file creation : " + fileName + ", " + e.getMessage());
             System.exit(1);
@@ -238,46 +186,37 @@ public class SdcSchemaFileImport {
      * @throws IOException thrown in case of issues in reding files.
      */
     public static void createAndSaveNodeSchemaFile(String deploymentType) throws IOException {
-
         //Initialize the snake yaml dumper option
         DumperOptions options = new DumperOptions();
         options.setDefaultFlowStyle(DumperOptions.FlowStyle.BLOCK);
-
         Map<String, Object> nodeTypeList = new LinkedHashMap<>();
-
-        String[] importFileList = new String[]{"data.yml", "artifacts.yml", "capabilities.yml", "interfaces.yml",
-            "relationships.yml"};
-
+        String[] importFileList = new String[]{"data.yml", "artifacts.yml", "capabilities.yml", "interfaces.yml", "relationships.yml"};
         //Create node.yaml - collect all types from normative-types and heat-types directories
         String[] nodeTypesMainFolders = new String[]{"normative-types", "heat-types"};
-
         if (DEPLOYMENT_TYPE_ONAP.equals(deploymentType)) {
             String[] onapNodeTypesMainFolders = new String[]{"nfv-types"};
             nodeTypesMainFolders = ArrayUtils.addAll(nodeTypesMainFolders, onapNodeTypesMainFolders);
         }
-
         final String nodeTypesToscaEntry = "node_types";
         for (String nodeTypesMainFolder : nodeTypesMainFolders) {
             try (Stream<Path> paths = Files.walk(Paths.get(importToscaPath + SEPARATOR + nodeTypesMainFolder))) {
-                paths.filter(path -> path.getFileName().toString().toLowerCase().endsWith(YAML_EXTENSION))
-                    .forEach(yamlFile -> {
-                        try {
-                            final String path = yamlFile.toAbsolutePath().toString();
-                            System.out.println("Processing node type file " + path + "...");
-                            final FileInputStream inputStream = new FileInputStream(path);
-                            final Map<String, Object> load = new Yaml().loadAs(inputStream, Map.class);
-                            final Map<String, Object> nodeType = (Map<String, Object>) load.get(nodeTypesToscaEntry);
-                            if (nodeType == null) {
-                                LOGGER.error("Expecting '{}' entry in TOSCA yaml file '{}'", nodeTypesToscaEntry, path);
-                                System.exit(1);
-                            }
-                            nodeTypeList.putAll(nodeType);
-                        } catch (final Exception e) {
-                            LOGGER.error("An error has occurred while processing YAML '{}'",
-                                yamlFile.toAbsolutePath(), e);
+                paths.filter(path -> path.getFileName().toString().toLowerCase().endsWith(YAML_EXTENSION)).forEach(yamlFile -> {
+                    try {
+                        final String path = yamlFile.toAbsolutePath().toString();
+                        System.out.println("Processing node type file " + path + "...");
+                        final FileInputStream inputStream = new FileInputStream(path);
+                        final Map<String, Object> load = new Yaml().loadAs(inputStream, Map.class);
+                        final Map<String, Object> nodeType = (Map<String, Object>) load.get(nodeTypesToscaEntry);
+                        if (nodeType == null) {
+                            LOGGER.error("Expecting '{}' entry in TOSCA yaml file '{}'", nodeTypesToscaEntry, path);
                             System.exit(1);
                         }
-                    });
+                        nodeTypeList.putAll(nodeType);
+                    } catch (final Exception e) {
+                        LOGGER.error("An error has occurred while processing YAML '{}'", yamlFile.toAbsolutePath(), e);
+                        System.exit(1);
+                    }
+                });
             }
         }
         createAndSaveSchemaFileYaml("nodes", importFileList, nodeTypesToscaEntry, nodeTypeList);
@@ -289,13 +228,12 @@ public class SdcSchemaFileImport {
     }
 
     private static void SdcSchemaFileImportUsage() {
-        System.err.println(
-            "Usage: <file dir/filename> <SDC release number> <Schema conformance level> <configuration dir> <deployment type optional>");
+        System.err
+            .println("Usage: <file dir/filename> <SDC release number> <Schema conformance level> <configuration dir> <deployment type optional>");
     }
 
     private static AnnotationConfigApplicationContext initContext(String appConfigDir) {
-        ConfigurationSource configurationSource = new FSConfigurationSource(ExternalConfiguration.getChangeListener(),
-            appConfigDir);
+        ConfigurationSource configurationSource = new FSConfigurationSource(ExternalConfiguration.getChangeListener(), appConfigDir);
         new ConfigurationManager(configurationSource);
         return new AnnotationConfigApplicationContext(SdcSchemaFileImportConfiguration.class);
     }
index 6ab9f9c..6b4b86d 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.main;
 
-import org.openecomp.sdc.asdctool.impl.UpdatePropertyOnVertex;
-import org.openecomp.sdc.be.dao.neo4j.GraphPropertiesDictionary;
-import org.openecomp.sdc.be.datatypes.enums.NodeTypeEnum;
-import org.openecomp.sdc.common.log.wrappers.Logger;
-
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+import org.openecomp.sdc.asdctool.impl.UpdatePropertyOnVertex;
+import org.openecomp.sdc.be.dao.neo4j.GraphPropertiesDictionary;
+import org.openecomp.sdc.be.datatypes.enums.NodeTypeEnum;
+import org.openecomp.sdc.common.log.wrappers.Logger;
 
 public class UpdateIsVnfMenu {
 
-       private static Logger log = Logger.getLogger(UpdateIsVnfMenu.class.getName());
-
-       private static void usageAndExit() {
-               updateIsVnfTrueUsage();
-               System.exit(1);
-       }
-
-       private static void updateIsVnfTrueUsage() {
-               System.out.println(
-                               "Usage: updateIsVnfTrue <janusgraph.properties> <systemServiceName1,systemServiceName2,...,systemServiceNameN>");
-       }
-
-       public static void main(String[] args) {
-
-               if (args == null || args.length < 1) {
-                       usageAndExit();
-               }
-
-               UpdatePropertyOnVertex updatePropertyOnVertex = new UpdatePropertyOnVertex();
-               String operation = args[0];
-
-               switch (operation.toLowerCase()) {
-
-               case "updateisvnftrue":
-                       boolean isValid = verifyParamsLength(args, 3);
-                       if (false == isValid) {
-                               updateIsVnfTrueUsage();
-                               System.exit(1);
-                       }
-
-                       Map<String, Object> keyValueToSet = new HashMap<>();
-                       keyValueToSet.put(GraphPropertiesDictionary.IS_VNF.getProperty(), true);
-
-                       List<Map<String, Object>> orCriteria = buildCriteriaFromSystemServiceNames(args[2]);
-                       Integer updatePropertyOnServiceAtLeastCertified = updatePropertyOnVertex
-                                       .updatePropertyOnServiceAtLeastCertified(args[1], keyValueToSet, orCriteria);
-
-                       if (updatePropertyOnServiceAtLeastCertified == null) {
-                               System.exit(2);
-                       } else if (updatePropertyOnServiceAtLeastCertified >= 0) {
-                               log.debug("Number of updated services is {}",updatePropertyOnServiceAtLeastCertified);
-                               System.exit(0);
-                       }
-
-                       break;
-               default:
-                       usageAndExit();
-               }
-
-       }
-
-       private static List<Map<String, Object>> buildCriteriaFromSystemServiceNames(String systemList) {
-
-               List<Map<String, Object>> systemNames = new ArrayList<>();
-
-               String[] split = systemList.split(",");
-               if (split != null) {
-                       for (String systemName : split) {
-                               systemName = systemName.trim();
-
-                               Map<String, Object> map = new HashMap();
-                               map.put(GraphPropertiesDictionary.SYSTEM_NAME.getProperty(), systemName);
-                               map.put(GraphPropertiesDictionary.LABEL.getProperty(), NodeTypeEnum.Service.getName());
-
-                               systemNames.add(map);
-                       }
-               }
-
-               return systemNames;
-       }
-
-       private static boolean verifyParamsLength(String[] args, int i) {
-               if (args == null) {
-                       if (i > 0) {
-                               return false;
-                       }
-                       return true;
-               }
-
-               if (args.length >= i) {
-                       return true;
-               }
-               return false;
-       }
-
+    private static Logger log = Logger.getLogger(UpdateIsVnfMenu.class.getName());
+
+    private static void usageAndExit() {
+        updateIsVnfTrueUsage();
+        System.exit(1);
+    }
+
+    private static void updateIsVnfTrueUsage() {
+        System.out.println("Usage: updateIsVnfTrue <janusgraph.properties> <systemServiceName1,systemServiceName2,...,systemServiceNameN>");
+    }
+
+    public static void main(String[] args) {
+        if (args == null || args.length < 1) {
+            usageAndExit();
+        }
+        UpdatePropertyOnVertex updatePropertyOnVertex = new UpdatePropertyOnVertex();
+        String operation = args[0];
+        switch (operation.toLowerCase()) {
+            case "updateisvnftrue":
+                boolean isValid = verifyParamsLength(args, 3);
+                if (false == isValid) {
+                    updateIsVnfTrueUsage();
+                    System.exit(1);
+                }
+                Map<String, Object> keyValueToSet = new HashMap<>();
+                keyValueToSet.put(GraphPropertiesDictionary.IS_VNF.getProperty(), true);
+                List<Map<String, Object>> orCriteria = buildCriteriaFromSystemServiceNames(args[2]);
+                Integer updatePropertyOnServiceAtLeastCertified = updatePropertyOnVertex
+                    .updatePropertyOnServiceAtLeastCertified(args[1], keyValueToSet, orCriteria);
+                if (updatePropertyOnServiceAtLeastCertified == null) {
+                    System.exit(2);
+                } else if (updatePropertyOnServiceAtLeastCertified >= 0) {
+                    log.debug("Number of updated services is {}", updatePropertyOnServiceAtLeastCertified);
+                    System.exit(0);
+                }
+                break;
+            default:
+                usageAndExit();
+        }
+    }
+
+    private static List<Map<String, Object>> buildCriteriaFromSystemServiceNames(String systemList) {
+        List<Map<String, Object>> systemNames = new ArrayList<>();
+        String[] split = systemList.split(",");
+        if (split != null) {
+            for (String systemName : split) {
+                systemName = systemName.trim();
+                Map<String, Object> map = new HashMap();
+                map.put(GraphPropertiesDictionary.SYSTEM_NAME.getProperty(), systemName);
+                map.put(GraphPropertiesDictionary.LABEL.getProperty(), NodeTypeEnum.Service.getName());
+                systemNames.add(map);
+            }
+        }
+        return systemNames;
+    }
+
+    private static boolean verifyParamsLength(String[] args, int i) {
+        if (args == null) {
+            if (i > 0) {
+                return false;
+            }
+            return true;
+        }
+        if (args.length >= i) {
+            return true;
+        }
+        return false;
+    }
 }
index e68c9dd..dfeba63 100644 (file)
@@ -17,7 +17,6 @@
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.main;
 
 import java.nio.file.Path;
@@ -42,18 +41,14 @@ public class ValidationTool {
     private static final Logger log = Logger.getLogger(ValidationTool.class.getName());
 
     public static void main(String[] args) {
-
         String outputPath = args[0];
         String txtReportFilePath = ValidationConfigManager.txtReportFilePath(outputPath);
         String csvReportFilePath = ValidationConfigManager.csvReportFilePath(outputPath, System::currentTimeMillis);
-
         CSVFile csvFile = ReportFile.makeCsvFile(makeNioWriter(Paths.get(csvReportFilePath)));
         TXTFile textFile = ReportFile.makeTxtFile(makeNioWriter(Paths.get(txtReportFilePath)));
-
         String appConfigDir = args[1];
         AnnotationConfigApplicationContext context = initContext(appConfigDir);
         ValidationToolBL validationToolBL = context.getBean(ValidationToolBL.class);
-
         log.info("Start Validation Tool");
         Report report = Report.make();
         boolean result = validationToolBL.validateAll(report, textFile);
@@ -69,9 +64,7 @@ public class ValidationTool {
     }
 
     private static <A extends FileType> ReportFileWriter<A> makeNioWriter(Path path) {
-        return ReportFileWriter.makeNioWriter(path, ex ->
-            log.info("write to file failed - {}", ex.getClass().getSimpleName(), ex)
-        );
+        return ReportFileWriter.makeNioWriter(path, ex -> log.info("write to file failed - {}", ex.getClass().getSimpleName(), ex));
     }
 
     private static AnnotationConfigApplicationContext initContext(String appConfigDir) {
index f561f86..b47e3ec 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -17,9 +17,9 @@
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.main;
 
+import java.util.Arrays;
 import org.openecomp.sdc.asdctool.configuration.VrfObjectFixConfiguration;
 import org.openecomp.sdc.asdctool.impl.VrfObjectFixHandler;
 import org.openecomp.sdc.be.config.ConfigurationManager;
@@ -29,13 +29,12 @@ import org.openecomp.sdc.common.impl.FSConfigurationSource;
 import org.openecomp.sdc.common.log.wrappers.Logger;
 import org.springframework.context.annotation.AnnotationConfigApplicationContext;
 
-import java.util.Arrays;
-
 public class VrfObjectFixMenu {
 
     private static final Logger log = Logger.getLogger(VrfObjectFixMenu.class);
 
-    private VrfObjectFixMenu(){}
+    private VrfObjectFixMenu() {
+    }
 
     public static void main(String[] args) {
         if (isNotValidArguments(args)) {
@@ -47,7 +46,7 @@ public class VrfObjectFixMenu {
         VrfObjectFixHandler vrfObjectFixHandler = getVrfObjectFixHandler();
         if (vrfObjectFixHandler.handle(args[1], args.length == 3 ? args[2] : null)) {
             log.info("#main - The {} operation of the corrupted VRFObject Node Types has been finished successfully", args[1]);
-        } else{
+        } else {
             log.info("#main - The {} operation of the corrupted VRFObject Node Types has been failed", args[1]);
             System.exit(2);
         }
@@ -63,10 +62,8 @@ public class VrfObjectFixMenu {
         return args == null || args.length < 2;
     }
 
-
     private static void initConfig(String configDir) {
         ConfigurationSource configurationSource = new FSConfigurationSource(ExternalConfiguration.getChangeListener(), configDir);
         new ConfigurationManager(configurationSource);
     }
-
 }
index 65f8c7b..3cadce3 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.migration.config;
 
+import java.util.ArrayList;
+import java.util.List;
 import org.openecomp.sdc.asdctool.migration.core.SdcMigrationTool;
 import org.openecomp.sdc.asdctool.migration.core.task.Migration;
 import org.openecomp.sdc.asdctool.migration.core.task.PostMigration;
@@ -49,20 +50,15 @@ import org.springframework.context.annotation.ComponentScan;
 import org.springframework.context.annotation.Configuration;
 import org.springframework.context.annotation.Import;
 
-import java.util.ArrayList;
-import java.util.List;
-
 @Configuration
 @Import({DAOSpringConfig.class, CatalogBESpringConfig.class, CatalogModelSpringConfig.class})
 @ComponentScan({"org.openecomp.sdc.asdctool.migration.tasks",//migration tasks
-        "org.openecomp.sdc.asdctool.migration.config.mocks",
-        "org.openecomp.sdc.be.filters" })
+    "org.openecomp.sdc.asdctool.migration.config.mocks", "org.openecomp.sdc.be.filters"})
 public class MigrationSpringConfig {
 
-    @Autowired(required=false)
+    @Autowired(required = false)
     private List<Migration> migrations = new ArrayList<>();
-    
-    @Autowired(required=false)
+    @Autowired(required = false)
     private List<PostMigration> postMigrations = new ArrayList<>();
 
     @Bean(name = "sdc-migration-tool")
@@ -86,23 +82,21 @@ public class MigrationSpringConfig {
     }
 
     @Bean(name = "componentsCleanBusinessLogic")
-    public ComponentsCleanBusinessLogic componentsCleanBusinessLogic(
-        IElementOperation elementDao,
-        IGroupOperation groupOperation,
-        IGroupInstanceOperation groupInstanceOperation,
-        IGroupTypeOperation groupTypeOperation,
-        InterfaceOperation interfaceOperation,
-        InterfaceLifecycleOperation interfaceLifecycleTypeOperation,
-        ResourceBusinessLogic resourceBusinessLogic,
-        ServiceBusinessLogic serviceBusinessLogic,
-        ArtifactsOperations artifactToscaOperation) {
-        return  new ComponentsCleanBusinessLogic(elementDao, groupOperation,
-        groupInstanceOperation, groupTypeOperation, interfaceOperation, interfaceLifecycleTypeOperation, resourceBusinessLogic,
-        serviceBusinessLogic, artifactToscaOperation);
+    public ComponentsCleanBusinessLogic componentsCleanBusinessLogic(IElementOperation elementDao, IGroupOperation groupOperation,
+                                                                     IGroupInstanceOperation groupInstanceOperation,
+                                                                     IGroupTypeOperation groupTypeOperation, InterfaceOperation interfaceOperation,
+                                                                     InterfaceLifecycleOperation interfaceLifecycleTypeOperation,
+                                                                     ResourceBusinessLogic resourceBusinessLogic,
+                                                                     ServiceBusinessLogic serviceBusinessLogic,
+                                                                     ArtifactsOperations artifactToscaOperation) {
+        return new ComponentsCleanBusinessLogic(elementDao, groupOperation, groupInstanceOperation, groupTypeOperation, interfaceOperation,
+            interfaceLifecycleTypeOperation, resourceBusinessLogic, serviceBusinessLogic, artifactToscaOperation);
     }
-    
+
     @Bean(name = "dmaapClientFactory")
-    public DmaapClientFactory getDmaapClientFactory() {return new DmaapClientFactory();}
+    public DmaapClientFactory getDmaapClientFactory() {
+        return new DmaapClientFactory();
+    }
 
     @Bean(name = "healthCheckBusinessLogic")
     public HealthCheckBusinessLogic getHealthCheckBusinessLogic() {
index e8c6a95..2c5cc63 100644 (file)
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.migration.config.mocks;
 
 import com.att.nsa.apiClient.credentials.ApiCredential;
 import fj.data.Either;
+import java.util.Collection;
+import java.util.List;
+import java.util.Set;
 import org.openecomp.sdc.be.components.distribution.engine.CambriaErrorResponse;
 import org.openecomp.sdc.be.components.distribution.engine.ICambriaHandler;
 import org.openecomp.sdc.be.components.distribution.engine.INotificationData;
 import org.openecomp.sdc.be.components.distribution.engine.SubscriberTypeEnum;
 import org.springframework.stereotype.Component;
 
-import java.util.Collection;
-import java.util.List;
-import java.util.Set;
-
 @Component("cambriaHandler")
 public class CambriaHandlerMock implements ICambriaHandler {
 
@@ -41,32 +39,38 @@ public class CambriaHandlerMock implements ICambriaHandler {
     }
 
     @Override
-    public CambriaErrorResponse createTopic(Collection<String> hostSet, String apiKey, String secretKey, String topicName, int partitionCount, int replicationCount) {
+    public CambriaErrorResponse createTopic(Collection<String> hostSet, String apiKey, String secretKey, String topicName, int partitionCount,
+                                            int replicationCount) {
         return null;
     }
 
     @Override
-    public CambriaErrorResponse unRegisterFromTopic(Collection<String> hostSet, String managerApiKey, String managerSecretKey, String subscriberApiKey, SubscriberTypeEnum subscriberTypeEnum, String topicName) {
+    public CambriaErrorResponse unRegisterFromTopic(Collection<String> hostSet, String managerApiKey, String managerSecretKey,
+                                                    String subscriberApiKey, SubscriberTypeEnum subscriberTypeEnum, String topicName) {
         return null;
     }
 
     @Override
-    public CambriaErrorResponse registerToTopic(Collection<String> hostSet, String managerApiKey, String managerSecretKey, String subscriberApiKey, SubscriberTypeEnum subscriberTypeEnum, String topicName) {
+    public CambriaErrorResponse registerToTopic(Collection<String> hostSet, String managerApiKey, String managerSecretKey, String subscriberApiKey,
+                                                SubscriberTypeEnum subscriberTypeEnum, String topicName) {
         return null;
     }
 
     @Override
-    public com.att.nsa.cambria.client.CambriaConsumer createConsumer(Collection<String> hostSet, String topicName, String apiKey, String secretKey, String consumerId, String consumerGroup, int timeoutMS) throws Exception {
+    public com.att.nsa.cambria.client.CambriaConsumer createConsumer(Collection<String> hostSet, String topicName, String apiKey, String secretKey,
+                                                                     String consumerId, String consumerGroup, int timeoutMS) throws Exception {
         return null;
     }
 
     @Override
-    public CambriaErrorResponse sendNotification(String topicName, String uebPublicKey, String uebSecretKey, List<String> uebServers, INotificationData data) {
+    public CambriaErrorResponse sendNotification(String topicName, String uebPublicKey, String uebSecretKey, List<String> uebServers,
+                                                 INotificationData data) {
         return null;
     }
 
     @Override
-    public CambriaErrorResponse sendNotificationAndClose(String topicName, String uebPublicKey, String uebSecretKey, List<String> uebServers, INotificationData data, long waitBeforeCloseTimeout) {
+    public CambriaErrorResponse sendNotificationAndClose(String topicName, String uebPublicKey, String uebSecretKey, List<String> uebServers,
+                                                         INotificationData data, long waitBeforeCloseTimeout) {
         return null;
     }
 
index 0be2552..b7273d7 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -17,7 +17,6 @@
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.migration.config.mocks;
 
 import org.openecomp.sdc.be.components.distribution.engine.DistributionEngineClusterHealth;
index 7b90e75..9d47a3b 100644 (file)
@@ -17,7 +17,6 @@
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.migration.config.mocks;
 
 import org.openecomp.sdc.be.components.distribution.engine.IDistributionEngine;
index a6334ef..04e0ef8 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -17,7 +17,6 @@
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.migration.config.mocks;
 
 import org.openecomp.sdc.be.components.distribution.engine.DmaapHealth;
@@ -25,6 +24,7 @@ import org.springframework.stereotype.Component;
 
 @Component("dmaapHealth")
 public class DmaapHealthCheckMock extends DmaapHealth {
+
     @Override
     public DmaapHealth init() {
         return null;
index 46470cc..0c5c356 100644 (file)
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.migration.config.mocks;
 
-import org.openecomp.sdc.be.components.health.PortalHealthCheckBuilder;
-import org.springframework.stereotype.Component;
-
 import javax.annotation.PostConstruct;
 import javax.annotation.PreDestroy;
+import org.openecomp.sdc.be.components.health.PortalHealthCheckBuilder;
+import org.springframework.stereotype.Component;
 
 @Component("portalHealthCheckBusinessLogic")
 public class PortalHealthCheckBuilderMock extends PortalHealthCheckBuilder {
 
-
     @Override
     @PostConstruct
     public PortalHealthCheckBuilder init() {
@@ -39,6 +36,5 @@ public class PortalHealthCheckBuilderMock extends PortalHealthCheckBuilder {
     @Override
     @PreDestroy
     protected void destroy() {
-
     }
 }
index a18ea69..5cfdc93 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.migration.core;
 
 import java.math.BigInteger;
 import lombok.Getter;
 
 @Getter
-public class DBVersion implements Comparable<DBVersion>{
+public class DBVersion implements Comparable<DBVersion> {
 
+    /**
+     * The current db version. should be tested against real db to verify it is compatible to the db version
+     */
+    public static final DBVersion DEFAULT_VERSION = new DBVersion(1710, 0);
     private static final String VERSION_PARTS_SEPARATOR = "\\.";
     private static final int MAJOR_PART_IDX = 0;
     private static final int MINOR_PART_IDX = 1;
     private final BigInteger major;
     private final BigInteger minor;
 
-    /**
-     * The current db version. should be tested against real db to verify it is compatible to the db version
-     */
-    public static final DBVersion DEFAULT_VERSION = new DBVersion(1710, 0);
-
     private DBVersion(BigInteger major, BigInteger minor) {
         this.major = major;
         this.minor = minor;
@@ -56,9 +54,7 @@ public class DBVersion implements Comparable<DBVersion>{
         if (split.length != 2) {
             throw new MigrationException("version must be of pattern: <major>.<minor>");
         }
-        return new DBVersion(getVersionPart(split[MAJOR_PART_IDX]),
-                             getVersionPart(split[MINOR_PART_IDX]));
-
+        return new DBVersion(getVersionPart(split[MAJOR_PART_IDX]), getVersionPart(split[MINOR_PART_IDX]));
     }
 
     private static BigInteger getVersionPart(String versionPart) {
@@ -76,11 +72,13 @@ public class DBVersion implements Comparable<DBVersion>{
 
     @Override
     public boolean equals(Object o) {
-        if (this == o) return true;
-        if (o == null || getClass() != o.getClass()) return false;
-
+        if (this == o) {
+            return true;
+        }
+        if (o == null || getClass() != o.getClass()) {
+            return false;
+        }
         DBVersion dbVersion = (DBVersion) o;
-
         return major.equals(dbVersion.major) && minor.equals(dbVersion.minor);
     }
 
index 5e9d6e4..464dc70 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -17,7 +17,6 @@
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.migration.core;
 
 public class MigrationException extends RuntimeException {
@@ -29,5 +28,4 @@ public class MigrationException extends RuntimeException {
     public MigrationException(String message, RuntimeException e) {
         super(message, e);
     }
-
 }
index bdae790..cb2f862 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -17,9 +17,9 @@
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.migration.core;
 
+import java.util.List;
 import org.openecomp.sdc.asdctool.migration.core.execution.MigrationExecutionResult;
 import org.openecomp.sdc.asdctool.migration.core.execution.MigrationExecutorImpl;
 import org.openecomp.sdc.asdctool.migration.core.task.IMigrationStage;
@@ -29,14 +29,10 @@ import org.openecomp.sdc.asdctool.migration.resolver.MigrationResolver;
 import org.openecomp.sdc.asdctool.migration.service.SdcRepoService;
 import org.openecomp.sdc.common.log.wrappers.Logger;
 
-import java.util.List;
-
 public class SdcMigrationTool {
 
     private static final Logger LOGGER = Logger.getLogger(SdcMigrationTool.class);
-
     private MigrationResolver migrationsResolver;
-
     private SdcRepoService sdcRepoService;
 
     public SdcMigrationTool(MigrationResolver migrationsResolver, SdcRepoService sdcRepoService) {
@@ -56,13 +52,16 @@ public class SdcMigrationTool {
             try {
                 MigrationExecutionResult executionResult = new MigrationExecutorImpl().execute(migration);
                 if (migrationHasFailed(executionResult)) {
-                    LOGGER.error("migration {} with version {} has failed. error msg: {}", migration.getClass().getName(), migration.getVersion().toString(), executionResult.getMsg());
+                    LOGGER.error("migration {} with version {} has failed. error msg: {}", migration.getClass().getName(),
+                        migration.getVersion().toString(), executionResult.getMsg());
                     return false;
                 }
-                if(migration.getAspectMigration() == AspectMigrationEnum.MIGRATION)
-                       sdcRepoService.createMigrationTask(executionResult.toMigrationTaskEntry());
+                if (migration.getAspectMigration() == AspectMigrationEnum.MIGRATION) {
+                    sdcRepoService.createMigrationTask(executionResult.toMigrationTaskEntry());
+                }
             } catch (RuntimeException e) {
-                LOGGER.error("migration {} with version {} has failed. error msg: {}", migration.getClass().getName(), migration.getVersion().toString(), e);
+                LOGGER.error("migration {} with version {} has failed. error msg: {}", migration.getClass().getName(),
+                    migration.getVersion().toString(), e);
                 return false;
             }
         }
@@ -79,5 +78,4 @@ public class SdcMigrationTool {
             sdcRepoService.clearTasksForCurrentMajor();
         }
     }
-
 }
index e3df42e..0d0dbf7 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.migration.core.execution;
 
+import java.util.Date;
 import org.openecomp.sdc.asdctool.migration.core.DBVersion;
 import org.openecomp.sdc.asdctool.migration.core.task.MigrationResult;
 import org.openecomp.sdc.be.resources.data.MigrationTaskEntry;
 
-import java.util.Date;
-
 public class MigrationExecutionResult {
 
     private MigrationResult.MigrationStatus migrationStatus;
@@ -48,7 +46,6 @@ public class MigrationExecutionResult {
         return migrationTaskEntry;
     }
 
-
     public MigrationResult.MigrationStatus getMigrationStatus() {
         return migrationStatus;
     }
index 4d1e74b..ef57389 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -17,7 +17,6 @@
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.migration.core.execution;
 
 import org.openecomp.sdc.asdctool.migration.core.MigrationException;
@@ -31,5 +30,4 @@ public interface MigrationExecutor {
      * @throws MigrationException in case there was an unexpected exception during migration
      */
     MigrationExecutionResult execute(IMigrationStage migration) throws MigrationException;
-
 }
index b77a35c..fe891f4 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -17,7 +17,6 @@
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.migration.core.execution;
 
 import org.openecomp.sdc.asdctool.migration.core.MigrationException;
@@ -26,7 +25,6 @@ import org.openecomp.sdc.asdctool.migration.core.task.MigrationResult;
 import org.openecomp.sdc.common.log.wrappers.Logger;
 import org.springframework.util.StopWatch;
 
-
 public class MigrationExecutorImpl implements MigrationExecutor {
 
     private static final Logger LOGGER = Logger.getLogger(MigrationExecutorImpl.class);
@@ -34,7 +32,8 @@ public class MigrationExecutorImpl implements MigrationExecutor {
     @Override
     public MigrationExecutionResult execute(IMigrationStage migration) throws MigrationException {
         try {
-            LOGGER.info("starting migration {}. description: {}. version {}", migration.getClass().getName(), migration.description(),  migration.getVersion().toString());
+            LOGGER.info("starting migration {}. description: {}. version {}", migration.getClass().getName(), migration.description(),
+                migration.getVersion().toString());
             StopWatch stopWatch = new StopWatch();
             stopWatch.start();
             MigrationResult migrationResult = migration.migrate();
@@ -44,12 +43,13 @@ public class MigrationExecutorImpl implements MigrationExecutor {
         } catch (RuntimeException e) {
             LOGGER.error("migration {} has failed!", migration.description(), e);
             throw new MigrationException("migration %s failed!!!", e);
-
         }
     }
 
     private MigrationExecutionResult logAndCreateExecutionResult(IMigrationStage migration, MigrationResult migrationResult, double executionTime) {
-        LOGGER.info("finished migration {}. with version {}. migration status: {}, migration message: {}, execution time: {}", migration.getClass().getName(),  migration.getVersion().toString(), migrationResult.getMigrationStatus().name(), migrationResult.getMsg(), executionTime);
+        LOGGER.info("finished migration {}. with version {}. migration status: {}, migration message: {}, execution time: {}",
+            migration.getClass().getName(), migration.getVersion().toString(), migrationResult.getMigrationStatus().name(), migrationResult.getMsg(),
+            executionTime);
         return createMigrationTask(migration, migrationResult, executionTime);
     }
 
@@ -63,5 +63,4 @@ public class MigrationExecutorImpl implements MigrationExecutor {
         migrationExecutionResult.setDescription(migration.description());
         return migrationExecutionResult;
     }
-
 }
index c17c8da..a981df1 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.migration.core.task;
 
 import org.openecomp.sdc.asdctool.migration.core.DBVersion;
 
 public interface IMigrationStage {
-       
-       String description();
+
+    String description();
 
     DBVersion getVersion();
-    
+
     MigrationResult migrate();
-    
+
     AspectMigrationEnum getAspectMigration();
-    
-       public enum AspectMigrationEnum {
-               BEFORE_MIGRATION,
-               MIGRATION,
-               AFTER_MIGRATION;
-       }
+
+    public enum AspectMigrationEnum {BEFORE_MIGRATION, MIGRATION, AFTER_MIGRATION;}
 }
index df4b077..61bc9bd 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.migration.core.task;
 
-public interface Migration extends IMigrationStage{
-       @Override
-       default
-    AspectMigrationEnum getAspectMigration(){
-       return AspectMigrationEnum.MIGRATION;
-    }
+public interface Migration extends IMigrationStage {
 
+    @Override
+    default AspectMigrationEnum getAspectMigration() {
+        return AspectMigrationEnum.MIGRATION;
+    }
 }
index daa6ea7..791a488 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -17,7 +17,6 @@
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.migration.core.task;
 
 public class MigrationResult {
@@ -25,22 +24,6 @@ public class MigrationResult {
     private String msg;
     private MigrationStatus migrationStatus;
 
-    public String getMsg() {
-        return msg;
-    }
-
-    public void setMsg(String msg) {
-        this.msg = msg;
-    }
-
-    public MigrationStatus getMigrationStatus() {
-        return migrationStatus;
-    }
-
-    public void setMigrationStatus(MigrationStatus migrationStatus) {
-        this.migrationStatus = migrationStatus;
-    }
-
     public static MigrationResult success() {
         MigrationResult success = new MigrationResult();
         success.setMigrationStatus(MigrationResult.MigrationStatus.COMPLETED);
@@ -54,11 +37,21 @@ public class MigrationResult {
         return error;
     }
 
-    public enum MigrationStatus {
-        COMPLETED,
-        COMPLETED_WITH_ERRORS,
-        FAILED
+    public String getMsg() {
+        return msg;
+    }
+
+    public void setMsg(String msg) {
+        this.msg = msg;
     }
 
+    public MigrationStatus getMigrationStatus() {
+        return migrationStatus;
+    }
+
+    public void setMigrationStatus(MigrationStatus migrationStatus) {
+        this.migrationStatus = migrationStatus;
+    }
 
+    public enum MigrationStatus {COMPLETED, COMPLETED_WITH_ERRORS, FAILED}
 }
index 5ed2e56..c6d0990 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.migration.core.task;
 
 import org.openecomp.sdc.asdctool.migration.core.DBVersion;
 
 public interface PostMigration extends IMigrationStage {
-  
-       @Override
-       default
-       public DBVersion getVersion() {
-               return DBVersion.DEFAULT_VERSION;
-       }
-       
-       @Override
-       default
-    AspectMigrationEnum getAspectMigration(){
-       return AspectMigrationEnum.AFTER_MIGRATION;
+
+    @Override
+    default public DBVersion getVersion() {
+        return DBVersion.DEFAULT_VERSION;
     }
 
+    @Override
+    default AspectMigrationEnum getAspectMigration() {
+        return AspectMigrationEnum.AFTER_MIGRATION;
+    }
 }
index 74c7405..53f0d4b 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -17,7 +17,6 @@
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.migration.dao;
 
 import com.datastax.driver.core.ResultSet;
@@ -36,5 +35,4 @@ public interface MigrationTasksAccessor {
 
     @Query("DELETE FROM sdcrepository.migrationTasks WHERE major_version = :majorVersion")
     void deleteTasksForMajorVersion(@Param("majorVersion") Long majorVersion);
-
 }
index aabd4d8..186fb1f 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -17,7 +17,6 @@
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.migration.dao;
 
 import com.datastax.driver.core.ResultSet;
@@ -26,6 +25,11 @@ import com.datastax.driver.core.Session;
 import com.datastax.driver.mapping.Mapper;
 import com.datastax.driver.mapping.MappingManager;
 import fj.data.Either;
+import java.math.BigInteger;
+import java.util.Collections;
+import java.util.List;
+import java.util.stream.Collectors;
+import javax.annotation.PostConstruct;
 import org.apache.commons.lang3.tuple.ImmutablePair;
 import org.openecomp.sdc.asdctool.migration.core.DBVersion;
 import org.openecomp.sdc.be.dao.cassandra.CassandraClient;
@@ -37,12 +41,6 @@ import org.openecomp.sdc.common.log.wrappers.Logger;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.stereotype.Service;
 
-import javax.annotation.PostConstruct;
-import java.math.BigInteger;
-import java.util.Collections;
-import java.util.List;
-import java.util.stream.Collectors;
-
 @Service
 public class MigrationTasksDao extends CassandraDao {
 
@@ -51,7 +49,7 @@ public class MigrationTasksDao extends CassandraDao {
     private Mapper<MigrationTaskEntry> migrationTaskMapper;
 
     @Autowired
-    public MigrationTasksDao(CassandraClient cassandraClient){
+    public MigrationTasksDao(CassandraClient cassandraClient) {
         super(cassandraClient);
     }
 
@@ -64,12 +62,11 @@ public class MigrationTasksDao extends CassandraDao {
                 session = result.left().value().left;
                 manager = result.left().value().right;
                 migrationTasksAccessor = manager.createAccessor(MigrationTasksAccessor.class);
-                migrationTaskMapper =  manager.mapper(MigrationTaskEntry.class);
+                migrationTaskMapper = manager.mapper(MigrationTaskEntry.class);
                 logger.info("** migrationTasksAccessor created");
             } else {
                 logger.info("** migrationTasksAccessor failed");
-                throw new RuntimeException("Artifact keyspace [" + keyspace + "] failed to connect with error : "
-                        + result.right().value());
+                throw new RuntimeException("Artifact keyspace [" + keyspace + "] failed to connect with error : " + result.right().value());
             }
         } else {
             logger.info("** Cassandra client isn't connected");
@@ -83,7 +80,7 @@ public class MigrationTasksDao extends CassandraDao {
             Row minorVersionRow = latestMinorVersion.one();
             return minorVersionRow == null ? DBVersion.DEFAULT_VERSION.getMinor() : BigInteger.valueOf(minorVersionRow.getLong(0));
         } catch (RuntimeException e) {
-            logger.error("failed to get latest minor version for major version {}", majorVersion,  e);
+            logger.error("failed to get latest minor version for major version {}", majorVersion, e);
             throw e;
         }
     }
@@ -93,13 +90,13 @@ public class MigrationTasksDao extends CassandraDao {
             ResultSet latestMajorVersion = migrationTasksAccessor.getLatestMajorVersion();
             List<Row> all = latestMajorVersion.all();
             Long majorVersionRow = null;
-            if (all.size() != 0){
+            if (all.size() != 0) {
                 List<Long> majorVersions = all.stream().map(p -> p.getLong(0)).collect(Collectors.toList());
                 majorVersionRow = Collections.max(majorVersions);
             }
             return majorVersionRow == null ? DBVersion.DEFAULT_VERSION.getMajor() : BigInteger.valueOf(majorVersionRow);
         } catch (RuntimeException e) {
-            logger.error("failed to get latest major version ",  e);
+            logger.error("failed to get latest major version ", e);
             throw e;
         }
     }
@@ -108,7 +105,7 @@ public class MigrationTasksDao extends CassandraDao {
         try {
             migrationTasksAccessor.deleteTasksForMajorVersion(majorVersion.longValue());
         } catch (RuntimeException e) {
-            logger.error("failed to delete tasks for major version {}", majorVersion,  e);
+            logger.error("failed to delete tasks for major version {}", majorVersion, e);
             throw e;
         }
     }
@@ -116,6 +113,4 @@ public class MigrationTasksDao extends CassandraDao {
     public void createMigrationTask(MigrationTaskEntry migrationTask) {
         migrationTaskMapper.save(migrationTask);
     }
-
-
 }
index a75e11e..d8180bb 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -17,7 +17,6 @@
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.migration.main;
 
 import org.apache.commons.cli.Option;
@@ -40,6 +39,10 @@ public class MigrationMenu extends SpringCLITool {
         migrationMenu.doMigrate(enforceAll, cliToolData.getSpringApplicationContext());
     }
 
+    private static Option buildEnforceAllOption() {
+        return Option.builder("e").longOpt("enforceAll").desc("enforce running all migration steps for current version").build();
+    }
+
     private void doMigrate(boolean enforceAll, AbstractApplicationContext context) {
         SdcMigrationTool migrationTool = context.getBean(SdcMigrationTool.class);
         boolean migrate = migrationTool.migrate(enforceAll);
@@ -65,13 +68,6 @@ public class MigrationMenu extends SpringCLITool {
         return "sdc-migration";
     }
 
-    private static Option buildEnforceAllOption() {
-        return Option.builder("e")
-                .longOpt("enforceAll")
-                .desc("enforce running all migration steps for current version")
-                .build();
-    }
-
     @Override
     protected Class<?> getSpringConfigurationClass() {
         return MigrationSpringConfig.class;
index 1ec2adb..cec7131 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.migration.resolver;
 
-
-import org.openecomp.sdc.asdctool.migration.core.task.IMigrationStage;
-
 import java.util.List;
+import org.openecomp.sdc.asdctool.migration.core.task.IMigrationStage;
 
 public interface MigrationResolver {
 
     /**
-     *
      * @return a list of {@code T}
      */
     List<IMigrationStage> resolveMigrations();
-
 }
index 03de809..0fae84d 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.migration.resolver;
 
-
+import java.util.ArrayList;
+import java.util.Comparator;
+import java.util.List;
+import java.util.stream.Collectors;
 import org.openecomp.sdc.asdctool.migration.core.DBVersion;
 import org.openecomp.sdc.asdctool.migration.core.task.IMigrationStage;
 import org.openecomp.sdc.asdctool.migration.core.task.Migration;
 import org.openecomp.sdc.asdctool.migration.core.task.PostMigration;
 import org.openecomp.sdc.asdctool.migration.service.SdcRepoService;
 
-import java.util.ArrayList;
-import java.util.Comparator;
-import java.util.List;
-import java.util.stream.Collectors;
-
 public class SpringBeansMigrationResolver implements MigrationResolver {
 
     private List<Migration> migrations = new ArrayList<>();
     private List<PostMigration> postMigrations = new ArrayList<>();
-    
     private SdcRepoService sdcRepoService;
 
     public SpringBeansMigrationResolver(List<Migration> migrations, List<PostMigration> postMigrations, SdcRepoService sdcRepoService) {
@@ -57,7 +53,7 @@ public class SpringBeansMigrationResolver implements MigrationResolver {
     void setMigrations(List<Migration> migrations) {
         this.migrations = migrations;
     }
-    
+
     //package private for testing
     void setPostMigrations(List<PostMigration> postMigrations) {
         this.postMigrations = postMigrations;
@@ -65,9 +61,7 @@ public class SpringBeansMigrationResolver implements MigrationResolver {
 
     private List<IMigrationStage> resolveNonExecutedMigrations() {
         DBVersion latestDBVersion = sdcRepoService.getLatestDBVersion();
-        return migrations.stream()
-                .filter(mig -> isMigrationVersionGreaterThanLatestVersion(latestDBVersion, mig))
-                .collect(Collectors.toList());
+        return migrations.stream().filter(mig -> isMigrationVersionGreaterThanLatestVersion(latestDBVersion, mig)).collect(Collectors.toList());
     }
 
     private boolean isMigrationVersionGreaterThanLatestVersion(DBVersion latestDBVersion, Migration mig) {
index 9141295..ba81b08 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.migration.service;
 
+import java.math.BigInteger;
 import org.openecomp.sdc.asdctool.migration.core.DBVersion;
 import org.openecomp.sdc.asdctool.migration.dao.MigrationTasksDao;
 import org.openecomp.sdc.be.resources.data.MigrationTaskEntry;
 
-import java.math.BigInteger;
-
 public class SdcRepoService {
 
     private MigrationTasksDao migrationTasksDao;
@@ -48,7 +46,4 @@ public class SdcRepoService {
     public void createMigrationTask(MigrationTaskEntry migrationTaskEntry) {
         migrationTasksDao.createMigrationTask(migrationTaskEntry);
     }
-
-
-
 }
index f7474ee..6448249 100644 (file)
@@ -17,7 +17,6 @@
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.migration.tasks.handlers;
 
 public interface OutputHandler {
@@ -27,5 +26,4 @@ public interface OutputHandler {
     void addRecord(Object... record);
 
     boolean writeOutputAndCloseFile();
-
 }
index 758589c..145e58b 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.migration.tasks.handlers;
 
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.text.SimpleDateFormat;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.poi.hssf.usermodel.HSSFWorkbook;
 import org.apache.poi.ss.usermodel.Cell;
@@ -28,70 +30,66 @@ import org.apache.poi.ss.usermodel.Sheet;
 import org.apache.poi.ss.usermodel.Workbook;
 import org.openecomp.sdc.common.log.wrappers.Logger;
 
-import java.io.FileNotFoundException;
-import java.io.FileOutputStream;
-import java.text.SimpleDateFormat;
-
 public class XlsOutputHandler implements OutputHandler {
 
-       private static final Logger log = Logger.getLogger(XlsOutputHandler.class);
-       private Workbook workbook;
-       private Sheet activeSheet;
-       private int rowCount = 0;
-       private String sheetName;
+    private static final Logger log = Logger.getLogger(XlsOutputHandler.class);
+    private Workbook workbook;
+    private Sheet activeSheet;
+    private int rowCount = 0;
+    private String sheetName;
     private String outputPath;
-       
-       public XlsOutputHandler(String outputPath, String sheetName, Object... title){
-               this.outputPath = outputPath;
-           this.sheetName = sheetName;
-               initiate(sheetName, title);
-       }
-       
-       @Override
-       public void initiate(String sheetName, Object... title) {
-               log.info("#initiate - Starting to initiate XlsOutputHandler. ");
-               workbook = new HSSFWorkbook();
-               activeSheet = workbook.createSheet(sheetName);
-               addRecord(title);
-               log.info("#initiate - XlsOutputHandler has been initiated. ");
-       }
 
-       @Override
-       public void addRecord(Object... record) {
-               log.info("#addRecord - Going to add record {} to output. ", record);
+    public XlsOutputHandler(String outputPath, String sheetName, Object... title) {
+        this.outputPath = outputPath;
+        this.sheetName = sheetName;
+        initiate(sheetName, title);
+    }
+
+    @Override
+    public void initiate(String sheetName, Object... title) {
+        log.info("#initiate - Starting to initiate XlsOutputHandler. ");
+        workbook = new HSSFWorkbook();
+        activeSheet = workbook.createSheet(sheetName);
+        addRecord(title);
+        log.info("#initiate - XlsOutputHandler has been initiated. ");
+    }
+
+    @Override
+    public void addRecord(Object... record) {
+        log.info("#addRecord - Going to add record {} to output. ", record);
         Row currentRow = activeSheet.createRow(rowCount++);
-               log.info("#addRecord - A new row has been created");
+        log.info("#addRecord - A new row has been created");
         int columnCount = 0;
         Cell cell;
-        for(Object cellValue : record){
+        for (Object cellValue : record) {
             cell = currentRow.createCell(columnCount++);
             if (cellValue != null) {
                 cell.setCellValue(cellValue.toString());
             }
         }
-       }
+    }
 
-       @Override
-       public boolean writeOutputAndCloseFile() {
-               if (rowCount <= 1) {
-                       return false;
-               }
+    @Override
+    public boolean writeOutputAndCloseFile() {
+        if (rowCount <= 1) {
+            return false;
+        }
         try {
-                       FileOutputStream file = getXlsFile();
-                       workbook.write(file);
-                       file.close();
-                       return true;
-               } catch (Exception e) {
-                       log.debug("#writeOutputAndCloseFile - Failed to write an output file. The {} exception occurred. ", e.getMessage());
-                       return false;
-               }
-       }
+            FileOutputStream file = getXlsFile();
+            workbook.write(file);
+            file.close();
+            return true;
+        } catch (Exception e) {
+            log.debug("#writeOutputAndCloseFile - Failed to write an output file. The {} exception occurred. ", e.getMessage());
+            return false;
+        }
+    }
 
-       public String getOutputPath() {
-               return outputPath;
-       }
+    public String getOutputPath() {
+        return outputPath;
+    }
 
-       FileOutputStream getXlsFile() throws FileNotFoundException {
+    FileOutputStream getXlsFile() throws FileNotFoundException {
         String fileName = buildFileName();
         log.info("#getXlsFile - Going to write the output file {}. ", fileName);
         return new FileOutputStream(fileName);
@@ -99,14 +97,10 @@ public class XlsOutputHandler implements OutputHandler {
 
     private String buildFileName() {
         StringBuilder fileName = new StringBuilder();
-        if(StringUtils.isNotEmpty(outputPath)){
+        if (StringUtils.isNotEmpty(outputPath)) {
             fileName.append(outputPath);
         }
-        return fileName.append(sheetName)
-                .append("_")
-                .append(new SimpleDateFormat("yyyyMMdd_HHmmss").format(System.currentTimeMillis()))
-                .append(".xls")
-                               .toString();
+        return fileName.append(sheetName).append("_").append(new SimpleDateFormat("yyyyMMdd_HHmmss").format(System.currentTimeMillis()))
+            .append(".xls").toString();
     }
-
 }
index e5ea76a..6dd3760 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.servlets;
 
-import org.openecomp.sdc.common.log.wrappers.Logger;
-
 import javax.ws.rs.GET;
 import javax.ws.rs.Path;
 import javax.ws.rs.Produces;
 import javax.ws.rs.core.MediaType;
+import org.openecomp.sdc.common.log.wrappers.Logger;
 
 @Path("/entrypoint")
 public class EntryPoint {
 
-       private static Logger log = Logger.getLogger(EntryPoint.class.getName());
-
-       @GET
-       @Path("test")
-       @Produces(MediaType.TEXT_PLAIN)
-       public String test() {
+    private static Logger log = Logger.getLogger(EntryPoint.class.getName());
 
-               log.info("In test");
-               return "Test" + System.currentTimeMillis();
-       }
+    @GET
+    @Path("test")
+    @Produces(MediaType.TEXT_PLAIN)
+    public String test() {
+        log.info("In test");
+        return "Test" + System.currentTimeMillis();
+    }
 }
index 4e9428d..a88728b 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.servlets;
 
-import org.apache.commons.configuration.BaseConfiguration;
-import org.apache.commons.configuration.Configuration;
-import org.apache.tinkerpop.gremlin.structure.io.graphml.GraphMLWriter;
-import org.glassfish.jersey.media.multipart.FormDataParam;
-import org.janusgraph.core.JanusGraph;
-import org.openecomp.sdc.asdctool.Utils;
-import org.openecomp.sdc.common.log.wrappers.Logger;
-
-import javax.ws.rs.Consumes;
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.Produces;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
 import java.io.BufferedOutputStream;
 import java.io.BufferedReader;
 import java.io.ByteArrayOutputStream;
@@ -44,125 +29,113 @@ import java.io.OutputStream;
 import java.util.Map.Entry;
 import java.util.Optional;
 import java.util.Properties;
+import javax.ws.rs.Consumes;
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import org.apache.commons.configuration.BaseConfiguration;
+import org.apache.commons.configuration.Configuration;
+import org.apache.tinkerpop.gremlin.structure.io.graphml.GraphMLWriter;
+import org.glassfish.jersey.media.multipart.FormDataParam;
+import org.janusgraph.core.JanusGraph;
+import org.openecomp.sdc.asdctool.Utils;
+import org.openecomp.sdc.common.log.wrappers.Logger;
 
 @Path("/janusgraph")
 public class ExportImportJanusGraphServlet {
 
-       private static Logger log = Logger.getLogger(ExportImportJanusGraphServlet.class.getName());
-
-       @GET
-       @Path("export")
-       @Consumes(MediaType.MULTIPART_FORM_DATA)
-       @Produces(MediaType.APPLICATION_OCTET_STREAM)
-       public Response export(@FormDataParam("janusGraphProperties") File janusGraphPropertiesFile,
-                       @FormDataParam("metadata") String exportGraphMetadata) {
-
-               printJanusGraphConfigFile(janusGraphPropertiesFile);
-               printMetadata(exportGraphMetadata);
-
-               Properties janusGraphProperties = convertFileToProperties(janusGraphPropertiesFile);
-
-               if (janusGraphProperties == null) {
-                       Response response = Utils.buildOkResponse(400, "cannot parse janusgraph properties file", null);
-                       return response;
-               }
-
-               Configuration conf = new BaseConfiguration();
-               for (Entry<Object, Object> entry : janusGraphProperties.entrySet()) {
-                       String key = entry.getKey().toString();
-                       Object value = entry.getValue();
-                       conf.setProperty(key, value);
-               }
-
-               conf.setProperty("storage.machine-id-appendix", System.currentTimeMillis() % 1000);
-
-               Optional<JanusGraph> openGraph = Utils.openGraph(conf);
-               if (openGraph.isPresent()) {
-                       try {
-                               return Utils.buildOkResponse(200, "ok man", null);
-                       } finally {
-                               openGraph.get().close();
-                       }
-               } else {
-                       return Utils.buildOkResponse(500, "failed to open graph", null);
-               }
-       }
-
-       private Properties convertFileToProperties(File janusGraphPropertiesFile) {
-
-               Properties properties = new Properties();
-
-               try (FileReader fileReader = new FileReader(janusGraphPropertiesFile)){
-                       properties.load(fileReader);
-               } catch (Exception e) {
-                       log.error("Failed to convert file to properties", e);
-                       return null;
-               }
-
-               return properties;
-       }
-
-       private void printJanusGraphConfigFile(File janusGraphPropertiesFile) {
-
-               if (log.isDebugEnabled()) {
-                       StringBuilder builder = new StringBuilder();
-                       try (BufferedReader br = new BufferedReader(new FileReader(janusGraphPropertiesFile))) {
-                               String line;
-                               while ((line = br.readLine()) != null) {
-                                       builder.append(line + Utils.NEW_LINE);
-                               }
-
-                               log.debug(builder.toString());
-
-                       } catch (IOException e) {
-                               log.error("Cannot print janusgraph properties file", e);
-                       }
-               }
-       }
-
-       private void printMetadata(String exportGraphMetadata) {
-
-               log.debug(exportGraphMetadata);
-
-       }
-
-       public String exportGraph(JanusGraph graph, String outputDirectory) {
-
-               String result = null;
-
-               // GraphMLWriter graphMLWriter = new GraphMLWriter(graph);
-               GraphMLWriter graphMLWriter = GraphMLWriter.build().create();
-
-               String outputFile = outputDirectory + File.separator + "exportGraph." + System.currentTimeMillis() + ".ml";
-
-               OutputStream out = null;
-               try {
-                       out = new BufferedOutputStream(new ByteArrayOutputStream());
-
-                       // graphMLWriter.outputGraph(out);
-
-                       graphMLWriter.writeGraph(out, graph);
-
-                       // graph.commit();
-                       graph.tx().commit();
-
-                       result = outputFile;
-
-               } catch (Exception e) {
-                       e.printStackTrace();
-                       // graph.rollback();
-                       graph.tx().rollback();
-               } finally {
-                       try {
-                               if (out != null) {
-                                       out.close();
-                               }
-                       } catch (IOException e) {
-                               e.printStackTrace();
-                       }
-               }
-               return result;
-
-       }
-
+    private static Logger log = Logger.getLogger(ExportImportJanusGraphServlet.class.getName());
+
+    @GET
+    @Path("export")
+    @Consumes(MediaType.MULTIPART_FORM_DATA)
+    @Produces(MediaType.APPLICATION_OCTET_STREAM)
+    public Response export(@FormDataParam("janusGraphProperties") File janusGraphPropertiesFile,
+                           @FormDataParam("metadata") String exportGraphMetadata) {
+        printJanusGraphConfigFile(janusGraphPropertiesFile);
+        printMetadata(exportGraphMetadata);
+        Properties janusGraphProperties = convertFileToProperties(janusGraphPropertiesFile);
+        if (janusGraphProperties == null) {
+            Response response = Utils.buildOkResponse(400, "cannot parse janusgraph properties file", null);
+            return response;
+        }
+        Configuration conf = new BaseConfiguration();
+        for (Entry<Object, Object> entry : janusGraphProperties.entrySet()) {
+            String key = entry.getKey().toString();
+            Object value = entry.getValue();
+            conf.setProperty(key, value);
+        }
+        conf.setProperty("storage.machine-id-appendix", System.currentTimeMillis() % 1000);
+        Optional<JanusGraph> openGraph = Utils.openGraph(conf);
+        if (openGraph.isPresent()) {
+            try {
+                return Utils.buildOkResponse(200, "ok man", null);
+            } finally {
+                openGraph.get().close();
+            }
+        } else {
+            return Utils.buildOkResponse(500, "failed to open graph", null);
+        }
+    }
+
+    private Properties convertFileToProperties(File janusGraphPropertiesFile) {
+        Properties properties = new Properties();
+        try (FileReader fileReader = new FileReader(janusGraphPropertiesFile)) {
+            properties.load(fileReader);
+        } catch (Exception e) {
+            log.error("Failed to convert file to properties", e);
+            return null;
+        }
+        return properties;
+    }
+
+    private void printJanusGraphConfigFile(File janusGraphPropertiesFile) {
+        if (log.isDebugEnabled()) {
+            StringBuilder builder = new StringBuilder();
+            try (BufferedReader br = new BufferedReader(new FileReader(janusGraphPropertiesFile))) {
+                String line;
+                while ((line = br.readLine()) != null) {
+                    builder.append(line + Utils.NEW_LINE);
+                }
+                log.debug(builder.toString());
+            } catch (IOException e) {
+                log.error("Cannot print janusgraph properties file", e);
+            }
+        }
+    }
+
+    private void printMetadata(String exportGraphMetadata) {
+        log.debug(exportGraphMetadata);
+    }
+
+    public String exportGraph(JanusGraph graph, String outputDirectory) {
+        String result = null;
+        // GraphMLWriter graphMLWriter = new GraphMLWriter(graph);
+        GraphMLWriter graphMLWriter = GraphMLWriter.build().create();
+        String outputFile = outputDirectory + File.separator + "exportGraph." + System.currentTimeMillis() + ".ml";
+        OutputStream out = null;
+        try {
+            out = new BufferedOutputStream(new ByteArrayOutputStream());
+            // graphMLWriter.outputGraph(out);
+            graphMLWriter.writeGraph(out, graph);
+            // graph.commit();
+            graph.tx().commit();
+            result = outputFile;
+        } catch (Exception e) {
+            e.printStackTrace();
+            // graph.rollback();
+            graph.tx().rollback();
+        } finally {
+            try {
+                if (out != null) {
+                    out.close();
+                }
+            } catch (IOException e) {
+                e.printStackTrace();
+            }
+        }
+        return result;
+    }
 }
index 98e458e..c490013 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.simulator.tenant;
 
+import java.util.HashMap;
+import java.util.Map;
+import java.util.function.Consumer;
 import org.openecomp.sdc.be.config.ConfigurationManager;
 import org.openecomp.sdc.common.api.ConfigurationSource;
 import org.openecomp.sdc.common.impl.ExternalConfiguration;
@@ -27,63 +29,51 @@ import org.openecomp.sdc.common.impl.FSConfigurationSource;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.util.HashMap;
-import java.util.Map;
-import java.util.function.Consumer;
-
 /**
  * Main class of utility imports CSV file into the specified table
  * The old stuff of the table is removed.
- * 
+ *
  * Accepts 3 mandatory arguments:
  *                     1. Path to configuration folder
  *                     2. Name of the table
  *                     3. Path to the CSV file
- *   
+ *
  *  Example of usage:
  *             \src\main\resources\config\ operationalenvironment "C:\Users\dr2032\Documents\env.csv"
- *  
- *  See relevant import handler for example of csv file line. 
- *  
+ *
+ * See relevant import handler for example of csv file line.
+ *
  *  The list of supported tables:
  *             1. operationalenvironment
- *  
- *  
- * @author dr2032
  *
+ *
+ * @author dr2032
  */
 public class ImportCassandraTableTool {
-       private static final Logger LOGGER = LoggerFactory.getLogger(ImportCassandraTableTool.class);
-       
-       private static Map<String, Consumer<String>> mapHandlers = new HashMap<>();
-       
-       static {
-               mapHandlers.put(OperationalEvnironmentImportHandler.getTableName().toLowerCase(), OperationalEvnironmentImportHandler::execute);
-       }
-       
-       public static void main(String[] args) {
-               if(args.length == 3) {
-                       String appConfigDir = args[0];
-                       String tableName = args[1];
-                       String fileName = args[2];
-                       
-                       ConfigurationSource configurationSource = new FSConfigurationSource(ExternalConfiguration.getChangeListener(), appConfigDir);
-                       new ConfigurationManager(configurationSource);
-               
-                       Consumer<String> executor = mapHandlers.get(tableName.toLowerCase());
-                       if (executor != null) {
-                               executor.accept(fileName);
-                       } 
-                       else {
-                               LOGGER.warn("Import to table [{}] is not supported yet!", tableName);
-                       }
-               }
-               else {
-                       LOGGER.warn("Invalid number of arguments. The 1st shoduld be path to config dir, the 2nd - table name and the 3rd - path to CSV file.");
-               }
-               
-               
-               System.exit(0);
-       }
-       
+
+    private static final Logger LOGGER = LoggerFactory.getLogger(ImportCassandraTableTool.class);
+    private static Map<String, Consumer<String>> mapHandlers = new HashMap<>();
+
+    static {
+        mapHandlers.put(OperationalEvnironmentImportHandler.getTableName().toLowerCase(), OperationalEvnironmentImportHandler::execute);
+    }
+
+    public static void main(String[] args) {
+        if (args.length == 3) {
+            String appConfigDir = args[0];
+            String tableName = args[1];
+            String fileName = args[2];
+            ConfigurationSource configurationSource = new FSConfigurationSource(ExternalConfiguration.getChangeListener(), appConfigDir);
+            new ConfigurationManager(configurationSource);
+            Consumer<String> executor = mapHandlers.get(tableName.toLowerCase());
+            if (executor != null) {
+                executor.accept(fileName);
+            } else {
+                LOGGER.warn("Import to table [{}] is not supported yet!", tableName);
+            }
+        } else {
+            LOGGER.warn("Invalid number of arguments. The 1st shoduld be path to config dir, the 2nd - table name and the 3rd - path to CSV file.");
+        }
+        System.exit(0);
+    }
 }
index 2a88fa6..e479c30 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -17,7 +17,6 @@
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.simulator.tenant;
 
 import org.openecomp.sdc.be.dao.cassandra.CassandraClient;
@@ -27,13 +26,14 @@ import org.springframework.context.annotation.Configuration;
 
 @Configuration
 public class ImportTableConfig {
-       @Bean(name = "cassandra-client")
-       public CassandraClient cassandraClient() {
-               return new CassandraClient();
-       }
-       
-       @Bean(name = "operational-environment-dao")
-       public OperationalEnvironmentDao operationalEnvironmentDao(CassandraClient cassandraClient) {
-               return new OperationalEnvironmentDao(cassandraClient);
-       }
+
+    @Bean(name = "cassandra-client")
+    public CassandraClient cassandraClient() {
+        return new CassandraClient();
+    }
+
+    @Bean(name = "operational-environment-dao")
+    public OperationalEnvironmentDao operationalEnvironmentDao(CassandraClient cassandraClient) {
+        return new OperationalEnvironmentDao(cassandraClient);
+    }
 }
index 26ce67f..57506ab 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -17,7 +17,6 @@
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.simulator.tenant;
 
 import com.opencsv.bean.CsvBindByPosition;
@@ -25,39 +24,30 @@ import org.openecomp.sdc.be.datatypes.enums.EnvironmentStatusEnum;
 
 /**
  * Represents line in CSV file should be imported into "operationalenvironment" table.
- * @author dr2032
  *
+ * @author dr2032
  */
 public class OperationalEnvironment {
-       @CsvBindByPosition(position = 0)
-    private String environmentId;
-
-       @CsvBindByPosition(position = 1)
-       private String dmaapUebAddress;
 
-       @CsvBindByPosition(position = 2)
-       private String ecompWorkloadContext;
-       
-       @CsvBindByPosition(position = 3)
-       private Boolean isProduction;
-       
-       @CsvBindByPosition(position = 4)
-       private String lastModified;
-
-       @CsvBindByPosition(position = 5)
+    @CsvBindByPosition(position = 0)
+    private String environmentId;
+    @CsvBindByPosition(position = 1)
+    private String dmaapUebAddress;
+    @CsvBindByPosition(position = 2)
+    private String ecompWorkloadContext;
+    @CsvBindByPosition(position = 3)
+    private Boolean isProduction;
+    @CsvBindByPosition(position = 4)
+    private String lastModified;
+    @CsvBindByPosition(position = 5)
     private String status;
-       
-       @CsvBindByPosition(position = 6)
-       private String tenant;
-
-       @CsvBindByPosition(position = 7)
+    @CsvBindByPosition(position = 6)
+    private String tenant;
+    @CsvBindByPosition(position = 7)
     private String uebApikey;
-
     @CsvBindByPosition(position = 8)
     private String uebSecretKey;
 
-
-
     public String getLastModified() {
         return lastModified;
     }
@@ -66,7 +56,6 @@ public class OperationalEnvironment {
         this.lastModified = lastModified;
     }
 
-   
     public String getEnvironmentId() {
         return environmentId;
     }
@@ -134,5 +123,4 @@ public class OperationalEnvironment {
     public void setUebSecretKey(String uebSecretKey) {
         this.uebSecretKey = uebSecretKey;
     }
-
 }
index 0de1011..fb3af7f 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.simulator.tenant;
 
 import com.opencsv.bean.CsvToBeanBuilder;
-import org.openecomp.sdc.be.dao.cassandra.OperationalEnvironmentDao;
-import org.openecomp.sdc.be.dao.cassandra.schema.Table;
-import org.openecomp.sdc.be.resources.data.OperationalEnvironmentEntry;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.context.annotation.AnnotationConfigApplicationContext;
-
 import java.io.FileNotFoundException;
 import java.io.FileReader;
 import java.text.ParseException;
 import java.text.SimpleDateFormat;
 import java.util.List;
 import java.util.stream.Collectors;
+import org.openecomp.sdc.be.dao.cassandra.OperationalEnvironmentDao;
+import org.openecomp.sdc.be.dao.cassandra.schema.Table;
+import org.openecomp.sdc.be.resources.data.OperationalEnvironmentEntry;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.context.annotation.AnnotationConfigApplicationContext;
 
 /**
- * Imports CSV file into 
+ * Imports CSV file into
  * Example of line in the file
  *             00002,135.42.43.45:5757,Context,FALSE,2017-10-11 12:02:01,INITIAL,personal tenant,abcd123456789,bbbbbbbbbbb
  *             Date format is fixed: yyyy-MM-dd HH:mm:ss
  * @author dr2032
- *
  */
 public class OperationalEvnironmentImportHandler {
-       private static final Logger LOGGER = LoggerFactory.getLogger(OperationalEvnironmentImportHandler.class);
-       private static final String TABLE_NAME = Table.SDC_OPERATIONAL_ENVIRONMENT.getTableDescription().getTableName();
-       
-       private OperationalEvnironmentImportHandler() {
-               
-       }
-       
-       public static void execute(String fileName) {
-               try {
-                       List<OperationalEnvironment> beans = new CsvToBeanBuilder<OperationalEnvironment>(new FileReader(fileName))
-                                      .withType(OperationalEnvironment.class).build().parse();
-                       
-                       List<OperationalEnvironmentEntry> entries = map(beans);
-                       modifyDb(entries);
-                       LOGGER.info("File {} has been successfully imported  into the [{}] table.", fileName, TABLE_NAME);
-               } catch (IllegalStateException | FileNotFoundException e) {
-                       String errorMessage = String.format("Failed to import file: %s into the [%s] table ", fileName, TABLE_NAME);
-                       LOGGER.error(errorMessage, e);
-               }
-       }
-       
-       private static List<OperationalEnvironmentEntry> map(List<OperationalEnvironment> beans) {
-               return beans.stream()
-                               .map(OperationalEvnironmentImportHandler::map)
-                               .collect(Collectors.toList());
-               
-       }
-       
-       private static OperationalEnvironmentEntry map(OperationalEnvironment perationalEnvironment) {
-               OperationalEnvironmentEntry entry = new OperationalEnvironmentEntry();
-               
-               entry.setEnvironmentId(perationalEnvironment.getEnvironmentId());
-               entry.addDmaapUebAddress(perationalEnvironment.getDmaapUebAddress());
-               entry.setEcompWorkloadContext(perationalEnvironment.getEcompWorkloadContext());
-               entry.setIsProduction(perationalEnvironment.getIsProduction());
-               
-               SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
-               try {
-                       entry.setLastModified(formatter.parse(perationalEnvironment.getLastModified()));
-               } catch (ParseException e) {
-                       LOGGER.error("Faild to pase Date, expected format is [yyyy-MM-dd HH:mm:ss].", e);
-                       throw new RuntimeException(e);
-               }
-               
-               entry.setStatus(perationalEnvironment.getStatus());
-               entry.setTenant(perationalEnvironment.getTenant());
-               entry.setUebApikey(perationalEnvironment.getUebApikey());
-               entry.setUebSecretKey(perationalEnvironment.getUebSecretKey());
-               
-               return entry;
-               
-       }
-       
-       private static OperationalEnvironmentDao createDaoObj() {
-               AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(ImportTableConfig.class);
-               return (OperationalEnvironmentDao) context.getBean("operational-environment-dao");
-       }
-       
-       private static void modifyDb(List<OperationalEnvironmentEntry> environments) {
-               OperationalEnvironmentDao daoObj = createDaoObj();
-               
-               daoObj.deleteAll();
-               
-               environments.forEach(daoObj::save);
-       }
 
-       public static String getTableName() {
-               return TABLE_NAME;
-       }
+    private static final Logger LOGGER = LoggerFactory.getLogger(OperationalEvnironmentImportHandler.class);
+    private static final String TABLE_NAME = Table.SDC_OPERATIONAL_ENVIRONMENT.getTableDescription().getTableName();
+
+    private OperationalEvnironmentImportHandler() {
+    }
+
+    public static void execute(String fileName) {
+        try {
+            List<OperationalEnvironment> beans = new CsvToBeanBuilder<OperationalEnvironment>(new FileReader(fileName))
+                .withType(OperationalEnvironment.class).build().parse();
+            List<OperationalEnvironmentEntry> entries = map(beans);
+            modifyDb(entries);
+            LOGGER.info("File {} has been successfully imported  into the [{}] table.", fileName, TABLE_NAME);
+        } catch (IllegalStateException | FileNotFoundException e) {
+            String errorMessage = String.format("Failed to import file: %s into the [%s] table ", fileName, TABLE_NAME);
+            LOGGER.error(errorMessage, e);
+        }
+    }
+
+    private static List<OperationalEnvironmentEntry> map(List<OperationalEnvironment> beans) {
+        return beans.stream().map(OperationalEvnironmentImportHandler::map).collect(Collectors.toList());
+    }
+
+    private static OperationalEnvironmentEntry map(OperationalEnvironment perationalEnvironment) {
+        OperationalEnvironmentEntry entry = new OperationalEnvironmentEntry();
+        entry.setEnvironmentId(perationalEnvironment.getEnvironmentId());
+        entry.addDmaapUebAddress(perationalEnvironment.getDmaapUebAddress());
+        entry.setEcompWorkloadContext(perationalEnvironment.getEcompWorkloadContext());
+        entry.setIsProduction(perationalEnvironment.getIsProduction());
+        SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
+        try {
+            entry.setLastModified(formatter.parse(perationalEnvironment.getLastModified()));
+        } catch (ParseException e) {
+            LOGGER.error("Faild to pase Date, expected format is [yyyy-MM-dd HH:mm:ss].", e);
+            throw new RuntimeException(e);
+        }
+        entry.setStatus(perationalEnvironment.getStatus());
+        entry.setTenant(perationalEnvironment.getTenant());
+        entry.setUebApikey(perationalEnvironment.getUebApikey());
+        entry.setUebSecretKey(perationalEnvironment.getUebSecretKey());
+        return entry;
+    }
+
+    private static OperationalEnvironmentDao createDaoObj() {
+        AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(ImportTableConfig.class);
+        return (OperationalEnvironmentDao) context.getBean("operational-environment-dao");
+    }
 
+    private static void modifyDb(List<OperationalEnvironmentEntry> environments) {
+        OperationalEnvironmentDao daoObj = createDaoObj();
+        daoObj.deleteAll();
+        environments.forEach(daoObj::save);
+    }
 
+    public static String getTableName() {
+        return TABLE_NAME;
+    }
 }
index ee995dc..b768590 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.utils;
 
 public class ConsoleWriter {
+
     private static String tabbedData(String data, int min) {
         // System.out.println(); //for debug
-
         int tabcount = 0;
         int len = 8 * min;
-
         while (data.length() < len) {
             tabcount++;
             len = len - 8;
         }
-
         // System.out.println("debug: tabcount=" + tabcount);
+
         // System.out.print("debug adding tabs... ");
         for (int x = 0; x < tabcount; x++) {
             // System.out.print("tab ");
             data = data + "\t";
         }
         // System.out.println(); //for debug
-
         return data;
     }
 
index 94b1ece..0c78e17 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * limitations under the License.
  * ============LICENSE_END=========================================================
  */
-
 package org.openecomp.sdc.asdctool.utils;
 
 import com.google.gson.Gson;
 import com.google.gson.GsonBuilder;
 import com.google.gson.JsonObject;
 import com.google.gson.JsonParser;
-import org.openecomp.sdc.be.dao.jsongraph.utils.JsonParserUtils;
-
 import java.io.FileWriter;
 import java.io.IOException;
 import java.nio.file.Path;
 import java.nio.file.Paths;
+import org.openecomp.sdc.be.dao.jsongraph.utils.JsonParserUtils;
 
 public class ReportWriter {
+
     FileWriter file;
-    public ReportWriter(String reportName) {
 
+    public ReportWriter(String reportName) {
         StringBuilder sb = new StringBuilder();
         Path path = Paths.get("/var/tmp/");
-        if ( path.toFile().exists() ) {
+        if (path.toFile().exists()) {
             sb.append("/var/tmp/");
         }
         sb.append("report_").append(reportName).append("_").append(System.currentTimeMillis()).append(".json");
@@ -53,10 +52,8 @@ public class ReportWriter {
         if (file != null) {
             JsonParser parser = new JsonParser();
             JsonObject json = parser.parse(JsonParserUtils.toJson(objectToWrite)).getAsJsonObject();
-
             Gson gson = new GsonBuilder().setPrettyPrinting().create();
             String prettyJson = gson.toJson(json);
-            
             file.write(prettyJson);
             file.flush();
         }