Move CSV's generation folder to target 27/125527/8
authorvasraz <vasyl.razinkov@est.tech>
Mon, 15 Nov 2021 15:45:28 +0000 (15:45 +0000)
committerVasyl Razinkov <vasyl.razinkov@est.tech>
Mon, 15 Nov 2021 15:46:00 +0000 (15:46 +0000)
Signed-off-by: Vasyl Razinkov <vasyl.razinkov@est.tech>
Change-Id: I379f20499eeb81f2dc82a943eaef4133ec6f6408
Issue-ID: SDC-3771

asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ArtifactUuidFix.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executor/ArtifactValidatorExecutor.java
asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/executor/ArtifactValidatorExecutorContract.java
asdctool/src/test/java/org/openecomp/sdc/asdctool/impl/validator/executor/IArtifactValidatorExecutorContract.java
asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/task/MigrationTasksTest.java

index d6aad24..1765bd5 100644 (file)
  */
 package org.openecomp.sdc.asdctool.impl;
 
-import static java.nio.charset.StandardCharsets.UTF_8;
-
 import com.google.gson.Gson;
 import com.google.gson.GsonBuilder;
 import com.google.gson.JsonArray;
 import fj.data.Either;
 import java.io.BufferedWriter;
+import java.io.File;
 import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.OutputStreamWriter;
 import java.io.Writer;
+import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.EnumMap;
@@ -44,9 +44,9 @@ import org.openecomp.sdc.asdctool.impl.validator.utils.VfModuleArtifactPayloadEx
 import org.openecomp.sdc.be.components.distribution.engine.VfModuleArtifactPayload;
 import org.openecomp.sdc.be.dao.cassandra.ArtifactCassandraDao;
 import org.openecomp.sdc.be.dao.cassandra.CassandraOperationStatus;
+import org.openecomp.sdc.be.dao.janusgraph.JanusGraphDao;
 import org.openecomp.sdc.be.dao.janusgraph.JanusGraphOperationStatus;
 import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
-import org.openecomp.sdc.be.dao.janusgraph.JanusGraphDao;
 import org.openecomp.sdc.be.dao.jsongraph.types.EdgeLabelEnum;
 import org.openecomp.sdc.be.dao.jsongraph.types.JsonParseFlagEnum;
 import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
@@ -85,9 +85,10 @@ import org.openecomp.sdc.be.tosca.ToscaRepresentation;
 import org.openecomp.sdc.common.api.ArtifactGroupTypeEnum;
 import org.openecomp.sdc.common.api.ArtifactTypeEnum;
 import org.openecomp.sdc.common.api.Constants;
-import org.openecomp.sdc.common.log.wrappers.Logger;
 import org.openecomp.sdc.common.util.GeneralUtility;
 import org.openecomp.sdc.exception.ResponseFormat;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
 
 @org.springframework.stereotype.Component("artifactUuidFix")
@@ -95,7 +96,7 @@ public class ArtifactUuidFix {
 
     private static final String MIGRATION1707_ARTIFACT_UUID_FIX = "Migration1707ArtifactUuidFix  fix group:  group name {} correct artifactId {} artifactUUID {} ";
     private static final String FAILED_TO_FETCH_VF_RESOURCES = "Failed to fetch vf resources ";
-    private static Logger log = Logger.getLogger(ArtifactUuidFix.class.getName());
+    private static final Logger log = LoggerFactory.getLogger(ArtifactUuidFix.class);
     private JanusGraphDao janusGraphDao;
     private ToscaOperationFacade toscaOperationFacade;
     private ToscaExportHandler toscaExportUtils;
@@ -150,8 +151,8 @@ public class ArtifactUuidFix {
 
     private boolean fetchFaultVf(List<Resource> vfLst, long time) {
         log.info("Find fault VF ");
-        String fileName = "fault_" + time + ".csv";
-        try (Writer writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileName), UTF_8))) {
+        try (Writer writer = new BufferedWriter(
+            new OutputStreamWriter(new FileOutputStream(File.createTempFile("fault_", "" + time)), StandardCharsets.UTF_8))) {
             writer.write("vf name, vf id, state, version\n");
             Map<GraphPropertyEnum, Object> hasProps = new EnumMap<>(GraphPropertyEnum.class);
             hasProps.put(GraphPropertyEnum.COMPONENT_TYPE, ComponentTypeEnum.RESOURCE.name());
@@ -213,8 +214,8 @@ public class ArtifactUuidFix {
             log.info("No services as input");
             return true;
         }
-        String fileName = "problemVf_" + time + ".csv";
-        try (Writer writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileName), UTF_8))) {
+        try (Writer writer = new BufferedWriter(
+            new OutputStreamWriter(new FileOutputStream(File.createTempFile("problemVf_", "" + time)), StandardCharsets.UTF_8))) {
             writer.write("vf name, vf id, state, version, example service name\n");
             Set<String> vfIds = new HashSet<>();
             for (Service service : serviceList) {
@@ -242,7 +243,6 @@ public class ArtifactUuidFix {
                     }
                 }
             }
-            log.info("output file with list of Vf : {}", fileName);
         } catch (Exception e) {
             log.info("Failed to fetch services ", e);
             return false;
@@ -254,8 +254,8 @@ public class ArtifactUuidFix {
 
     private boolean fetchServices(String fixServices, List<Service> serviceList, long time) {
         log.info("Find problem Services {}", fixServices);
-        String fileName = "problemService_" + time + ".csv";
-        try (Writer writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileName), UTF_8))) {
+        try (Writer writer = new BufferedWriter(
+            new OutputStreamWriter(new FileOutputStream(File.createTempFile("problemService_", "" + time)), StandardCharsets.UTF_8))) {
             writer.write("service name, service id, state, version\n");
             Map<GraphPropertyEnum, Object> hasProps = new EnumMap<>(GraphPropertyEnum.class);
             hasProps.put(GraphPropertyEnum.COMPONENT_TYPE, ComponentTypeEnum.SERVICE.name());
@@ -293,7 +293,6 @@ public class ArtifactUuidFix {
                 }
                 janusGraphDao.commit();
             }
-            log.info("output file with list of services : {}", fileName);
         } catch (Exception e) {
             log.info("Failed to fetch services ", e);
             return false;
@@ -529,6 +528,7 @@ public class ArtifactUuidFix {
         return false;
     }
 
+
     private boolean fix(List<Resource> vfLst, List<Service> serviceList, Map<String, List<Component>> nodesToFixTosca,
                         Map<String, List<Component>> vfToFixTosca, Map<String, List<Component>> servicesToFixTosca) {
         boolean res = true;
@@ -541,8 +541,8 @@ public class ArtifactUuidFix {
         }
         Set<String> fixedIds = new HashSet<>();
         long time = System.currentTimeMillis();
-        String fileName = "FailedGenerateTosca" + "_" + time + ".csv";
-        try (Writer writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileName), UTF_8))) {
+        try (Writer writer = new BufferedWriter(
+            new OutputStreamWriter(new FileOutputStream(File.createTempFile("FailedGenerateTosca", "" + time)), StandardCharsets.UTF_8))) {
             writer.write("componentType, name, version, UID, UUID, invariantUUID, state\n");
             List<Component> failedList = new ArrayList<>();
             if (res && nodesToFixTosca != null && !nodesToFixTosca.isEmpty()) {
@@ -574,6 +574,7 @@ public class ArtifactUuidFix {
             if (servicesToFixTosca != null && !servicesToFixTosca.isEmpty()) {
                 generateAndSaveToscaArtifacts(servicesToFixTosca, fixedIds, serviceList, failedList);
             }
+
             for (Component component : serviceList) {
                 res = generateToscaPerComponent(fixedIds, component);
                 if (res) {
@@ -721,7 +722,7 @@ public class ArtifactUuidFix {
                 }
             }
         }
-        log.debug("Fix data on graph finished: VertexTypeEnum {} EdgeLabelEnum {} res {}", vertexTypeEnum.getName(), res);
+        log.debug("Fix data on graph finished: VertexTypeEnum {} EdgeLabelEnum {} res {}", vertexTypeEnum.getName(), edgeLabelEnum, res);
         return res;
     }
 
@@ -857,10 +858,10 @@ public class ArtifactUuidFix {
                     if (group.getType().equals(Constants.DEFAULT_GROUP_VF_MODULE) && group.getArtifacts() != null) {
                         fixVfGroup(resource, artifactsMap, group);
                     }
-                    if (group.getType().equals(Constants.DEFAULT_GROUP_VF_MODULE) && (group.getArtifacts() == null || group.getArtifacts()
-                        .isEmpty())) {
+                    if (group.getType().equals(Constants.DEFAULT_GROUP_VF_MODULE) &&
+                        (group.getArtifacts() == null || group.getArtifacts().isEmpty())) {
                         log.debug("Migration1707ArtifactUuidFix  add group to delete list fix resource: id {},  name {} ", resource.getUniqueId(),
-                            resource.getName(), group.getName());
+                            resource.getName());
                         groupsToDelete.add(group);
                     }
                 }
@@ -1008,8 +1009,8 @@ public class ArtifactUuidFix {
     public boolean validateTosca(Map<String, List<Component>> vertices, Map<String, List<Component>> compToFix, String name) {
         boolean result = true;
         long time = System.currentTimeMillis();
-        String fileName = name + "_" + time + ".csv";
-        try (Writer writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileName), UTF_8))) {
+        try (Writer writer = new BufferedWriter(
+            new OutputStreamWriter(new FileOutputStream(File.createTempFile(name, "" + time)), StandardCharsets.UTF_8))) {
             writer.write("name, UUID, invariantUUID, state, version\n");
             for (Map.Entry<String, List<Component>> entry : vertices.entrySet()) {
                 List<Component> compList = entry.getValue();
@@ -1128,7 +1129,7 @@ public class ArtifactUuidFix {
 
     private void fillVfModuleInstHeatEnvPayload(Component parent, ComponentInstance instance, List<GroupInstance> groupsForCurrVF,
                                                 ArtifactDefinition vfModuleArtifact) {
-        log.debug("generate new vf module for component. name  {}, id {}, Version {}", instance.getName(), instance.getUniqueId());
+        log.debug("generate new vf module for component. name {}, id {}", instance.getName(), instance.getUniqueId());
         String uniqueId = UniqueIdBuilder
             .buildInstanceArtifactUniqueId(parent.getUniqueId(), instance.getUniqueId(), vfModuleArtifact.getArtifactLabel());
         vfModuleArtifact.setUniqueId(uniqueId);
index 531d54a..07f7ec0 100644 (file)
@@ -23,6 +23,7 @@ import static java.nio.charset.StandardCharsets.UTF_8;
 
 import fj.data.Either;
 import java.io.BufferedWriter;
+import java.io.File;
 import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.OutputStreamWriter;
@@ -36,9 +37,9 @@ import java.util.Map;
 import java.util.Optional;
 import java.util.Set;
 import java.util.stream.Collectors;
+import org.openecomp.sdc.be.dao.janusgraph.JanusGraphDao;
 import org.openecomp.sdc.be.dao.janusgraph.JanusGraphOperationStatus;
 import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
-import org.openecomp.sdc.be.dao.janusgraph.JanusGraphDao;
 import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
 import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
 import org.openecomp.sdc.be.model.ArtifactDefinition;
@@ -55,7 +56,7 @@ public abstract class ArtifactValidatorExecutor {
     private final ToscaOperationFacade toscaOperationFacade;
     private final String name;
 
-    public ArtifactValidatorExecutor(JanusGraphDao janusGraphDao, ToscaOperationFacade toscaOperationFacade, String name) {
+    protected ArtifactValidatorExecutor(JanusGraphDao janusGraphDao, ToscaOperationFacade toscaOperationFacade, String name) {
         this.janusGraphDao = janusGraphDao;
         this.toscaOperationFacade = toscaOperationFacade;
         this.name = name;
@@ -72,7 +73,7 @@ public abstract class ArtifactValidatorExecutor {
             log.error("getVerticesToValidate failed " + resultsEither.right().value());
             return result;
         }
-        System.out.println("getVerticesToValidate: " + resultsEither.left().value().size() + " vertices to scan");
+        log.info("getVerticesToValidate: {} vertices to scan", resultsEither.left().value().size());
         List<GraphVertex> componentsList = resultsEither.left().value();
         componentsList.forEach(vertex -> {
             String ivariantUuid = (String) vertex.getMetadataProperty(GraphPropertyEnum.INVARIANT_UUID);
@@ -96,8 +97,8 @@ public abstract class ArtifactValidatorExecutor {
     public boolean validate(Map<String, List<Component>> vertices, String outputFilePath) {
         boolean result = true;
         long time = System.currentTimeMillis();
-        String fileName = outputFilePath + this.getName() + "_" + time + ".csv";
-        try (Writer writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileName), UTF_8))) {
+        try (Writer writer = new BufferedWriter(
+            new OutputStreamWriter(new FileOutputStream(File.createTempFile(outputFilePath + this.getName(), "" + time)), UTF_8))) {
             writer.write("name, UUID, invariantUUID, state, version\n");
             Collection<List<Component>> collection = vertices.values();
             for (List<Component> compList : collection) {
index 621b832..a940407 100644 (file)
@@ -20,7 +20,6 @@
 
 package org.openecomp.sdc.asdctool.impl.validator.executor;
 
-import static org.junit.jupiter.api.Assertions.assertFalse;
 import static org.mockito.Mockito.mock;
 
 import java.util.HashMap;
@@ -51,8 +50,7 @@ public interface ArtifactValidatorExecutorContract {
 
         VertexTypeEnum type = null;
         Map<GraphPropertyEnum, Object> hasProps = null;
-        Assertions.assertThrows(NullPointerException.class, () ->
-            testSubject.getVerticesToValidate(type, hasProps)
+        Assertions.assertThrows(NullPointerException.class, () -> testSubject.getVerticesToValidate(type, hasProps)
         );
     }
 
@@ -68,8 +66,6 @@ public interface ArtifactValidatorExecutorContract {
         Map<String, List<Component>> vertices = new HashMap<>();
         vertices.put("stam", linkedList);
 
-        // Initially no outputFilePath was passed to this function (hence it is set to null)
-        // TODO: Fix this null and see if the argument is used by this function
-        assertFalse(testSubject.validate(vertices, null));
+        Assertions.assertFalse(testSubject.validate(vertices, "target/"));
     }
 }
index 8af265e..a21afb8 100644 (file)
 
 package org.openecomp.sdc.asdctool.impl.validator.executor;
 
+import static org.mockito.Mockito.mock;
+
 import org.junit.jupiter.api.Assertions;
 import org.junit.jupiter.api.Test;
 import org.openecomp.sdc.be.dao.janusgraph.JanusGraphDao;
 import org.openecomp.sdc.be.model.jsonjanusgraph.operations.ToscaOperationFacade;
 
-import static org.mockito.Mockito.mock;
-
 public abstract class IArtifactValidatorExecutorContract {
 
     protected abstract IArtifactValidatorExecutor createTestSubject(
@@ -47,4 +47,3 @@ public abstract class IArtifactValidatorExecutorContract {
         );
     }
 }
-
index 9a6db29..5efb23f 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 
 package org.openecomp.sdc.asdctool.migration.task;
 
-import static org.junit.jupiter.api.Assertions.fail;
-
-import org.apache.commons.lang3.StringUtils;
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-import org.openecomp.sdc.asdctool.migration.core.DBVersion;
-import org.openecomp.sdc.asdctool.migration.core.task.Migration;
-import org.openecomp.sdc.asdctool.migration.scanner.ClassScanner;
-
 import java.util.Collection;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
 import java.util.stream.Collectors;
+import org.apache.commons.lang.StringUtils;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.openecomp.sdc.asdctool.migration.core.DBVersion;
+import org.openecomp.sdc.asdctool.migration.core.task.Migration;
+import org.openecomp.sdc.asdctool.migration.scanner.ClassScanner;
 
-
-public class MigrationTasksTest  {
+public class MigrationTasksTest {
 
     public static final String MIGRATIONS_BASE_PACKAGE = "org.openecomp.sdc.asdctool.migration.tasks";
     private List<Migration> migrations;
@@ -52,21 +49,25 @@ public class MigrationTasksTest  {
         Map<DBVersion, List<Migration>> migrationsByVersion = migrations.stream().collect(Collectors.groupingBy(Migration::getVersion));
         migrationsByVersion.forEach((version, migrations) -> {
             if (migrations.size() > 1) {
-                System.out.println(String.format("the following migration tasks have the same version %s. versions must be unique", version.toString()));
-                fail(String.format("migration tasks %s has same version %s. migration tasks versions must be unique.", getMigrationsNameAsString(migrations), version.toString()));
+                System.out.println(
+                    String.format("the following migration tasks have the same version %s. versions must be unique", version.toString()));
+                Assertions.fail(String.format("migration tasks %s has same version %s. migration tasks versions must be unique.",
+                    getMigrationsNameAsString(migrations), version.toString()));
             }
         });
     }
 
     @Test
     public void testNoTaskWithVersionGreaterThanCurrentVersion() throws Exception {
-        Set<Migration> migrationsWithVersionsGreaterThanCurrent = migrations.stream().filter(mig -> mig.getVersion().compareTo(DBVersion.DEFAULT_VERSION) > 0)
-                .collect(Collectors.toSet());
+        Set<Migration> migrationsWithVersionsGreaterThanCurrent = migrations.stream()
+            .filter(mig -> mig.getVersion().compareTo(DBVersion.DEFAULT_VERSION) > 0)
+            .collect(Collectors.toSet());
 
         if (!migrationsWithVersionsGreaterThanCurrent.isEmpty()) {
-            fail(String.format("migrations tasks %s have version which is greater than DBVersion.DEFAULT_VERSION %s. did you forget to update current version?",
-                    getMigrationsNameAsString(migrationsWithVersionsGreaterThanCurrent),
-                    DBVersion.DEFAULT_VERSION.toString()));
+            Assertions.fail(String.format(
+                "migrations tasks %s have version which is greater than DBVersion.DEFAULT_VERSION %s. did you forget to update current version?",
+                getMigrationsNameAsString(migrationsWithVersionsGreaterThanCurrent),
+                DBVersion.DEFAULT_VERSION.toString()));
         }
     }