first attempt to fix vulnerabilities 07/55607/1
authorYuli Shlosberg <ys9693@att.com>
Mon, 2 Jul 2018 08:10:17 +0000 (11:10 +0300)
committerYuli Shlosberg <ys9693@att.com>
Mon, 2 Jul 2018 08:11:02 +0000 (11:11 +0300)
Change-Id: Iaf5f7db54511f730e09a8af8921d52ebf7f6c078
Issue-ID: SDC-1457
Signed-off-by: Yuli Shlosberg <ys9693@att.com>
24 files changed:
asdctool/src/main/java/org/openecomp/sdc/asdctool/App.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/Utils.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphMLConverter.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphMLDataAnalyzer.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ProductLogic.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/UpdatePropertyOnVertex.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/config/ValidationConfigManager.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/utils/ReportManager.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/main/DataSchemaMenu.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/main/EsToCassandraDataMigrationMenu.java
asdctool/src/main/java/org/openecomp/sdc/asdctool/servlets/ExportImportTitanServlet.java
catalog-dao/src/main/java/org/openecomp/sdc/be/dao/cassandra/ComponentCassandraDao.java
catalog-dao/src/main/java/org/openecomp/sdc/be/dao/cassandra/schema/SdcSchemaBuilder.java
catalog-dao/src/main/java/org/openecomp/sdc/be/resources/data/ComponentCacheData.java
catalog-dao/src/main/java/org/openecomp/sdc/be/resources/data/ESArtifactData.java
catalog-model/src/main/java/org/openecomp/sdc/be/model/jsontitan/operations/NodeTypeOperation.java
common-app-api/src/main/java/org/openecomp/sdc/common/api/ResponseInfo.java
common-app-api/src/main/java/org/openecomp/sdc/common/config/generation/GenerateEcompErrorsCsv.java
common-app-api/src/main/java/org/openecomp/sdc/common/listener/AppContextListener.java
common-app-api/src/main/java/org/openecomp/sdc/common/util/ValidationUtils.java
common-app-api/src/main/java/org/openecomp/sdc/common/util/YamlToObjectConverter.java
common-app-api/src/main/java/org/openecomp/sdc/common/util/ZipUtil.java
common-be/src/main/java/org/openecomp/sdc/be/datatypes/elements/PropertyRule.java
security-utils/src/main/java/org/openecomp/sdc/security/SecurityUtil.java

index b433357..7d11040 100644 (file)
@@ -23,12 +23,16 @@ package org.openecomp.sdc.asdctool;
 import org.eclipse.jetty.server.Server;
 import org.eclipse.jetty.servlet.ServletContextHandler;
 import org.eclipse.jetty.servlet.ServletHolder;
+import org.openecomp.sdc.asdctool.main.ArtifactUUIDFixMenu;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Hello world!
  *
  */
 public class App {
+       private static Logger log = LoggerFactory.getLogger(App.class);
        public static void main(String[] args) {
 
                String asdcToolPort = "8087";
@@ -52,12 +56,12 @@ public class App {
                try {
                        jettyServer.start();
 
-                       System.out.println("Server was started on port " + asdcToolPort);
+                       log.info("Server was started on port {}", asdcToolPort);
 
                        jettyServer.join();
 
                } catch (Exception e) {
-                       e.printStackTrace();
+                       log.info("Server failed to start - {}", e);
                        System.exit(1);
                } finally {
                        jettyServer.destroy();
index 23612a8..ce8b2c4 100644 (file)
@@ -45,7 +45,7 @@ public class Utils {
 
        private static Logger log = LoggerFactory.getLogger(Utils.class.getName());
 
-       public static String NEW_LINE = System.getProperty("line.separator");
+       public final static String NEW_LINE = System.getProperty("line.separator");
 
        public static Response buildOkResponse(
                        /*
index 2bcc8c0..f036c39 100644 (file)
@@ -94,7 +94,7 @@ public class GraphMLConverter {
                        return result;
 
                } catch (Exception e) {
-                       e.printStackTrace();
+                       log.info("import graph failed - {} " , e);
                        return false;
                } finally {
                        if (graph != null) {
@@ -121,7 +121,7 @@ public class GraphMLConverter {
 
                        System.out.println("Exported file=" + result);
                } catch (Exception e) {
-                       e.printStackTrace();
+                       log.info("export graph failed -{}" , e);
                        return false;
                } finally {
                        if (graph != null) {
@@ -146,7 +146,7 @@ public class GraphMLConverter {
 
                        System.out.println("Exported file=" + result);
                } catch (Exception e) {
-                       e.printStackTrace();
+                       log.info("export exportGraphMl failed - {}" , e);
                        return null;
                } finally {
                        if (graph != null) {
@@ -173,7 +173,7 @@ public class GraphMLConverter {
 
                        System.out.println("Exported file=" + result);
                } catch (Exception e) {
-                       e.printStackTrace();
+                       log.info("find Error In Json Graph failed - {}" , e);
                        return false;
                } finally {
                        if (graph != null) {
@@ -221,7 +221,7 @@ public class GraphMLConverter {
                        result = outputFile;
 
                } catch (Exception e) {
-                       e.printStackTrace();
+                       log.info("export Json Graph failed - {}" , e);
                        graph.tx().rollback();
                } finally {
                        try {
@@ -229,7 +229,7 @@ public class GraphMLConverter {
                                        out.close();
                                }
                        } catch (IOException e) {
-                               e.printStackTrace();
+                               log.info("close FileOutputStream failed - {}" , e);
                        }
                }
                return result;
@@ -247,7 +247,7 @@ public class GraphMLConverter {
                        graph.tx().commit();
                } catch (Exception e) {
                        graph.tx().rollback();
-                       e.printStackTrace();
+                       log.info("export Graph Ml failed - {}" , e);
                }
                return result;
 
@@ -305,7 +305,7 @@ public class GraphMLConverter {
 
                } catch (Exception e) {
                        System.out.println("Failed to import graph " + e.getMessage());
-                       e.printStackTrace();
+                       log.info("Failed to import graph - {}" , e);
                        // graph.rollback();
                        graph.tx().rollback();
                } finally {
@@ -314,7 +314,7 @@ public class GraphMLConverter {
                                        is.close();
                                }
                        } catch (IOException e) {
-                               e.printStackTrace();
+                               log.info("close FileOutputStream failed - {}" , e);
                        }
                }
 
@@ -396,7 +396,7 @@ public class GraphMLConverter {
                                                openGraph.tx().rollback();
 
                                        } catch (Exception e) {
-                                               e.printStackTrace();
+                                               log.info("run Edge Scan failed - {}" , e);
 
                                                log.error("fromVertex={}", Utils.getProperties(vertexFrom));
                                                log.error("toVertex={}", Utils.getProperties(vertexTo));
@@ -450,6 +450,7 @@ public class GraphMLConverter {
 
                                        } catch (Exception e) {
                                                e.printStackTrace();
+                                               log.info("run Vertex Scan failed - {}" , e);
 
                                                Object property1 = vertex.value(GraphPropertiesDictionary.HEALTH_CHECK.getProperty());
                                                System.out.println(property1);
@@ -508,7 +509,7 @@ public class GraphMLConverter {
                        graph.tx().rollback();
 
                } catch (Exception e) {
-                       e.printStackTrace();
+                       log.info("find Error In Json Graph failed - {}" , e);
                        // graph.rollback();
                        graph.tx().rollback();
                } finally {
@@ -517,7 +518,7 @@ public class GraphMLConverter {
                                        out.close();
                                }
                        } catch (IOException e) {
-                               e.printStackTrace();
+                               log.info("close FileOutputStream failed - {}" , e);
                        }
                }
                return result;
@@ -592,7 +593,7 @@ public class GraphMLConverter {
                        result = outputFile;
 
                } catch (Exception e) {
-                       e.printStackTrace();
+                       log.info("export Users failed - {}" , e);
                        graph.tx().rollback();
                } finally {
                        try {
@@ -600,7 +601,7 @@ public class GraphMLConverter {
                                        fileWriter.close();
                                }
                        } catch (IOException e) {
-                               e.printStackTrace();
+                               log.info("close FileOutputStream failed - {}" , e);
                        }
                }
                return result;
@@ -642,7 +643,7 @@ public class GraphMLConverter {
 
                        System.out.println("Exported file=" + result);
                } catch (Exception e) {
-                       e.printStackTrace();
+                       log.info("export Users failed - {}" , e);
                        return false;
                } finally {
                        if (graph != null) {
index 77163b4..f34feb0 100644 (file)
@@ -36,9 +36,13 @@ import org.jdom2.Element;
 import org.jdom2.filter.ElementFilter;
 import org.jdom2.input.SAXBuilder;
 import org.jdom2.util.IteratorIterable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class GraphMLDataAnalyzer {
 
+       private static Logger log = LoggerFactory.getLogger(GraphMLDataAnalyzer.class);
+
        private static final String[] COMPONENT_SHEET_HEADER = { "uniqueId", "type", "name", "toscaResourceName",
                        "resourceType", "version", "deleted", "hasNonCalculatedReqCap" };
        private static final String[] COMPONENT_INSTANCES_SHEET_HEADER = { "uniqueId", "name", "originUid", "originType",
@@ -51,7 +55,7 @@ public class GraphMLDataAnalyzer {
                        result = _analyzeGraphMLData(mlFileLocation);
                        System.out.println("Analyzed ML file=" + mlFileLocation + ", XLS result=" + result);
                } catch (Exception e) {
-                       e.printStackTrace();
+                       log.info("analyze GraphML Data failed - {}" , e);
                        return null;
                }
                return result;
index 502ddd1..a8674f8 100644 (file)
@@ -77,7 +77,7 @@ public class ProductLogic {
                        graph.tx().commit();
                        return productsToDelete;
                } catch (Exception e) {
-                       e.printStackTrace();
+                       log.info("get All Products failed - {}" , e);
                        graph.tx().rollback();
                        return null;
 
index 0336701..6f0136c 100644 (file)
@@ -102,7 +102,7 @@ public class UpdatePropertyOnVertex {
                        return numberOfUpdatedVertexes;
 
                } catch (Exception e) {
-                       e.printStackTrace();
+                       log.info("update Property On Service At Least Certified failed -{}" , e);
                        // graph.rollback();
                        graph.tx().rollback();
 
index 90d8506..01b92f7 100644 (file)
@@ -4,12 +4,16 @@ import java.io.FileInputStream;
 import java.io.IOException;
 import java.io.InputStream;
 import java.util.Properties;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Created by chaya on 7/4/2017.
  */
 public class ValidationConfigManager {
 
+    private static Logger log = LoggerFactory.getLogger(ValidationConfigManager.class);
+
     private static Properties prop = new Properties();
     private static String outputFullFilePath;
     private static String outputFilePath;
@@ -44,7 +48,7 @@ public class ValidationConfigManager {
             input = new FileInputStream(path);
             prop.load(input);
         } catch (IOException ex) {
-            ex.printStackTrace();
+            log.info("FileInputStream failed - " , ex);
         }
         return prop;
     }
index a19a98e..ebedf0a 100644 (file)
@@ -13,12 +13,15 @@ import java.util.Set;
 import org.apache.commons.lang.text.StrBuilder;
 import org.openecomp.sdc.asdctool.impl.validator.config.ValidationConfigManager;
 import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Created by chaya on 7/5/2017.
  */
 public class ReportManager {
 
+    private static Logger log = LoggerFactory.getLogger(ReportManager.class);
     private static String reportOutputFilePath;
     private static String csvReportFilePath;
     private static Map<String, Set<String>> failedVerticesPerTask = new HashMap<>();
@@ -29,7 +32,7 @@ public class ReportManager {
             initCsvFile();
             initReportFile();
         } catch (IOException e) {
-            e.printStackTrace();
+            log.info("Init file failed - {}" , e);
         }
     }
 
@@ -79,7 +82,7 @@ public class ReportManager {
             Files.write(Paths.get(reportOutputFilePath), new StrBuilder().appendNewLine().toString().getBytes(), StandardOpenOption.APPEND);
             Files.write(Paths.get(reportOutputFilePath), message.getBytes(), StandardOpenOption.APPEND);
         } catch (IOException e) {
-            e.printStackTrace();
+            log.info("write to file failed - {}" , e);
         }
     }
 
@@ -128,7 +131,7 @@ public class ReportManager {
                     Files.write(Paths.get(csvReportFilePath), resultLine.getBytes(), StandardOpenOption.APPEND);
                     Files.write(Paths.get(csvReportFilePath), new StrBuilder().appendNewLine().toString().getBytes(), StandardOpenOption.APPEND);
                 } catch (IOException e) {
-                    e.printStackTrace();
+                    log.info("write to file failed - {}" , e);
                 }
             });
         });
index b0e3dbe..665a9b7 100644 (file)
@@ -31,69 +31,66 @@ import org.slf4j.LoggerFactory;
 
 public class DataSchemaMenu {
 
-       private static Logger log = LoggerFactory.getLogger(DataSchemaMenu.class.getName());
+    private static Logger log = LoggerFactory.getLogger(DataSchemaMenu.class.getName());
 
-       public static void main(String[] args) {
+    public static void main(String[] args) {
 
-               String operation = args[0];
+        String operation = args[0];
 
-               String appConfigDir = args[1];
+        String appConfigDir = args[1];
 
-               if (args == null || args.length < 2) {
-                       usageAndExit();
-               }
+        if (args == null || args.length < 2) {
+            usageAndExit();
+        }
 
-               ConfigurationSource configurationSource = new FSConfigurationSource(ExternalConfiguration.getChangeListener(), appConfigDir);
-               ConfigurationManager configurationManager = new ConfigurationManager(configurationSource);
-               
-               try {
+        ConfigurationSource configurationSource = new FSConfigurationSource(ExternalConfiguration.getChangeListener(), appConfigDir);
+        ConfigurationManager configurationManager = new ConfigurationManager(configurationSource);
 
-                       switch (operation.toLowerCase()) {
-                       case "create-cassandra-structures":
-                               log.debug("Start create cassandra keyspace, tables and indexes");
-                               if (SdcSchemaBuilder.createSchema()) {
-                                       log.debug("create cassandra keyspace, tables and indexes successfull");
-                                       System.exit(0);
-                               } else {
-                                       log.debug("create cassandra keyspace, tables and indexes failed");
-                                       System.exit(2);
-                               }
-                       case "create-titan-structures":
-                               log.debug("Start create titan keyspace");
-                               String titanCfg = 2 == args.length? configurationManager.getConfiguration().getTitanCfgFile(): args[2];
-                               if (TitanGraphInitializer.createGraph(titanCfg)) {
-                                       log.debug("create titan keyspace successfull");
-                                       System.exit(0);
-                               } else {
-                                       log.debug("create titan keyspace failed");
-                                       System.exit(2);
-                               }
-                       case "clean-cassndra":
-                               log.debug("Start clean keyspace, tables");
-                               if (SdcSchemaBuilder.deleteSchema()) {
-                                       log.debug(" successfull");
-                                       System.exit(0);
-                               } else {
-                                       log.debug(" failed");
-                                       System.exit(2);
-                               }
-                       default:
-                               usageAndExit();
-                       }
-               } catch (Throwable t) {
-                       t.printStackTrace();
-                       log.debug("create cassandra keyspace, tables and indexes failed");
-                       System.exit(3);
-               }
-       }
+        switch (operation.toLowerCase()) {
+            case "create-cassandra-structures":
+                log.debug("Start create cassandra keyspace, tables and indexes");
+                if (SdcSchemaBuilder.createSchema()) {
+                    log.debug("create cassandra keyspace, tables and indexes successfull");
+                    System.exit(0);
+                } else {
+                    log.debug("create cassandra keyspace, tables and indexes failed");
+                    System.exit(2);
+                }
+                break;
+            case "create-titan-structures":
+                log.debug("Start create titan keyspace");
+                String titanCfg = 2 == args.length ? configurationManager.getConfiguration().getTitanCfgFile() : args[2];
+                if (TitanGraphInitializer.createGraph(titanCfg)) {
+                    log.debug("create titan keyspace successfull");
+                    System.exit(0);
+                } else {
+                    log.debug("create titan keyspace failed");
+                    System.exit(2);
+                }
+                break;
+            case "clean-cassndra":
+                log.debug("Start clean keyspace, tables");
+                if (SdcSchemaBuilder.deleteSchema()) {
+                    log.debug(" successfull");
+                    System.exit(0);
+                } else {
+                    log.debug(" failed");
+                    System.exit(2);
+                }
+                break;
+            default:
+                usageAndExit();
+                break;
+        }
+    }
 
-       private static void usageAndExit() {
-               DataSchemeUsage();
-               System.exit(1);
-       }
+    private static void usageAndExit() {
+        DataSchemeUsage();
+        System.exit(1);
+    }
 
-       private static void DataSchemeUsage() {
-               System.out.println("Usage: create-cassandra-structures <configuration dir> ");
-               System.out.println("Usage: create-titan-structures <configuration dir> ");
-       }
+    private static void DataSchemeUsage() {
+        System.out.println("Usage: create-cassandra-structures <configuration dir> ");
+        System.out.println("Usage: create-titan-structures <configuration dir> ");
+    }
 }
index f791098..873bdb1 100644 (file)
@@ -89,7 +89,7 @@ public class EsToCassandraDataMigrationMenu {
                                usageAndExit();
                        }
                } catch (Throwable t) {
-                       t.printStackTrace();
+                       log.info("data migration failed - {}", t);
                        System.exit(3);
                }
        }
index 9d40354..62b59f1 100644 (file)
@@ -164,7 +164,7 @@ public class ExportImportTitanServlet {
                        result = outputFile;
 
                } catch (Exception e) {
-                       e.printStackTrace();
+                       log.info("export Graph failed - {}" , e);
                        // graph.rollback();
                        graph.tx().rollback();
                } finally {
@@ -173,7 +173,7 @@ public class ExportImportTitanServlet {
                                        out.close();
                                }
                        } catch (IOException e) {
-                               e.printStackTrace();
+                               log.info("close FileOutputStream failed - {}" , e);
                        }
                }
                return result;
index 82630e0..50c4f3f 100644 (file)
@@ -49,7 +49,7 @@ public class ComponentCassandraDao extends CassandraDao {
 
        private static Logger logger = LoggerFactory.getLogger(ComponentCassandraDao.class.getName());
 
-       public static Integer DEFAULT_FETCH_SIZE = 500;
+       public final static Integer DEFAULT_FETCH_SIZE = 500;
 
        private ComponentCacheAccessor componentCacheAccessor;
 
index fa5c4ac..0618acc 100644 (file)
@@ -7,9 +7,9 @@
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 
 package org.openecomp.sdc.be.dao.cassandra.schema;
 
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Optional;
-import java.util.stream.Collectors;
-
-import org.apache.commons.lang3.tuple.ImmutablePair;
-import org.openecomp.sdc.be.config.Configuration;
-import org.openecomp.sdc.be.config.ConfigurationManager;
-import org.openecomp.sdc.be.dao.cassandra.schema.tables.OldExternalApiEventTableDesc;
-import org.openecomp.sdc.be.resources.data.auditing.AuditingTypesConstants;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.datastax.driver.core.Cluster;
-import com.datastax.driver.core.DataType;
-import com.datastax.driver.core.KeyspaceMetadata;
-import com.datastax.driver.core.Session;
-import com.datastax.driver.core.schemabuilder.Alter;
-import com.datastax.driver.core.schemabuilder.Create;
-import com.datastax.driver.core.schemabuilder.SchemaBuilder;
-import com.datastax.driver.core.schemabuilder.SchemaStatement;
-
+               import java.util.ArrayList;
+               import java.util.HashMap;
+               import java.util.List;
+               import java.util.Map;
+               import java.util.Optional;
+               import java.util.stream.Collectors;
+
+               import org.apache.commons.lang3.tuple.ImmutablePair;
+               import org.openecomp.sdc.be.config.Configuration;
+               import org.openecomp.sdc.be.config.ConfigurationManager;
+               import org.openecomp.sdc.be.dao.cassandra.schema.tables.OldExternalApiEventTableDesc;
+               import org.openecomp.sdc.be.resources.data.auditing.AuditingTypesConstants;
+               import org.slf4j.Logger;
+               import org.slf4j.LoggerFactory;
+
+               import com.datastax.driver.core.Cluster;
+               import com.datastax.driver.core.DataType;
+               import com.datastax.driver.core.KeyspaceMetadata;
+               import com.datastax.driver.core.Session;
+               import com.datastax.driver.core.schemabuilder.Alter;
+               import com.datastax.driver.core.schemabuilder.Create;
+               import com.datastax.driver.core.schemabuilder.SchemaBuilder;
+               import com.datastax.driver.core.schemabuilder.SchemaStatement;
+               import com.google.common.annotations.VisibleForTesting;
 public class SdcSchemaBuilder {
 
        /**
@@ -64,12 +64,12 @@ public class SdcSchemaBuilder {
                        list.add(new OldExternalApiEventTableDesc());
                        schemeData.put("attaudit", list);
                }
-               
+
        }
        /**
         * the method creates all keyspaces, tables and indexes in case they do not
         * already exist. the method can be run multiple times. the method uses the
-        * internal enums and external configuration for its operation   * 
+        * internal enums and external configuration for its operation   *
         * @return true if the create operation was successful
         */
        public static boolean createSchema() {
@@ -161,14 +161,14 @@ public class SdcSchemaBuilder {
                return false;
        }
 
-       
+
 
        /**
         * the method prcess the metadata retrieved from the cassandra for the
         * creation of a map conting the names of keyspaces tabls and indexes
         * already defined in the cassandra keyspacename -> tablename -> list of
         * indexes info
-        * 
+        *
         * @param keyspacesMetadata
         *            cassndra mmetadata
         * @return a map of maps of lists holding parsed info
@@ -183,23 +183,23 @@ public class SdcSchemaBuilder {
                                                                                                .collect(Collectors.toList())))));
                return cassndraMetadata;
        }
-       
+
        private static Map<String, Map<String, List<String>>> getMetadataTablesStructure(
                        List<KeyspaceMetadata> keyspacesMetadata) {
                return keyspacesMetadata.stream().collect(
                                Collectors.toMap(keyspaceMetadata -> keyspaceMetadata.getName(),
-                                                                keyspaceMetadata -> keyspaceMetadata.getTables().stream().collect(
-                                                                                Collectors.toMap(tableMetadata -> tableMetadata.getName(), 
-                                                                                                                 tableMetadata -> tableMetadata.getColumns().stream().map(
-                                                                                                                                 columnMetadata -> columnMetadata.getName().toLowerCase()).collect(
-                                                                                                                                                 Collectors.toList())))));             
+                                               keyspaceMetadata -> keyspaceMetadata.getTables().stream().collect(
+                                                               Collectors.toMap(tableMetadata -> tableMetadata.getName(),
+                                                                               tableMetadata -> tableMetadata.getColumns().stream().map(
+                                                                                               columnMetadata -> columnMetadata.getName().toLowerCase()).collect(
+                                                                                               Collectors.toList())))));
        }
 
        /**
         * the method builds an index name according to a defined logic
         * <table>
         * _<column>_idx
-        * 
+        *
         * @param table: table name
         * @param column: column name
         * @return string name of the index
@@ -214,12 +214,12 @@ public class SdcSchemaBuilder {
         * @param iTableDescriptions: a list of table description we want to create
         * @param keyspaceMetadate: the current tables that exist in the cassandra under this keyspace
         * @param session: the session object used for the execution of the query.
-        * @param existingTablesMetadata 
+        * @param existingTablesMetadata
         *                      the current tables columns that exist in the cassandra under this
         *            keyspace
         */
-       private static void createTables(List<ITableDescription> iTableDescriptions, Map<String, List<String>> keyspaceMetadate, Session session, 
-                       Map<String, List<String>> existingTablesMetadata) {
+       private static void createTables(List<ITableDescription> iTableDescriptions, Map<String, List<String>> keyspaceMetadate, Session session,
+                                                                        Map<String, List<String>> existingTablesMetadata) {
                for (ITableDescription tableDescription : iTableDescriptions) {
                        String tableName = tableDescription.getTableName().toLowerCase();
                        Map<String, ImmutablePair<DataType, Boolean>> columnDescription = tableDescription.getColumnDescription();
@@ -275,8 +275,8 @@ public class SdcSchemaBuilder {
         * @param columnDescription
         */
        private static void alterTable(Session session, Map<String, List<String>> existingTablesMetadata,
-                       ITableDescription tableDescription, String tableName,
-                       Map<String, ImmutablePair<DataType, Boolean>> columnDescription) {
+                                                                  ITableDescription tableDescription, String tableName,
+                                                                  Map<String, ImmutablePair<DataType, Boolean>> columnDescription) {
                List<String> definedTableColumns = existingTablesMetadata.get(tableName);
                //add column to casandra if was added to table definition
                for (Map.Entry<String, ImmutablePair<DataType, Boolean>> column : columnDescription.entrySet()) {
@@ -286,7 +286,7 @@ public class SdcSchemaBuilder {
                                Alter alter = SchemaBuilder.alterTable(tableDescription.getKeyspace(),tableDescription.getTableName());
                                SchemaStatement addColumn = alter.addColumn(columnName).type(column.getValue().getLeft());
                                log.trace("exacuting :{}", addColumn.toString());
-                               session.execute(addColumn);                                             
+                               session.execute(addColumn);
                        }
                }
        }
@@ -294,7 +294,7 @@ public class SdcSchemaBuilder {
        /**
         * the method create the keyspace in case it does not already exists the
         * method uses configurtion to select the needed replication strategy
-        * 
+        *
         * @param keyspace: name of the keyspace we want to create
         * @param cassndraMetadata: cassndra metadata
         * @param session: the session object used for the execution of the query.
@@ -330,7 +330,7 @@ public class SdcSchemaBuilder {
 
        /**
         * the method retries the schem info from the enums describing the tables
-        * 
+        *
         * @return a map of keyspaces to there table info
         */
        private static Map<String, List<ITableDescription>> getSchemeData() {
@@ -349,14 +349,14 @@ public class SdcSchemaBuilder {
        }
 
        /**
-        * the methoed creates the query string for the given keyspace the methoed
+        * the methoed creates the query string for the given keyspace the methoed
         * valides the given data according the the requirments of the replication
         * strategy SimpleStrategy: "CREATE KEYSPACE IF NOT EXISTS
         * <keyspaceName></keyspaceName> WITH replication =
         * {'class':'SimpleStrategy', 'replication_factor':2};" SimpleStrategy:
         * "CREATE KEYSPACE IF NOT EXISTS <keyspaceName></keyspaceName> WITH
         * replication = {'class':'NetworkTopologyStrategy', 'dc1' : 2 ,dc2 : 2 };"
-        * 
+        *
         * @param keyspace
         *            name of the keyspace we want to create
         * @param keyspaceInfo
@@ -401,9 +401,9 @@ public class SdcSchemaBuilder {
        public enum ReplicationStrategy {
                NETWORK_TOPOLOGY_STRATEGY("NetworkTopologyStrategy"), SIMPLE_STRATEGY("SimpleStrategy");
 
-               public String name;
+               private String name;
 
-               private ReplicationStrategy(String name) {
+               ReplicationStrategy(String name) {
                        this.name = name;
                }
 
@@ -411,5 +411,4 @@ public class SdcSchemaBuilder {
                        return name;
                }
        }
-
 }
index 7277400..acd4ff4 100644 (file)
@@ -35,7 +35,7 @@ public class ComponentCacheData {
        public final static String SERVICE_VERSION_FIELD = "serviceVersion";
        public final static String ARTIFACT_NAME_FIELD = "artifactName";
 
-       public static String delim = ":";
+       public final static String delim = ":";
 
        @PartitionKey
        @Column(name = "id")
index b198985..0ae1fd5 100644 (file)
@@ -34,7 +34,7 @@ public class ESArtifactData {
        public static final String SERVICE_VERSION_FIELD = "serviceVersion";
        public static final String ARTIFACT_NAME_FIELD = "artifactName";
 
-       public static String delim = ":";
+       public final static String delim = ":";
 
        @PartitionKey
        @Column(name = "id")
index d46743c..77677a9 100644 (file)
@@ -66,8 +66,8 @@ import fj.data.Either;
 
 @org.springframework.stereotype.Component("node-type-operation")
 public class NodeTypeOperation extends ToscaElementOperation {
-       public static Pattern uuidNewVersion = Pattern.compile("^\\d{1,}.1");
-       public static Pattern uuidNormativeNewVersion = Pattern.compile("^\\d{1,}.0");
+       public final static Pattern uuidNewVersion = Pattern.compile("^\\d{1,}.1");
+       public final static Pattern uuidNormativeNewVersion = Pattern.compile("^\\d{1,}.0");
 
        private static Logger log = LoggerFactory.getLogger(NodeTypeOperation.class.getName());
 
index 2a6b7e7..3e8968a 100644 (file)
@@ -25,14 +25,18 @@ import com.fasterxml.jackson.databind.ObjectMapper;
 
 public class ResponseInfo {
 
-       public static enum ResponseStatusEnum {
+       public enum ResponseStatusEnum {
                SUCCESS("success"), LOGIN_FAILED("loginFailed"), INTERNAL_ERROR("internalError"), MISSING_HEADERS("required headers are missing"), TIMEOUT("timeout"), PARSING_ERROR("parsingFailed");
 
+               private String statusDescription;
+
                ResponseStatusEnum(String status) {
                        this.statusDescription = status;
                }
 
-               public String statusDescription;
+               public String getStatusDescription() {
+                       return statusDescription;
+               }
        }
 
        private ResponseStatusEnum applicativeStatus;
index 87453de..6b7afc4 100644 (file)
@@ -28,6 +28,8 @@ import java.text.SimpleDateFormat;
 import java.util.ArrayList;
 import java.util.Date;
 import java.util.List;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import org.openecomp.sdc.common.config.EcompErrorEnum;
 import org.openecomp.sdc.common.config.EcompErrorEnum.AlarmSeverity;
@@ -36,6 +38,8 @@ import org.openecomp.sdc.common.config.EcompErrorLogUtil;
 
 public class GenerateEcompErrorsCsv {
 
+       private static Logger log = LoggerFactory.getLogger(GenerateEcompErrorsCsv.class);
+
        private static String DATE_FORMAT = "dd-M-yyyy-hh-mm-ss";
 
        private static String NEW_LINE = System.getProperty("line.separator");
@@ -195,14 +199,15 @@ public class GenerateEcompErrorsCsv {
                        result = true;
 
                } catch (Exception e) {
-                       e.printStackTrace();
+                       log.info("generate Ecomp Errors Csv File failed - {}" , e);
+
                } finally {
                        if (writer != null) {
                                try {
                                        writer.flush();
                                        writer.close();
                                } catch (IOException e) {
-                                       e.printStackTrace();
+                                       log.info("close FileOutputStream failed - {}" , e);
                                }
 
                        }
index a6965a7..7544952 100644 (file)
@@ -117,7 +117,7 @@ public class AppContextListener implements ServletContextListener {
                                try {
                                        inputStream.close();
                                } catch (IOException e) {
-                                       e.printStackTrace();
+                                       log.info("close FileOutputStream failed - {}" , e);
                                }
                        }
                }
index 004c2d0..864d79e 100644 (file)
@@ -92,7 +92,7 @@ public class ValidationUtils {
        public final static Pattern COLON_PATTERN = Pattern.compile("[:]+");
        public final static Pattern AT_PATTERN = Pattern.compile("[@]+");
        public final static Pattern AND_PATTERN = Pattern.compile(" [aA][Nn][Dd] ");
-       public final static Set<String> CATEGORY_CONJUNCTIONS = new HashSet<String>(
+       protected final static Set<String> CATEGORY_CONJUNCTIONS = new HashSet<String>(
                        Arrays.asList("of", "to", "for", "as", "a", "an", "the"));
 
        public final static Pattern COST_PATTERN = Pattern.compile("^[0-9]{1,5}\\.[0-9]{1,3}$");
index 322725b..e8feb1b 100644 (file)
@@ -220,8 +220,7 @@ public class YamlToObjectConverter {
                                try {
                                        in.close();
                                } catch (IOException e) {
-                                       log.debug("Failed to close input stream {} ", e.getMessage(), e);
-                                       e.printStackTrace();
+                                       log.debug("Failed to close input stream", e);
                                }
                        }
                }
@@ -249,8 +248,7 @@ public class YamlToObjectConverter {
                                try {
                                        in.close();
                                } catch (IOException e) {
-                                       log.debug("Failed to close input stream {} ", e.getMessage(), e);
-                                       e.printStackTrace();
+                                       log.debug("Failed to close input stream", e);
                                }
                        }
                }
index c9207b2..f60cc16 100644 (file)
@@ -88,7 +88,8 @@ public class ZipUtil {
                        zis.close();
 
                } catch (IOException ex) {
-                       ex.printStackTrace();
+                       
+                       log.info("close Byte stream failed - {}" , ex);
                        return null;
                } finally {
                        if (zis != null) {
@@ -121,7 +122,7 @@ public class ZipUtil {
                        ZipUtil.readZip(zipAsBytes);
 
                } catch (IOException e) {
-                       e.printStackTrace();
+                       log.info("close Byte stream failed - {}" , e);
                }
 
        }
index a23ecfe..d9f913e 100644 (file)
@@ -33,9 +33,9 @@ public class PropertyRule extends ToscaDataDefinition implements Serializable {
         */
        private static final long serialVersionUID = -3357933382124599996L;
 
-       public static String FORCE_ALL = "FORCE_ALL";
-       public static String ALL = "ALL";
-       public static String RULE_ANY_MATCH = ".+";
+       public final static String FORCE_ALL = "FORCE_ALL";
+       public final static String ALL = "ALL";
+       public final static String RULE_ANY_MATCH = ".+";
 
        List<String> rule;
        String value;
index 892c29b..9500ab5 100644 (file)
@@ -23,7 +23,7 @@ public class SecurityUtil {
     public static final String ALGORITHM = "AES" ;
     public static final String CHARSET = StandardCharsets.UTF_8.name();
 
-    public static Key secKey = null ;
+    private static Key secKey = null ;
 
     /**
      *