Sonar issue Dix - Session is closeable 33/27033/2
authorshrek2000 <orenkle@amdocs.com>
Wed, 27 Dec 2017 12:24:00 +0000 (14:24 +0200)
committerAvi Gaffa <avi.gaffa@amdocs.com>
Wed, 27 Dec 2017 13:30:23 +0000 (13:30 +0000)
Cassandra session is closeable and should be protected by try block
Issue-ID: SDC-801

Change-Id: Ib2cc178e029dce1915e2931fe58e4a5d6527dd5f
Signed-off-by: shrek2000 <orenkle@amdocs.com>
openecomp-be/tools/zusammen-tools/src/main/java/org/openecomp/core/tools/exportinfo/ExportDataCommand.java
openecomp-be/tools/zusammen-tools/src/main/java/org/openecomp/core/tools/importinfo/ImportSingleTable.java

index c5e140e..b5486f5 100644 (file)
@@ -42,7 +42,7 @@ import static java.nio.file.Files.createDirectories;
 public final class ExportDataCommand {
     private static final Logger logger = LoggerFactory.getLogger(ExportDataCommand.class);
     public static final String JOIN_DELIMITER = "$#";
-    public static final String JOIN_DELIMITER_SPILTTER = "\\$\\#";
+    public static final String JOIN_DELIMITER_SPLITTER = "\\$\\#";
     public static final String MAP_DELIMITER = "!@";
     public static final String MAP_DELIMITER_SPLITTER = "\\!\\@";
     public static final int THREAD_POOL_SIZE = 4;
@@ -54,33 +54,33 @@ public final class ExportDataCommand {
         ExecutorService executor = null;
         try {
             CassandraConnectionInitializer.setCassandraConnectionPropertiesToSystem();
-            final Set<String> filteredItems = Sets.newHashSet(filterItem);
             Path rootDir = Paths.get(ImportProperties.ROOT_DIRECTORY);
             initDir(rootDir);
-            Set<String> fis = filteredItems.stream().map(fi -> fi.replaceAll("\\r", "")).collect(Collectors.toSet());
-
-            Map<String, List<String>> queries;
-            Yaml yaml = new Yaml();
-            try (InputStream is = ExportDataCommand.class.getResourceAsStream("/queries.yaml")) {
-                queries = (Map<String, List<String>>) yaml.load(is);
-            }
-            List<String> queriesList = queries.get("queries");
-            List<String> itemsColumns = queries.get("item_columns");
-            Set<String> vlms = new HashSet<>();
-            CountDownLatch doneQueries = new CountDownLatch(queriesList.size());
-            executor = Executors.newFixedThreadPool(THREAD_POOL_SIZE);
-            for (int i = 0; i < queriesList.size(); i++) {
-                executeQuery(queriesList.get(i), fis, itemsColumns.get(i), vlms, doneQueries, executor);
-            }
-            doneQueries.await();
-            if (!vlms.isEmpty()) {
-                CountDownLatch doneVmls = new CountDownLatch(queriesList.size());
-
+            try(Session session = CassandraSessionFactory.getSession()) {
+                final Set<String> filteredItems = Sets.newHashSet(filterItem);
+                Set<String> fis = filteredItems.stream().map(fi -> fi.replaceAll("\\r", "")).collect(Collectors.toSet());
+                Map<String, List<String>> queries;
+                Yaml yaml = new Yaml();
+                try (InputStream is = ExportDataCommand.class.getResourceAsStream("/queries.yaml")) {
+                    queries = (Map<String, List<String>>) yaml.load(is);
+                }
+                List<String> queriesList = queries.get("queries");
+                List<String> itemsColumns = queries.get("item_columns");
+                Set<String> vlms = new HashSet<>();
+                CountDownLatch doneQueries = new CountDownLatch(queriesList.size());
+                executor = Executors.newFixedThreadPool(THREAD_POOL_SIZE);
                 for (int i = 0; i < queriesList.size(); i++) {
-                    executeQuery(queriesList.get(i), vlms, itemsColumns.get(i), null, doneVmls, executor);
+                    executeQuery(session, queriesList.get(i), fis, itemsColumns.get(i), vlms, doneQueries, executor);
+                }
+                doneQueries.await();
+                if (!vlms.isEmpty()) {
+                    CountDownLatch doneVmls = new CountDownLatch(queriesList.size());
+                    for (int i = 0; i < queriesList.size(); i++) {
+                        executeQuery(session, queriesList.get(i), vlms, itemsColumns.get(i), null, doneVmls, executor);
+                    }
+
+                    doneVmls.await();
                 }
-
-                doneVmls.await();
             }
             zipPath(rootDir);
             FileUtils.forceDelete(rootDir.toFile());
@@ -95,9 +95,8 @@ public final class ExportDataCommand {
     }
 
 
-    private static boolean executeQuery(final String query, final Set<String> filteredItems, final String filteredColumn,
+    private static boolean executeQuery(final Session session, final String query, final Set<String> filteredItems, final String filteredColumn,
                                         final Set<String> vlms, final CountDownLatch donequerying, Executor executor) {
-        Session session = CassandraSessionFactory.getSession();
         ResultSetFuture resultSetFuture = session.executeAsync(query);
         Futures.addCallback(resultSetFuture, new FutureCallback<ResultSet>() {
             @Override
index 7504ad1..8a671c5 100644 (file)
@@ -114,7 +114,7 @@ public class ImportSingleTable {
                 byte[] decoded = Base64.getDecoder().decode(rowData);
                 String decodedStr = new String(decoded);
                 if (!StringUtils.isEmpty(decodedStr)) {
-                    String[] splitted = decodedStr.split(ExportDataCommand.JOIN_DELIMITER_SPILTTER);
+                    String[] splitted = decodedStr.split(ExportDataCommand.JOIN_DELIMITER_SPLITTER);
                     Set set = Sets.newHashSet(splitted);
                     set.remove("");
                     bind.setSet(i, set);
@@ -126,7 +126,7 @@ public class ImportSingleTable {
                 byte[] decodedMap = Base64.getDecoder().decode(rowData);
                 String mapStr = new String(decodedMap);
                 if (!StringUtils.isEmpty(mapStr)) {
-                    String[] splittedMap = mapStr.split(ExportDataCommand.JOIN_DELIMITER_SPILTTER);
+                    String[] splittedMap = mapStr.split(ExportDataCommand.JOIN_DELIMITER_SPLITTER);
                     Map<String, String> map = new HashMap<>();
                     for (String keyValue : splittedMap) {
                         String[] split = keyValue.split(ExportDataCommand.MAP_DELIMITER_SPLITTER);