Merge "Unit Test for PubFailRecord"
authorRam Koya <rk541m@att.com>
Fri, 31 Aug 2018 15:15:48 +0000 (15:15 +0000)
committerGerrit Code Review <gerrit@onap.org>
Fri, 31 Aug 2018 15:15:48 +0000 (15:15 +0000)
14 files changed:
datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DeliveryTest.java [new file with mode: 0644]
datarouter-prov/pom.xml
datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Group.java
datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/LOGJSONObject.java
datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/DailyLatencyReport.java
datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/LatencyReport.java
datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/SubscriberReport.java
datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/VolumeReport.java
datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/DrServletTestBase.java
datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/InternalServletTest.java [changed mode: 0755->0644]
datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/beans/GroupTest.java
datarouter-prov/src/test/resources/META-INF/persistence.xml [new file with mode: 0755]
datarouter-prov/src/test/resources/create.sql [new file with mode: 0755]
datarouter-prov/src/test/resources/h2Database.properties [new file with mode: 0755]

diff --git a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DeliveryTest.java b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DeliveryTest.java
new file mode 100644 (file)
index 0000000..8a7460e
--- /dev/null
@@ -0,0 +1,104 @@
+/*******************************************************************************
+ * ============LICENSE_START==================================================
+ * * org.onap.dmaap
+ * * ===========================================================================
+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * * ===========================================================================
+ * * Licensed under the Apache License, Version 2.0 (the "License");
+ * * you may not use this file except in compliance with the License.
+ * * You may obtain a copy of the License at
+ * *
+ *  *      http://www.apache.org/licenses/LICENSE-2.0
+ * *
+ *  * Unless required by applicable law or agreed to in writing, software
+ * * distributed under the License is distributed on an "AS IS" BASIS,
+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * * See the License for the specific language governing permissions and
+ * * limitations under the License.
+ * * ============LICENSE_END====================================================
+ * *
+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ * *
+ ******************************************************************************/
+package org.onap.dmaap.datarouter.node;
+
+import org.apache.commons.lang3.reflect.FieldUtils;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.Mock;
+import org.powermock.api.mockito.PowerMockito;
+import org.powermock.core.classloader.annotations.SuppressStaticInitializationFor;
+import org.powermock.modules.junit4.PowerMockRunner;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Hashtable;
+
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+
+@RunWith(PowerMockRunner.class)
+@SuppressStaticInitializationFor("org.onap.dmaap.datarouter.node.NodeConfigManager")
+public class DeliveryTest {
+
+  @Mock
+  private DeliveryQueue deliveryQueue;
+
+  private File nDir = new File("tmp/n");
+  private File sDir = new File("tmp/s");
+
+  @Before
+  public void setUp() throws IOException {
+    nDir.mkdirs();
+    sDir.mkdirs();
+    File newNDir = new File("tmp/n/0");
+    newNDir.mkdirs();
+    File newNFile = new File("tmp/n/0/testN.txt");
+    newNFile.createNewFile();
+    File newSDir = new File("tmp/s/0/1");
+    newSDir.mkdirs();
+    File newSpoolFile = new File("tmp/s/0/1/testSpool.txt");
+    newSpoolFile.createNewFile();
+  }
+
+  @Test
+  public void Validate_Reset_Queue_Calls_Reset_Queue_On_Delivery_Queue_Object() throws IllegalAccessException {
+    NodeConfigManager config = mockNodeConfigManager();
+    Delivery delivery = new Delivery(config);
+    Hashtable<String, DeliveryQueue> dqs = new Hashtable<>();
+    dqs.put("spool/s/0/1", deliveryQueue);
+    FieldUtils.writeDeclaredField(delivery, "dqs", dqs, true);
+    delivery.resetQueue("spool/s/0/1");
+    verify(deliveryQueue, times(1)).resetQueue();
+  }
+
+  @After
+  public void tearDown() {
+    nDir.delete();
+    sDir.delete();
+    File tmpDir = new File("tmp");
+    tmpDir.delete();
+  }
+
+  private NodeConfigManager mockNodeConfigManager() {
+    PowerMockito.mockStatic(NodeConfigManager.class);
+    NodeConfigManager config = mock(NodeConfigManager.class);
+    PowerMockito.when(config.isConfigured()).thenReturn(true);
+    PowerMockito.when(config.getAllDests()).thenReturn(createDestInfoObjects());
+    PowerMockito.when(config.getFreeDiskStart()).thenReturn(0.49);
+    PowerMockito.when(config.getFreeDiskStop()).thenReturn(0.5);
+    PowerMockito.when(config.getDeliveryThreads()).thenReturn(0);
+    PowerMockito.when(config.getSpoolBase()).thenReturn("tmp");
+    return config;
+  }
+
+  private DestInfo[] createDestInfoObjects() {
+    DestInfo[] destInfos = new DestInfo[1];
+    DestInfo destInfo = new DestInfo("node.datarouternew.com", "spool/s/0/1", "1", "logs/", "/subs/1", "user1", "Basic dXNlcjE6cGFzc3dvcmQx", false, true);
+    destInfos[0] = destInfo;
+    return destInfos;
+  }
+}
index 8473141..7613757 100755 (executable)
             <version>1.2.17</version>\r
             <scope>compile</scope>\r
         </dependency>\r
+        <dependency>\r
+            <groupId>com.h2database</groupId>\r
+            <artifactId>h2</artifactId>\r
+            <version>1.4.197</version>\r
+        </dependency>\r
+        <dependency>\r
+            <groupId>org.hibernate</groupId>\r
+            <artifactId>hibernate-entitymanager</artifactId>\r
+            <version>5.2.9.Final</version>\r
+        </dependency>\r
     </dependencies>\r
     <profiles>\r
         <profile>\r
index 2ea60d2..a021a60 100644 (file)
@@ -60,7 +60,7 @@ public class Group extends Syncable {
 \r
     public static Group getGroupMatching(Group gup) {\r
         String sql = String.format(\r
-                "select * from GROUPS where  NAME = \"%s\"",\r
+                "select * from GROUPS where NAME='%s'",\r
                 gup.getName()\r
         );\r
         List<Group> list = getGroupsForSQL(sql);\r
@@ -69,7 +69,7 @@ public class Group extends Syncable {
 \r
     public static Group getGroupMatching(Group gup, int groupid) {\r
         String sql = String.format(\r
-                "select * from GROUPS where  NAME = \"%s\" and GROUPID != %d ",\r
+                "select * from GROUPS where  NAME = '%s' and GROUPID != %d ",\r
                 gup.getName(),\r
                 gup.getGroupid()\r
         );\r
index afb0de2..dec3cc1 100644 (file)
@@ -29,14 +29,7 @@ import java.io.Writer;
 import java.lang.reflect.Field;
 import java.lang.reflect.Method;
 import java.lang.reflect.Modifier;
-import java.util.Collection;
-import java.util.Enumeration;
-import java.util.LinkedHashMap;
-import java.util.Iterator;
-import java.util.Locale;
-import java.util.Map;
-import java.util.ResourceBundle;
-import java.util.Set;
+import java.util.*;
 
 import org.json.JSONArray;
 import org.json.JSONException;
@@ -139,6 +132,46 @@ public class LOGJSONObject {
             return object == null || object == this;
         }
 
+        /**
+         * Returns a hash code value for the object. This method is
+         * supported for the benefit of hash tables such as those provided by
+         * {@link HashMap}.
+         * <p>
+         * The general contract of {@code hashCode} is:
+         * <ul>
+         * <li>Whenever it is invoked on the same object more than once during
+         * an execution of a Java application, the {@code hashCode} method
+         * must consistently return the same integer, provided no information
+         * used in {@code equals} comparisons on the object is modified.
+         * This integer need not remain consistent from one execution of an
+         * application to another execution of the same application.
+         * <li>If two objects are equal according to the {@code equals(Object)}
+         * method, then calling the {@code hashCode} method on each of
+         * the two objects must produce the same integer result.
+         * <li>It is <em>not</em> required that if two objects are unequal
+         * according to the {@link Object#equals(Object)}
+         * method, then calling the {@code hashCode} method on each of the
+         * two objects must produce distinct integer results.  However, the
+         * programmer should be aware that producing distinct integer results
+         * for unequal objects may improve the performance of hash tables.
+         * </ul>
+         * <p>
+         * As much as is reasonably practical, the hashCode method defined by
+         * class {@code Object} does return distinct integers for distinct
+         * objects. (This is typically implemented by converting the internal
+         * address of the object into an integer, but this implementation
+         * technique is not required by the
+         * Java&trade; programming language.)
+         *
+         * @return a hash code value for this object.
+         * @see Object#equals(Object)
+         * @see System#identityHashCode
+         */
+        @Override
+        public int hashCode() {
+            return super.hashCode();
+        }
+
         /**
          * Get the "null" string value.
          *
index a5281c0..28740c0 100644 (file)
@@ -152,41 +152,40 @@ public class DailyLatencyReport extends ReportBase {
             DB db = new DB();\r
             @SuppressWarnings("resource")\r
             Connection conn = db.getConnection();\r
-            PreparedStatement ps = conn.prepareStatement(SELECT_SQL);\r
-            ps.setLong(1, from);\r
-            ps.setLong(2, to);\r
-            ResultSet rs = ps.executeQuery();\r
-            while (rs.next()) {\r
-                String id   = rs.getString("PUBLISH_ID");\r
-                int feed    = rs.getInt("FEEDID");\r
-                long etime  = rs.getLong("EVENT_TIME");\r
-                String type = rs.getString("TYPE");\r
-                String fid  = rs.getString("FEED_FILEID");\r
-                long clen   = rs.getLong("CONTENT_LENGTH");\r
-                String date = sdf.format(new Date(getPstart(id)));\r
-                String key  = date + "," + feed;\r
-                Counters c = map.get(key);\r
-                if (c == null) {\r
-                    c = new Counters(date, feed);\r
-                    map.put(key, c);\r
+            try(PreparedStatement ps = conn.prepareStatement(SELECT_SQL)) {\r
+                ps.setLong(1, from);\r
+                ps.setLong(2, to);\r
+                try(ResultSet rs = ps.executeQuery()) {\r
+                    while (rs.next()) {\r
+                        String id = rs.getString("PUBLISH_ID");\r
+                        int feed = rs.getInt("FEEDID");\r
+                        long etime = rs.getLong("EVENT_TIME");\r
+                        String type = rs.getString("TYPE");\r
+                        String fid = rs.getString("FEED_FILEID");\r
+                        long clen = rs.getLong("CONTENT_LENGTH");\r
+                        String date = sdf.format(new Date(getPstart(id)));\r
+                        String key = date + "," + feed;\r
+                        Counters c = map.get(key);\r
+                        if (c == null) {\r
+                            c = new Counters(date, feed);\r
+                            map.put(key, c);\r
+                        }\r
+                        c.addEvent(etime, type, id, fid, clen);\r
+                    }\r
                 }\r
-                c.addEvent(etime, type, id, fid, clen);\r
+\r
+                db.release(conn);\r
             }\r
-            rs.close();\r
-            ps.close();\r
-            db.release(conn);\r
         } catch (SQLException e) {\r
             e.printStackTrace();\r
         }\r
         logger.debug("Query time: " + (System.currentTimeMillis()-start) + " ms");\r
-        try {\r
-            PrintWriter os = new PrintWriter(outfile);\r
+        try (PrintWriter os = new PrintWriter(outfile)){\r
             os.println("date,feedid,minsize,maxsize,avgsize,minlat,maxlat,avglat,fanout");\r
             for (String key : new TreeSet<String>(map.keySet())) {\r
                 Counters c = map.get(key);\r
                 os.println(c.toString());\r
             }\r
-            os.close();\r
         } catch (FileNotFoundException e) {\r
             System.err.println("File cannot be written: "+outfile);\r
         }\r
index ba8f15a..549511b 100644 (file)
@@ -145,40 +145,38 @@ public class LatencyReport extends ReportBase {
             DB db = new DB();\r
             @SuppressWarnings("resource")\r
             Connection conn = db.getConnection();\r
-            PreparedStatement ps = conn.prepareStatement(SELECT_SQL);\r
+            try(PreparedStatement ps = conn.prepareStatement(SELECT_SQL)){\r
             ps.setLong(1, from);\r
             ps.setLong(2, to);\r
-            ResultSet rs = ps.executeQuery();\r
-            PrintWriter os = new PrintWriter(outfile);\r
-            os.println("recordid,feedid,uri,size,min,max,avg,fanout");\r
-            Counters c = null;\r
-            while (rs.next()) {\r
-                long etime = rs.getLong("EVENT_TIME");\r
-                String type = rs.getString("TYPE");\r
-                String id = rs.getString("PUBLISH_ID");\r
-                String fid = rs.getString("FEED_FILEID");\r
-                int feed = rs.getInt("FEEDID");\r
-                long clen = rs.getLong("CONTENT_LENGTH");\r
-                if (c != null && !id.equals(c.id)) {\r
-                    String line = id + "," + c.toString();\r
-                    os.println(line);\r
-                    c = null;\r
+            try(ResultSet rs = ps.executeQuery()) {\r
+                try(PrintWriter os = new PrintWriter(outfile)) {\r
+                    os.println("recordid,feedid,uri,size,min,max,avg,fanout");\r
+                    Counters c = null;\r
+                    while (rs.next()) {\r
+                        long etime = rs.getLong("EVENT_TIME");\r
+                        String type = rs.getString("TYPE");\r
+                        String id = rs.getString("PUBLISH_ID");\r
+                        String fid = rs.getString("FEED_FILEID");\r
+                        int feed = rs.getInt("FEEDID");\r
+                        long clen = rs.getLong("CONTENT_LENGTH");\r
+                        if (c != null && !id.equals(c.id)) {\r
+                            String line = id + "," + c.toString();\r
+                            os.println(line);\r
+                            c = null;\r
+                        }\r
+                        if (c == null) {\r
+                            c = new Counters(id, feed, clen, fid);\r
+                        }\r
+                        if (feed != c.feedid)\r
+                            System.err.println("Feed ID mismatch, " + feed + " <=> " + c.feedid);\r
+                        if (clen != c.clen)\r
+                            System.err.println("Cont Len mismatch, " + clen + " <=> " + c.clen);\r
+                        c.addEvent(type, etime);\r
+                    }\r
                 }\r
-                if (c == null) {\r
-                    c = new Counters(id, feed, clen, fid);\r
-                }\r
-                if (feed != c.feedid)\r
-                    System.err.println("Feed ID mismatch, " + feed + " <=> " + c.feedid);\r
-                if (clen != c.clen)\r
-                    System.err.println("Cont Len mismatch, " + clen + " <=> " + c.clen);\r
-//                if (fid != c.fileid)\r
-//                    System.err.println("File ID mismatch, "+fid+" <=> "+c.fileid);\r
-                c.addEvent(type, etime);\r
+             db.release(conn);\r
+            }\r
             }\r
-            rs.close();\r
-            ps.close();\r
-            db.release(conn);\r
-            os.close();\r
         } catch (FileNotFoundException e) {\r
             System.err.println("File cannot be written: " + outfile);\r
         } catch (SQLException e) {\r
index e00c394..51beac9 100644 (file)
@@ -98,62 +98,61 @@ public class SubscriberReport extends ReportBase {
     public void run() {\r
         Map<String, Counters> map = new HashMap<String, Counters>();\r
         long start = System.currentTimeMillis();\r
+\r
         try {\r
             DB db = new DB();\r
             @SuppressWarnings("resource")\r
             Connection conn = db.getConnection();\r
-            PreparedStatement ps = conn.prepareStatement(SELECT_SQL);\r
-            ps.setLong(1, from);\r
-            ps.setLong(2, to);\r
-            ResultSet rs = ps.executeQuery();\r
-            while (rs.next()) {\r
-                String date = rs.getString("DATE");\r
-                int sub = rs.getInt("DELIVERY_SUBID");\r
-                int res = rs.getInt("RESULT");\r
-                int count = rs.getInt("COUNT");\r
-                String key = date + "," + sub;\r
-                Counters c = map.get(key);\r
-                if (c == null) {\r
-                    c = new Counters(date, sub);\r
-                    map.put(key, c);\r
+            try(PreparedStatement ps = conn.prepareStatement(SELECT_SQL)) {\r
+                ps.setLong(1, from);\r
+                ps.setLong(2, to);\r
+                try(ResultSet rs = ps.executeQuery()) {\r
+                    while (rs.next()) {\r
+                        String date = rs.getString("DATE");\r
+                        int sub = rs.getInt("DELIVERY_SUBID");\r
+                        int res = rs.getInt("RESULT");\r
+                        int count = rs.getInt("COUNT");\r
+                        String key = date + "," + sub;\r
+                        Counters c = map.get(key);\r
+                        if (c == null) {\r
+                            c = new Counters(date, sub);\r
+                            map.put(key, c);\r
+                        }\r
+                        c.addCounts(res, count);\r
+                    }\r
                 }\r
-                c.addCounts(res, count);\r
             }\r
-            rs.close();\r
-            ps.close();\r
 \r
-            ps = conn.prepareStatement(SELECT_SQL2);\r
-            ps.setLong(1, from);\r
-            ps.setLong(2, to);\r
-            rs = ps.executeQuery();\r
-            while (rs.next()) {\r
-                String date = rs.getString("DATE");\r
-                int sub = rs.getInt("DELIVERY_SUBID");\r
-                int count = rs.getInt("COUNT");\r
-                String key = date + "," + sub;\r
-                Counters c = map.get(key);\r
-                if (c == null) {\r
-                    c = new Counters(date, sub);\r
-                    map.put(key, c);\r
-                }\r
-                c.addDlxCount(count);\r
-            }\r
-            rs.close();\r
-            ps.close();\r
+           try( PreparedStatement ps2 = conn.prepareStatement(SELECT_SQL2)) {\r
+               ps2.setLong(1, from);\r
+               ps2.setLong(2, to);\r
+               try(ResultSet rs2 = ps2.executeQuery()) {\r
+                   while (rs2.next()) {\r
+                       String date = rs2.getString("DATE");\r
+                       int sub = rs2.getInt("DELIVERY_SUBID");\r
+                       int count = rs2.getInt("COUNT");\r
+                       String key = date + "," + sub;\r
+                       Counters c = map.get(key);\r
+                       if (c == null) {\r
+                           c = new Counters(date, sub);\r
+                           map.put(key, c);\r
+                       }\r
+                       c.addDlxCount(count);\r
+                   }\r
+                  }\r
+           }\r
 \r
             db.release(conn);\r
         } catch (SQLException e) {\r
             e.printStackTrace();\r
         }\r
         logger.debug("Query time: " + (System.currentTimeMillis() - start) + " ms");\r
-        try {\r
-            PrintWriter os = new PrintWriter(outfile);\r
+        try (PrintWriter os = new PrintWriter(outfile)){\r
             os.println("date,subid,count100,count200,count300,count400,count500,countminus1,countdlx");\r
             for (String key : new TreeSet<String>(map.keySet())) {\r
                 Counters c = map.get(key);\r
                 os.println(c.toString());\r
             }\r
-            os.close();\r
         } catch (FileNotFoundException e) {\r
             System.err.println("File cannot be written: " + outfile);\r
         }\r
index 169db0d..34e158a 100644 (file)
@@ -36,6 +36,7 @@ import java.util.HashMap;
 import java.util.Map;\r
 import java.util.TreeSet;\r
 \r
+import org.apache.log4j.Logger;\r
 import org.onap.dmaap.datarouter.provisioning.utils.DB;\r
 \r
 /**\r
@@ -57,7 +58,7 @@ import org.onap.dmaap.datarouter.provisioning.utils.DB;
 public class VolumeReport extends ReportBase {\r
     private static final String SELECT_SQL = "select EVENT_TIME, TYPE, FEEDID, CONTENT_LENGTH, RESULT" +\r
             " from LOG_RECORDS where EVENT_TIME >= ? and EVENT_TIME <= ? LIMIT ?, ?";\r
-\r
+    private Logger loggerVolumeReport=Logger.getLogger("org.onap.dmaap.datarouter.reports");\r
     private class Counters {\r
         public int filespublished, filesdelivered, filesexpired;\r
         public long bytespublished, bytesdelivered, bytesexpired;\r
@@ -83,58 +84,64 @@ public class VolumeReport extends ReportBase {
             final long stepsize = 6000000L;\r
             boolean go_again = true;\r
             for (long i = 0; go_again; i += stepsize) {\r
-                PreparedStatement ps = conn.prepareStatement(SELECT_SQL);\r
-                ps.setLong(1, from);\r
-                ps.setLong(2, to);\r
-                ps.setLong(3, i);\r
-                ps.setLong(4, stepsize);\r
-                ResultSet rs = ps.executeQuery();\r
-                go_again = false;\r
-                while (rs.next()) {\r
-                    go_again = true;\r
-                    long etime = rs.getLong("EVENT_TIME");\r
-                    String type = rs.getString("TYPE");\r
-                    int feed = rs.getInt("FEEDID");\r
-                    long clen = rs.getLong("CONTENT_LENGTH");\r
-                    String key = sdf.format(new Date(etime)) + ":" + feed;\r
-                    Counters c = map.get(key);\r
-                    if (c == null) {\r
-                        c = new Counters();\r
-                        map.put(key, c);\r
-                    }\r
-                    if (type.equalsIgnoreCase("pub")) {\r
-                        c.filespublished++;\r
-                        c.bytespublished += clen;\r
-                    } else if (type.equalsIgnoreCase("del")) {\r
-                        // Only count successful deliveries\r
-                        int statusCode = rs.getInt("RESULT");\r
-                        if (statusCode >= 200 && statusCode < 300) {\r
-                            c.filesdelivered++;\r
-                            c.bytesdelivered += clen;\r
+                try (PreparedStatement ps = conn.prepareStatement(SELECT_SQL)) {\r
+                    ps.setLong(1, from);\r
+                    ps.setLong(2, to);\r
+                    ps.setLong(3, i);\r
+                    ps.setLong(4, stepsize);\r
+                    try(ResultSet rs = ps.executeQuery()) {\r
+                        go_again = false;\r
+                        while (rs.next()) {\r
+                            go_again = true;\r
+                            long etime = rs.getLong("EVENT_TIME");\r
+                            String type = rs.getString("TYPE");\r
+                            int feed = rs.getInt("FEEDID");\r
+                            long clen = rs.getLong("CONTENT_LENGTH");\r
+                            String key = sdf.format(new Date(etime)) + ":" + feed;\r
+                            Counters c = map.get(key);\r
+                            if (c == null) {\r
+                                c = new Counters();\r
+                                map.put(key, c);\r
+                            }\r
+                            if (type.equalsIgnoreCase("pub")) {\r
+                                c.filespublished++;\r
+                                c.bytespublished += clen;\r
+                            } else if (type.equalsIgnoreCase("del")) {\r
+                                // Only count successful deliveries\r
+                                int statusCode = rs.getInt("RESULT");\r
+                                if (statusCode >= 200 && statusCode < 300) {\r
+                                    c.filesdelivered++;\r
+                                    c.bytesdelivered += clen;\r
+                                }\r
+                            } else if (type.equalsIgnoreCase("exp")) {\r
+                                c.filesexpired++;\r
+                                c.bytesexpired += clen;\r
+                            }\r
                         }\r
-                    } else if (type.equalsIgnoreCase("exp")) {\r
-                        c.filesexpired++;\r
-                        c.bytesexpired += clen;\r
                     }\r
+\r
+                }\r
+                catch (SQLException sqlException)\r
+                {\r
+                    loggerVolumeReport.error("SqlException",sqlException);\r
                 }\r
-                rs.close();\r
-                ps.close();\r
             }\r
+\r
             db.release(conn);\r
         } catch (SQLException e) {\r
             e.printStackTrace();\r
         }\r
         logger.debug("Query time: " + (System.currentTimeMillis() - start) + " ms");\r
-        try {\r
-            PrintWriter os = new PrintWriter(outfile);\r
+        try (PrintWriter os = new PrintWriter(outfile)) {\r
             os.println("date,feedid,filespublished,bytespublished,filesdelivered,bytesdelivered,filesexpired,bytesexpired");\r
-            for (String key : new TreeSet<String>(map.keySet())) {\r
+            for(String key :new TreeSet<String>(map.keySet()))\r
+            {\r
                 Counters c = map.get(key);\r
                 String[] p = key.split(":");\r
                 os.println(String.format("%s,%s,%s", p[0], p[1], c.toString()));\r
             }\r
-            os.close();\r
-        catch (FileNotFoundException e) {\r
+        }\r
+        catch (FileNotFoundException e) {\r
             System.err.println("File cannot be written: " + outfile);\r
         }\r
     }\r
index 414fc18..c7f639e 100644 (file)
@@ -38,8 +38,8 @@ public class DrServletTestBase {
     public void setUp() throws Exception {
         Properties props = new Properties();
         props.setProperty("org.onap.dmaap.datarouter.provserver.isaddressauthenabled", "false");
-        props.setProperty("org.onap.dmaap.datarouter.provserver.accesslog.dir", "datarouter-prov/unit-test-logs");
-        props.setProperty("org.onap.dmaap.datarouter.provserver.spooldir", "resources/spooldir");
+        props.setProperty("org.onap.dmaap.datarouter.provserver.accesslog.dir", "unit-test-logs");
+        props.setProperty("org.onap.dmaap.datarouter.provserver.spooldir", "unit-test-logs/spool");
         props.setProperty("org.onap.dmaap.datarouter.provserver.https.relaxation", "false");
         FieldUtils.writeDeclaredStaticField(DB.class, "props", props, true);
         FieldUtils.writeDeclaredStaticField(BaseServlet.class, "startmsgFlag", false, true);
old mode 100755 (executable)
new mode 100644 (file)
index 97900d4..5f6b7ae
@@ -31,6 +31,7 @@ import static org.mockito.Mockito.verify;
 import static org.mockito.Mockito.when;
 import static org.onap.dmaap.datarouter.provisioning.BaseServlet.BEHALF_HEADER;
 
+import java.io.File;
 import java.net.InetAddress;
 import java.util.HashMap;
 import java.util.Map;
@@ -153,6 +154,19 @@ public class InternalServletTest extends DrServletTestBase {
         .sendError(eq(HttpServletResponse.SC_NO_CONTENT), argThat(notNullValue(String.class)));
   }
 
+  @Test
+  public void Given_Request_Is_HTTP_GET_Starts_With_Logs_In_Endpoint_And_File_Exists_Then_Request_Returns_Ok()
+      throws Exception {
+    when(request.getPathInfo()).thenReturn("/logs/testFile.txt");
+    File testFile = new File("unit-test-logs/testFile.txt");
+    testFile.createNewFile();
+    testFile.deleteOnExit();
+    ServletOutputStream outStream = mock(ServletOutputStream.class);
+    when(response.getOutputStream()).thenReturn(outStream);
+    internalServlet.doGet(request, response);
+    verify(response).setStatus(eq(HttpServletResponse.SC_OK));
+  }
+
   @Test
   public void Given_Request_Is_HTTP_GET_With_Api_In_Endpoint_Request_Succeeds() throws Exception {
     when(request.getPathInfo()).thenReturn("/api/Key");
@@ -167,10 +181,7 @@ public class InternalServletTest extends DrServletTestBase {
   public void Given_Request_Is_HTTP_GET_With_Drlogs_In_Endpoint_Request_Succeeds()
       throws Exception {
     when(request.getPathInfo()).thenReturn("/drlogs/");
-    PowerMockito.mockStatic(LogfileLoader.class);
-    LogfileLoader logfileLoader = mock(LogfileLoader.class);
-    when(logfileLoader.getBitSet()).thenReturn(new RLEBitSet());
-    PowerMockito.when(LogfileLoader.getLoader()).thenReturn(logfileLoader);
+    mockLogfileLoader();
     ServletOutputStream outStream = mock(ServletOutputStream.class);
     when(response.getOutputStream()).thenReturn(outStream);
     internalServlet.doGet(request, response);
@@ -331,6 +342,21 @@ public class InternalServletTest extends DrServletTestBase {
     verify(response).setStatus(eq(HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE));
   }
 
+  @Test
+  public void Given_Request_Is_HTTP_POST_To_Logs_Then_Request_Succeeds()
+      throws Exception {
+    when(request.getHeader("Content-Encoding")).thenReturn("gzip");
+    when(request.getPathInfo()).thenReturn("/logs/");
+    ServletInputStream inStream = mock(ServletInputStream.class);
+    when(request.getInputStream()).thenReturn(inStream);
+    File testDir = new File("unit-test-logs/spool");
+    testDir.mkdirs();
+    testDir.deleteOnExit();
+    mockLogfileLoader();
+    internalServlet.doPost(request, response);
+    verify(response).setStatus(eq(HttpServletResponse.SC_CREATED));
+  }
+
   @Test
   public void Given_Request_Is_HTTP_POST_To_Drlogs_And_Then_Unsupported_Media_Type_Response_Is_Generated()
       throws Exception {
@@ -452,4 +478,11 @@ public class InternalServletTest extends DrServletTestBase {
     Map<String, Integer> map = new HashMap<>();
     FieldUtils.writeDeclaredStaticField(NodeClass.class, "map", map, true);
   }
+
+  private void mockLogfileLoader() {
+    PowerMockito.mockStatic(LogfileLoader.class);
+    LogfileLoader logfileLoader = mock(LogfileLoader.class);
+    when(logfileLoader.getBitSet()).thenReturn(new RLEBitSet());
+    PowerMockito.when(LogfileLoader.getLoader()).thenReturn(logfileLoader);
+  }
 }
index 098765c..91d72af 100644 (file)
  ******************************************************************************/
 package org.onap.dmaap.datarouter.provisioning.beans;
 
-import org.junit.Assert;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.powermock.core.classloader.annotations.SuppressStaticInitializationFor;
-import org.powermock.modules.junit4.PowerMockRunner;
+import org.junit.*;
+import org.onap.dmaap.datarouter.provisioning.utils.DB;
 
+import javax.persistence.EntityManager;
+import javax.persistence.EntityManagerFactory;
+import javax.persistence.Persistence;
+import java.util.Collection;
 import java.util.Date;
+import java.util.List;
 
-
-@RunWith(PowerMockRunner.class)
-@SuppressStaticInitializationFor({"org.onap.dmaap.datarouter.provisioning.beans.Group"})
 public class GroupTest {
-    private Group group;
+  private static EntityManagerFactory emf;
+  private static EntityManager em;
+  private Group group;
+  private DB db;
+
+  @BeforeClass
+  public static void init() {
+    emf = Persistence.createEntityManagerFactory("dr-unit-tests");
+    em = emf.createEntityManager();
+    System.setProperty(
+        "org.onap.dmaap.datarouter.provserver.properties",
+        "src/test/resources/h2Database.properties");
+  }
+
+  @AfterClass
+  public static void tearDownClass() {
+    em.clear();
+    em.close();
+    emf.close();
+  }
+
+  @Before
+  public void setUp() throws Exception {
+    db = new DB();
+    group = new Group("GroupTest", "", "");
+    group.doInsert(db.getConnection());
+  }
+
+  @Test
+  public void Given_Group_Exists_In_Db_GetAllGroups_Returns_Correct_Group() {
+    Collection<Group> groups = Group.getAllgroups();
+    Assert.assertEquals("Group1", ((List<Group>) groups).get(0).getName());
+  }
+
+  @Test
+  public void Given_Group_Inserted_Into_Db_GetGroupMatching_Returns_Created_Group() {
+    Assert.assertEquals(group, Group.getGroupMatching(group));
+  }
+
+  @Test
+  public void Given_Group_Inserted_With_Same_Name_GetGroupMatching_With_Id_Returns_Correct_Group()
+      throws Exception {
+    Group sameGroupName = new Group("GroupTest", "This group has a description", "");
+    sameGroupName.doInsert(db.getConnection());
+    Assert.assertEquals(
+        "This group has a description", Group.getGroupMatching(group, 2).getDescription());
+    sameGroupName.doDelete(db.getConnection());
+  }
 
-    @Test
-    public void Validate_Group_Created_With_Default_Contructor() {
-        group = new Group();
-        Assert.assertEquals(group.getGroupid(), -1);
-        Assert.assertEquals(group.getName(), "");
-    }
+  @Test
+  public void Given_Group_Inserted_GetGroupById_Returns_Correct_Group() {
+    Assert.assertEquals(group, Group.getGroupById(group.getGroupid()));
+  }
 
-    @Test
-    public void Validate_Getters_And_Setters() {
-        group = new Group();
-        group.setGroupid(1);
-        group.setAuthid("Auth");
-        group.setClassification("Class");
-        group.setDescription("Description");
-        Date date = new Date();
-        group.setLast_mod(date);
-        group.setMembers("Members");
-        group.setName("NewName");
-        Assert.assertEquals(1, group.getGroupid());
-        Assert.assertEquals("Auth", group.getAuthid());
-        Assert.assertEquals("Class", group.getClassification());
-        Assert.assertEquals("Description", group.getDescription());
-        Assert.assertEquals(date, group.getLast_mod());
-        Assert.assertEquals("Members", group.getMembers());
-    }
+  @Test
+  public void Given_Group_AuthId_Updated_GetGroupByAuthId_Returns_Correct_Group() throws Exception {
+    group.setAuthid("Basic TmFtZTp6Z04wMFkyS3gybFppbXltNy94ZDhuMkdEYjA9");
+    group.doUpdate(db.getConnection());
+    Assert.assertEquals(group, Group.getGroupByAuthId("Basic TmFtZTp6Z04wMFkyS3gybFppbXltNy94ZDhuMkdEYjA9"));
+  }
 
-    @Test
-    public void Validate_Equals() {
-        group = new Group();
-        group.setGroupid(1);
-        group.setAuthid("Auth");
-        group.setClassification("Class");
-        group.setDescription("Description");
-        Date date = new Date();
-        group.setLast_mod(date);
-        group.setMembers("Members");
-        group.setName("NewName");
-        Group group2 = new Group("NewName", "Description", "Members");
-        group2.setGroupid(1);
-        group2.setAuthid("Auth");
-        group2.setClassification("Class");
-        group2.setLast_mod(date);
-        Assert.assertEquals(group, group2);
-    }
+  @After
+  public void tearDown() throws Exception {
+    group.doDelete(db.getConnection());
+  }
 }
diff --git a/datarouter-prov/src/test/resources/META-INF/persistence.xml b/datarouter-prov/src/test/resources/META-INF/persistence.xml
new file mode 100755 (executable)
index 0000000..6b42f8a
--- /dev/null
@@ -0,0 +1,21 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<persistence version="2.1" xmlns="http://xmlns.jcp.org/xml/ns/persistence"
+             xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+             xsi:schemaLocation="http://xmlns.jcp.org/xml/ns/persistence
+                                 http://xmlns.jcp.org/xml/ns/persistence/persistence_2_1.xsd">
+    <persistence-unit name="dr-unit-tests" transaction-type="RESOURCE_LOCAL">
+        <provider>org.hibernate.jpa.HibernatePersistenceProvider</provider>
+        <properties>
+            <!-- Configuring JDBC properties -->
+            <property name="javax.persistence.jdbc.url" value="jdbc:h2:mem:test;MODE=MySQL;INIT=RUNSCRIPT FROM 'classpath:create.sql';DB_CLOSE_DELAY=-1"/>
+            <property name="javax.persistence.jdbc.driver" value="org.h2.Driver"/>
+
+            <!-- Hibernate properties -->
+            <property name="hibernate.dialect" value="org.hibernate.dialect.H2Dialect"/>
+            <property name="hibernate.hbm2ddl.auto" value="validate"/>
+            <property name="hibernate.format_sql" value="false"/>
+            <property name="hibernate.show_sql" value="true"/>
+
+        </properties>
+    </persistence-unit>
+</persistence>
\ No newline at end of file
diff --git a/datarouter-prov/src/test/resources/create.sql b/datarouter-prov/src/test/resources/create.sql
new file mode 100755 (executable)
index 0000000..6e6af1d
--- /dev/null
@@ -0,0 +1,146 @@
+CREATE TABLE FEEDS (
+    FEEDID         INT UNSIGNED NOT NULL PRIMARY KEY,
+    GROUPID        INT(10) UNSIGNED NOT NULL DEFAULT 0,
+    NAME           VARCHAR(255) NOT NULL,
+    VERSION        VARCHAR(20) NOT NULL,
+    DESCRIPTION    VARCHAR(1000),
+    BUSINESS_DESCRIPTION VARCHAR(1000) DEFAULT NULL,
+    AUTH_CLASS     VARCHAR(32) NOT NULL,
+    PUBLISHER      VARCHAR(8) NOT NULL,
+    SELF_LINK      VARCHAR(256),
+    PUBLISH_LINK   VARCHAR(256),
+    SUBSCRIBE_LINK VARCHAR(256),
+    LOG_LINK       VARCHAR(256),
+    DELETED        BOOLEAN DEFAULT FALSE,
+    LAST_MOD       TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+    SUSPENDED      BOOLEAN DEFAULT FALSE,
+    CREATED_DATE   TIMESTAMP DEFAULT CURRENT_TIMESTAMP
+);
+
+CREATE TABLE FEED_ENDPOINT_IDS (
+    FEEDID        INT UNSIGNED NOT NULL,
+    USERID        VARCHAR(20) NOT NULL,
+    PASSWORD      VARCHAR(32) NOT NULL
+);
+
+CREATE TABLE FEED_ENDPOINT_ADDRS (
+    FEEDID        INT UNSIGNED NOT NULL,
+    ADDR          VARCHAR(44) NOT NULL
+);
+
+CREATE TABLE SUBSCRIPTIONS (
+    SUBID              INT UNSIGNED NOT NULL PRIMARY KEY,
+    FEEDID             INT UNSIGNED NOT NULL,
+    GROUPID            INT(10) UNSIGNED NOT NULL DEFAULT 0,
+    DELIVERY_URL       VARCHAR(256),
+    DELIVERY_USER      VARCHAR(20),
+    DELIVERY_PASSWORD  VARCHAR(32),
+    DELIVERY_USE100    BOOLEAN DEFAULT FALSE,
+    METADATA_ONLY      BOOLEAN DEFAULT FALSE,
+    SUBSCRIBER         VARCHAR(8) NOT NULL,
+    SELF_LINK          VARCHAR(256),
+    LOG_LINK           VARCHAR(256),
+    LAST_MOD           TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+    SUSPENDED          BOOLEAN DEFAULT FALSE,
+    CREATED_DATE       TIMESTAMP DEFAULT CURRENT_TIMESTAMP
+
+);
+
+CREATE TABLE PARAMETERS (
+    KEYNAME        VARCHAR(32) NOT NULL PRIMARY KEY,
+    VALUE          VARCHAR(4096) NOT NULL
+);
+
+CREATE TABLE LOG_RECORDS (
+    TYPE           ENUM('pub', 'del', 'exp', 'pbf', 'dlx') NOT NULL,
+    EVENT_TIME     BIGINT NOT NULL,           /* time of the publish request */
+    PUBLISH_ID     VARCHAR(64) NOT NULL,      /* unique ID assigned to this publish attempt */
+    FEEDID         INT UNSIGNED NOT NULL,     /* pointer to feed in FEEDS */
+    REQURI         VARCHAR(256) NOT NULL,     /* request URI */
+    METHOD         ENUM('DELETE', 'GET', 'HEAD', 'OPTIONS', 'PUT', 'POST', 'TRACE') NOT NULL, /* HTTP method */
+    CONTENT_TYPE   VARCHAR(256) NOT NULL,     /* content type of published file */
+    CONTENT_LENGTH BIGINT NOT NULL,  /* content length of published file */
+
+    FEED_FILEID    VARCHAR(256),        /* file ID of published file */
+    REMOTE_ADDR    VARCHAR(40),         /* IP address of publishing endpoint */
+    USER           VARCHAR(50),         /* user name of publishing endpoint */
+    STATUS         SMALLINT,            /* status code returned to delivering agent */
+
+    DELIVERY_SUBID INT UNSIGNED,        /* pointer to subscription in SUBSCRIPTIONS */
+    DELIVERY_FILEID  VARCHAR(256),      /* file ID of file being delivered */
+    RESULT         SMALLINT,            /* result received from subscribing agent */
+
+    ATTEMPTS       INT,             /* deliveries attempted */
+    REASON         ENUM('notRetryable', 'retriesExhausted', 'diskFull', 'other'),
+
+    RECORD_ID      BIGINT UNSIGNED NOT NULL PRIMARY KEY, /* unique ID for this record */
+    CONTENT_LENGTH_2 BIGINT,
+
+    INDEX (FEEDID) USING BTREE,
+    INDEX (DELIVERY_SUBID) USING BTREE,
+    INDEX (RECORD_ID) USING BTREE
+) ENGINE = MyISAM;
+
+CREATE TABLE INGRESS_ROUTES (
+    SEQUENCE  INT UNSIGNED NOT NULL,
+    FEEDID    INT UNSIGNED NOT NULL,
+    USERID    VARCHAR(20),
+    SUBNET    VARCHAR(44),
+    NODESET   INT UNSIGNED NOT NULL
+);
+
+CREATE TABLE EGRESS_ROUTES (
+    SUBID    INT UNSIGNED NOT NULL PRIMARY KEY,
+    NODEID   INT UNSIGNED NOT NULL
+);
+
+CREATE TABLE NETWORK_ROUTES (
+    FROMNODE INT UNSIGNED NOT NULL,
+    TONODE   INT UNSIGNED NOT NULL,
+    VIANODE  INT UNSIGNED NOT NULL
+);
+
+CREATE TABLE NODESETS (
+    SETID   INT UNSIGNED NOT NULL,
+    NODEID  INT UNSIGNED NOT NULL
+);
+
+CREATE TABLE NODES (
+    NODEID  INT UNSIGNED NOT NULL PRIMARY KEY,
+    NAME    VARCHAR(255) NOT NULL,
+    ACTIVE  BOOLEAN DEFAULT TRUE
+);
+
+CREATE TABLE GROUPS (
+    GROUPID        INT UNSIGNED NOT NULL PRIMARY KEY,
+    AUTHID         VARCHAR(100) NOT NULL,
+    NAME           VARCHAR(50) NOT NULL,
+    DESCRIPTION    VARCHAR(255),
+    CLASSIFICATION VARCHAR(20) NOT NULL,
+    MEMBERS        TINYTEXT,
+    LAST_MOD       TIMESTAMP DEFAULT CURRENT_TIMESTAMP
+);
+
+INSERT INTO PARAMETERS VALUES
+    ('ACTIVE_POD',  'dmaap-dr-prov'),
+    ('PROV_ACTIVE_NAME',  'dmaap-dr-prov'),
+    ('STANDBY_POD', ''),
+    ('PROV_NAME',   'dmaap-dr-prov'),
+    ('NODES',       'dmaap-dr-node'),
+    ('PROV_DOMAIN', ''),
+    ('DELIVERY_INIT_RETRY_INTERVAL', '10'),
+    ('DELIVERY_MAX_AGE', '86400'),
+    ('DELIVERY_MAX_RETRY_INTERVAL', '3600'),
+    ('DELIVERY_RETRY_RATIO', '2'),
+    ('LOGROLL_INTERVAL', '300'),
+    ('PROV_AUTH_ADDRESSES', 'dmaap-dr-prov|dmaap-dr-node'),
+    ('PROV_AUTH_SUBJECTS', ''),
+    ('PROV_MAXFEED_COUNT',  '10000'),
+    ('PROV_MAXSUB_COUNT',   '100000'),
+    ('PROV_REQUIRE_CERT', 'false'),
+    ('PROV_REQUIRE_SECURE', 'false'),
+    ('_INT_VALUES', 'LOGROLL_INTERVAL|PROV_MAXFEED_COUNT|PROV_MAXSUB_COUNT|DELIVERY_INIT_RETRY_INTERVAL|DELIVERY_MAX_RETRY_INTERVAL|DELIVERY_RETRY_RATIO|DELIVERY_MAX_AGE')
+    ;
+
+INSERT INTO GROUPS(GROUPID, AUTHID, NAME, DESCRIPTION, CLASSIFICATION, MEMBERS)
+VALUES (1, 'Basic dXNlcjE6cGFzc3dvcmQx', 'Group1', 'First Group for testing', 'Class1', 'Member1');
diff --git a/datarouter-prov/src/test/resources/h2Database.properties b/datarouter-prov/src/test/resources/h2Database.properties
new file mode 100755 (executable)
index 0000000..5bc20ed
--- /dev/null
@@ -0,0 +1,26 @@
+#-------------------------------------------------------------------------------
+# ============LICENSE_START==================================================
+# * org.onap.dmaap
+# * ===========================================================================
+# * Copyright ? 2017 AT&T Intellectual Property. All rights reserved.
+# * ===========================================================================
+# * Licensed under the Apache License, Version 2.0 (the "License");
+# * you may not use this file except in compliance with the License.
+# * You may obtain a copy of the License at
+# *
+#  *      http://www.apache.org/licenses/LICENSE-2.0
+# *
+#  * Unless required by applicable law or agreed to in writing, software
+# * distributed under the License is distributed on an "AS IS" BASIS,
+# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# * See the License for the specific language governing permissions and
+# * limitations under the License.
+# * ============LICENSE_END====================================================
+# *
+# * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+# *
+#-------------------------------------------------------------------------------
+
+# Database access
+org.onap.dmaap.datarouter.db.driver   = org.h2.Driver
+org.onap.dmaap.datarouter.db.url      = jdbc:h2:mem:test;DB_CLOSE_DELAY=-1