From: Ram Koya Date: Thu, 30 Aug 2018 16:02:47 +0000 (+0000) Subject: Merge "Add Tests to InternalServletTest" X-Git-Tag: 1.0.1~36 X-Git-Url: https://gerrit.onap.org/r/gitweb?a=commitdiff_plain;h=bdeb5f44dc6811518884228c2ae4397a9323c9f4;hp=5fc125c5159ae84e211ff2d8b03cd43475493142;p=dmaap%2Fdatarouter.git Merge "Add Tests to InternalServletTest" --- diff --git a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DeliveryTest.java b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DeliveryTest.java new file mode 100644 index 00000000..8a7460e8 --- /dev/null +++ b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/DeliveryTest.java @@ -0,0 +1,104 @@ +/******************************************************************************* + * ============LICENSE_START================================================== + * * org.onap.dmaap + * * =========================================================================== + * * Copyright © 2017 AT&T Intellectual Property. All rights reserved. + * * =========================================================================== + * * Licensed under the Apache License, Version 2.0 (the "License"); + * * you may not use this file except in compliance with the License. + * * You may obtain a copy of the License at + * * + * * http://www.apache.org/licenses/LICENSE-2.0 + * * + * * Unless required by applicable law or agreed to in writing, software + * * distributed under the License is distributed on an "AS IS" BASIS, + * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * * See the License for the specific language governing permissions and + * * limitations under the License. + * * ============LICENSE_END==================================================== + * * + * * ECOMP is a trademark and service mark of AT&T Intellectual Property. + * * + ******************************************************************************/ +package org.onap.dmaap.datarouter.node; + +import org.apache.commons.lang3.reflect.FieldUtils; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.powermock.api.mockito.PowerMockito; +import org.powermock.core.classloader.annotations.SuppressStaticInitializationFor; +import org.powermock.modules.junit4.PowerMockRunner; + +import java.io.File; +import java.io.IOException; +import java.util.Hashtable; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; + +@RunWith(PowerMockRunner.class) +@SuppressStaticInitializationFor("org.onap.dmaap.datarouter.node.NodeConfigManager") +public class DeliveryTest { + + @Mock + private DeliveryQueue deliveryQueue; + + private File nDir = new File("tmp/n"); + private File sDir = new File("tmp/s"); + + @Before + public void setUp() throws IOException { + nDir.mkdirs(); + sDir.mkdirs(); + File newNDir = new File("tmp/n/0"); + newNDir.mkdirs(); + File newNFile = new File("tmp/n/0/testN.txt"); + newNFile.createNewFile(); + File newSDir = new File("tmp/s/0/1"); + newSDir.mkdirs(); + File newSpoolFile = new File("tmp/s/0/1/testSpool.txt"); + newSpoolFile.createNewFile(); + } + + @Test + public void Validate_Reset_Queue_Calls_Reset_Queue_On_Delivery_Queue_Object() throws IllegalAccessException { + NodeConfigManager config = mockNodeConfigManager(); + Delivery delivery = new Delivery(config); + Hashtable dqs = new Hashtable<>(); + dqs.put("spool/s/0/1", deliveryQueue); + FieldUtils.writeDeclaredField(delivery, "dqs", dqs, true); + delivery.resetQueue("spool/s/0/1"); + verify(deliveryQueue, times(1)).resetQueue(); + } + + @After + public void tearDown() { + nDir.delete(); + sDir.delete(); + File tmpDir = new File("tmp"); + tmpDir.delete(); + } + + private NodeConfigManager mockNodeConfigManager() { + PowerMockito.mockStatic(NodeConfigManager.class); + NodeConfigManager config = mock(NodeConfigManager.class); + PowerMockito.when(config.isConfigured()).thenReturn(true); + PowerMockito.when(config.getAllDests()).thenReturn(createDestInfoObjects()); + PowerMockito.when(config.getFreeDiskStart()).thenReturn(0.49); + PowerMockito.when(config.getFreeDiskStop()).thenReturn(0.5); + PowerMockito.when(config.getDeliveryThreads()).thenReturn(0); + PowerMockito.when(config.getSpoolBase()).thenReturn("tmp"); + return config; + } + + private DestInfo[] createDestInfoObjects() { + DestInfo[] destInfos = new DestInfo[1]; + DestInfo destInfo = new DestInfo("node.datarouternew.com", "spool/s/0/1", "1", "logs/", "/subs/1", "user1", "Basic dXNlcjE6cGFzc3dvcmQx", false, true); + destInfos[0] = destInfo; + return destInfos; + } +} diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/DailyLatencyReport.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/DailyLatencyReport.java index a5281c06..28740c0f 100644 --- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/DailyLatencyReport.java +++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/DailyLatencyReport.java @@ -152,41 +152,40 @@ public class DailyLatencyReport extends ReportBase { DB db = new DB(); @SuppressWarnings("resource") Connection conn = db.getConnection(); - PreparedStatement ps = conn.prepareStatement(SELECT_SQL); - ps.setLong(1, from); - ps.setLong(2, to); - ResultSet rs = ps.executeQuery(); - while (rs.next()) { - String id = rs.getString("PUBLISH_ID"); - int feed = rs.getInt("FEEDID"); - long etime = rs.getLong("EVENT_TIME"); - String type = rs.getString("TYPE"); - String fid = rs.getString("FEED_FILEID"); - long clen = rs.getLong("CONTENT_LENGTH"); - String date = sdf.format(new Date(getPstart(id))); - String key = date + "," + feed; - Counters c = map.get(key); - if (c == null) { - c = new Counters(date, feed); - map.put(key, c); + try(PreparedStatement ps = conn.prepareStatement(SELECT_SQL)) { + ps.setLong(1, from); + ps.setLong(2, to); + try(ResultSet rs = ps.executeQuery()) { + while (rs.next()) { + String id = rs.getString("PUBLISH_ID"); + int feed = rs.getInt("FEEDID"); + long etime = rs.getLong("EVENT_TIME"); + String type = rs.getString("TYPE"); + String fid = rs.getString("FEED_FILEID"); + long clen = rs.getLong("CONTENT_LENGTH"); + String date = sdf.format(new Date(getPstart(id))); + String key = date + "," + feed; + Counters c = map.get(key); + if (c == null) { + c = new Counters(date, feed); + map.put(key, c); + } + c.addEvent(etime, type, id, fid, clen); + } } - c.addEvent(etime, type, id, fid, clen); + + db.release(conn); } - rs.close(); - ps.close(); - db.release(conn); } catch (SQLException e) { e.printStackTrace(); } logger.debug("Query time: " + (System.currentTimeMillis()-start) + " ms"); - try { - PrintWriter os = new PrintWriter(outfile); + try (PrintWriter os = new PrintWriter(outfile)){ os.println("date,feedid,minsize,maxsize,avgsize,minlat,maxlat,avglat,fanout"); for (String key : new TreeSet(map.keySet())) { Counters c = map.get(key); os.println(c.toString()); } - os.close(); } catch (FileNotFoundException e) { System.err.println("File cannot be written: "+outfile); } diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/LatencyReport.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/LatencyReport.java index ba8f15a0..549511b7 100644 --- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/LatencyReport.java +++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/LatencyReport.java @@ -145,40 +145,38 @@ public class LatencyReport extends ReportBase { DB db = new DB(); @SuppressWarnings("resource") Connection conn = db.getConnection(); - PreparedStatement ps = conn.prepareStatement(SELECT_SQL); + try(PreparedStatement ps = conn.prepareStatement(SELECT_SQL)){ ps.setLong(1, from); ps.setLong(2, to); - ResultSet rs = ps.executeQuery(); - PrintWriter os = new PrintWriter(outfile); - os.println("recordid,feedid,uri,size,min,max,avg,fanout"); - Counters c = null; - while (rs.next()) { - long etime = rs.getLong("EVENT_TIME"); - String type = rs.getString("TYPE"); - String id = rs.getString("PUBLISH_ID"); - String fid = rs.getString("FEED_FILEID"); - int feed = rs.getInt("FEEDID"); - long clen = rs.getLong("CONTENT_LENGTH"); - if (c != null && !id.equals(c.id)) { - String line = id + "," + c.toString(); - os.println(line); - c = null; + try(ResultSet rs = ps.executeQuery()) { + try(PrintWriter os = new PrintWriter(outfile)) { + os.println("recordid,feedid,uri,size,min,max,avg,fanout"); + Counters c = null; + while (rs.next()) { + long etime = rs.getLong("EVENT_TIME"); + String type = rs.getString("TYPE"); + String id = rs.getString("PUBLISH_ID"); + String fid = rs.getString("FEED_FILEID"); + int feed = rs.getInt("FEEDID"); + long clen = rs.getLong("CONTENT_LENGTH"); + if (c != null && !id.equals(c.id)) { + String line = id + "," + c.toString(); + os.println(line); + c = null; + } + if (c == null) { + c = new Counters(id, feed, clen, fid); + } + if (feed != c.feedid) + System.err.println("Feed ID mismatch, " + feed + " <=> " + c.feedid); + if (clen != c.clen) + System.err.println("Cont Len mismatch, " + clen + " <=> " + c.clen); + c.addEvent(type, etime); + } } - if (c == null) { - c = new Counters(id, feed, clen, fid); - } - if (feed != c.feedid) - System.err.println("Feed ID mismatch, " + feed + " <=> " + c.feedid); - if (clen != c.clen) - System.err.println("Cont Len mismatch, " + clen + " <=> " + c.clen); -// if (fid != c.fileid) -// System.err.println("File ID mismatch, "+fid+" <=> "+c.fileid); - c.addEvent(type, etime); + db.release(conn); + } } - rs.close(); - ps.close(); - db.release(conn); - os.close(); } catch (FileNotFoundException e) { System.err.println("File cannot be written: " + outfile); } catch (SQLException e) { diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/SubscriberReport.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/SubscriberReport.java index e00c3944..51beac92 100644 --- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/SubscriberReport.java +++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/SubscriberReport.java @@ -98,62 +98,61 @@ public class SubscriberReport extends ReportBase { public void run() { Map map = new HashMap(); long start = System.currentTimeMillis(); + try { DB db = new DB(); @SuppressWarnings("resource") Connection conn = db.getConnection(); - PreparedStatement ps = conn.prepareStatement(SELECT_SQL); - ps.setLong(1, from); - ps.setLong(2, to); - ResultSet rs = ps.executeQuery(); - while (rs.next()) { - String date = rs.getString("DATE"); - int sub = rs.getInt("DELIVERY_SUBID"); - int res = rs.getInt("RESULT"); - int count = rs.getInt("COUNT"); - String key = date + "," + sub; - Counters c = map.get(key); - if (c == null) { - c = new Counters(date, sub); - map.put(key, c); + try(PreparedStatement ps = conn.prepareStatement(SELECT_SQL)) { + ps.setLong(1, from); + ps.setLong(2, to); + try(ResultSet rs = ps.executeQuery()) { + while (rs.next()) { + String date = rs.getString("DATE"); + int sub = rs.getInt("DELIVERY_SUBID"); + int res = rs.getInt("RESULT"); + int count = rs.getInt("COUNT"); + String key = date + "," + sub; + Counters c = map.get(key); + if (c == null) { + c = new Counters(date, sub); + map.put(key, c); + } + c.addCounts(res, count); + } } - c.addCounts(res, count); } - rs.close(); - ps.close(); - ps = conn.prepareStatement(SELECT_SQL2); - ps.setLong(1, from); - ps.setLong(2, to); - rs = ps.executeQuery(); - while (rs.next()) { - String date = rs.getString("DATE"); - int sub = rs.getInt("DELIVERY_SUBID"); - int count = rs.getInt("COUNT"); - String key = date + "," + sub; - Counters c = map.get(key); - if (c == null) { - c = new Counters(date, sub); - map.put(key, c); - } - c.addDlxCount(count); - } - rs.close(); - ps.close(); + try( PreparedStatement ps2 = conn.prepareStatement(SELECT_SQL2)) { + ps2.setLong(1, from); + ps2.setLong(2, to); + try(ResultSet rs2 = ps2.executeQuery()) { + while (rs2.next()) { + String date = rs2.getString("DATE"); + int sub = rs2.getInt("DELIVERY_SUBID"); + int count = rs2.getInt("COUNT"); + String key = date + "," + sub; + Counters c = map.get(key); + if (c == null) { + c = new Counters(date, sub); + map.put(key, c); + } + c.addDlxCount(count); + } + } + } db.release(conn); } catch (SQLException e) { e.printStackTrace(); } logger.debug("Query time: " + (System.currentTimeMillis() - start) + " ms"); - try { - PrintWriter os = new PrintWriter(outfile); + try (PrintWriter os = new PrintWriter(outfile)){ os.println("date,subid,count100,count200,count300,count400,count500,countminus1,countdlx"); for (String key : new TreeSet(map.keySet())) { Counters c = map.get(key); os.println(c.toString()); } - os.close(); } catch (FileNotFoundException e) { System.err.println("File cannot be written: " + outfile); } diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/VolumeReport.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/VolumeReport.java index 169db0d2..34e158a7 100644 --- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/VolumeReport.java +++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/VolumeReport.java @@ -36,6 +36,7 @@ import java.util.HashMap; import java.util.Map; import java.util.TreeSet; +import org.apache.log4j.Logger; import org.onap.dmaap.datarouter.provisioning.utils.DB; /** @@ -57,7 +58,7 @@ import org.onap.dmaap.datarouter.provisioning.utils.DB; public class VolumeReport extends ReportBase { private static final String SELECT_SQL = "select EVENT_TIME, TYPE, FEEDID, CONTENT_LENGTH, RESULT" + " from LOG_RECORDS where EVENT_TIME >= ? and EVENT_TIME <= ? LIMIT ?, ?"; - + private Logger loggerVolumeReport=Logger.getLogger("org.onap.dmaap.datarouter.reports"); private class Counters { public int filespublished, filesdelivered, filesexpired; public long bytespublished, bytesdelivered, bytesexpired; @@ -83,58 +84,64 @@ public class VolumeReport extends ReportBase { final long stepsize = 6000000L; boolean go_again = true; for (long i = 0; go_again; i += stepsize) { - PreparedStatement ps = conn.prepareStatement(SELECT_SQL); - ps.setLong(1, from); - ps.setLong(2, to); - ps.setLong(3, i); - ps.setLong(4, stepsize); - ResultSet rs = ps.executeQuery(); - go_again = false; - while (rs.next()) { - go_again = true; - long etime = rs.getLong("EVENT_TIME"); - String type = rs.getString("TYPE"); - int feed = rs.getInt("FEEDID"); - long clen = rs.getLong("CONTENT_LENGTH"); - String key = sdf.format(new Date(etime)) + ":" + feed; - Counters c = map.get(key); - if (c == null) { - c = new Counters(); - map.put(key, c); - } - if (type.equalsIgnoreCase("pub")) { - c.filespublished++; - c.bytespublished += clen; - } else if (type.equalsIgnoreCase("del")) { - // Only count successful deliveries - int statusCode = rs.getInt("RESULT"); - if (statusCode >= 200 && statusCode < 300) { - c.filesdelivered++; - c.bytesdelivered += clen; + try (PreparedStatement ps = conn.prepareStatement(SELECT_SQL)) { + ps.setLong(1, from); + ps.setLong(2, to); + ps.setLong(3, i); + ps.setLong(4, stepsize); + try(ResultSet rs = ps.executeQuery()) { + go_again = false; + while (rs.next()) { + go_again = true; + long etime = rs.getLong("EVENT_TIME"); + String type = rs.getString("TYPE"); + int feed = rs.getInt("FEEDID"); + long clen = rs.getLong("CONTENT_LENGTH"); + String key = sdf.format(new Date(etime)) + ":" + feed; + Counters c = map.get(key); + if (c == null) { + c = new Counters(); + map.put(key, c); + } + if (type.equalsIgnoreCase("pub")) { + c.filespublished++; + c.bytespublished += clen; + } else if (type.equalsIgnoreCase("del")) { + // Only count successful deliveries + int statusCode = rs.getInt("RESULT"); + if (statusCode >= 200 && statusCode < 300) { + c.filesdelivered++; + c.bytesdelivered += clen; + } + } else if (type.equalsIgnoreCase("exp")) { + c.filesexpired++; + c.bytesexpired += clen; + } } - } else if (type.equalsIgnoreCase("exp")) { - c.filesexpired++; - c.bytesexpired += clen; } + + } + catch (SQLException sqlException) + { + loggerVolumeReport.error("SqlException",sqlException); } - rs.close(); - ps.close(); } + db.release(conn); } catch (SQLException e) { e.printStackTrace(); } logger.debug("Query time: " + (System.currentTimeMillis() - start) + " ms"); - try { - PrintWriter os = new PrintWriter(outfile); + try (PrintWriter os = new PrintWriter(outfile)) { os.println("date,feedid,filespublished,bytespublished,filesdelivered,bytesdelivered,filesexpired,bytesexpired"); - for (String key : new TreeSet(map.keySet())) { + for(String key :new TreeSet(map.keySet())) + { Counters c = map.get(key); String[] p = key.split(":"); os.println(String.format("%s,%s,%s", p[0], p[1], c.toString())); } - os.close(); - } catch (FileNotFoundException e) { + } + catch (FileNotFoundException e) { System.err.println("File cannot be written: " + outfile); } }