Merge "Fix NetworkRoute Vulnerabilities"
authorRam Koya <rk541m@att.com>
Tue, 25 Sep 2018 13:20:51 +0000 (13:20 +0000)
committerGerrit Code Review <gerrit@onap.org>
Tue, 25 Sep 2018 13:20:51 +0000 (13:20 +0000)
datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/LogfileLoader.java
datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/PurgeLogDirTask.java
datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/DailyLatencyReport.java
datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/LatencyReport.java
datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/SubscriberReport.java
datarouter-prov/src/main/java/org/onap/dmaap/datarouter/reports/VolumeReport.java

index 110c63d..b6ad8e4 100644 (file)
@@ -214,7 +214,6 @@ public class LogfileLoader extends Thread {
                 }\r
             } catch (Exception e) {\r
                 logger.warn("PROV0020: Caught exception in LogfileLoader: " + e);\r
-                e.printStackTrace();\r
             }\r
         }\r
     }\r
@@ -275,7 +274,7 @@ public class LogfileLoader extends Thread {
              }\r
             } catch (SQLException e) {\r
                 System.err.println(e);\r
-                e.printStackTrace();\r
+                logger.error(e);\r
             } finally {\r
                 db.release(conn);\r
             }\r
@@ -297,7 +296,7 @@ public class LogfileLoader extends Thread {
            }\r
          } catch (SQLException e) {\r
             System.err.println(e);\r
-            e.printStackTrace();\r
+            logger.error(e);\r
         } finally {\r
             db.release(conn);\r
         }\r
@@ -322,7 +321,7 @@ public class LogfileLoader extends Thread {
             }\r
            } catch (SQLException e) {\r
             System.err.println(e);\r
-            e.printStackTrace();\r
+            logger.error(e);\r
         } finally {\r
             db.release(conn);\r
         }\r
@@ -376,7 +375,7 @@ public class LogfileLoader extends Thread {
             logger.debug(String.format("initializeNextid, next ID is %d (%x)", nextid, nextid));\r
         } catch (SQLException e) {\r
             System.err.println(e);\r
-            e.printStackTrace();\r
+            logger.error(e);\r
         } finally {\r
             db.release(conn);\r
         }\r
@@ -417,19 +416,15 @@ public class LogfileLoader extends Thread {
                     } catch (SQLException e) {\r
                         logger.warn("PROV8003 Invalid value in record: " + line);\r
                         logger.debug(e);\r
-                        e.printStackTrace();\r
                     } catch (NumberFormatException e) {\r
                         logger.warn("PROV8004 Invalid number in record: " + line);\r
                         logger.debug(e);\r
-                        e.printStackTrace();\r
                     } catch (ParseException e) {\r
                         logger.warn("PROV8005 Invalid date in record: " + line);\r
                         logger.debug(e);\r
-                        e.printStackTrace();\r
                     } catch (Exception e) {\r
                         logger.warn("PROV8006 Invalid pattern in record: " + line);\r
                         logger.debug(e);\r
-                        e.printStackTrace();\r
                     }\r
                     total++;\r
                 }\r
index 7b0e0e6..14d1519 100644 (file)
@@ -27,24 +27,30 @@ package org.onap.dmaap.datarouter.provisioning.utils;
 import java.io.File;\r
 import java.util.Properties;\r
 import java.util.TimerTask;\r
+import org.apache.log4j.Logger;\r
 \r
 /**\r
- * This class provides a {@link TimerTask} that purges old logfiles\r
- * (older than the number of days specified by the org.onap.dmaap.datarouter.provserver.logretention property).\r
+ * This class provides a {@link TimerTask} that purges old logfiles (older than the number of days specified by the\r
+ * org.onap.dmaap.datarouter.provserver.logretention property).\r
  *\r
  * @author Robert Eby\r
  * @version $Id: PurgeLogDirTask.java,v 1.2 2013/07/05 13:48:05 eby Exp $\r
  */\r
 public class PurgeLogDirTask extends TimerTask {\r
+\r
     private static final long ONEDAY = 86400000L;\r
 \r
     private final String logdir;\r
     private final long interval;\r
+    private Logger utilsLogger;\r
 \r
     public PurgeLogDirTask() {\r
         Properties p = (new DB()).getProperties();\r
         logdir = p.getProperty("org.onap.dmaap.datarouter.provserver.accesslog.dir");\r
         String s = p.getProperty("org.onap.dmaap.datarouter.provserver.logretention", "30");\r
+\r
+        this.utilsLogger = Logger.getLogger("org.onap.dmaap.datarouter.provisioning.utils");\r
+\r
         long n = 30;\r
         try {\r
             n = Long.parseLong(s);\r
@@ -61,12 +67,13 @@ public class PurgeLogDirTask extends TimerTask {
             if (dir.exists()) {\r
                 long exptime = System.currentTimeMillis() - interval;\r
                 for (File logfile : dir.listFiles()) {\r
-                    if (logfile.lastModified() < exptime)\r
+                    if (logfile.lastModified() < exptime) {\r
                         logfile.delete();\r
+                    }\r
                 }\r
             }\r
         } catch (Exception e) {\r
-            e.printStackTrace();\r
+            utilsLogger.error("Exception: " + e.getMessage());\r
         }\r
     }\r
 }\r
index f848733..f1e0f7c 100644 (file)
@@ -37,7 +37,6 @@ import java.util.HashMap;
 import java.util.List;\r
 import java.util.Map;\r
 import java.util.TreeSet;\r
-\r
 import org.onap.dmaap.datarouter.provisioning.utils.DB;\r
 \r
 /**\r
@@ -65,42 +64,54 @@ import org.onap.dmaap.datarouter.provisioning.utils.DB;
  * @version $Id: DailyLatencyReport.java,v 1.2 2013/11/06 16:23:54 eby Exp $\r
  */\r
 public class DailyLatencyReport extends ReportBase {\r
+\r
     private static final String SELECT_SQL =\r
         "select EVENT_TIME, TYPE, PUBLISH_ID, FEED_FILEID, FEEDID, CONTENT_LENGTH from LOG_RECORDS" +\r
-        " where EVENT_TIME >= ? and EVENT_TIME <= ?";\r
+            " where EVENT_TIME >= ? and EVENT_TIME <= ?";\r
 \r
     private class Job {\r
+\r
         public long pubtime = 0;\r
         public long clen = 0;\r
         public List<Long> deltime = new ArrayList<Long>();\r
+\r
         public long minLatency() {\r
             long n = deltime.isEmpty() ? 0 : Long.MAX_VALUE;\r
-            for (Long l : deltime)\r
-                n = Math.min(n, l-pubtime);\r
+            for (Long l : deltime) {\r
+                n = Math.min(n, l - pubtime);\r
+            }\r
             return n;\r
         }\r
+\r
         public long maxLatency() {\r
             long n = 0;\r
-            for (Long l : deltime)\r
-                n = Math.max(n, l-pubtime);\r
+            for (Long l : deltime) {\r
+                n = Math.max(n, l - pubtime);\r
+            }\r
             return n;\r
         }\r
+\r
         public long totalLatency() {\r
             long n = 0;\r
-            for (Long l : deltime)\r
-                n += (l-pubtime);\r
+            for (Long l : deltime) {\r
+                n += (l - pubtime);\r
+            }\r
             return n;\r
         }\r
     }\r
+\r
     private class Counters {\r
+\r
         public final String date;\r
         public final int feedid;\r
         public final Map<String, Job> jobs;\r
+\r
         public Counters(String d, int fid) {\r
             date = d;\r
             feedid = fid;\r
-            jobs = new HashMap<String, Job>();\r
+            jobs = new HashMap<>();\r
         }\r
+\r
         public void addEvent(long etime, String type, String id, String fid, long clen) {\r
             Job j = jobs.get(id);\r
             if (j == null) {\r
@@ -114,48 +125,52 @@ public class DailyLatencyReport extends ReportBase {
                 j.deltime.add(etime);\r
             }\r
         }\r
+\r
         @Override\r
         public String toString() {\r
             long minsize = Long.MAX_VALUE, maxsize = 0, avgsize = 0;\r
-            long minl    = Long.MAX_VALUE, maxl    = 0;\r
-            long fanout  = 0, totall = 0, totaln = 0;\r
+            long minl = Long.MAX_VALUE, maxl = 0;\r
+            long fanout = 0, totall = 0, totaln = 0;\r
             for (Job j : jobs.values()) {\r
                 minsize = Math.min(minsize, j.clen);\r
                 maxsize = Math.max(maxsize, j.clen);\r
                 avgsize += j.clen;\r
-                minl    = Math.min(minl, j.minLatency());\r
-                maxl    = Math.max(maxl, j.maxLatency());\r
-                totall  += j.totalLatency();\r
-                totaln  += j.deltime.size();\r
-                fanout  += j.deltime.size();\r
+                minl = Math.min(minl, j.minLatency());\r
+                maxl = Math.max(maxl, j.maxLatency());\r
+                totall += j.totalLatency();\r
+                totaln += j.deltime.size();\r
+                fanout += j.deltime.size();\r
             }\r
             if (jobs.size() > 0) {\r
                 avgsize /= jobs.size();\r
-                fanout  /= jobs.size();\r
+                fanout /= jobs.size();\r
             }\r
             long avgl = (totaln > 0) ? (totall / totaln) : 0;\r
-            return date + "," + feedid + "," + minsize + "," + maxsize + "," + avgsize + "," + minl + "," + maxl + "," + avgl + "," + fanout;\r
+            return date + "," + feedid + "," + minsize + "," + maxsize + "," + avgsize + "," + minl + "," + maxl + ","\r
+                + avgl + "," + fanout;\r
         }\r
     }\r
+\r
     private long getPstart(String t) {\r
-        if (t.indexOf('.') >= 0)\r
+        if (t.indexOf('.') >= 0) {\r
             t = t.substring(0, t.indexOf('.'));\r
+        }\r
         return Long.parseLong(t);\r
     }\r
 \r
     @Override\r
     public void run() {\r
-        Map<String, Counters> map = new HashMap<String, Counters>();\r
+        Map<String, Counters> map = new HashMap<>();\r
         SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");\r
         long start = System.currentTimeMillis();\r
         try {\r
             DB db = new DB();\r
             @SuppressWarnings("resource")\r
             Connection conn = db.getConnection();\r
-            try(PreparedStatement ps = conn.prepareStatement(SELECT_SQL)) {\r
+            try (PreparedStatement ps = conn.prepareStatement(SELECT_SQL)) {\r
                 ps.setLong(1, from);\r
                 ps.setLong(2, to);\r
-                try(ResultSet rs = ps.executeQuery()) {\r
+                try (ResultSet rs = ps.executeQuery()) {\r
                     while (rs.next()) {\r
                         String id = rs.getString("PUBLISH_ID");\r
                         int feed = rs.getInt("FEEDID");\r
@@ -177,17 +192,18 @@ public class DailyLatencyReport extends ReportBase {
                 db.release(conn);\r
             }\r
         } catch (SQLException e) {\r
-            e.printStackTrace();\r
+            logger.error("SQLException: " + e.getMessage());\r
         }\r
-        logger.debug("Query time: " + (System.currentTimeMillis()-start) + " ms");\r
-        try (PrintWriter os = new PrintWriter(outfile)){\r
+        logger.debug("Query time: " + (System.currentTimeMillis() - start) + " ms");\r
+        try (PrintWriter os = new PrintWriter(outfile)) {\r
             os.println("date,feedid,minsize,maxsize,avgsize,minlat,maxlat,avglat,fanout");\r
-            for (String key : new TreeSet<String>(map.keySet())) {\r
+            for (String key : new TreeSet<>(map.keySet())) {\r
                 Counters c = map.get(key);\r
                 os.println(c.toString());\r
             }\r
         } catch (FileNotFoundException e) {\r
-            System.err.println("File cannot be written: "+outfile);\r
+            System.err.println("File cannot be written: " + outfile);\r
+            logger.error("FileNotFoundException: " + e.getMessage());\r
         }\r
     }\r
 }\r
index 549511b..f500140 100644 (file)
@@ -180,7 +180,7 @@ public class LatencyReport extends ReportBase {
         } catch (FileNotFoundException e) {\r
             System.err.println("File cannot be written: " + outfile);\r
         } catch (SQLException e) {\r
-            e.printStackTrace();\r
+            logger.error("SQLException: " + e.getMessage());\r
         }\r
         logger.debug("Query time: " + (System.currentTimeMillis() - start) + " ms");\r
     }\r
index 51beac9..b580af7 100644 (file)
@@ -144,7 +144,7 @@ public class SubscriberReport extends ReportBase {
 \r
             db.release(conn);\r
         } catch (SQLException e) {\r
-            e.printStackTrace();\r
+            logger.error("SQLException: " + e.getMessage());\r
         }\r
         logger.debug("Query time: " + (System.currentTimeMillis() - start) + " ms");\r
         try (PrintWriter os = new PrintWriter(outfile)){\r
@@ -155,6 +155,7 @@ public class SubscriberReport extends ReportBase {
             }\r
         } catch (FileNotFoundException e) {\r
             System.err.println("File cannot be written: " + outfile);\r
+            logger.error("FileNotFoundException: " + e.getMessage());\r
         }\r
     }\r
 }\r
index 34e158a..8d5731f 100644 (file)
-/*******************************************************************************\r
- * ============LICENSE_START==================================================\r
- * * org.onap.dmaap\r
- * * ===========================================================================\r
- * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * *\r
- *  *      http://www.apache.org/licenses/LICENSE-2.0\r
- * *\r
- *  * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-\r
-\r
-package org.onap.dmaap.datarouter.reports;\r
-\r
-import java.io.FileNotFoundException;\r
-import java.io.PrintWriter;\r
-import java.sql.Connection;\r
-import java.sql.PreparedStatement;\r
-import java.sql.ResultSet;\r
-import java.sql.SQLException;\r
-import java.text.SimpleDateFormat;\r
-import java.util.Date;\r
-import java.util.HashMap;\r
-import java.util.Map;\r
-import java.util.TreeSet;\r
-\r
-import org.apache.log4j.Logger;\r
-import org.onap.dmaap.datarouter.provisioning.utils.DB;\r
-\r
-/**\r
- * Generate a traffic volume report. The report is a .csv file containing the following columns:\r
- * <table>\r
- * <tr><td>date</td><td>the date for this record</td></tr>\r
- * <tr><td>feedid</td><td>the Feed ID for this record</td></tr>\r
- * <tr><td>filespublished</td><td>the number of files published on this feed and date</td></tr>\r
- * <tr><td>bytespublished</td><td>the number of bytes published on this feed and date</td></tr>\r
- * <tr><td>filesdelivered</td><td>the number of files delivered on this feed and date</td></tr>\r
- * <tr><td>bytesdelivered</td><td>the number of bytes delivered on this feed and date</td></tr>\r
- * <tr><td>filesexpired</td><td>the number of files expired on this feed and date</td></tr>\r
- * <tr><td>bytesexpired</td><td>the number of bytes expired on this feed and date</td></tr>\r
- * </table>\r
- *\r
- * @author Robert P. Eby\r
- * @version $Id: VolumeReport.java,v 1.3 2014/02/28 15:11:13 eby Exp $\r
- */\r
-public class VolumeReport extends ReportBase {\r
-    private static final String SELECT_SQL = "select EVENT_TIME, TYPE, FEEDID, CONTENT_LENGTH, RESULT" +\r
-            " from LOG_RECORDS where EVENT_TIME >= ? and EVENT_TIME <= ? LIMIT ?, ?";\r
-    private Logger loggerVolumeReport=Logger.getLogger("org.onap.dmaap.datarouter.reports");\r
-    private class Counters {\r
-        public int filespublished, filesdelivered, filesexpired;\r
-        public long bytespublished, bytesdelivered, bytesexpired;\r
-\r
-        @Override\r
-        public String toString() {\r
-            return String.format("%d,%d,%d,%d,%d,%d",\r
-                    filespublished, bytespublished, filesdelivered,\r
-                    bytesdelivered, filesexpired, bytesexpired);\r
-        }\r
-    }\r
-\r
-    @Override\r
-    public void run() {\r
-        Map<String, Counters> map = new HashMap<String, Counters>();\r
-        SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");\r
-        long start = System.currentTimeMillis();\r
-        try {\r
-            DB db = new DB();\r
-            @SuppressWarnings("resource")\r
-            Connection conn = db.getConnection();\r
-            // We need to run this SELECT in stages, because otherwise we run out of memory!\r
-            final long stepsize = 6000000L;\r
-            boolean go_again = true;\r
-            for (long i = 0; go_again; i += stepsize) {\r
-                try (PreparedStatement ps = conn.prepareStatement(SELECT_SQL)) {\r
-                    ps.setLong(1, from);\r
-                    ps.setLong(2, to);\r
-                    ps.setLong(3, i);\r
-                    ps.setLong(4, stepsize);\r
-                    try(ResultSet rs = ps.executeQuery()) {\r
-                        go_again = false;\r
-                        while (rs.next()) {\r
-                            go_again = true;\r
-                            long etime = rs.getLong("EVENT_TIME");\r
-                            String type = rs.getString("TYPE");\r
-                            int feed = rs.getInt("FEEDID");\r
-                            long clen = rs.getLong("CONTENT_LENGTH");\r
-                            String key = sdf.format(new Date(etime)) + ":" + feed;\r
-                            Counters c = map.get(key);\r
-                            if (c == null) {\r
-                                c = new Counters();\r
-                                map.put(key, c);\r
-                            }\r
-                            if (type.equalsIgnoreCase("pub")) {\r
-                                c.filespublished++;\r
-                                c.bytespublished += clen;\r
-                            } else if (type.equalsIgnoreCase("del")) {\r
-                                // Only count successful deliveries\r
-                                int statusCode = rs.getInt("RESULT");\r
-                                if (statusCode >= 200 && statusCode < 300) {\r
-                                    c.filesdelivered++;\r
-                                    c.bytesdelivered += clen;\r
-                                }\r
-                            } else if (type.equalsIgnoreCase("exp")) {\r
-                                c.filesexpired++;\r
-                                c.bytesexpired += clen;\r
-                            }\r
-                        }\r
-                    }\r
-\r
-                }\r
-                catch (SQLException sqlException)\r
-                {\r
-                    loggerVolumeReport.error("SqlException",sqlException);\r
-                }\r
-            }\r
-\r
-            db.release(conn);\r
-        } catch (SQLException e) {\r
-            e.printStackTrace();\r
-        }\r
-        logger.debug("Query time: " + (System.currentTimeMillis() - start) + " ms");\r
-        try (PrintWriter os = new PrintWriter(outfile)) {\r
-            os.println("date,feedid,filespublished,bytespublished,filesdelivered,bytesdelivered,filesexpired,bytesexpired");\r
-            for(String key :new TreeSet<String>(map.keySet()))\r
-            {\r
-                Counters c = map.get(key);\r
-                String[] p = key.split(":");\r
-                os.println(String.format("%s,%s,%s", p[0], p[1], c.toString()));\r
-            }\r
-        }\r
-        catch (FileNotFoundException e) {\r
-            System.err.println("File cannot be written: " + outfile);\r
-        }\r
-    }\r
-}\r
+/*******************************************************************************
+ * ============LICENSE_START==================================================
+ * * org.onap.dmaap
+ * * ===========================================================================
+ * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+ * * ===========================================================================
+ * * Licensed under the Apache License, Version 2.0 (the "License");
+ * * you may not use this file except in compliance with the License.
+ * * You may obtain a copy of the License at
+ * *
+ *  *      http://www.apache.org/licenses/LICENSE-2.0
+ * *
+ *  * Unless required by applicable law or agreed to in writing, software
+ * * distributed under the License is distributed on an "AS IS" BASIS,
+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * * See the License for the specific language governing permissions and
+ * * limitations under the License.
+ * * ============LICENSE_END====================================================
+ * *
+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ * *
+ ******************************************************************************/
+
+
+package org.onap.dmaap.datarouter.reports;
+
+import java.io.FileNotFoundException;
+import java.io.PrintWriter;
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.TreeSet;
+
+import org.apache.log4j.Logger;
+import org.onap.dmaap.datarouter.provisioning.utils.DB;
+
+/**
+ * Generate a traffic volume report. The report is a .csv file containing the following columns:
+ * <table>
+ * <tr><td>date</td><td>the date for this record</td></tr>
+ * <tr><td>feedid</td><td>the Feed ID for this record</td></tr>
+ * <tr><td>filespublished</td><td>the number of files published on this feed and date</td></tr>
+ * <tr><td>bytespublished</td><td>the number of bytes published on this feed and date</td></tr>
+ * <tr><td>filesdelivered</td><td>the number of files delivered on this feed and date</td></tr>
+ * <tr><td>bytesdelivered</td><td>the number of bytes delivered on this feed and date</td></tr>
+ * <tr><td>filesexpired</td><td>the number of files expired on this feed and date</td></tr>
+ * <tr><td>bytesexpired</td><td>the number of bytes expired on this feed and date</td></tr>
+ * </table>
+ *
+ * @author Robert P. Eby
+ * @version $Id: VolumeReport.java,v 1.3 2014/02/28 15:11:13 eby Exp $
+ */
+public class VolumeReport extends ReportBase {
+    private static final String SELECT_SQL = "select EVENT_TIME, TYPE, FEEDID, CONTENT_LENGTH, RESULT" +
+            " from LOG_RECORDS where EVENT_TIME >= ? and EVENT_TIME <= ? LIMIT ?, ?";
+    private Logger loggerVolumeReport=Logger.getLogger("org.onap.dmaap.datarouter.reports");
+    private class Counters {
+        int filespublished, filesdelivered, filesexpired;
+        long bytespublished, bytesdelivered, bytesexpired;
+
+        @Override
+        public String toString() {
+            return String.format("%d,%d,%d,%d,%d,%d",
+                    filespublished, bytespublished, filesdelivered,
+                    bytesdelivered, filesexpired, bytesexpired);
+        }
+    }
+
+    @Override
+    public void run() {
+        Map<String, Counters> map = new HashMap<String, Counters>();
+        SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
+        long start = System.currentTimeMillis();
+        try {
+            DB db = new DB();
+            @SuppressWarnings("resource")
+            Connection conn = db.getConnection();
+            // We need to run this SELECT in stages, because otherwise we run out of memory!
+            final long stepsize = 6000000L;
+            boolean go_again = true;
+            for (long i = 0; go_again; i += stepsize) {
+                try (PreparedStatement ps = conn.prepareStatement(SELECT_SQL)) {
+                    ps.setLong(1, from);
+                    ps.setLong(2, to);
+                    ps.setLong(3, i);
+                    ps.setLong(4, stepsize);
+                    try(ResultSet rs = ps.executeQuery()) {
+                        go_again = false;
+                        while (rs.next()) {
+                            go_again = true;
+                            long etime = rs.getLong("EVENT_TIME");
+                            String type = rs.getString("TYPE");
+                            int feed = rs.getInt("FEEDID");
+                            long clen = rs.getLong("CONTENT_LENGTH");
+                            String key = sdf.format(new Date(etime)) + ":" + feed;
+                            Counters c = map.get(key);
+                            if (c == null) {
+                                c = new Counters();
+                                map.put(key, c);
+                            }
+                            if (type.equalsIgnoreCase("pub")) {
+                                c.filespublished++;
+                                c.bytespublished += clen;
+                            } else if (type.equalsIgnoreCase("del")) {
+                                // Only count successful deliveries
+                                int statusCode = rs.getInt("RESULT");
+                                if (statusCode >= 200 && statusCode < 300) {
+                                    c.filesdelivered++;
+                                    c.bytesdelivered += clen;
+                                }
+                            } else if (type.equalsIgnoreCase("exp")) {
+                                c.filesexpired++;
+                                c.bytesexpired += clen;
+                            }
+                        }
+                    }
+                }
+                catch (SQLException sqlException)
+                {
+                    loggerVolumeReport.error("SqlException",sqlException);
+                }
+            }
+
+            db.release(conn);
+        } catch (SQLException e) {
+            loggerVolumeReport.error("SQLException: " + e.getMessage());
+        }
+        logger.debug("Query time: " + (System.currentTimeMillis() - start) + " ms");
+        try (PrintWriter os = new PrintWriter(outfile)) {
+            os.println("date,feedid,filespublished,bytespublished,filesdelivered,bytesdelivered,filesexpired,bytesexpired");
+            for(String key :new TreeSet<String>(map.keySet()))
+            {
+                Counters c = map.get(key);
+                String[] p = key.split(":");
+                os.println(String.format("%s,%s,%s", p[0], p[1], c.toString()));
+            }
+        }
+        catch (FileNotFoundException e) {
+            System.err.println("File cannot be written: " + outfile);
+        }
+    }
+}