Update Datarouter Version 1.0.0
[dmaap/datarouter.git] / datarouter-prov / src / main / java / com / att / research / datarouter / reports / VolumeReport.java
1 /*******************************************************************************\r
2  * ============LICENSE_START==================================================\r
3  * * org.onap.dmaap\r
4  * * ===========================================================================\r
5  * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.\r
6  * * ===========================================================================\r
7  * * Licensed under the Apache License, Version 2.0 (the "License");\r
8  * * you may not use this file except in compliance with the License.\r
9  * * You may obtain a copy of the License at\r
10  * * \r
11  *  *      http://www.apache.org/licenses/LICENSE-2.0\r
12  * * \r
13  *  * Unless required by applicable law or agreed to in writing, software\r
14  * * distributed under the License is distributed on an "AS IS" BASIS,\r
15  * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
16  * * See the License for the specific language governing permissions and\r
17  * * limitations under the License.\r
18  * * ============LICENSE_END====================================================\r
19  * *\r
20  * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
21  * *\r
22  ******************************************************************************/\r
23 \r
24 \r
25 package com.att.research.datarouter.reports;\r
26 \r
27 import java.io.FileNotFoundException;\r
28 import java.io.PrintWriter;\r
29 import java.sql.Connection;\r
30 import java.sql.PreparedStatement;\r
31 import java.sql.ResultSet;\r
32 import java.sql.SQLException;\r
33 import java.text.SimpleDateFormat;\r
34 import java.util.Date;\r
35 import java.util.HashMap;\r
36 import java.util.Map;\r
37 import java.util.TreeSet;\r
38 \r
39 import com.att.research.datarouter.provisioning.utils.DB;\r
40 \r
41 /**\r
42  * Generate a traffic volume report. The report is a .csv file containing the following columns:\r
43  * <table>\r
44  * <tr><td>date</td><td>the date for this record</td></tr>\r
45  * <tr><td>feedid</td><td>the Feed ID for this record</td></tr>\r
46  * <tr><td>filespublished</td><td>the number of files published on this feed and date</td></tr>\r
47  * <tr><td>bytespublished</td><td>the number of bytes published on this feed and date</td></tr>\r
48  * <tr><td>filesdelivered</td><td>the number of files delivered on this feed and date</td></tr>\r
49  * <tr><td>bytesdelivered</td><td>the number of bytes delivered on this feed and date</td></tr>\r
50  * <tr><td>filesexpired</td><td>the number of files expired on this feed and date</td></tr>\r
51  * <tr><td>bytesexpired</td><td>the number of bytes expired on this feed and date</td></tr>\r
52  * </table>\r
53  *\r
54  * @author Robert P. Eby\r
55  * @version $Id: VolumeReport.java,v 1.3 2014/02/28 15:11:13 eby Exp $\r
56  */\r
57 public class VolumeReport extends ReportBase {\r
58         private static final String SELECT_SQL = "select EVENT_TIME, TYPE, FEEDID, CONTENT_LENGTH, RESULT" +\r
59                 " from LOG_RECORDS where EVENT_TIME >= ? and EVENT_TIME <= ? LIMIT ?, ?";\r
60 \r
61         private class Counters {\r
62                 public int  filespublished, filesdelivered, filesexpired;\r
63                 public long bytespublished, bytesdelivered, bytesexpired;\r
64                 @Override\r
65                 public String toString() {\r
66                         return String.format("%d,%d,%d,%d,%d,%d",\r
67                                 filespublished, bytespublished, filesdelivered,\r
68                                 bytesdelivered, filesexpired, bytesexpired);\r
69                 }\r
70         }\r
71 \r
72         @Override\r
73         public void run() {\r
74                 Map<String, Counters> map = new HashMap<String, Counters>();\r
75                 SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");\r
76                 long start = System.currentTimeMillis();\r
77                 try {\r
78                         DB db = new DB();\r
79                         @SuppressWarnings("resource")\r
80                         Connection conn = db.getConnection();\r
81                         // We need to run this SELECT in stages, because otherwise we run out of memory!\r
82                         final long stepsize = 6000000L;\r
83                         boolean go_again = true;\r
84                         for (long i = 0; go_again; i += stepsize) {\r
85                                 PreparedStatement ps = conn.prepareStatement(SELECT_SQL);\r
86                                 ps.setLong(1, from);\r
87                                 ps.setLong(2, to);\r
88                                 ps.setLong(3, i);\r
89                                 ps.setLong(4, stepsize);\r
90                                 ResultSet rs = ps.executeQuery();\r
91                                 go_again = false;\r
92                                 while (rs.next()) {\r
93                                         go_again = true;\r
94                                         long etime  = rs.getLong("EVENT_TIME");\r
95                                         String type = rs.getString("TYPE");\r
96                                         int feed    = rs.getInt("FEEDID");\r
97                                         long clen   = rs.getLong("CONTENT_LENGTH");\r
98                                         String key  = sdf.format(new Date(etime)) + ":" + feed;\r
99                                         Counters c = map.get(key);\r
100                                         if (c == null) {\r
101                                                 c = new Counters();\r
102                                                 map.put(key, c);\r
103                                         }\r
104                                         if (type.equalsIgnoreCase("pub")) {\r
105                                                 c.filespublished++;\r
106                                                 c.bytespublished += clen;\r
107                                         } else if (type.equalsIgnoreCase("del")) {\r
108                                                 // Only count successful deliveries\r
109                                                 int statusCode = rs.getInt("RESULT");\r
110                                                 if (statusCode >= 200 && statusCode < 300) {\r
111                                                         c.filesdelivered++;\r
112                                                         c.bytesdelivered += clen;\r
113                                                 }\r
114                                         } else if (type.equalsIgnoreCase("exp")) {\r
115                                                 c.filesexpired++;\r
116                                                 c.bytesexpired += clen;\r
117                                         }\r
118                                 }\r
119                                 rs.close();\r
120                                 ps.close();\r
121                         }\r
122                         db.release(conn);\r
123                 } catch (SQLException e) {\r
124                         e.printStackTrace();\r
125                 }\r
126                 logger.debug("Query time: " + (System.currentTimeMillis()-start) + " ms");\r
127                 try {\r
128                         PrintWriter os = new PrintWriter(outfile);\r
129                         os.println("date,feedid,filespublished,bytespublished,filesdelivered,bytesdelivered,filesexpired,bytesexpired");\r
130                         for (String key : new TreeSet<String>(map.keySet())) {\r
131                                 Counters c = map.get(key);\r
132                                 String[] p = key.split(":");\r
133                                 os.println(String.format("%s,%s,%s", p[0], p[1], c.toString()));\r
134                         }\r
135                         os.close();\r
136                 } catch (FileNotFoundException e) {\r
137                         System.err.println("File cannot be written: "+outfile);\r
138                 }\r
139         }\r
140 }\r