1 /*******************************************************************************
\r
2 * ============LICENSE_START==================================================
\r
4 * * ===========================================================================
\r
5 * * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
\r
6 * * ===========================================================================
\r
7 * * Licensed under the Apache License, Version 2.0 (the "License");
\r
8 * * you may not use this file except in compliance with the License.
\r
9 * * You may obtain a copy of the License at
\r
11 * * http://www.apache.org/licenses/LICENSE-2.0
\r
13 * * Unless required by applicable law or agreed to in writing, software
\r
14 * * distributed under the License is distributed on an "AS IS" BASIS,
\r
15 * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
\r
16 * * See the License for the specific language governing permissions and
\r
17 * * limitations under the License.
\r
18 * * ============LICENSE_END====================================================
\r
20 * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
\r
22 ******************************************************************************/
\r
25 package com.att.research.datarouter.reports;
\r
27 import java.io.FileNotFoundException;
\r
28 import java.io.PrintWriter;
\r
29 import java.sql.Connection;
\r
30 import java.sql.PreparedStatement;
\r
31 import java.sql.ResultSet;
\r
32 import java.sql.SQLException;
\r
33 import java.text.SimpleDateFormat;
\r
34 import java.util.ArrayList;
\r
35 import java.util.Date;
\r
36 import java.util.HashMap;
\r
37 import java.util.List;
\r
38 import java.util.Map;
\r
39 import java.util.TreeSet;
\r
41 import com.att.research.datarouter.provisioning.utils.DB;
\r
44 * Generate a daily per feed latency report. The report is a .csv file containing the following columns:
\r
46 * <tr><td>date</td><td>the date for this record</td></tr>
\r
47 * <tr><td>feedid</td><td>the Feed ID for this record</td></tr>
\r
48 * <tr><td>minsize</td><td>the minimum size of all files published on this feed and date</td></tr>
\r
49 * <tr><td>maxsize</td><td>the maximum size of all files published on this feed and date</td></tr>
\r
50 * <tr><td>avgsize</td><td>the average size of all files published on this feed and date</td></tr>
\r
51 * <tr><td>minlat</td><td>the minimum latency in delivering this feed to all subscribers (in ms)</td></tr>
\r
52 * <tr><td>maxlat</td><td>the maximum latency in delivering this feed to all subscribers (in ms)</td></tr>
\r
53 * <tr><td>avglat</td><td>the average latency in delivering this feed to all subscribers (in ms)</td></tr>
\r
54 * <tr><td>fanout</td><td>the average number of subscribers this feed was delivered to</td></tr>
\r
57 * In the context of this report, latency is defined as the value
\r
58 * <i>(D<sub>e</sub> - P<sub>s</sub>)</i>
\r
61 * <p>P<sub>s</sub> is the time that the publication of the file to the node starts.</p>
\r
62 * <p>D<sub>e</sub> is the time that the delivery of the file to the subscriber ends.</p>
\r
64 * @author Robert P. Eby
\r
65 * @version $Id: DailyLatencyReport.java,v 1.2 2013/11/06 16:23:54 eby Exp $
\r
67 public class DailyLatencyReport extends ReportBase {
\r
68 private static final String SELECT_SQL =
\r
69 "select EVENT_TIME, TYPE, PUBLISH_ID, FEED_FILEID, FEEDID, CONTENT_LENGTH from LOG_RECORDS" +
\r
70 " where EVENT_TIME >= ? and EVENT_TIME <= ?";
\r
73 public long pubtime = 0;
\r
74 public long clen = 0;
\r
75 public List<Long> deltime = new ArrayList<Long>();
\r
76 public long minLatency() {
\r
77 long n = deltime.isEmpty() ? 0 : Long.MAX_VALUE;
\r
78 for (Long l : deltime)
\r
79 n = Math.min(n, l-pubtime);
\r
82 public long maxLatency() {
\r
84 for (Long l : deltime)
\r
85 n = Math.max(n, l-pubtime);
\r
88 public long totalLatency() {
\r
90 for (Long l : deltime)
\r
95 private class Counters {
\r
96 public final String date;
\r
97 public final int feedid;
\r
98 public final Map<String, Job> jobs;
\r
99 public Counters(String d, int fid) {
\r
102 jobs = new HashMap<String, Job>();
\r
104 public void addEvent(long etime, String type, String id, String fid, long clen) {
\r
105 Job j = jobs.get(id);
\r
110 if (type.equals("pub")) {
\r
111 j.pubtime = getPstart(id);
\r
113 } else if (type.equals("del")) {
\r
114 j.deltime.add(etime);
\r
118 public String toString() {
\r
119 long minsize = Long.MAX_VALUE, maxsize = 0, avgsize = 0;
\r
120 long minl = Long.MAX_VALUE, maxl = 0;
\r
121 long fanout = 0, totall = 0, totaln = 0;
\r
122 for (Job j : jobs.values()) {
\r
123 minsize = Math.min(minsize, j.clen);
\r
124 maxsize = Math.max(maxsize, j.clen);
\r
126 minl = Math.min(minl, j.minLatency());
\r
127 maxl = Math.max(maxl, j.maxLatency());
\r
128 totall += j.totalLatency();
\r
129 totaln += j.deltime.size();
\r
130 fanout += j.deltime.size();
\r
132 if (jobs.size() > 0) {
\r
133 avgsize /= jobs.size();
\r
134 fanout /= jobs.size();
\r
136 long avgl = (totaln > 0) ? (totall / totaln) : 0;
\r
137 return date + "," + feedid + "," + minsize + "," + maxsize + "," + avgsize + "," + minl + "," + maxl + "," + avgl + "," + fanout;
\r
140 private long getPstart(String t) {
\r
141 if (t.indexOf('.') > 0)
\r
142 t = t.substring(0, t.indexOf('.'));
\r
143 return Long.parseLong(t);
\r
147 public void run() {
\r
148 Map<String, Counters> map = new HashMap<String, Counters>();
\r
149 SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
\r
150 long start = System.currentTimeMillis();
\r
153 @SuppressWarnings("resource")
\r
154 Connection conn = db.getConnection();
\r
155 PreparedStatement ps = conn.prepareStatement(SELECT_SQL);
\r
156 ps.setLong(1, from);
\r
158 ResultSet rs = ps.executeQuery();
\r
159 while (rs.next()) {
\r
160 String id = rs.getString("PUBLISH_ID");
\r
161 int feed = rs.getInt("FEEDID");
\r
162 long etime = rs.getLong("EVENT_TIME");
\r
163 String type = rs.getString("TYPE");
\r
164 String fid = rs.getString("FEED_FILEID");
\r
165 long clen = rs.getLong("CONTENT_LENGTH");
\r
166 String date = sdf.format(new Date(getPstart(id)));
\r
167 String key = date + "," + feed;
\r
168 Counters c = map.get(key);
\r
170 c = new Counters(date, feed);
\r
173 c.addEvent(etime, type, id, fid, clen);
\r
178 } catch (SQLException e) {
\r
179 e.printStackTrace();
\r
181 logger.debug("Query time: " + (System.currentTimeMillis()-start) + " ms");
\r
183 PrintWriter os = new PrintWriter(outfile);
\r
184 os.println("date,feedid,minsize,maxsize,avgsize,minlat,maxlat,avglat,fanout");
\r
185 for (String key : new TreeSet<String>(map.keySet())) {
\r
186 Counters c = map.get(key);
\r
187 os.println(c.toString());
\r
190 } catch (FileNotFoundException e) {
\r
191 System.err.println("File cannot be written: "+outfile);
\r