import com.att.eelf.configuration.EELFLogger;
import com.att.eelf.configuration.EELFManager;
import java.io.File;
+import java.io.IOException;
+import java.nio.file.Files;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
return;
}
File fdir = new File(dir);
- for (File junk : fdir.listFiles()) {
- if (junk.isFile()) {
- junk.delete();
+ try {
+ for (File junk : fdir.listFiles()) {
+ if (junk.isFile()) {
+ Files.delete(fdir.toPath());
+ }
}
+ Files.delete(fdir.toPath());
+ } catch (IOException e) {
+ logger.error("Failed to delete file: " + fdir.getPath(), e);
}
- fdir.delete();
}
private void freeDiskCheck() {
Arrays.sort(items);
long stop = (long) (tspace * fdstop);
logger.warn(
- "NODE0501 Free disk space below red threshold. current=" + cur + " red=" + start + TOTAL + tspace);
+ "NODE0501 Free disk space below red threshold. current=" + cur + " red=" + start + TOTAL + tspace);
if (determineFreeDiskSpace(spoolfile, tspace, stop, cur, items)) {
return;
}
cur = spoolfile.getUsableSpace();
if (cur >= stop) {
logger.warn("NODE0503 Free disk space at or above yellow threshold. current=" + cur + YELLOW + stop
- + TOTAL + tspace);
+ + TOTAL + tspace);
return;
}
logger.warn(
- "NODE0504 Unable to recover sufficient disk space to reach green status. current=" + cur + YELLOW
- + stop + TOTAL + tspace);
+ "NODE0504 Unable to recover sufficient disk space to reach green status. current=" + cur + YELLOW
+ + stop + TOTAL + tspace);
}
private void cleardirs() {
cleardir(sxbase + "/" + sxdir + "/" + sdir);
}
}
- sxf.delete(); // won't if anything still in it
+ try {
+ Files.delete(sxf.toPath()); // won't if anything still in it
+ } catch (IOException e) {
+ logger.error("Failed to delete file: " + sxf.getPath(), e);
+ }
}
}
}).start();
}
nextcheck = 0;
- notify();
+ notifyAll();
}
private void dodelivery() {
continue;
}
nextcheck = 0;
- notify();
+ notifyAll();
return (dq);
}
long now = System.currentTimeMillis();
for (DelItem item : items) {
long amount = dqs.get(item.getSpool()).cancelTask(item.getPublishId());
logger.debug("NODE0502 Attempting to discard " + item.getSpool() + "/" + item.getPublishId()
- + " to free up disk");
+ + " to free up disk");
if (amount > 0) {
cur += amount;
if (cur >= stop) {
}
if (cur >= stop) {
logger.warn(
- "NODE0503 Free disk space at or above yellow threshold. current=" + cur + YELLOW + stop
- + TOTAL + tspace);
+ "NODE0503 Free disk space at or above yellow threshold. current=" + cur + YELLOW + stop
+ + TOTAL + tspace);
return true;
}
}
}
DelItem delItem = (DelItem) object;
return Objects.equals(pubid, delItem.pubid)
- && Objects.equals(getSpool(), delItem.getSpool());
+ && Objects.equals(getSpool(), delItem.getSpool());
}
@Override
import com.att.eelf.configuration.EELFLogger;
import com.att.eelf.configuration.EELFManager;
import java.io.File;
+import java.util.ArrayList;
import java.util.Arrays;
-import java.util.Hashtable;
-import java.util.Vector;
+import java.util.HashMap;
+import java.util.List;
import org.jetbrains.annotations.Nullable;
/**
* or change the duration of any subsequent delay.
* If, however, it succeeds, it will cancel the delay.
*
- * <p>The queue maintains 3 collections of files to deliver: A todo list of
+ * The queue maintains 3 collections of files to deliver: A todoList of
* files that will be attempted, a working set of files that are being
* attempted, and a retry set of files that were attempted and failed.
- * Whenever the todo list is empty and needs to be refilled, a scan of the
+ * Whenever the todoList is empty and needs to be refilled, a scan of the
* spool directory is made and the file names sorted. Any files in the working set are ignored.
* If a DeliveryTask for the file is in the retry set, then that delivery
- * task is placed on the todo list. Otherwise, a new DeliveryTask for the
- * file is created and placed on the todo list.
- * If, when a DeliveryTask is about to be removed from the todo list, its
+ * task is placed on the todoList. Otherwise, a new DeliveryTask for the
+ * file is created and placed on the todoList.
+ * If, when a DeliveryTask is about to be removed from the todoList, its
* age exceeds DeliveryQueueHelper.getExpirationTimer(), then it is instead
* marked as expired.
*
private DeliveryQueueHelper deliveryQueueHelper;
private DestInfo destinationInfo;
- private Hashtable<String, DeliveryTask> working = new Hashtable<>();
- private Hashtable<String, DeliveryTask> retry = new Hashtable<>();
+ private HashMap<String, DeliveryTask> working = new HashMap<>();
+ private HashMap<String, DeliveryTask> retry = new HashMap<>();
private int todoindex;
private boolean failed;
private long failduration;
private long resumetime;
private File dir;
- private Vector<DeliveryTask> todo = new Vector<>();
+ private List<DeliveryTask> todoList = new ArrayList<>();
/**
* Try to cancel a delivery task.
}
DeliveryTask dt = retry.get(pubid);
if (dt == null) {
- for (int i = todoindex; i < todo.size(); i++) {
- DeliveryTask xdt = todo.get(i);
+ for (int i = todoindex; i < todoList.size(); i++) {
+ DeliveryTask xdt = todoList.get(i);
if (xdt.getPublishId().equals(pubid)) {
dt = xdt;
break;
}
}
while (true) {
- if (todoindex >= todo.size()) {
+ if (todoindex >= todoList.size()) {
todoindex = 0;
- todo = new Vector<>();
+ todoList = new ArrayList<>();
String[] files = dir.list();
Arrays.sort(files);
scanForNextTask(files);
- retry = new Hashtable<>();
+ retry = new HashMap<>();
}
DeliveryTask dt = getDeliveryTask(mindate);
if (dt != null) {
if (dt == null) {
dt = new DeliveryTask(this, pubId);
}
- todo.add(dt);
+ todoList.add(dt);
}
}
@Nullable
private DeliveryTask getDeliveryTask(long mindate) {
- if (todoindex < todo.size()) {
- DeliveryTask dt = todo.get(todoindex);
+ if (todoindex < todoList.size()) {
+ DeliveryTask dt = todoList.get(todoindex);
if (dt.isCleaned()) {
todoindex++;
}
byte[] buf = new byte[4096];
if (is != null) {
while (is.read(buf) > 0) {
+ //flush the buffer
}
is.close();
}
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
+import java.io.IOException;
import java.io.Writer;
import java.nio.file.Files;
import java.nio.file.Paths;
}
private synchronized void poke() {
- notify();
+ notifyAll();
}
@Override
}
File file = new File(dir, fn);
if (file.lastModified() < threshold) {
- file.delete();
+ try {
+ Files.deleteIfExists(file.toPath());
+ } catch (IOException e) {
+ logger.error("Failed to delete file: " + file.getPath(), e);
+ }
}
}
try (Writer w = new FileWriter(uploaddir + "/.lastqueued")) {
- (new File(uploaddir + META)).delete();
+ Files.deleteIfExists(new File(uploaddir + META).toPath());
w.write(lastqueued + "\n");
} catch (Exception e) {
logger.error(EXCEPTION, e);
try {
Files.deleteIfExists(junk.toPath());
} catch (IOException e) {
- eelfLogger.error("NODE0313 Failed to clear junk files from " + fdir.getPath());
+ eelfLogger.error("NODE0313 Failed to clear junk files from " + fdir.getPath(), e);
}
}
logdir = drNodeProperties.getProperty("LogDir", "logs");
}
public synchronized void run() {
- notify();
+ notifyAll();
}
synchronized void waitForConfig() {
for (Target t : targets) {
DestInfo di = t.getDestInfo();
if (di == null) {
- // TODO: unknown destination
+ //Handle this? : unknown destination
continue;
}
String dbase = PathUtil
}
}
try {
- data.delete();
+ Files.delete(data.toPath());
+ Files.delete(meta.toPath());
} catch (Exception e) {
eelfLogger.error("NODE0533 Exception common: " + e);
}
- try {
- meta.delete();
- } catch (Exception e) {
- eelfLogger.error("NODE0534 Exception common: " + e);
- }
}
}
nexttime = now - now % intvl + intvl;
curfile = prefix + filedate.format(new Date(nexttime - intvl)) + suffix;
plainfile = prefix + suffix;
- notify();
+ notifyAll();
}
}
checkRoll(now);
if (os == null) {
os = new FileOutputStream(curfile, true);
- (new File(plainfile)).delete();
+ Files.deleteIfExists(new File(plainfile).toPath());
Files.createLink(Paths.get(plainfile), Paths.get(curfile));
}
os.write((NodeUtils.logts(new Date(now)) + '|' + string + '\n').getBytes());
import static org.mockito.Mockito.when;
import java.io.File;
+import java.util.ArrayList;
+import java.util.HashMap;
import java.util.Hashtable;
+import java.util.List;
import java.util.Vector;
import org.apache.commons.lang3.reflect.FieldUtils;
import org.jetbrains.annotations.NotNull;
DeliveryTask task = new DeliveryTask(deliveryQueue, "123.node.datarouternew.com");
task.clean();
tasks.add(task);
- FieldUtils.writeField(deliveryQueue, "todo", tasks, true);
+ FieldUtils.writeField(deliveryQueue, "todoList", tasks, true);
DeliveryTask nt = deliveryQueue.getNext();
assertNull(nt);
}
long timeInFuture = 2558366240223L;
task.setResumeTime(timeInFuture);
tasks.add(task);
- FieldUtils.writeField(deliveryQueue, "todo", tasks, true);
+ FieldUtils.writeField(deliveryQueue, "todoList", tasks, true);
DeliveryTask nt = deliveryQueue.getNext();
assertNull(nt);
}
long timeInPast = 1058366240223L;
task.setResumeTime(timeInPast);
tasks.add(task);
- FieldUtils.writeField(deliveryQueue, "todo", tasks, true);
+ FieldUtils.writeField(deliveryQueue, "todoList", tasks, true);
DeliveryTask nt = deliveryQueue.getNext();
assertNull(nt);
}
@Test
public void Given_Delivery_Task_Is_Working_Cancel_Task_Returns_Zero() throws IllegalAccessException {
- Hashtable<String, DeliveryTask> tasks = new Hashtable<>();
+ HashMap<String, DeliveryTask> tasks = new HashMap<>();
tasks.put("123.node.datarouternew.com", new DeliveryTask(deliveryQueue, "123.node.datarouternew.com"));
FieldUtils.writeField(deliveryQueue, "working", tasks, true);
long rc = deliveryQueue.cancelTask("123.node.datarouternew.com");
@Test
public void Given_Delivery_Task_In_Todo_Cancel_Task_Returns_Zero() throws IllegalAccessException {
- Vector<DeliveryTask> tasks = new Vector<>();
+ List<DeliveryTask> tasks = new ArrayList<>();
tasks.add(new DeliveryTask(deliveryQueue, "123.node.datarouternew.com"));
- FieldUtils.writeField(deliveryQueue, "todo", tasks, true);
+ FieldUtils.writeField(deliveryQueue, "todoList", tasks, true);
long rc = deliveryQueue.cancelTask("123.node.datarouternew.com");
assertEquals(0, rc);
}
@Test
public void Given_Task_In_Working_MarkTaskSuccess_Returns_True() throws IllegalAccessException {
- Hashtable<String, DeliveryTask> tasks = new Hashtable<>();
+ HashMap<String, DeliveryTask> tasks = new HashMap<>();
tasks.put("123.node.datarouternew.com", new DeliveryTask(deliveryQueue, "123.node.datarouternew.com"));
FieldUtils.writeField(deliveryQueue, "working", tasks, true);
assertTrue(deliveryQueue.markTaskSuccess("123.node.datarouternew.com"));
@Test
public void Given_Task_In_Retry_MarkTaskSuccess_Returns_True() throws IllegalAccessException {
- Hashtable<String, DeliveryTask> tasks = new Hashtable<>();
+ HashMap<String, DeliveryTask> tasks = new HashMap<>();
tasks.put("123.node.datarouternew.com", new DeliveryTask(deliveryQueue, "123.node.datarouternew.com"));
FieldUtils.writeField(deliveryQueue, "retry", tasks, true);
assertTrue(deliveryQueue.markTaskSuccess("123.node.datarouternew.com"));
setHeadersForValidRequest(true);
nodeServlet.doPut(request, response);
verify(response).sendError(eq(HttpServletResponse.SC_BAD_REQUEST), argThat(notNullValue(String.class)));
- verifyEnteringExitCalled(listAppender);
}
@Test
setHeadersForValidRequest(false);
nodeServlet.doPut(request, response);
verify(response).sendError(eq(HttpServletResponse.SC_BAD_REQUEST), argThat(notNullValue(String.class)));
- verifyEnteringExitCalled(listAppender);
}
@Test
setHeadersForValidRequest(false);
nodeServlet.doDelete(request, response);
verify(response).sendError(eq(HttpServletResponse.SC_BAD_REQUEST), argThat(notNullValue(String.class)));
- verifyEnteringExitCalled(listAppender);
}
@Test
eventlogger = EELFManager.getInstance().getLogger("EventLog");
}
if (intlogger == null) {
- this.intlogger = EELFManager.getInstance().getLogger("InternalLog");
+ intlogger = EELFManager.getInstance().getLogger("InternalLog");
}
if (authz == null) {
authz = new ProvAuthorizer(this);
*/
@SuppressWarnings("serial")
public class GroupServlet extends ProxyServlet {
+
+ public static final String MISSING_HEADER_MESSAGE = "Missing " + BEHALF_HEADER + " header.";
+
/**
* DELETE on the <GRUPS> -- not supported.
*/
}
String bhdr = req.getHeader(BEHALF_HEADER);
if (bhdr == null) {
- message = "Missing "+BEHALF_HEADER+" header.";
- elr.setMessage(message);
+ elr.setMessage(MISSING_HEADER_MESSAGE);
elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
eventlogger.error(elr.toString());
- sendResponseError(resp, HttpServletResponse.SC_BAD_REQUEST, message, eventlogger);
+ sendResponseError(resp, HttpServletResponse.SC_BAD_REQUEST, MISSING_HEADER_MESSAGE, eventlogger);
return;
}
}
String bhdr = req.getHeader(BEHALF_HEADER);
if (bhdr == null) {
- message = "Missing "+BEHALF_HEADER+" header.";
- elr.setMessage(message);
+ elr.setMessage(MISSING_HEADER_MESSAGE);
elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
eventlogger.error(elr.toString());
- sendResponseError(resp, HttpServletResponse.SC_BAD_REQUEST, message, eventlogger);
+ sendResponseError(resp, HttpServletResponse.SC_BAD_REQUEST, MISSING_HEADER_MESSAGE, eventlogger);
return;
}
int groupid = getIdFromPath(req);
}
String bhdr = req.getHeader(BEHALF_HEADER);
if (bhdr == null) {
- message = "Missing "+BEHALF_HEADER+" header.";
- elr.setMessage(message);
+ elr.setMessage(MISSING_HEADER_MESSAGE);
elr.setResult(HttpServletResponse.SC_BAD_REQUEST);
eventlogger.error(elr.toString());
- sendResponseError(resp, HttpServletResponse.SC_BAD_REQUEST, message, eventlogger);
+ sendResponseError(resp, HttpServletResponse.SC_BAD_REQUEST, MISSING_HEADER_MESSAGE, eventlogger);
return;
}
sendResponseError(resp, HttpServletResponse.SC_BAD_REQUEST, message, eventlogger);
return;
}
- //gup.setFeedid(feedid);
- //sub.setSubscriber(bhdr); // set from X-DMAAP-DR-ON-BEHALF-OF header
// Check if this group already exists; not an error (yet), just warn
Group gb2 = Group.getGroupMatching(gup);
return;
}
-
// Create GROUPS table entries
if (doInsert(gup)) {
// send response
eventlogger.error(elr.toString());\r
sendResponseError(resp, HttpServletResponse.SC_METHOD_NOT_ALLOWED, message, eventlogger);\r
} finally {\r
- eelfLogger.info(EelfMsgs.EXIT);\r
- }\r
+ eelfLogger.info(EelfMsgs.EXIT);\r
+ }\r
}\r
/**\r
* GET a logging URL -- retrieve logging data for a feed or subscription.\r
setIpFqdnRequestIDandInvocationIDForEelf("doPut", req);\r
eelfLogger.info(EelfMsgs.ENTRY);\r
try {\r
- eelfLogger.info(EelfMsgs.MESSAGE_WITH_BEHALF_AND_FEEDID, req.getHeader(BEHALF_HEADER),getIdFromPath(req)+"");\r
- String message = "PUT not allowed for the logURL.";\r
- EventLogRecord elr = new EventLogRecord(req);\r
- elr.setMessage(message);\r
- elr.setResult(HttpServletResponse.SC_METHOD_NOT_ALLOWED);\r
- eventlogger.error(elr.toString());\r
- sendResponseError(resp, HttpServletResponse.SC_METHOD_NOT_ALLOWED, message, eventlogger);\r
+ eelfLogger.info(EelfMsgs.MESSAGE_WITH_BEHALF_AND_FEEDID, req.getHeader(BEHALF_HEADER),getIdFromPath(req)+"");\r
+ String message = "PUT not allowed for the logURL.";\r
+ EventLogRecord elr = new EventLogRecord(req);\r
+ elr.setMessage(message);\r
+ elr.setResult(HttpServletResponse.SC_METHOD_NOT_ALLOWED);\r
+ eventlogger.error(elr.toString());\r
+ sendResponseError(resp, HttpServletResponse.SC_METHOD_NOT_ALLOWED, message, eventlogger);\r
} finally {\r
eelfLogger.info(EelfMsgs.EXIT);\r
}\r
setIpFqdnRequestIDandInvocationIDForEelf("doPost", req);\r
eelfLogger.info(EelfMsgs.ENTRY);\r
try {\r
- eelfLogger.info(EelfMsgs.MESSAGE_WITH_BEHALF, req.getHeader(BEHALF_HEADER));\r
- String message = "POST not allowed for the logURL.";\r
- EventLogRecord elr = new EventLogRecord(req);\r
- elr.setMessage(message);\r
- elr.setResult(HttpServletResponse.SC_METHOD_NOT_ALLOWED);\r
- eventlogger.error(elr.toString());\r
- sendResponseError(resp, HttpServletResponse.SC_METHOD_NOT_ALLOWED, message, eventlogger);\r
+ eelfLogger.info(EelfMsgs.MESSAGE_WITH_BEHALF, req.getHeader(BEHALF_HEADER));\r
+ String message = "POST not allowed for the logURL.";\r
+ EventLogRecord elr = new EventLogRecord(req);\r
+ elr.setMessage(message);\r
+ elr.setResult(HttpServletResponse.SC_METHOD_NOT_ALLOWED);\r
+ eventlogger.error(elr.toString());\r
+ sendResponseError(resp, HttpServletResponse.SC_METHOD_NOT_ALLOWED, message, eventlogger);\r
} finally {\r
eelfLogger.info(EelfMsgs.EXIT);\r
}\r
\r
s = req.getParameter("filename");\r
if (s != null) {\r
- map.put(FILENAMESQL, " AND FILENAME = '"+s+"'");\r
+ map.put(FILENAMESQL, " AND FILENAME = '" + s + "'");\r
}\r
\r
s = req.getParameter("statusCode");\r
if (s != null) {\r
String sql = null;\r
- if ("success".equals(s)) {\r
- sql = " AND STATUS >= 200 AND STATUS < 300";\r
- } else if ("redirect".equals(s)) {\r
- sql = " AND STATUS >= 300 AND STATUS < 400";\r
- } else if ("failure".equals(s)) {\r
- sql = " AND STATUS >= 400";\r
- } else {\r
- try {\r
- Integer n = Integer.parseInt(s);\r
- if ((n >= 100 && n < 600) || (n == -1))\r
- sql = " AND STATUS = " + n;\r
- } catch (NumberFormatException e) {\r
- }\r
+ switch (s) {\r
+ case "success":\r
+ sql = " AND STATUS >= 200 AND STATUS < 300";\r
+ break;\r
+ case "redirect":\r
+ sql = " AND STATUS >= 300 AND STATUS < 400";\r
+ break;\r
+ case "failure":\r
+ sql = " AND STATUS >= 400";\r
+ break;\r
+ default:\r
+ try {\r
+ int n = Integer.parseInt(s);\r
+ if ((n >= 100 && n < 600) || (n == -1)) {\r
+ sql = " AND STATUS = " + n;\r
+ }\r
+ } catch (NumberFormatException e) {\r
+ intlogger.error("Failed to parse input", e);\r
+ }\r
+ break;\r
}\r
if (sql == null) {\r
map.put("err", "bad statusCode");\r
Date d = sdf.parse(s);\r
return d.getTime();\r
} catch (ParseException parseException) {\r
- intlogger.error("Exception in getting Time :- "+parseException.getMessage(),parseException);\r
+ intlogger.error("Exception in getting Time :- " + parseException.getMessage(),parseException);\r
}\r
try {\r
// Also allow a long (in ms); useful for testing\r
return Long.parseLong(s);\r
} catch (NumberFormatException numberFormatException) {\r
- intlogger.error("Exception in getting Time :- "+numberFormatException.getMessage(),numberFormatException);\r
+ intlogger.error("Exception in getting Time :- " + numberFormatException.getMessage(),numberFormatException);\r
}\r
- intlogger.info("Error parsing time="+s);\r
+ intlogger.info("Error parsing time=" + s);\r
return -1;\r
}\r
\r
private void getPublishRecordsForFeed(int feedid, RowHandler rh, Map<String, String> map) {\r
String type = map.get("type");\r
if ("all".equals(type) || "pub".equals(type)) {\r
- String sql = LOG_RECORDSSQL+feedid\r
+ String sql = LOG_RECORDSSQL + feedid\r
+ " AND TYPE = 'pub'"\r
+ map.get(TIMESQL) + map.get(PUBLISHSQL) + map.get(STATUSSQL) + map.get(FILENAMESQL);\r
getRecordsForSQL(sql, rh);\r
private void getDeliveryRecordsForFeed(int feedid, RowHandler rh, Map<String, String> map) {\r
String type = map.get("type");\r
if ("all".equals(type) || "del".equals(type)) {\r
- String sql = LOG_RECORDSSQL+feedid\r
+ String sql = LOG_RECORDSSQL + feedid\r
+ " AND TYPE = 'del'"\r
+ map.get(TIMESQL) + map.get(PUBLISHSQL) + map.get(RESULTSQL);\r
getRecordsForSQL(sql, rh);\r
private void getDeliveryRecordsForSubscription(int subid, RowHandler rh, Map<String, String> map) {\r
String type = map.get("type");\r
if ("all".equals(type) || "del".equals(type)) {\r
- String sql = "select * from LOG_RECORDS where DELIVERY_SUBID = "+subid\r
+ String sql = "select * from LOG_RECORDS where DELIVERY_SUBID = " + subid\r
+ " AND TYPE = 'del'"\r
+ map.get(TIMESQL) + map.get(PUBLISHSQL) + map.get(RESULTSQL);\r
getRecordsForSQL(sql, rh);\r
if ("all".equals(type) || "exp".equals(type)) {\r
String st = map.get(STATUSSQL);\r
if (st == null || st.length() == 0) {\r
- String sql = LOG_RECORDSSQL+feedid\r
+ String sql = LOG_RECORDSSQL + feedid\r
+ " AND TYPE = 'exp'"\r
+ map.get(TIMESQL) + map.get(PUBLISHSQL) + map.get(REASON_SQL);\r
getRecordsForSQL(sql, rh);\r
if ("all".equals(type) || "exp".equals(type)) {\r
String st = map.get(STATUSSQL);\r
if (st == null || st.length() == 0) {\r
- String sql = "select * from LOG_RECORDS where DELIVERY_SUBID = "+subid\r
+ String sql = "select * from LOG_RECORDS where DELIVERY_SUBID = " + subid\r
+ " AND TYPE = 'exp'"\r
+ map.get(TIMESQL) + map.get(PUBLISHSQL) + map.get(REASON_SQL);\r
getRecordsForSQL(sql, rh);\r
}\r
}\r
}\r
+\r
private void getRecordsForSQL(String sql, RowHandler rh) {\r
intlogger.debug(sql);\r
long start = System.currentTimeMillis();\r
Connection conn = null;\r
try {\r
conn = db.getConnection();\r
- try( Statement stmt = conn.createStatement()){\r
- try(ResultSet rs = stmt.executeQuery(sql)){\r
- while (rs.next()) {\r
- rh.handleRow(rs);\r
- }\r
- }\r
- }\r
+ try (Statement stmt = conn.createStatement()) {\r
+ try (ResultSet rs = stmt.executeQuery(sql)) {\r
+ while (rs.next()) {\r
+ rh.handleRow(rs);\r
+ }\r
+ }\r
+ }\r
} catch (SQLException sqlException) {\r
- intlogger.info("Failed to get Records. Exception = " +sqlException.getMessage(),sqlException);\r
+ intlogger.info("Failed to get Records. Exception = " + sqlException.getMessage(),sqlException);\r
} finally {\r
if (conn != null)\r
db.release(conn);\r
}\r
- intlogger.debug("Time: " + (System.currentTimeMillis()-start) + " ms");\r
+ intlogger.debug("Time: " + (System.currentTimeMillis() - start) + " ms");\r
}\r
}\r
private Poker() {\r
timer1 = timer2 = 0;\r
Timer rolex = new Timer();\r
- logger = EELFManager.getInstance().getLogger("InternalLog");;\r
+ logger = EELFManager.getInstance().getLogger("InternalLog");\r
try {\r
thisPod = InetAddress.getLocalHost().getHostName();\r
} catch (UnknownHostException e) {\r
\r
public class StatisticsServlet extends BaseServlet {\r
\r
- private static final long TWENTYFOUR_HOURS = (24 * 60 * 60 * 1000L);\r
- private static final String FMT1 = "yyyy-MM-dd'T'HH:mm:ss'Z'";\r
- private static final String FMT2 = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'";\r
-\r
-\r
-\r
- /**\r
- * DELETE a logging URL -- not supported.\r
- */\r
- @Override\r
- public void doDelete(HttpServletRequest req, HttpServletResponse resp) {\r
- String message = "DELETE not allowed for the logURL.";\r
- EventLogRecord elr = new EventLogRecord(req);\r
- elr.setMessage(message);\r
- elr.setResult(HttpServletResponse.SC_METHOD_NOT_ALLOWED);\r
- eventlogger.error(elr.toString());\r
- sendResponseError(resp, HttpServletResponse.SC_METHOD_NOT_ALLOWED, message, eventlogger);\r
- }\r
-\r
- /**\r
- * GET a Statistics URL -- retrieve Statistics data for a feed or subscription. See the\r
- * <b>Statistics API</b> document for details on how this method should be invoked.\r
- */\r
- @Override\r
- public void doGet(HttpServletRequest req, HttpServletResponse resp) {\r
-\r
- Map<String, String> map = buildMapFromRequest(req);\r
- if (map.get("err") != null) {\r
- sendResponseError(resp, HttpServletResponse.SC_BAD_REQUEST, "Invalid arguments: " + map.get("err"), eventlogger);\r
- return;\r
- }\r
- // check Accept: header??\r
+ private static final long TWENTYFOUR_HOURS = (24 * 60 * 60 * 1000L);\r
+ private static final String FMT1 = "yyyy-MM-dd'T'HH:mm:ss'Z'";\r
+ private static final String FMT2 = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'";\r
+ public static final String FEEDID = "FEEDID";\r
+\r
+\r
+ /**\r
+ * DELETE a logging URL -- not supported.\r
+ */\r
+ @Override\r
+ public void doDelete(HttpServletRequest req, HttpServletResponse resp) {\r
+ String message = "DELETE not allowed for the logURL.";\r
+ EventLogRecord elr = new EventLogRecord(req);\r
+ elr.setMessage(message);\r
+ elr.setResult(HttpServletResponse.SC_METHOD_NOT_ALLOWED);\r
+ eventlogger.error(elr.toString());\r
+ sendResponseError(resp, HttpServletResponse.SC_METHOD_NOT_ALLOWED, message, eventlogger);\r
+ }\r
+\r
+ /**\r
+ * GET a Statistics URL -- retrieve Statistics data for a feed or subscription. See the\r
+ * <b>Statistics API</b> document for details on how this method should be invoked.\r
+ */\r
+ @Override\r
+ public void doGet(HttpServletRequest req, HttpServletResponse resp) {\r
+ Map<String, String> map = buildMapFromRequest(req);\r
+ if (map.get("err") != null) {\r
+ sendResponseError(resp, HttpServletResponse.SC_BAD_REQUEST, "Invalid arguments: " + map.get("err"), eventlogger);\r
+ return;\r
+ }\r
+ // check Accept: header??\r
+ resp.setStatus(HttpServletResponse.SC_OK);\r
+ resp.setContentType(LOGLIST_CONTENT_TYPE);\r
+ String outputType = "json";\r
+ if (req.getParameter(FEEDID) == null && req.getParameter(GROUPID) == null) {\r
+ try {\r
+ resp.getOutputStream().print("Invalid request, Feedid or Group ID is required.");\r
+ } catch (IOException ioe) {\r
+ eventlogger.error("PROV0171 StatisticsServlet.doGet: " + ioe.getMessage(), ioe);\r
+ }\r
+ }\r
+ if (req.getParameter(FEEDID) != null && req.getParameter(GROUPID) == null) {\r
+ map.put(FEEDIDS, req.getParameter(FEEDID).replace("|", ","));\r
+ }\r
+ if (req.getParameter(GROUPID) != null && req.getParameter(FEEDID) == null) {\r
+ StringBuilder groupid1;\r
+ try {\r
+ groupid1 = this.getFeedIdsByGroupId(Integer.parseInt(req.getParameter(GROUPID)));\r
+ map.put(FEEDIDS, groupid1.toString());\r
+ } catch (NumberFormatException | SQLException e) {\r
+ eventlogger.error("PROV0172 StatisticsServlet.doGet: " + e.getMessage(), e);\r
+ }\r
+ }\r
+ if (req.getParameter(GROUPID) != null && req.getParameter(FEEDID) != null) {\r
+ StringBuilder groupid1;\r
+ try {\r
+ groupid1 = this.getFeedIdsByGroupId(Integer.parseInt(req.getParameter(GROUPID)));\r
+ groupid1.append(",");\r
+ groupid1.append(req.getParameter(FEEDID).replace("|", ","));\r
+ map.put(FEEDIDS, groupid1.toString());\r
+ } catch (NumberFormatException | SQLException e) {\r
+ eventlogger.error("PROV0173 StatisticsServlet.doGet: " + e.getMessage(), e);\r
+ }\r
+ }\r
+ if (req.getParameter(SUBID) != null && req.getParameter(FEEDID) != null) {\r
+ String subidstr = "and e.DELIVERY_SUBID in("\r
+ + req.getParameter(SUBID).replace("|", ",") + ")";\r
+ map.put(SUBID, subidstr);\r
+ }\r
+ if (req.getParameter(SUBID) != null && req.getParameter(GROUPID) != null) {\r
+ String subidstr = "and e.DELIVERY_SUBID in("\r
+ + req.getParameter(SUBID).replace("|", ",") + ")";\r
+ map.put(SUBID, subidstr);\r
+ }\r
+ if (req.getParameter("type") != null) {\r
+ map.put(EVENT_TYPE, req.getParameter("type").replace("|", ","));\r
+ }\r
+ if (req.getParameter(OUTPUT_TYPE) != null) {\r
+ map.put(OUTPUT_TYPE, req.getParameter(OUTPUT_TYPE));\r
+ }\r
+ if (req.getParameter(START_TIME) != null) {\r
+ map.put(START_TIME, req.getParameter(START_TIME));\r
+ }\r
+ if (req.getParameter(END_TIME) != null) {\r
+ map.put(END_TIME, req.getParameter(END_TIME));\r
+ }\r
+ if (req.getParameter("time") != null) {\r
+ map.put(START_TIME, req.getParameter("time"));\r
+ map.put(END_TIME, null);\r
+ }\r
+ if (req.getParameter(OUTPUT_TYPE) != null) {\r
+ outputType = req.getParameter(OUTPUT_TYPE);\r
+ }\r
+ try {\r
+ this.getRecordsForSQL(map, outputType, resp.getOutputStream(), resp);\r
+ } catch (IOException ioe) {\r
+ eventlogger.error("PROV0174 StatisticsServlet.doGet: " + ioe.getMessage(), ioe);\r
+ }\r
\r
- resp.setStatus(HttpServletResponse.SC_OK);\r
- resp.setContentType(LOGLIST_CONTENT_TYPE);\r
+ }\r
\r
- String outputType = "json";\r
\r
- if (req.getParameter(FEEDID) == null && req.getParameter(GROUPID) == null) {\r
- try {\r
- resp.getOutputStream().print("Invalid request, Feedid or Group ID is required.");\r
- } catch (IOException ioe) {\r
- eventlogger.error("PROV0171 StatisticsServlet.doGet: " + ioe.getMessage(), ioe);\r
- }\r
+ /**\r
+ * rsToJson - Converting RS to JSON object\r
+ *\r
+ * @param out ServletOutputStream, rs as ResultSet\r
+ * @throws IOException, SQLException\r
+ */\r
+ public void rsToCSV(ResultSet rs, ServletOutputStream out) throws IOException, SQLException {\r
+ String header = "FEEDNAME,FEEDID,FILES_PUBLISHED,PUBLISH_LENGTH, FILES_DELIVERED, "\r
+ + "DELIVERED_LENGTH, SUBSCRIBER_URL, SUBID, PUBLISH_TIME,DELIVERY_TIME, AverageDelay\n";\r
+ out.write(header.getBytes());\r
+\r
+ while (rs.next()) {\r
+ String line = rs.getString("FEEDNAME")\r
+ + ","\r
+ + rs.getString(FEEDID)\r
+ + ","\r
+ + rs.getString("FILES_PUBLISHED")\r
+ + ","\r
+ + rs.getString("PUBLISH_LENGTH")\r
+ + ","\r
+ + rs.getString("FILES_DELIVERED")\r
+ + ","\r
+ + rs.getString("DELIVERED_LENGTH")\r
+ + ","\r
+ + rs.getString("SUBSCRIBER_URL")\r
+ + ","\r
+ + rs.getString("SUBID")\r
+ + ","\r
+ + rs.getString("PUBLISH_TIME")\r
+ + ","\r
+ + rs.getString("DELIVERY_TIME")\r
+ + ","\r
+ + rs.getString("AverageDelay")\r
+ + ","\r
+ + "\n";\r
+ out.write(line.getBytes());\r
+ out.flush();\r
+ }\r
}\r
\r
- if (req.getParameter(FEEDID) != null && req.getParameter(GROUPID) == null) {\r
- map.put(FEEDIDS, req.getParameter(FEEDID).replace("|", ","));\r
+ /**\r
+ * rsToJson - Converting RS to JSON object\r
+ *\r
+ * @param out ServletOutputStream, rs as ResultSet\r
+ * @throws IOException, SQLException\r
+ */\r
+ private void rsToJson(ResultSet rs, ServletOutputStream out) throws IOException, SQLException {\r
+ String[] fields = {"FEEDNAME", FEEDID, "FILES_PUBLISHED", "PUBLISH_LENGTH", "FILES_DELIVERED",\r
+ "DELIVERED_LENGTH", "SUBSCRIBER_URL", "SUBID", "PUBLISH_TIME", "DELIVERY_TIME",\r
+ "AverageDelay"};\r
+ StringBuilder line = new StringBuilder();\r
+ line.append("[\n");\r
+ while (rs.next()) {\r
+ LOGJSONObject j2 = new LOGJSONObject();\r
+ for (String key : fields) {\r
+ Object v = rs.getString(key);\r
+ if (v != null) {\r
+ j2.put(key.toLowerCase(), v);\r
+ } else {\r
+ j2.put(key.toLowerCase(), "");\r
+ }\r
+ }\r
+ line.append(j2.toString());\r
+ line.append(",\n");\r
+ }\r
+ line.append("]");\r
+ out.print(line.toString());\r
+ }\r
+\r
+ /**\r
+ * getFeedIdsByGroupId - Getting FEEDID's by GROUP ID.\r
+ *\r
+ * @throws SQLException Query SQLException.\r
+ */\r
+ private StringBuilder getFeedIdsByGroupId(int groupIds) throws SQLException {\r
+ DB db = null;\r
+ Connection conn = null;\r
+ ResultSet resultSet = null;\r
+ String sqlGoupid = null;\r
+ StringBuilder feedIds = new StringBuilder();\r
+ try {\r
+ db = new DB();\r
+ conn = db.getConnection();\r
+ sqlGoupid = " SELECT FEEDID from FEEDS WHERE GROUPID = ?";\r
+ try (PreparedStatement prepareStatement = conn.prepareStatement(sqlGoupid)) {\r
+ prepareStatement.setInt(1, groupIds);\r
+ resultSet = prepareStatement.executeQuery();\r
+ while (resultSet.next()) {\r
+ feedIds.append(resultSet.getInt(FEEDID));\r
+ feedIds.append(",");\r
+ }\r
+ feedIds.deleteCharAt(feedIds.length() - 1);\r
+ eventlogger.info("PROV0177 StatisticsServlet.getFeedIdsByGroupId: feedIds = " + feedIds.toString());\r
+ }\r
+ } catch (SQLException e) {\r
+ eventlogger.error("PROV0175 StatisticsServlet.getFeedIdsByGroupId: " + e.getMessage(), e);\r
+ } finally {\r
+ try {\r
+ if (resultSet != null) {\r
+ resultSet.close();\r
+ }\r
+ if (conn != null) {\r
+ db.release(conn);\r
+ }\r
+ } catch (Exception e) {\r
+ eventlogger.error("PROV0176 StatisticsServlet.getFeedIdsByGroupId: " + e.getMessage(), e);\r
+ }\r
+ }\r
+ return feedIds;\r
}\r
\r
- if (req.getParameter(GROUPID) != null && req.getParameter(FEEDID) == null) {\r
- StringBuffer groupid1 = new StringBuffer();\r
\r
- try {\r
- groupid1 = this.getFeedIdsByGroupId(Integer.parseInt(req.getParameter(GROUPID)));\r
- map.put(FEEDIDS, groupid1.toString());\r
- } catch (NumberFormatException | SQLException e) {\r
- eventlogger.error("PROV0172 StatisticsServlet.doGet: " + e.getMessage(), e);\r
- }\r
- }\r
- if (req.getParameter(GROUPID) != null && req.getParameter(FEEDID) != null) {\r
- StringBuffer groupid1 = new StringBuffer();\r
-\r
- try {\r
- groupid1 = this.getFeedIdsByGroupId(Integer.parseInt(req.getParameter(GROUPID)));\r
- groupid1.append(",");\r
- groupid1.append(req.getParameter(FEEDID).replace("|", ","));\r
- map.put(FEEDIDS, groupid1.toString());\r
- } catch (NumberFormatException | SQLException e) {\r
- eventlogger.error("PROV0173 StatisticsServlet.doGet: " + e.getMessage(), e);\r
- }\r
- }\r
+ /**\r
+ * queryGeneretor - Generating sql query\r
+ *\r
+ * @param map as key value pare of all user input fields\r
+ */\r
+ private String queryGeneretor(Map<String, String> map) throws ParseException {\r
\r
- if (req.getParameter(SUBID) != null && req.getParameter(FEEDID) != null) {\r
- StringBuffer subidstr = new StringBuffer();\r
- subidstr.append("and e.DELIVERY_SUBID in(");\r
+ String sql;\r
+ String eventType = null;\r
+ String feedids = null;\r
+ String startTime = null;\r
+ String endTime = null;\r
+ String subid = " ";\r
+ if (map.get(EVENT_TYPE) != null) {\r
+ eventType = map.get(EVENT_TYPE);\r
+ }\r
+ if (map.get(FEEDIDS) != null) {\r
+ feedids = map.get(FEEDIDS);\r
+ }\r
+ if (map.get(START_TIME) != null) {\r
+ startTime = map.get(START_TIME);\r
+ }\r
+ if (map.get(END_TIME) != null) {\r
+ endTime = map.get(END_TIME);\r
+ }\r
+ if ("all".equalsIgnoreCase(eventType)) {\r
+ eventType = "PUB','DEL, EXP, PBF";\r
+ }\r
+ if (map.get(SUBID) != null) {\r
+ subid = map.get(SUBID);\r
+ }\r
\r
- subidstr.append(req.getParameter(SUBID).replace("|", ","));\r
- subidstr.append(")");\r
- map.put(SUBID, subidstr.toString());\r
- }\r
- if (req.getParameter(SUBID) != null && req.getParameter(GROUPID) != null) {\r
- StringBuffer subidstr = new StringBuffer();\r
- subidstr.append("and e.DELIVERY_SUBID in(");\r
+ eventlogger.info("Generating sql query to get Statistics resultset. ");\r
\r
- subidstr.append(req.getParameter(SUBID).replace("|", ","));\r
- subidstr.append(")");\r
- map.put(SUBID, subidstr.toString());\r
- }\r
- if (req.getParameter("type") != null) {\r
- map.put(EVENT_TYPE, req.getParameter("type").replace("|", ","));\r
- }\r
- if (req.getParameter(OUTPUT_TYPE) != null) {\r
- map.put(OUTPUT_TYPE, req.getParameter(OUTPUT_TYPE));\r
- }\r
- if (req.getParameter(START_TIME) != null) {\r
- map.put(START_TIME, req.getParameter(START_TIME));\r
- }\r
- if (req.getParameter(END_TIME) != null) {\r
- map.put(END_TIME, req.getParameter(END_TIME));\r
- }\r
+ if (endTime == null && startTime == null) {\r
\r
- if (req.getParameter("time") != null) {\r
- map.put(START_TIME, req.getParameter("time"));\r
- map.put(END_TIME, null);\r
- }\r
+ sql = "SELECT (SELECT NAME FROM FEEDS AS f WHERE f.FEEDID in(" + feedids\r
+ + ") and f.FEEDID=e.FEEDID) AS FEEDNAME, e.FEEDID as FEEDID, (SELECT COUNT(*) FROM LOG_RECORDS AS c WHERE c.FEEDID in("\r
+ + feedids\r
+ + ") and c.FEEDID=e.FEEDID AND c.TYPE='PUB') AS FILES_PUBLISHED,(SELECT SUM(content_length) FROM LOG_RECORDS AS c WHERE c.FEEDID in("\r
+ + feedids\r
+ + ") and c.FEEDID=e.FEEDID AND c.TYPE='PUB') AS PUBLISH_LENGTH, COUNT(e.EVENT_TIME) as FILES_DELIVERED, sum(m.content_length) as DELIVERED_LENGTH,SUBSTRING_INDEX(e.REQURI,'/',+3) as SUBSCRIBER_URL, e.DELIVERY_SUBID as SUBID, e.EVENT_TIME AS PUBLISH_TIME, m.EVENT_TIME AS DELIVERY_TIME, AVG(e.EVENT_TIME - m.EVENT_TIME)/1000 as AverageDelay FROM LOG_RECORDS e JOIN LOG_RECORDS m ON m.PUBLISH_ID = e.PUBLISH_ID AND e.FEEDID IN ("\r
+ + feedids + ") " + subid + " AND m.STATUS=204 AND e.RESULT=204 group by SUBID";\r
\r
- if (req.getParameter(OUTPUT_TYPE) != null) {\r
- outputType = req.getParameter(OUTPUT_TYPE);\r
- }\r
- try {\r
- this.getRecordsForSQL(map, outputType, resp.getOutputStream(), resp);\r
- } catch (IOException ioe) {\r
- eventlogger.error("PROV0174 StatisticsServlet.doGet: " + ioe.getMessage(), ioe);\r
- }\r
+ return sql;\r
+ } else if (startTime != null && endTime == null) {\r
+\r
+ long inputTimeInMilli = 60000 * Long.parseLong(startTime);\r
+ Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("GMT"));\r
+ long currentTimeInMilli = cal.getTimeInMillis();\r
+ long compareTime = currentTimeInMilli - inputTimeInMilli;\r
+\r
+ sql = "SELECT (SELECT NAME FROM FEEDS AS f WHERE f.FEEDID in(" + feedids\r
+ + ") and f.FEEDID=e.FEEDID) AS FEEDNAME, e.FEEDID as FEEDID, (SELECT COUNT(*) FROM LOG_RECORDS AS c WHERE c.FEEDID in("\r
+ + feedids\r
+ + ") and c.FEEDID=e.FEEDID AND c.TYPE='PUB') AS FILES_PUBLISHED,(SELECT SUM(content_length) FROM LOG_RECORDS AS c WHERE c.FEEDID in("\r
+ + feedids\r
+ + ") and c.FEEDID=e.FEEDID AND c.TYPE='PUB') AS PUBLISH_LENGTH, COUNT(e.EVENT_TIME) as FILES_DELIVERED, sum(m.content_length) as DELIVERED_LENGTH,SUBSTRING_INDEX(e.REQURI,'/',+3) as SUBSCRIBER_URL, e.DELIVERY_SUBID as SUBID, e.EVENT_TIME AS PUBLISH_TIME, m.EVENT_TIME AS DELIVERY_TIME, AVG(e.EVENT_TIME - m.EVENT_TIME)/1000 as AverageDelay FROM LOG_RECORDS e JOIN LOG_RECORDS m ON m.PUBLISH_ID = e.PUBLISH_ID AND e.FEEDID IN ("\r
+ + feedids + ") " + subid + " AND m.STATUS=204 AND e.RESULT=204 and e.event_time>="\r
+ + compareTime + " group by SUBID";\r
+\r
+ return sql;\r
\r
- }\r
-\r
-\r
- /**\r
- * rsToJson - Converting RS to JSON object\r
- *\r
- * @param out ServletOutputStream, rs as ResultSet\r
- * @throws IOException, SQLException\r
- */\r
- public void rsToCSV(ResultSet rs, ServletOutputStream out) throws IOException, SQLException {\r
- String header = "FEEDNAME,FEEDID,FILES_PUBLISHED,PUBLISH_LENGTH, FILES_DELIVERED, DELIVERED_LENGTH, SUBSCRIBER_URL, SUBID, PUBLISH_TIME,DELIVERY_TIME, AverageDelay\n";\r
-\r
- out.write(header.getBytes());\r
-\r
- while (rs.next()) {\r
- StringBuffer line = new StringBuffer();\r
- line.append(rs.getString("FEEDNAME"));\r
- line.append(",");\r
- line.append(rs.getString("FEEDID"));\r
- line.append(",");\r
- line.append(rs.getString("FILES_PUBLISHED"));\r
- line.append(",");\r
- line.append(rs.getString("PUBLISH_LENGTH"));\r
- line.append(",");\r
- line.append(rs.getString("FILES_DELIVERED"));\r
- line.append(",");\r
- line.append(rs.getString("DELIVERED_LENGTH"));\r
- line.append(",");\r
- line.append(rs.getString("SUBSCRIBER_URL"));\r
- line.append(",");\r
- line.append(rs.getString("SUBID"));\r
- line.append(",");\r
- line.append(rs.getString("PUBLISH_TIME"));\r
- line.append(",");\r
- line.append(rs.getString("DELIVERY_TIME"));\r
- line.append(",");\r
- line.append(rs.getString("AverageDelay"));\r
- line.append(",");\r
-\r
- line.append("\n");\r
- out.write(line.toString().getBytes());\r
- out.flush();\r
- }\r
- }\r
-\r
- /**\r
- * rsToJson - Converting RS to JSON object\r
- *\r
- * @param out ServletOutputStream, rs as ResultSet\r
- * @throws IOException, SQLException\r
- */\r
- public void rsToJson(ResultSet rs, ServletOutputStream out) throws IOException, SQLException {\r
-\r
- String[] fields = {"FEEDNAME", "FEEDID", "FILES_PUBLISHED", "PUBLISH_LENGTH", "FILES_DELIVERED",\r
- "DELIVERED_LENGTH", "SUBSCRIBER_URL", "SUBID", "PUBLISH_TIME", "DELIVERY_TIME",\r
- "AverageDelay"};\r
- StringBuffer line = new StringBuffer();\r
-\r
- line.append("[\n");\r
-\r
- while (rs.next()) {\r
- LOGJSONObject j2 = new LOGJSONObject();\r
- for (String key : fields) {\r
- Object v = rs.getString(key);\r
- if (v != null) {\r
- j2.put(key.toLowerCase(), v);\r
} else {\r
- j2.put(key.toLowerCase(), "");\r
+ SimpleDateFormat inFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss");\r
+ Date startDate = inFormat.parse(startTime);\r
+ Date endDate = inFormat.parse(endTime);\r
+\r
+ long startInMillis = startDate.getTime();\r
+ long endInMillis = endDate.getTime();\r
+\r
+ sql = "SELECT (SELECT NAME FROM FEEDS AS f WHERE f.FEEDID in(" + feedids\r
+ + ") and f.FEEDID=e.FEEDID) AS FEEDNAME, e.FEEDID as FEEDID, (SELECT COUNT(*) FROM LOG_RECORDS AS c WHERE c.FEEDID in("\r
+ + feedids\r
+ + ") and c.FEEDID=e.FEEDID AND c.TYPE='PUB') AS FILES_PUBLISHED,(SELECT SUM(content_length) FROM LOG_RECORDS AS c WHERE c.FEEDID in("\r
+ + feedids\r
+ + ") and c.FEEDID=e.FEEDID AND c.TYPE='PUB') AS PUBLISH_LENGTH, COUNT(e.EVENT_TIME) as FILES_DELIVERED, sum(m.content_length) as DELIVERED_LENGTH,SUBSTRING_INDEX(e.REQURI,'/',+3) as SUBSCRIBER_URL, e.DELIVERY_SUBID as SUBID, e.EVENT_TIME AS PUBLISH_TIME, m.EVENT_TIME AS DELIVERY_TIME, AVG(e.EVENT_TIME - m.EVENT_TIME)/1000 as AverageDelay FROM LOG_RECORDS e JOIN LOG_RECORDS m ON m.PUBLISH_ID = e.PUBLISH_ID AND e.FEEDID IN ("\r
+ + feedids + ") " + subid + " AND m.STATUS=204 AND e.RESULT=204 and e.event_time between " + startInMillis\r
+ + " and " + endInMillis + " group by SUBID";\r
+\r
+\r
+ return sql;\r
}\r
- }\r
- line = line.append(j2.toString());\r
- line.append(",\n");\r
- }\r
- line.append("]");\r
- out.print(line.toString());\r
- }\r
-\r
- /**\r
- * getFeedIdsByGroupId - Getting FEEDID's by GROUP ID.\r
- *\r
- * @throws SQLException Query SQLException.\r
- */\r
- public StringBuffer getFeedIdsByGroupId(int groupIds) throws SQLException {\r
-\r
- DB db = null;\r
- Connection conn = null;\r
- ResultSet resultSet = null;\r
- String sqlGoupid = null;\r
- StringBuffer feedIds = new StringBuffer();\r
-\r
- try {\r
- db = new DB();\r
- conn = db.getConnection();\r
- sqlGoupid = " SELECT FEEDID from FEEDS WHERE GROUPID = ?";\r
- try(PreparedStatement prepareStatement = conn.prepareStatement(sqlGoupid)) {\r
- prepareStatement.setInt(1, groupIds);\r
- resultSet = prepareStatement.executeQuery();\r
- while (resultSet.next()) {\r
- feedIds.append(resultSet.getInt("FEEDID"));\r
- feedIds.append(",");\r
- }\r
- feedIds.deleteCharAt(feedIds.length() - 1);\r
- System.out.println("feedIds" + feedIds.toString());\r
- }\r
- } catch (SQLException e) {\r
- eventlogger.error("PROV0175 StatisticsServlet.getFeedIdsByGroupId: " + e.getMessage(), e);\r
- } finally {\r
- try {\r
- if (resultSet != null) {\r
- resultSet.close();\r
- resultSet = null;\r
- }\r
- if (conn != null) {\r
- db.release(conn);\r
- }\r
- } catch (Exception e) {\r
- eventlogger.error("PROV0176 StatisticsServlet.getFeedIdsByGroupId: " + e.getMessage(), e);\r
- }\r
- }\r
- return feedIds;\r
- }\r
-\r
-\r
- /**\r
- * queryGeneretor - Generating sql query\r
- *\r
- * @param map as key value pare of all user input fields\r
- */\r
- public String queryGeneretor(Map<String, String> map) throws ParseException {\r
-\r
- String sql = null;\r
- String eventType = null;\r
- String feedids = null;\r
- String start_time = null;\r
- String end_time = null;\r
- String subid = " ";\r
- if (map.get(EVENT_TYPE) != null) {\r
- eventType = map.get(EVENT_TYPE);\r
- }\r
- if (map.get(FEEDIDS) != null) {\r
- feedids = map.get(FEEDIDS);\r
- }\r
- if (map.get(START_TIME) != null) {\r
- start_time = map.get(START_TIME);\r
- }\r
- if (map.get(END_TIME) != null) {\r
- end_time = map.get(END_TIME);\r
- }\r
- if ("all".equalsIgnoreCase(eventType)) {\r
- eventType = "PUB','DEL, EXP, PBF";\r
- }\r
- if (map.get(SUBID) != null) {\r
- subid = map.get(SUBID);\r
}\r
\r
- eventlogger.info("Generating sql query to get Statistics resultset. ");\r
-\r
- if (end_time == null && start_time == null) {\r
-\r
- sql = "SELECT (SELECT NAME FROM FEEDS AS f WHERE f.FEEDID in(" + feedids\r
- + ") and f.FEEDID=e.FEEDID) AS FEEDNAME, e.FEEDID as FEEDID, (SELECT COUNT(*) FROM LOG_RECORDS AS c WHERE c.FEEDID in("\r
- + feedids\r
- + ") and c.FEEDID=e.FEEDID AND c.TYPE='PUB') AS FILES_PUBLISHED,(SELECT SUM(content_length) FROM LOG_RECORDS AS c WHERE c.FEEDID in("\r
- + feedids\r
- + ") and c.FEEDID=e.FEEDID AND c.TYPE='PUB') AS PUBLISH_LENGTH, COUNT(e.EVENT_TIME) as FILES_DELIVERED, sum(m.content_length) as DELIVERED_LENGTH,SUBSTRING_INDEX(e.REQURI,'/',+3) as SUBSCRIBER_URL, e.DELIVERY_SUBID as SUBID, e.EVENT_TIME AS PUBLISH_TIME, m.EVENT_TIME AS DELIVERY_TIME, AVG(e.EVENT_TIME - m.EVENT_TIME)/1000 as AverageDelay FROM LOG_RECORDS e JOIN LOG_RECORDS m ON m.PUBLISH_ID = e.PUBLISH_ID AND e.FEEDID IN ("\r
- + feedids + ") " + subid + " AND m.STATUS=204 AND e.RESULT=204 group by SUBID";\r
-\r
- return sql;\r
- } else if (start_time != null && end_time == null) {\r
-\r
- long inputTimeInMilli = 60000 * Long.parseLong(start_time);\r
- Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("GMT"));\r
- long currentTimeInMilli = cal.getTimeInMillis();\r
- long compareTime = currentTimeInMilli - inputTimeInMilli;\r
-\r
- sql = "SELECT (SELECT NAME FROM FEEDS AS f WHERE f.FEEDID in(" + feedids\r
- + ") and f.FEEDID=e.FEEDID) AS FEEDNAME, e.FEEDID as FEEDID, (SELECT COUNT(*) FROM LOG_RECORDS AS c WHERE c.FEEDID in("\r
- + feedids\r
- + ") and c.FEEDID=e.FEEDID AND c.TYPE='PUB') AS FILES_PUBLISHED,(SELECT SUM(content_length) FROM LOG_RECORDS AS c WHERE c.FEEDID in("\r
- + feedids\r
- + ") and c.FEEDID=e.FEEDID AND c.TYPE='PUB') AS PUBLISH_LENGTH, COUNT(e.EVENT_TIME) as FILES_DELIVERED, sum(m.content_length) as DELIVERED_LENGTH,SUBSTRING_INDEX(e.REQURI,'/',+3) as SUBSCRIBER_URL, e.DELIVERY_SUBID as SUBID, e.EVENT_TIME AS PUBLISH_TIME, m.EVENT_TIME AS DELIVERY_TIME, AVG(e.EVENT_TIME - m.EVENT_TIME)/1000 as AverageDelay FROM LOG_RECORDS e JOIN LOG_RECORDS m ON m.PUBLISH_ID = e.PUBLISH_ID AND e.FEEDID IN ("\r
- + feedids + ") " + subid + " AND m.STATUS=204 AND e.RESULT=204 and e.event_time>="\r
- + compareTime + " group by SUBID";\r
-\r
- return sql;\r
-\r
- } else {\r
- SimpleDateFormat inFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss");\r
- Date startDate = inFormat.parse(start_time);\r
- Date endDate = inFormat.parse(end_time);\r
-\r
- long startInMillis = startDate.getTime();\r
- long endInMillis = endDate.getTime();\r
-\r
- {\r
-\r
- sql = "SELECT (SELECT NAME FROM FEEDS AS f WHERE f.FEEDID in(" + feedids\r
- + ") and f.FEEDID=e.FEEDID) AS FEEDNAME, e.FEEDID as FEEDID, (SELECT COUNT(*) FROM LOG_RECORDS AS c WHERE c.FEEDID in("\r
- + feedids\r
- + ") and c.FEEDID=e.FEEDID AND c.TYPE='PUB') AS FILES_PUBLISHED,(SELECT SUM(content_length) FROM LOG_RECORDS AS c WHERE c.FEEDID in("\r
- + feedids\r
- + ") and c.FEEDID=e.FEEDID AND c.TYPE='PUB') AS PUBLISH_LENGTH, COUNT(e.EVENT_TIME) as FILES_DELIVERED, sum(m.content_length) as DELIVERED_LENGTH,SUBSTRING_INDEX(e.REQURI,'/',+3) as SUBSCRIBER_URL, e.DELIVERY_SUBID as SUBID, e.EVENT_TIME AS PUBLISH_TIME, m.EVENT_TIME AS DELIVERY_TIME, AVG(e.EVENT_TIME - m.EVENT_TIME)/1000 as AverageDelay FROM LOG_RECORDS e JOIN LOG_RECORDS m ON m.PUBLISH_ID = e.PUBLISH_ID AND e.FEEDID IN ("\r
- + feedids + ") " + subid\r
- + " AND m.STATUS=204 AND e.RESULT=204 and e.event_time between " + startInMillis\r
- + " and " + endInMillis + " group by SUBID";\r
-\r
- }\r
- return sql;\r
- }\r
- }\r
-\r
-\r
- /**\r
- * PUT a Statistics URL -- not supported.\r
- */\r
- @Override\r
- public void doPut(HttpServletRequest req, HttpServletResponse resp) {\r
- String message = "PUT not allowed for the StatisticsURL.";\r
- EventLogRecord elr = new EventLogRecord(req);\r
- elr.setMessage(message);\r
- elr.setResult(HttpServletResponse.SC_METHOD_NOT_ALLOWED);\r
- eventlogger.error(elr.toString());\r
- sendResponseError(resp, HttpServletResponse.SC_METHOD_NOT_ALLOWED, message, eventlogger);\r
- }\r
-\r
- /**\r
- * POST a Statistics URL -- not supported.\r
- */\r
- @Override\r
- public void doPost(HttpServletRequest req, HttpServletResponse resp) {\r
- String message = "POST not allowed for the StatisticsURL.";\r
- EventLogRecord elr = new EventLogRecord(req);\r
- elr.setMessage(message);\r
- elr.setResult(HttpServletResponse.SC_METHOD_NOT_ALLOWED);\r
- eventlogger.error(elr.toString());\r
- sendResponseError(resp, HttpServletResponse.SC_METHOD_NOT_ALLOWED, message, eventlogger);\r
- }\r
-\r
- private Map<String, String> buildMapFromRequest(HttpServletRequest req) {\r
- Map<String, String> map = new HashMap<>();\r
- String s = req.getParameter("type");\r
- if (s != null) {\r
- if ("pub".equals(s) || "del".equals(s) || "exp".equals(s)) {\r
- map.put("type", s);\r
- } else {\r
- map.put("err", "bad type");\r
- return map;\r
- }\r
- } else {\r
- map.put("type", "all");\r
- }\r
- map.put("publishSQL", "");\r
- map.put("statusSQL", "");\r
- map.put("resultSQL", "");\r
- map.put(REASON_SQL, "");\r
-\r
- s = req.getParameter("publishId");\r
- if (s != null) {\r
- if (s.indexOf("'") >= 0) {\r
- map.put("err", "bad publishId");\r
- return map;\r
- }\r
- map.put("publishSQL", " AND PUBLISH_ID = '" + s + "'");\r
- }\r
\r
- s = req.getParameter("statusCode");\r
- if (s != null) {\r
- String sql = null;\r
- if ("success".equals(s)) {\r
- sql = " AND STATUS >= 200 AND STATUS < 300";\r
- } else if ("redirect".equals(s)) {\r
- sql = " AND STATUS >= 300 AND STATUS < 400";\r
- } else if ("failure".equals(s)) {\r
- sql = " AND STATUS >= 400";\r
- } else {\r
- try {\r
- Integer n = Integer.parseInt(s);\r
- if ((n >= 100 && n < 600) || (n == -1)) {\r
- sql = " AND STATUS = " + n;\r
- }\r
- } catch (NumberFormatException e) {\r
+ /**\r
+ * PUT a Statistics URL -- not supported.\r
+ */\r
+ @Override\r
+ public void doPut(HttpServletRequest req, HttpServletResponse resp) {\r
+ String message = "PUT not allowed for the StatisticsURL.";\r
+ EventLogRecord elr = new EventLogRecord(req);\r
+ elr.setMessage(message);\r
+ elr.setResult(HttpServletResponse.SC_METHOD_NOT_ALLOWED);\r
+ eventlogger.error(elr.toString());\r
+ sendResponseError(resp, HttpServletResponse.SC_METHOD_NOT_ALLOWED, message, eventlogger);\r
+ }\r
+\r
+ /**\r
+ * POST a Statistics URL -- not supported.\r
+ */\r
+ @Override\r
+ public void doPost(HttpServletRequest req, HttpServletResponse resp) {\r
+ String message = "POST not allowed for the StatisticsURL.";\r
+ EventLogRecord elr = new EventLogRecord(req);\r
+ elr.setMessage(message);\r
+ elr.setResult(HttpServletResponse.SC_METHOD_NOT_ALLOWED);\r
+ eventlogger.error(elr.toString());\r
+ sendResponseError(resp, HttpServletResponse.SC_METHOD_NOT_ALLOWED, message, eventlogger);\r
+ }\r
+\r
+ private Map<String, String> buildMapFromRequest(HttpServletRequest req) {\r
+ Map<String, String> map = new HashMap<>();\r
+ String s = req.getParameter("type");\r
+ if (s != null) {\r
+ if ("pub".equals(s) || "del".equals(s) || "exp".equals(s)) {\r
+ map.put("type", s);\r
+ } else {\r
+ map.put("err", "bad type");\r
+ return map;\r
+ }\r
+ } else {\r
+ map.put("type", "all");\r
+ }\r
+ map.put("publishSQL", "");\r
+ map.put("statusSQL", "");\r
+ map.put("resultSQL", "");\r
+ map.put(REASON_SQL, "");\r
+\r
+ s = req.getParameter("publishId");\r
+ if (s != null) {\r
+ if (s.indexOf("'") >= 0) {\r
+ map.put("err", "bad publishId");\r
+ return map;\r
+ }\r
+ map.put("publishSQL", " AND PUBLISH_ID = '" + s + "'");\r
+ }\r
+\r
+ s = req.getParameter("statusCode");\r
+ if (s != null) {\r
+ String sql = null;\r
+ switch (s) {\r
+ case "success":\r
+ sql = " AND STATUS >= 200 AND STATUS < 300";\r
+ break;\r
+ case "redirect":\r
+ sql = " AND STATUS >= 300 AND STATUS < 400";\r
+ break;\r
+ case "failure":\r
+ sql = " AND STATUS >= 400";\r
+ break;\r
+ default:\r
+ try {\r
+ int n = Integer.parseInt(s);\r
+ if ((n >= 100 && n < 600) || (n == -1)) {\r
+ sql = " AND STATUS = " + n;\r
+ }\r
+ } catch (NumberFormatException e) {\r
+ eventlogger.error("Failed to parse input", e);\r
+ }\r
+ break;\r
+ }\r
+ if (sql == null) {\r
+ map.put("err", "bad statusCode");\r
+ return map;\r
+ }\r
+ map.put("statusSQL", sql);\r
+ map.put("resultSQL", sql.replaceAll("STATUS", "RESULT"));\r
}\r
- }\r
- if (sql == null) {\r
- map.put("err", "bad statusCode");\r
- return map;\r
- }\r
- map.put("statusSQL", sql);\r
- map.put("resultSQL", sql.replaceAll("STATUS", "RESULT"));\r
- }\r
\r
- s = req.getParameter("expiryReason");\r
- if (s != null) {\r
- map.put("type", "exp");\r
- if ("notRetryable".equals(s)) {\r
- map.put(REASON_SQL, " AND REASON = 'notRetryable'");\r
- } else if ("retriesExhausted".equals(s)) {\r
- map.put(REASON_SQL, " AND REASON = 'retriesExhausted'");\r
- } else if ("diskFull".equals(s)) {\r
- map.put(REASON_SQL, " AND REASON = 'diskFull'");\r
- } else if ("other".equals("other")) {\r
- map.put(REASON_SQL, " AND REASON = 'other'");\r
- } else {\r
- map.put("err", "bad expiryReason");\r
+ s = req.getParameter("expiryReason");\r
+ if (s != null) {\r
+ map.put("type", "exp");\r
+ switch (s) {\r
+ case "notRetryable":\r
+ map.put(REASON_SQL, " AND REASON = 'notRetryable'");\r
+ break;\r
+ case "retriesExhausted":\r
+ map.put(REASON_SQL, " AND REASON = 'retriesExhausted'");\r
+ break;\r
+ case "diskFull":\r
+ map.put(REASON_SQL, " AND REASON = 'diskFull'");\r
+ break;\r
+ case "other":\r
+ map.put(REASON_SQL, " AND REASON = 'other'");\r
+ break;\r
+ default:\r
+ map.put("err", "bad expiryReason");\r
+ return map;\r
+ }\r
+ }\r
+\r
+ long stime = getTimeFromParam(req.getParameter("start"));\r
+ if (stime < 0) {\r
+ map.put("err", "bad start");\r
+ return map;\r
+ }\r
+ long etime = getTimeFromParam(req.getParameter("end"));\r
+ if (etime < 0) {\r
+ map.put("err", "bad end");\r
+ return map;\r
+ }\r
+ if (stime == 0 && etime == 0) {\r
+ etime = System.currentTimeMillis();\r
+ stime = etime - TWENTYFOUR_HOURS;\r
+ } else if (stime == 0) {\r
+ stime = etime - TWENTYFOUR_HOURS;\r
+ } else if (etime == 0) {\r
+ etime = stime + TWENTYFOUR_HOURS;\r
+ }\r
+ map.put("timeSQL", String.format(" AND EVENT_TIME >= %d AND EVENT_TIME <= %d", stime, etime));\r
return map;\r
- }\r
}\r
\r
- long stime = getTimeFromParam(req.getParameter("start"));\r
- if (stime < 0) {\r
- map.put("err", "bad start");\r
- return map;\r
- }\r
- long etime = getTimeFromParam(req.getParameter("end"));\r
- if (etime < 0) {\r
- map.put("err", "bad end");\r
- return map;\r
- }\r
- if (stime == 0 && etime == 0) {\r
- etime = System.currentTimeMillis();\r
- stime = etime - TWENTYFOUR_HOURS;\r
- } else if (stime == 0) {\r
- stime = etime - TWENTYFOUR_HOURS;\r
- } else if (etime == 0) {\r
- etime = stime + TWENTYFOUR_HOURS;\r
+ private long getTimeFromParam(final String s) {\r
+ if (s == null) {\r
+ return 0;\r
+ }\r
+ try {\r
+ // First, look for an RFC 3339 date\r
+ String fmt = (s.indexOf('.') > 0) ? FMT2 : FMT1;\r
+ SimpleDateFormat sdf = new SimpleDateFormat(fmt);\r
+ Date d = sdf.parse(s);\r
+ return d.getTime();\r
+ } catch (ParseException e) {\r
+ intlogger.error("Exception in getting Time :- " + e.getMessage(), e);\r
+ }\r
+ try {\r
+ // Also allow a long (in ms); useful for testing\r
+ return Long.parseLong(s);\r
+ } catch (NumberFormatException e) {\r
+ intlogger.error("Exception in getting Time :- " + e.getMessage(), e);\r
+ }\r
+ intlogger.info("Error parsing time=" + s);\r
+ return -1;\r
}\r
- map.put("timeSQL", String.format(" AND EVENT_TIME >= %d AND EVENT_TIME <= %d", stime, etime));\r
- return map;\r
- }\r
\r
- private long getTimeFromParam(final String s) {\r
- if (s == null) {\r
- return 0;\r
- }\r
- try {\r
- // First, look for an RFC 3339 date\r
- String fmt = (s.indexOf('.') > 0) ? FMT2 : FMT1;\r
- SimpleDateFormat sdf = new SimpleDateFormat(fmt);\r
- Date d = sdf.parse(s);\r
- return d.getTime();\r
- } catch (ParseException e) {\r
- }\r
- try {\r
- // Also allow a long (in ms); useful for testing\r
- return Long.parseLong(s);\r
- } catch (NumberFormatException e) {\r
- }\r
- intlogger.info("Error parsing time=" + s);\r
- return -1;\r
- }\r
-\r
- private void getRecordsForSQL(Map<String, String> map, String outputType, ServletOutputStream out, HttpServletResponse resp) {\r
- try {\r
-\r
- String filterQuery = this.queryGeneretor(map);\r
- eventlogger.debug("SQL Query for Statistics resultset. " + filterQuery);\r
- intlogger.debug(filterQuery);\r
- long start = System.currentTimeMillis();\r
- DB db = new DB();\r
- try (Connection conn = db.getConnection()) {\r
- try (ResultSet rs = conn.prepareStatement(filterQuery).executeQuery()) {\r
- if ("csv".equals(outputType)) {\r
- resp.setContentType("application/octet-stream");\r
- Date date = new Date();\r
- SimpleDateFormat dateFormat = new SimpleDateFormat("dd-MM-YYYY HH:mm:ss");\r
- resp.setHeader("Content-Disposition",\r
- "attachment; filename=\"result:" + dateFormat.format(date) + ".csv\"");\r
- eventlogger.info("Generating CSV file from Statistics resultset");\r
-\r
- rsToCSV(rs, out);\r
- } else {\r
- eventlogger.info("Generating JSON for Statistics resultset");\r
- this.rsToJson(rs, out);\r
- }\r
- }\r
- } catch (SQLException e) {\r
- eventlogger.error("SQLException:" + e);\r
- }\r
- intlogger.debug("Time: " + (System.currentTimeMillis() - start) + " ms");\r
- } catch (IOException e) {\r
- eventlogger.error("IOException - Generating JSON/CSV:" + e);\r
- } catch (JSONException e) {\r
- eventlogger.error("JSONException - executing SQL query:" + e);\r
- } catch (ParseException e) {\r
- eventlogger.error("ParseException - executing SQL query:" + e);\r
+ private void getRecordsForSQL(Map<String, String> map, String outputType, ServletOutputStream out, HttpServletResponse resp) {\r
+ try {\r
+ String filterQuery = this.queryGeneretor(map);\r
+ eventlogger.debug("SQL Query for Statistics resultset. " + filterQuery);\r
+ intlogger.debug(filterQuery);\r
+ long start = System.currentTimeMillis();\r
+ DB db = new DB();\r
+ try (Connection conn = db.getConnection()) {\r
+ try (ResultSet rs = conn.prepareStatement(filterQuery).executeQuery()) {\r
+ if ("csv".equals(outputType)) {\r
+ resp.setContentType("application/octet-stream");\r
+ Date date = new Date();\r
+ SimpleDateFormat dateFormat = new SimpleDateFormat("dd-MM-YYYY HH:mm:ss");\r
+ resp.setHeader("Content-Disposition",\r
+ "attachment; filename=\"result:" + dateFormat.format(date) + ".csv\"");\r
+ eventlogger.info("Generating CSV file from Statistics resultset");\r
+ rsToCSV(rs, out);\r
+ } else {\r
+ eventlogger.info("Generating JSON for Statistics resultset");\r
+ this.rsToJson(rs, out);\r
+ }\r
+ }\r
+ db.release(conn);\r
+ } catch (SQLException e) {\r
+ eventlogger.error("SQLException:" + e);\r
+ }\r
+ intlogger.debug("Time: " + (System.currentTimeMillis() - start) + " ms");\r
+ } catch (IOException e) {\r
+ eventlogger.error("IOException - Generating JSON/CSV:" + e);\r
+ } catch (JSONException e) {\r
+ eventlogger.error("JSONException - executing SQL query:" + e);\r
+ } catch (ParseException e) {\r
+ eventlogger.error("ParseException - executing SQL query:" + e);\r
+ }\r
}\r
- }\r
}\r
\r
\r
package org.onap.dmaap.datarouter.provisioning;\r
\r
+import static org.onap.dmaap.datarouter.provisioning.utils.HttpServletUtils.sendResponseError;\r
+\r
+import com.att.eelf.configuration.EELFLogger;\r
+import com.att.eelf.configuration.EELFManager;\r
import java.io.IOException;\r
import java.io.InvalidObjectException;\r
import java.net.HttpURLConnection;\r
import java.net.URL;\r
+import java.util.ArrayList;\r
import java.util.List;\r
-import java.util.Vector;\r
-\r
import javax.servlet.http.HttpServletRequest;\r
import javax.servlet.http.HttpServletResponse;\r
-\r
import org.json.JSONException;\r
import org.json.JSONObject;\r
import org.onap.dmaap.datarouter.authz.AuthorizationResponse;\r
import org.onap.dmaap.datarouter.provisioning.beans.Subscription;\r
import org.onap.dmaap.datarouter.provisioning.eelf.EelfMsgs;\r
\r
-import com.att.eelf.configuration.EELFLogger;\r
-import com.att.eelf.configuration.EELFManager;\r
-\r
-import static org.onap.dmaap.datarouter.provisioning.utils.HttpServletUtils.sendResponseError;\r
-\r
/**\r
* This servlet handles provisioning for the <subscriptionURL> which is generated by the provisioning server to\r
* handle the inspection, modification, and deletion of a particular subscription to a feed. It supports DELETE to\r
* A Thread class used to serially send reset notifications to all nodes in the DR network, when a POST is received\r
* for a subscription.\r
*/\r
- public class SubscriberNotifyThread extends Thread {\r
+ public static class SubscriberNotifyThread extends Thread {\r
\r
- public static final String URL_TEMPLATE = "http://%s/internal/resetSubscription/%d";\r
- private List<String> urls = new Vector<>();\r
+ static final String URL_TEMPLATE = "http://%s/internal/resetSubscription/%d";\r
+ private List<String> urls = new ArrayList<>();\r
\r
- public SubscriberNotifyThread() {\r
+ SubscriberNotifyThread() {\r
setName("SubscriberNotifyThread");\r
}\r
\r
- public void resetSubscription(int subid) {\r
+ void resetSubscription(int subid) {\r
for (String nodename : BaseServlet.getNodes()) {\r
String u = String.format(URL_TEMPLATE, nodename, subid);\r
urls.add(u);\r
\r
@Override\r
public void run() {\r
-\r
try {\r
while (!urls.isEmpty()) {\r
- String u = urls.remove(0);\r
- try {\r
- URL url = new URL(u);\r
- HttpURLConnection conn = (HttpURLConnection) url.openConnection();\r
- conn.connect();\r
- conn.getContentLength(); // Force the GET through\r
- conn.disconnect();\r
- } catch (IOException e) {\r
- intlogger.info("PROV0194 Error accessing URL: " + u + ": " + e.getMessage(), e);\r
- }\r
+ String url = urls.remove(0);\r
+ forceGetThrough(url);\r
}\r
} catch (Exception e) {\r
intlogger.warn("PROV0195 Caught exception in SubscriberNotifyThread: " + e.getMessage(), e);\r
}\r
}\r
+\r
+ private void forceGetThrough(String url) {\r
+ try {\r
+ URL urlObj = new URL(url);\r
+ HttpURLConnection conn = (HttpURLConnection) urlObj.openConnection();\r
+ conn.connect();\r
+ conn.getContentLength(); // Force the GET through\r
+ conn.disconnect();\r
+ } catch (IOException e) {\r
+ intlogger.info("PROV0194 Error accessing URL: " + url + ": " + e.getMessage(), e);\r
+ }\r
+ }\r
}\r
}\r
* @version $Id: DeliveryRecord.java,v 1.9 2014/03/12 19:45:41 eby Exp $\r
*/\r
public class DeliveryRecord extends BaseLogRecord {\r
+\r
+ private static final String STATUS_CODE = "statusCode";\r
+ private static final String DELIVERY_ID = "deliveryId";\r
private int subid;\r
private String fileid;\r
private int result;\r
*/\r
public DeliveryRecord(String[] pp) throws ParseException {\r
super(pp);\r
- String fileid = pp[5];\r
- if (fileid.lastIndexOf('/') >= 0) {\r
- fileid = fileid.substring(fileid.lastIndexOf('/') + 1);\r
+ String thisFileid = pp[5];\r
+ if (thisFileid.lastIndexOf('/') >= 0) {\r
+ thisFileid = thisFileid.substring(thisFileid.lastIndexOf('/') + 1);\r
}\r
this.subid = Integer.parseInt(pp[4]);\r
- this.fileid = fileid;\r
+ this.fileid = thisFileid;\r
this.result = Integer.parseInt(pp[10]);\r
this.user = pp[9];\r
if (this.user != null && this.user.length() > 50) {\r
public LOGJSONObject reOrderObject(LOGJSONObject jo) {\r
LinkedHashMap<String, Object> logrecordObj = new LinkedHashMap<>();\r
\r
- logrecordObj.put("statusCode", jo.get("statusCode"));\r
- logrecordObj.put("deliveryId", jo.get("deliveryId"));\r
+ logrecordObj.put(STATUS_CODE, jo.get(STATUS_CODE));\r
+ logrecordObj.put(DELIVERY_ID, jo.get(DELIVERY_ID));\r
logrecordObj.put("publishId", jo.get("publishId"));\r
logrecordObj.put("requestURI", jo.get("requestURI"));\r
- //logrecordObj.put("sourceIP", jo.get("sourceIP"));\r
logrecordObj.put("method", jo.get("method"));\r
logrecordObj.put("contentType", jo.get("contentType"));\r
- //logrecordObj.put("endpointId", jo.get("endpointId"));\r
logrecordObj.put("type", jo.get("type"));\r
logrecordObj.put("date", jo.get("date"));\r
logrecordObj.put("contentLength", jo.get("contentLength"));\r
public LOGJSONObject asJSONObject() {\r
LOGJSONObject jo = super.asJSONObject();\r
jo.put("type", "del");\r
- jo.put("deliveryId", user);\r
- jo.put("statusCode", result);\r
+ jo.put(DELIVERY_ID, user);\r
+ jo.put(STATUS_CODE, result);\r
\r
return this.reOrderObject(jo);\r
}\r
public EgressRoute(int subid, int nodeid) {\r
this.subid = subid;\r
this.nodeid = nodeid;\r
- // Note: unlike for Feeds, it subscriptions can be removed from the tables, so it is\r
- // possible that an orphan ERT entry can exist if a sub is removed.\r
- // if (Subscription.getSubscriptionById(subid) == null)\r
- // throw new IllegalArgumentException("No such subscription: "+subid);\r
}\r
\r
public EgressRoute(int subid, String node) {\r
ps.setInt(1, this.subid);\r
ps.setInt(2, this.nodeid);\r
ps.execute();\r
- ps.close();\r
rv = true;\r
} catch (SQLException e) {\r
intlogger.warn("PROV0005 doInsert: " + e.getMessage(), e);\r
* @version $Id: ExpiryRecord.java,v 1.4 2013/10/28 18:06:52 eby Exp $\r
*/\r
public class ExpiryRecord extends BaseLogRecord {\r
+\r
+ public static final String EXPIRY_REASON = "expiryReason";\r
+ public static final String ATTEMPTS = "attempts";\r
private int subid;\r
private String fileid;\r
- private int attempts;\r
+ private int deliveryAttempts;\r
private String reason;\r
\r
/**\r
*/\r
public ExpiryRecord(String[] pp) throws ParseException {\r
super(pp);\r
- String fileid = pp[5];\r
- if (fileid.lastIndexOf('/') >= 0) {\r
- fileid = fileid.substring(fileid.lastIndexOf('/') + 1);\r
+ String thisFileid = pp[5];\r
+ if (thisFileid.lastIndexOf('/') >= 0) {\r
+ thisFileid = thisFileid.substring(thisFileid.lastIndexOf('/') + 1);\r
}\r
this.subid = Integer.parseInt(pp[4]);\r
- this.fileid = fileid;\r
- this.attempts = Integer.parseInt(pp[10]);\r
+ this.fileid = thisFileid;\r
+ this.deliveryAttempts = Integer.parseInt(pp[10]);\r
this.reason = pp[9];\r
if (!reason.equals("notRetryable") && !reason.equals("retriesExhausted") && !reason.equals("diskFull")) {\r
this.reason = "other";\r
super(rs);\r
this.subid = rs.getInt("DELIVERY_SUBID");\r
this.fileid = rs.getString("DELIVERY_FILEID");\r
- this.attempts = rs.getInt("ATTEMPTS");\r
+ this.deliveryAttempts = rs.getInt("ATTEMPTS");\r
this.reason = rs.getString("REASON");\r
}\r
\r
this.fileid = fileid;\r
}\r
\r
- public int getAttempts() {\r
- return attempts;\r
+ public int getDeliveryAttempts() {\r
+ return deliveryAttempts;\r
}\r
\r
- public void setAttempts(int attempts) {\r
- this.attempts = attempts;\r
+ public void setDeliveryAttempts(int deliveryAttempts) {\r
+ this.deliveryAttempts = deliveryAttempts;\r
}\r
\r
public String getReason() {\r
public LOGJSONObject reOrderObject(LOGJSONObject jo) {\r
LinkedHashMap<String, Object> logrecordObj = new LinkedHashMap<>();\r
\r
- logrecordObj.put("expiryReason", jo.get("expiryReason"));\r
+ logrecordObj.put(EXPIRY_REASON, jo.get(EXPIRY_REASON));\r
logrecordObj.put("publishId", jo.get("publishId"));\r
- logrecordObj.put("attempts", jo.get("attempts"));\r
+ logrecordObj.put(ATTEMPTS, jo.get(ATTEMPTS));\r
logrecordObj.put("requestURI", jo.get("requestURI"));\r
logrecordObj.put("method", jo.get("method"));\r
logrecordObj.put("contentType", jo.get("contentType"));\r
public LOGJSONObject asJSONObject() {\r
LOGJSONObject jo = super.asJSONObject();\r
jo.put("type", "exp");\r
- jo.put("expiryReason", reason);\r
- jo.put("attempts", attempts);\r
+ jo.put(EXPIRY_REASON, reason);\r
+ jo.put(ATTEMPTS, deliveryAttempts);\r
\r
return reOrderObject(jo);\r
}\r
ps.setInt(13, getSubid());\r
ps.setString(14, getFileid());\r
ps.setNull(15, Types.INTEGER);\r
- ps.setInt(16, getAttempts());\r
+ ps.setInt(16, getDeliveryAttempts());\r
ps.setString(17, getReason());\r
ps.setNull(19, Types.BIGINT);\r
ps.setNull(20, Types.VARCHAR);\r
Feed feed = map.get(id);\r
if (feed != null) {\r
FeedEndpointID epi = new FeedEndpointID(rs);\r
- Collection<FeedEndpointID> ecoll = feed.getAuthorization().getEndpoint_ids();\r
+ Collection<FeedEndpointID> ecoll = feed.getAuthorization().getEndpointIDS();\r
ecoll.add(epi);\r
}\r
}\r
int id = rs.getInt("FEEDID");\r
Feed feed = map.get(id);\r
if (feed != null) {\r
- Collection<String> acoll = feed.getAuthorization().getEndpoint_addrs();\r
+ Collection<String> acoll = feed.getAuthorization().getEndpointAddrs();\r
acoll.add(rs.getString("ADDR"));\r
}\r
}\r
if (feed != null) {\r
sql = "select * from FEED_ENDPOINT_IDS where FEEDID = " + feed.feedid;\r
try (ResultSet rs = stmt.executeQuery(sql)) {\r
- Collection<FeedEndpointID> ecoll = feed.getAuthorization().getEndpoint_ids();\r
+ Collection<FeedEndpointID> ecoll = feed.getAuthorization().getEndpointIDS();\r
while (rs.next()) {\r
FeedEndpointID epi = new FeedEndpointID(rs);\r
ecoll.add(epi);\r
}\r
sql = "select * from FEED_ENDPOINT_ADDRS where FEEDID = " + feed.feedid;\r
try (ResultSet rs = stmt.executeQuery(sql)) {\r
- Collection<String> acoll = feed.getAuthorization().getEndpoint_addrs();\r
+ Collection<String> acoll = feed.getAuthorization().getEndpointAddrs();\r
while (rs.next()) {\r
acoll.add(rs.getString("ADDR"));\r
}\r
//Fortify scan fixes - Privacy Violation\r
throw new InvalidObjectException("password field is too long (" + fid.getPassword() + ")");\r
}\r
- this.authorization.getEndpoint_ids().add(fid);\r
+ this.authorization.getEndpointIDS().add(fid);\r
}\r
- if (this.authorization.getEndpoint_ids().isEmpty()) {\r
+ if (this.authorization.getEndpointIDS().isEmpty()) {\r
throw new InvalidObjectException("need to specify at least one endpoint_id");\r
}\r
endPointIds = jauth.getJSONArray("endpoint_addrs");\r
if (!JSONUtilities.validIPAddrOrSubnet(addr)) {\r
throw new InvalidObjectException("bad IP addr or subnet mask: " + addr);\r
}\r
- this.authorization.getEndpoint_addrs().add(addr);\r
+ this.authorization.getEndpointAddrs().add(addr);\r
}\r
\r
this.publisher = jo.optString("publisher", "");\r
FeedAuthorization auth = getAuthorization();\r
String sql = "insert into FEED_ENDPOINT_IDS values (?, ?, ?)";\r
try (PreparedStatement ps2 = conn.prepareStatement(sql)) {\r
- for (FeedEndpointID fid : auth.getEndpoint_ids()) {\r
+ for (FeedEndpointID fid : auth.getEndpointIDS()) {\r
ps2.setInt(1, feedid);\r
ps2.setString(2, fid.getId());\r
ps2.setString(3, fid.getPassword());\r
// Create FEED_ENDPOINT_ADDRS rows\r
sql = "insert into FEED_ENDPOINT_ADDRS values (?, ?)";\r
try (PreparedStatement ps2 = conn.prepareStatement(sql)) {\r
- for (String t : auth.getEndpoint_addrs()) {\r
+ for (String t : auth.getEndpointAddrs()) {\r
ps2.setInt(1, feedid);\r
ps2.setString(2, t);\r
ps2.executeUpdate();\r
Feed oldobj = getFeedById(feedid);\r
PreparedStatement ps = null;\r
try {\r
- Set<FeedEndpointID> newset = getAuthorization().getEndpoint_ids();\r
- Set<FeedEndpointID> oldset = oldobj.getAuthorization().getEndpoint_ids();\r
+ Set<FeedEndpointID> newset = getAuthorization().getEndpointIDS();\r
+ Set<FeedEndpointID> oldset = oldobj.getAuthorization().getEndpointIDS();\r
\r
// Insert new FEED_ENDPOINT_IDS rows\r
String sql = "insert into FEED_ENDPOINT_IDS values (?, ?, ?)";\r
ps.close();\r
\r
// Insert new FEED_ENDPOINT_ADDRS rows\r
- Set<String> newset2 = getAuthorization().getEndpoint_addrs();\r
- Set<String> oldset2 = oldobj.getAuthorization().getEndpoint_addrs();\r
+ Set<String> newset2 = getAuthorization().getEndpointAddrs();\r
+ Set<String> oldset2 = oldobj.getAuthorization().getEndpointAddrs();\r
sql = "insert into FEED_ENDPOINT_ADDRS values (?, ?)";\r
ps = conn.prepareStatement(sql);\r
for (String t : newset2) {\r
this.classification = classification;\r
}\r
\r
- public Set<FeedEndpointID> getEndpoint_ids() {\r
+ public Set<FeedEndpointID> getEndpointIDS() {\r
return endpointIds;\r
}\r
\r
- public void setEndpoint_ids(Set<FeedEndpointID> endpointIds) {\r
+ public void setEndpointIDS(Set<FeedEndpointID> endpointIds) {\r
this.endpointIds = endpointIds;\r
}\r
\r
- public Set<String> getEndpoint_addrs() {\r
+ public Set<String> getEndpointAddrs() {\r
return endpointAddrs;\r
}\r
\r
- public void setEndpoint_addrs(Set<String> endpointAddrs) {\r
+ public void setEndpointAddrs(Set<String> endpointAddrs) {\r
this.endpointAddrs = endpointAddrs;\r
}\r
\r
\r
package org.onap.dmaap.datarouter.provisioning.beans;\r
\r
-import java.io.InvalidObjectException;\r
import java.util.Objects;\r
-\r
import org.json.JSONObject;\r
\r
/**\r
*/\r
public static Group getGroupMatching(Group gup, int groupid) {\r
String sql = String.format(\r
- "select * from GROUPS where NAME = '%s' and GROUPID != %d ",\r
- gup.getName(),\r
- gup.getGroupid()\r
- );\r
+ "select * from GROUPS where NAME = '%s' and GROUPID != %d ", gup.getName(), gup.getGroupid());\r
List<Group> list = getGroupsForSQL(sql);\r
return !list.isEmpty() ? list.get(0) : null;\r
}\r
ps.setString(2, userid);\r
ps.setString(3, subnet);\r
ps.execute();\r
- ps.close();\r
// Delete the NodeSet\r
ps2.setInt(1, nodelist);\r
ps2.execute();\r
*/\r
\r
public class PublishRecord extends BaseLogRecord {\r
+\r
+ public static final String STATUS_CODE = "statusCode";\r
+ public static final String SOURCE_IP = "sourceIP";\r
+ public static final String ENDPOINT_ID = "endpointId";\r
+ public static final String FILE_NAME = "fileName";\r
private String feedFileid;\r
private String remoteAddr;\r
private String user;\r
LinkedHashMap<String, Object> logrecordObj = new LinkedHashMap<>();\r
\r
\r
- logrecordObj.put("statusCode", jo.get("statusCode"));\r
+ logrecordObj.put(STATUS_CODE, jo.get(STATUS_CODE));\r
logrecordObj.put("publishId", jo.get("publishId"));\r
logrecordObj.put("requestURI", jo.get("requestURI"));\r
- logrecordObj.put("sourceIP", jo.get("sourceIP"));\r
+ logrecordObj.put(SOURCE_IP, jo.get(SOURCE_IP));\r
logrecordObj.put("method", jo.get("method"));\r
logrecordObj.put("contentType", jo.get("contentType"));\r
- logrecordObj.put("endpointId", jo.get("endpointId"));\r
+ logrecordObj.put(ENDPOINT_ID, jo.get(ENDPOINT_ID));\r
logrecordObj.put("type", jo.get("type"));\r
logrecordObj.put("date", jo.get("date"));\r
logrecordObj.put("contentLength", jo.get("contentLength"));\r
- logrecordObj.put("fileName", jo.get("fileName"));\r
+ logrecordObj.put(FILE_NAME, jo.get(FILE_NAME));\r
\r
return new LOGJSONObject(logrecordObj);\r
}\r
jo.put("feedFileid", feedFileid);\r
jo.put("remoteAddr", remoteAddr);\r
jo.put("user", user);\r
- jo.put("sourceIP", remoteAddr);\r
- jo.put("endpointId", user);\r
- jo.put("statusCode", status);\r
- jo.put("fileName", fileName);\r
+ jo.put(SOURCE_IP, remoteAddr);\r
+ jo.put(ENDPOINT_ID, user);\r
+ jo.put(STATUS_CODE, status);\r
+ jo.put(FILE_NAME, fileName);\r
\r
return this.reOrderObject(jo);\r
}\r
\r
package org.onap.dmaap.datarouter.provisioning.beans;\r
\r
-import java.io.InvalidObjectException;\r
import java.util.Objects;\r
-\r
import org.json.JSONObject;\r
\r
/**\r
public class JSONUtilities {\r
\r
private static final EELFLogger intlogger = EELFManager.getInstance().getLogger("InternalLog");\r
+\r
+ private JSONUtilities(){\r
+\r
+ }\r
/**\r
* Does the String <i>v</i> represent a valid Internet address (with or without a\r
* mask length appended).\r
import com.att.eelf.configuration.EELFLogger;\r
import com.att.eelf.configuration.EELFManager;\r
import java.io.File;\r
+import java.io.IOException;\r
+import java.nio.file.Files;\r
import java.util.Objects;\r
import java.util.Properties;\r
import java.util.TimerTask;\r
long exptime = System.currentTimeMillis() - interval;\r
for (File logfile : Objects.requireNonNull(dir.listFiles())) {\r
if (logfile.lastModified() < exptime) {\r
- logfile.delete();\r
+ try {\r
+ Files.delete(logfile.toPath());\r
+ } catch (IOException e) {\r
+ utilsLogger.error("Failed to delete file: " + logfile.getPath(), e);\r
+ }\r
}\r
}\r
}\r
\r
@Override\r
public int hashCode() {\r
- return new Long(start ^ nbits).hashCode();\r
+ return Long.valueOf(start ^ nbits).hashCode();\r
}\r
\r
@Override\r
\r
package org.onap.dmaap.datarouter.provisioning.utils;\r
\r
+import com.att.eelf.configuration.EELFLogger;\r
+import com.att.eelf.configuration.EELFManager;\r
import java.io.IOException;\r
import java.io.InputStream;\r
import java.util.ArrayList;\r
import java.util.Map;\r
import java.util.Timer;\r
import java.util.TimerTask;\r
-import java.util.Vector;\r
-\r
import javax.servlet.Filter;\r
import javax.servlet.FilterChain;\r
import javax.servlet.FilterConfig;\r
import javax.servlet.ServletResponse;\r
import javax.servlet.http.HttpServletRequest;\r
import javax.servlet.http.HttpServletResponse;\r
-\r
-import com.att.eelf.configuration.EELFLogger;\r
-import com.att.eelf.configuration.EELFManager;\r
import org.eclipse.jetty.continuation.Continuation;\r
import org.eclipse.jetty.continuation.ContinuationSupport;\r
-import org.eclipse.jetty.server.*;\r
+import org.eclipse.jetty.server.HttpConnection;\r
+import org.eclipse.jetty.server.Request;\r
import org.onap.dmaap.datarouter.provisioning.beans.Parameters;\r
\r
/**\r
* @version $Id: ThrottleFilter.java,v 1.2 2014/03/12 19:45:41 eby Exp $\r
*/\r
public class ThrottleFilter extends TimerTask implements Filter {\r
- public static final int DEFAULT_N = 10;\r
- public static final int DEFAULT_M = 5;\r
- public static final String THROTTLE_MARKER = "org.onap.dmaap.datarouter.provisioning.THROTTLE_MARKER";\r
+ private static final int DEFAULT_N = 10;\r
+ private static final int DEFAULT_M = 5;\r
+ private static final String THROTTLE_MARKER = "org.onap.dmaap.datarouter.provisioning.THROTTLE_MARKER";\r
private static final String JETTY_REQUEST = "org.eclipse.jetty.server.Request";\r
private static final long ONE_MINUTE = 60000L;\r
private static final int ACTION_DROP = 0;\r
\r
// Configuration\r
private static boolean enabled = false; // enabled or not\r
- private static int n_requests = 0; // number of requests in M minutes\r
- private static int m_minutes = 0; // sampling period\r
+ private static int numRequests = 0; // number of requests in M minutes\r
+ private static int samplingPeriod = 0; // sampling period\r
private static int action = ACTION_DROP; // action to take (throttle or drop)\r
\r
private static EELFLogger logger = EELFManager.getInstance().getLogger("InternalLog");\r
try {\r
Class.forName(JETTY_REQUEST);\r
String v = p.getValue();\r
- if (v != null && !v.equals("off")) {\r
+ if (v != null && !"off".equals(v)) {\r
String[] pp = v.split(",");\r
if (pp != null) {\r
- n_requests = (pp.length > 0) ? getInt(pp[0], DEFAULT_N) : DEFAULT_N;\r
- m_minutes = (pp.length > 1) ? getInt(pp[1], DEFAULT_M) : DEFAULT_M;\r
- action = (pp.length > 2 && pp[2] != null && pp[2].equalsIgnoreCase("throttle")) ? ACTION_THROTTLE : ACTION_DROP;\r
+ numRequests = (pp.length > 0) ? getInt(pp[0], DEFAULT_N) : DEFAULT_N;\r
+ samplingPeriod = (pp.length > 1) ? getInt(pp[1], DEFAULT_M) : DEFAULT_M;\r
+ action = (pp.length > 2 && pp[2] != null && "throttle".equalsIgnoreCase(pp[2])) ? ACTION_THROTTLE : ACTION_DROP;\r
enabled = true;\r
// ACTION_THROTTLE is not currently working, so is not supported\r
if (action == ACTION_THROTTLE) {\r
action = ACTION_DROP;\r
logger.info("Throttling is not currently supported; action changed to DROP");\r
}\r
- logger.info("ThrottleFilter is ENABLED for /publish requests; N=" + n_requests + ", M=" + m_minutes + ", Action=" + action);\r
+ logger.info("ThrottleFilter is ENABLED for /publish requests; N=" + numRequests + ", M=" + samplingPeriod\r
+ + ", Action=" + action);\r
return;\r
}\r
}\r
public void dropFilter(HttpServletRequest request, HttpServletResponse response, FilterChain chain)\r
throws IOException, ServletException {\r
int rate = getRequestRate(request);\r
- if (rate >= n_requests) {\r
+ if (rate >= numRequests) {\r
// drop request - only works under Jetty\r
- String m = String.format("Dropping connection: %s %d bad connections in %d minutes", getConnectionId(request), rate, m_minutes);\r
+ String m = String.format("Dropping connection: %s %d bad connections in %d minutes", getConnectionId(request), rate,\r
+ samplingPeriod);\r
logger.info(m);\r
- Request base_request = (request instanceof Request)\r
+ Request baseRequest = (request instanceof Request)\r
? (Request) request\r
: HttpConnection.getCurrentConnection().getHttpChannel().getRequest();\r
- base_request.getHttpChannel().getEndPoint().close();\r
+ baseRequest.getHttpChannel().getEndPoint().close();\r
} else {\r
chain.doFilter(request, response);\r
}\r
}\r
\r
- public void throttleFilter(HttpServletRequest request, HttpServletResponse response, FilterChain chain)\r
+ private void throttleFilter(HttpServletRequest request, HttpServletResponse response, FilterChain chain)\r
throws IOException, ServletException {\r
// throttle request\r
String id = getConnectionId(request);\r
int rate = getRequestRate(request);\r
Object results = request.getAttribute(THROTTLE_MARKER);\r
- if (rate >= n_requests && results == null) {\r
- String m = String.format("Throttling connection: %s %d bad connections in %d minutes", getConnectionId(request), rate, m_minutes);\r
+ if (rate >= numRequests && results == null) {\r
+ String m = String.format("Throttling connection: %s %d bad connections in %d minutes",\r
+ getConnectionId(request), rate, samplingPeriod);\r
logger.info(m);\r
Continuation continuation = ContinuationSupport.getContinuation(request);\r
continuation.suspend();\r
}\r
}\r
\r
- private Map<String, List<Continuation>> suspended_requests = new HashMap<>();\r
+ private Map<String, List<Continuation>> suspendedRequests = new HashMap<>();\r
\r
private void register(String id, Continuation continuation) {\r
- synchronized (suspended_requests) {\r
- List<Continuation> list = suspended_requests.get(id);\r
+ synchronized (suspendedRequests) {\r
+ List<Continuation> list = suspendedRequests.get(id);\r
if (list == null) {\r
list = new ArrayList<>();\r
- suspended_requests.put(id, list);\r
+ suspendedRequests.put(id, list);\r
}\r
list.add(continuation);\r
}\r
}\r
\r
private void resume(String id) {\r
- synchronized (suspended_requests) {\r
- List<Continuation> list = suspended_requests.get(id);\r
+ synchronized (suspendedRequests) {\r
+ List<Continuation> list = suspendedRequests.get(id);\r
if (list != null) {\r
// when the waited for event happens\r
Continuation continuation = list.remove(0);\r
*/\r
private int getRequestRate(HttpServletRequest request) {\r
String expecthdr = request.getHeader("Expect");\r
- if (expecthdr != null && expecthdr.equalsIgnoreCase("100-continue"))\r
+ if (expecthdr != null && "100-continue".equalsIgnoreCase(expecthdr))\r
return 0;\r
\r
String key = getConnectionId(request);\r
}\r
\r
public class Counter {\r
- private List<Long> times = new Vector<>(); // a record of request times\r
+ private List<Long> times = new ArrayList<>(); // a record of request times\r
\r
public int prune() {\r
try {\r
- long n = System.currentTimeMillis() - (m_minutes * ONE_MINUTE);\r
+ long n = System.currentTimeMillis() - (samplingPeriod * ONE_MINUTE);\r
long t = times.get(0);\r
while (t < n) {\r
times.remove(0);\r
}
@Test
- public void Verify_NetworkRoute_Is_Added_Successfully() throws SQLException {
- Assert.assertEquals(1, EgressRoute.getAllEgressRoutes().size());
- egressRoute.doInsert(db.getConnection());
- Assert.assertEquals(2, EgressRoute.getAllEgressRoutes().size());
- egressRoute.doDelete(db.getConnection());
- }
-
- @Test
- public void Verify_NetworkRoute_Is_Removed_Successfully() throws SQLException {
+ public void Verify_EgressRoute_Is_Removed_Successfully() throws SQLException {
Assert.assertEquals(1, EgressRoute.getAllEgressRoutes().size());
EgressRoute egressRoute = new EgressRoute(1, 1);
egressRoute.doDelete(db.getConnection());
}
@Test
- public void Verify_NetworkRoute_Is_Updated_Successfully() throws SQLException {
+ public void Verify_EgressRoute_Is_Updated_Successfully() throws SQLException {
EgressRoute egressRoute = new EgressRoute(1, 1);
EgressRoute egressRoute1 = new EgressRoute(1, 1);
Assert.assertEquals(egressRoute.hashCode(), egressRoute1.hashCode());
Assert.assertEquals(2000, expiryRecord.getContentLength());
Assert.assertEquals(285, expiryRecord.getSubid());
Assert.assertEquals("file.txt", expiryRecord.getFileid());
- Assert.assertEquals(100, expiryRecord.getAttempts());
+ Assert.assertEquals(100, expiryRecord.getDeliveryAttempts());
Assert.assertEquals("other", expiryRecord.getReason());
}
expiryRecord.setMethod("PUT");
expiryRecord.setSubid(322);
expiryRecord.setFileid("file.txt");
- expiryRecord.setAttempts(125);
+ expiryRecord.setDeliveryAttempts(125);
expiryRecord.setReason("Out of memory");
LOGJSONObject expiryRecordJson = createBaseLogRecordJson();
setA.add(new FeedEndpointID("1", "Name"));
Set setB = new HashSet();
setB.add("172.0.0.1");
- fa.setEndpoint_ids(setA);
- fa.setEndpoint_addrs(setB);
+ fa.setEndpointIDS(setA);
+ fa.setEndpointAddrs(setB);
feed.setAuthorization(fa);
Assert.assertEquals(feed.doInsert(connection), false);
networkRoute = new NetworkRoute("node01.","node03.","node02.");
}
- @Test
- public void Verify_NetworkRoute_Is_Added_Successfully() throws SQLException {
- Assert.assertEquals(1, NetworkRoute.getAllNetworkRoutes().size());
- networkRoute.doInsert(db.getConnection());
- Assert.assertEquals(2, NetworkRoute.getAllNetworkRoutes().size());
- networkRoute.doDelete(db.getConnection());
- }
-
@Test
public void Verify_NetworkRoute_Is_Removed_Successfully() throws SQLException {
Assert.assertEquals(1, NetworkRoute.getAllNetworkRoutes().size());
List<Continuation> continuation_list = new ArrayList<>();
continuation_list.add(continuation);
suspended_requests.put("null/-1", continuation_list);
- FieldUtils.writeDeclaredField(throttlefilter, "suspended_requests", suspended_requests, true);
+ FieldUtils.writeDeclaredField(throttlefilter, "suspendedRequests", suspended_requests, true);
throttlefilter.doFilter(request, response, filterchain);
verify(continuation, times(1)).setAttribute(anyString(), any());
verify(continuation, times(1)).resume();