Even more unit test and code cleanup 15/91515/1
authorefiacor <fiachra.corcoran@est.tech>
Tue, 16 Jul 2019 09:49:13 +0000 (09:49 +0000)
committerefiacor <fiachra.corcoran@est.tech>
Tue, 16 Jul 2019 09:49:13 +0000 (09:49 +0000)
Change-Id: Ide9477f5f8856e4ab35864bdc93d27a2d59afc83
Issue-ID: DMAAP-1226
Signed-off-by: efiacor <fiachra.corcoran@est.tech>
14 files changed:
datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/ProvData.java
datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/TaskList.java
datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/eelf/AuditFilter.java
datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/TaskListTest.java [new file with mode: 0644]
datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/BaseServlet.java
datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/Parameters.java
datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/HttpServletUtils.java
datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/LogfileLoader.java
datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/PasswordProcessor.java
datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/BaseServletTest.java
datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/DrServletTestBase.java
datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/SynchronizerTaskTest.java
datarouter-prov/src/test/java/org/onap/dmaap/datarouter/provisioning/utils/LogfileLoaderTest.java
datarouter-prov/src/test/resources/h2Database.properties

index c436076..03e952c 100644 (file)
@@ -152,7 +152,7 @@ public class ProvData {
     }
 
     /**
-     * Get the raw node configuration entries
+     * Get the raw node configuration entries.
      */
     public NodeConfig.ProvNode[] getNodes() {
         return (pn);
@@ -333,6 +333,9 @@ public class ProvData {
         if (jnodes != null) {
             for (int nx = 0; nx < jnodes.length(); nx++) {
                 String nn = gvas(jnodes, nx);
+                if (nn == null) {
+                    continue;
+                }
                 if (nn.indexOf('.') == -1) {
                     nn = nn + "." + sfx;
                 }
index 7fa0dc4..a77277f 100644 (file)
@@ -38,7 +38,7 @@ import java.util.Iterator;
  * called.
  * </ul>
  */
-public class TaskList {
+class TaskList {
 
     private Iterator<Runnable> runlist;
     private HashSet<Runnable> tasks = new HashSet<>();
@@ -50,7 +50,7 @@ public class TaskList {
     /**
      * Start executing the sequence of tasks.
      */
-    public synchronized void startRun() {
+    synchronized void startRun() {
         sofar = new HashSet<>();
         added = new HashSet<>();
         removed = new HashSet<>();
@@ -61,7 +61,7 @@ public class TaskList {
     /**
      * Get the next task to execute.
      */
-    public synchronized Runnable next() {
+    synchronized Runnable next() {
         while (runlist != null) {
             if (runlist.hasNext()) {
                 Runnable task = runlist.next();
@@ -88,7 +88,7 @@ public class TaskList {
     /**
      * Add a task to the list of tasks to run whenever the event occurs.
      */
-    public synchronized void addTask(Runnable task) {
+    synchronized void addTask(Runnable task) {
         if (runlist != null) {
             added.add(task);
             removed.remove(task);
@@ -99,7 +99,7 @@ public class TaskList {
     /**
      * Remove a task from the list of tasks to run whenever the event occurs.
      */
-    public synchronized void removeTask(Runnable task) {
+    synchronized void removeTask(Runnable task) {
         if (runlist != null) {
             removed.add(task);
             added.remove(task);
index 33103db..a278c2e 100644 (file)
@@ -17,9 +17,9 @@
  * SPDX-License-Identifier: Apache-2.0
  * ============LICENSE_END=========================================================
  */
+
 package org.onap.dmaap.datarouter.node.eelf;
 
-import ch.qos.logback.classic.Level;
 import ch.qos.logback.classic.spi.ILoggingEvent;
 import ch.qos.logback.core.filter.Filter;
 import ch.qos.logback.core.spi.FilterReply;
diff --git a/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/TaskListTest.java b/datarouter-node/src/test/java/org/onap/dmaap/datarouter/node/TaskListTest.java
new file mode 100644 (file)
index 0000000..311165c
--- /dev/null
@@ -0,0 +1,44 @@
+/*-
+ * ============LICENSE_START=======================================================
+ *  Copyright (C) 2019 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.dmaap.datarouter.node;
+
+import org.junit.Test;
+
+public class TaskListTest {
+
+    @Test
+    public void Given_New_Task_List_Verify_Add_And_Run() {
+        TaskList taskList = new TaskList();
+        taskList.startRun();
+        taskList.addTask(() -> {
+        });
+        taskList.next();
+        taskList.removeTask(() -> {
+        });
+    }
+
+    @Test
+    public void Given_Empty_Task_List_Verify_Next() {
+        TaskList taskList = new TaskList();
+        taskList.startRun();
+        taskList.next();
+    }
+}
index ef106ab..3993b4d 100755 (executable)
 
 package org.onap.dmaap.datarouter.provisioning;
 
+import static com.att.eelf.configuration.Configuration.MDC_KEY_REQUEST_ID;
 import static com.att.eelf.configuration.Configuration.MDC_SERVER_FQDN;
-
 import static com.att.eelf.configuration.Configuration.MDC_SERVER_IP_ADDRESS;
 import static com.att.eelf.configuration.Configuration.MDC_SERVICE_NAME;
-import static com.att.eelf.configuration.Configuration.MDC_KEY_REQUEST_ID;
 
-
-
-import java.io.IOException;
-import java.io.InputStream;
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
 import java.net.InetAddress;
 import java.net.UnknownHostException;
+import java.security.GeneralSecurityException;
 import java.security.cert.X509Certificate;
 import java.sql.Connection;
 import java.sql.SQLException;
-
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.Set;
+import java.util.UUID;
 import javax.servlet.ServletConfig;
 import javax.servlet.ServletException;
 import javax.servlet.http.HttpServlet;
 import javax.servlet.http.HttpServletRequest;
-
-import com.att.eelf.configuration.EELFLogger;
-import com.att.eelf.configuration.EELFManager;
 import org.apache.commons.lang3.StringUtils;
+import org.jetbrains.annotations.Nullable;
 import org.json.JSONArray;
 import org.json.JSONException;
 import org.json.JSONObject;
@@ -55,21 +58,19 @@ import org.json.JSONTokener;
 import org.onap.dmaap.datarouter.authz.Authorizer;
 import org.onap.dmaap.datarouter.authz.impl.ProvAuthorizer;
 import org.onap.dmaap.datarouter.authz.impl.ProvDataProvider;
-import org.onap.dmaap.datarouter.provisioning.beans.*;
+import org.onap.dmaap.datarouter.provisioning.beans.Deleteable;
+import org.onap.dmaap.datarouter.provisioning.beans.Feed;
+import org.onap.dmaap.datarouter.provisioning.beans.Group;
+import org.onap.dmaap.datarouter.provisioning.beans.Insertable;
+import org.onap.dmaap.datarouter.provisioning.beans.NodeClass;
+import org.onap.dmaap.datarouter.provisioning.beans.Parameters;
+import org.onap.dmaap.datarouter.provisioning.beans.Subscription;
+import org.onap.dmaap.datarouter.provisioning.beans.Updateable;
 import org.onap.dmaap.datarouter.provisioning.utils.DB;
 import org.onap.dmaap.datarouter.provisioning.utils.PasswordProcessor;
 import org.onap.dmaap.datarouter.provisioning.utils.ThrottleFilter;
 import org.slf4j.MDC;
 
-import javax.mail.*;
-import javax.mail.internet.InternetAddress;
-import javax.mail.internet.MimeBodyPart;
-import javax.mail.internet.MimeMessage;
-import javax.mail.internet.MimeMultipart;
-import java.security.GeneralSecurityException;
-import java.util.*;
-import java.util.regex.Pattern;
-
 
 /**
  * This is the base class for all Servlets in the provisioning code. It provides standard constants and some common
@@ -94,10 +95,10 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider {
     static final String CREATE_PERMISSION = "create";
     static final String EDIT_PERMISSION = "edit";
     static final String DELETE_PERMISSION = "delete";
-    static final String PUBLISH_PERMISSION = "publish";
-    static final String SUSPEND_PERMISSION = "suspend";
-    static final String RESTORE_PERMISSION = "restore";
-    static final String SUBSCRIBE_PERMISSION = "subscribe";
+    private static final String PUBLISH_PERMISSION = "publish";
+    private static final String SUSPEND_PERMISSION = "suspend";
+    private static final String RESTORE_PERMISSION = "restore";
+    private static final String SUBSCRIBE_PERMISSION = "subscribe";
     static final String APPROVE_SUB_PERMISSION = "approveSub";
 
     static final String FEED_BASECONTENT_TYPE = "application/vnd.dmaap-dr.feed";
@@ -113,7 +114,7 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider {
     //Adding groups functionality, ...1610
     static final String GROUP_BASECONTENT_TYPE = "application/vnd.dmaap-dr.group";
     static final String GROUP_CONTENT_TYPE = "application/vnd.dmaap-dr.group; version=2.0";
-    public static final String GROUPFULL_CONTENT_TYPE = "application/vnd.dmaap-dr.group-full; version=2.0";
+    static final String GROUPFULL_CONTENT_TYPE = "application/vnd.dmaap-dr.group-full; version=2.0";
     public static final String GROUPLIST_CONTENT_TYPE = "application/vnd.dmaap-dr.fegrouped-list; version=1.0";
 
 
@@ -130,127 +131,123 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider {
     private static final int DEFAULT_POKETIMER2 = 30;
     private static final String DEFAULT_DOMAIN = "onap";
     private static final String DEFAULT_PROVSRVR_NAME = "dmaap-dr-prov";
-    private static final String STATIC_ROUTING_NODES = ""; //Adding new param for static Routing - Rally:US664862-1610
 
     //Common Errors
-    public static final String MISSING_ON_BEHALF = "Missing X-DMAAP-DR-ON-BEHALF-OF header.";
-    public static final String MISSING_FEED = "Missing or bad feed number.";
-    public static final String POLICY_ENGINE = "Policy Engine disallows access.";
-    public static final String UNAUTHORIZED = "Unauthorized.";
-    public static final String BAD_SUB = "Missing or bad subscription number.";
-    public static final String BAD_JSON = "Badly formed JSON";
-    public static final String BAD_URL = "Bad URL.";
+    static final String MISSING_ON_BEHALF = "Missing X-DMAAP-DR-ON-BEHALF-OF header.";
+    static final String MISSING_FEED = "Missing or bad feed number.";
+    static final String POLICY_ENGINE = "Policy Engine disallows access.";
+    static final String UNAUTHORIZED = "Unauthorized.";
+    static final String BAD_SUB = "Missing or bad subscription number.";
+    static final String BAD_JSON = "Badly formed JSON";
+    static final String BAD_URL = "Bad URL.";
 
     public static final String API = "/api/";
-    public static final String LOGS = "/logs/";
-    public static final String TEXT_CT = "text/plain";
-    public static final String INGRESS = "/ingress/";
-    public static final String EGRESS = "/egress/";
-    public static final String NETWORK = "/network/";
-    public static final String GROUPID = "groupid";
+    static final String LOGS = "/logs/";
+    static final String TEXT_CT = "text/plain";
+    static final String INGRESS = "/ingress/";
+    static final String EGRESS = "/egress/";
+    static final String NETWORK = "/network/";
+    static final String GROUPID = "groupid";
     public static final String FEEDID = "feedid";
-    public static final String FEEDIDS = "feedids";
-    public static final String SUBID = "subid";
-    public static final String EVENT_TYPE = "eventType";
-    public static final String OUTPUT_TYPE = "output_type";
-    public static final String START_TIME = "start_time";
-    public static final String END_TIME = "end_time";
-    public static final String REASON_SQL = "reasonSQL";
+    static final String FEEDIDS = "feedids";
+    static final String SUBID = "subid";
+    static final String EVENT_TYPE = "eventType";
+    static final String OUTPUT_TYPE = "output_type";
+    static final String START_TIME = "start_time";
+    static final String END_TIME = "end_time";
+    static final String REASON_SQL = "reasonSQL";
 
 
     /**
-     * A boolean to trigger one time "provisioning changed" event on startup
+     * A boolean to trigger one time "provisioning changed" event on startup.
      */
     private static boolean startmsgFlag = true;
     /**
-     * This POD should require SSL connections from clients; pulled from the DB (PROV_REQUIRE_SECURE)
+     * This POD should require SSL connections from clients; pulled from the DB (PROV_REQUIRE_SECURE).
      */
     private static boolean requireSecure = true;
     /**
-     * This POD should require signed, recognized certificates from clients; pulled from the DB (PROV_REQUIRE_CERT)
+     * This POD should require signed, recognized certificates from clients; pulled from the DB (PROV_REQUIRE_CERT).
      */
     private static boolean requireCert = true;
     /**
-     * The set of authorized addresses and networks; pulled from the DB (PROV_AUTH_ADDRESSES)
+     * The set of authorized addresses and networks; pulled from the DB (PROV_AUTH_ADDRESSES).
      */
     private static Set<String> authorizedAddressesAndNetworks = new HashSet<>();
     /**
-     * The set of authorized names; pulled from the DB (PROV_AUTH_SUBJECTS)
+     * The set of authorized names; pulled from the DB (PROV_AUTH_SUBJECTS).
      */
     private static Set<String> authorizedNames = new HashSet<>();
     /**
-     * The FQDN of the initially "active" provisioning server in this Data Router ecosystem
+     * The FQDN of the initially "active" provisioning server in this Data Router ecosystem.
      */
     private static String initialActivePod;
     /**
-     * The FQDN of the initially "standby" provisioning server in this Data Router ecosystem
+     * The FQDN of the initially "standby" provisioning server in this Data Router ecosystem.
      */
     private static String initialStandbyPod;
     /**
-     * The FQDN of this provisioning server in this Data Router ecosystem
+     * The FQDN of this provisioning server in this Data Router ecosystem.
      */
     private static String thisPod;
     /**
-     * "Timer 1" - used to determine when to notify nodes of provisioning changes
+     * "Timer 1" - used to determine when to notify nodes of provisioning changes.
      */
     private static long pokeTimer1;
     /**
-     * "Timer 2" - used to determine when to notify nodes of provisioning changes
+     * "Timer 2" - used to determine when to notify nodes of provisioning changes.
      */
     private static long pokeTimer2;
     /**
-     * Array of nodes names and/or FQDNs
+     * Array of nodes names and/or FQDNs.
      */
     private static String[] nodes = new String[0];
     /**
-     * [DATARTR-27] Poke all the DR nodes : Array of nodes names and/or FQDNs
+     * [DATARTR-27] Poke all the DR nodes : Array of nodes names and/or FQDNs.
      */
     private static String[] drnodes = new String[0];
     /**
-     * Array of node IP addresses
+     * Array of node IP addresses.
      */
     private static InetAddress[] nodeAddresses = new InetAddress[0];
     /**
-     * Array of POD IP addresses
+     * Array of POD IP addresses.
      */
     private static InetAddress[] podAddresses = new InetAddress[0];
     /**
-     * The maximum number of feeds allowed; pulled from the DB (PROV_MAXFEED_COUNT)
+     * The maximum number of feeds allowed; pulled from the DB (PROV_MAXFEED_COUNT).
      */
     static int maxFeeds = 0;
     /**
-     * The maximum number of subscriptions allowed; pulled from the DB (PROV_MAXSUB_COUNT)
+     * The maximum number of subscriptions allowed; pulled from the DB (PROV_MAXSUB_COUNT).
      */
     static int maxSubs = 0;
     /**
-     * The current number of feeds in the system
+     * The current number of feeds in the system.
      */
     static int activeFeeds = 0;
     /**
-     * The current number of subscriptions in the system
+     * The current number of subscriptions in the system.
      */
     static int activeSubs = 0;
 
     /**
-     * The domain used to generate a FQDN from the "bare" node names
+     * The domain used to generate a FQDN from the "bare" node names.
      */
     private static String provDomain = "web.att.com";
 
     /**
-     * The standard FQDN of the provisioning server in this Data Router ecosystem
+     * The standard FQDN of the provisioning server in this Data Router ecosystem.
      */
     private static String provName = "feeds-drtr.web.att.com";
 
     /**
-     * The standard FQDN of the ACTIVE_POD provisioning server in this Data Router ecosystem
+     * The standard FQDN of the ACTIVE_POD provisioning server in this Data Router ecosystem.
      */
     private static String activeProvName = "feeds-drtr.web.att.com";
 
-    //Adding new param for static Routing - Rally:US664862-1610
-    private static String staticRoutingNodes = STATIC_ROUTING_NODES;
-
     /**
-     * This logger is used to log provisioning events
+     * This logger is used to log provisioning events.
      */
     protected static EELFLogger eventlogger;
     /**
@@ -258,21 +255,17 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider {
      */
     protected static EELFLogger intlogger;
     /**
-     * Authorizer - interface to the Policy Engine
+     * Authorizer - interface to the Policy Engine.
      */
     protected static Authorizer authz;
     /**
-     * The Synchronizer used to sync active DB to standby one
+     * The Synchronizer used to sync active DB to standby one.
      */
     private static SynchronizerTask synctask = null;
 
     //Data Router Subscriber HTTPS Relaxation feature USERSTORYID:US674047.
     private InetAddress thishost;
     private InetAddress loopback;
-    private static Boolean mailSendFlag = false;
-
-    private static final String MAILCONFIG_FILE = "mail.properties";
-    private static Properties mailprops;
 
     //DMAAP-597 (Tech Dept) REST request source IP auth relaxation to accommodate OOM kubernetes deploy
     private static String isAddressAuthEnabled = (new DB()).getProperties()
@@ -285,10 +278,10 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider {
      * Initialize data common to all the provisioning server servlets.
      */
     protected BaseServlet() {
-        if(eventlogger == null) {
-            this.eventlogger = EELFManager.getInstance().getLogger("EventLog");
+        if (eventlogger == null) {
+            eventlogger = EELFManager.getInstance().getLogger("EventLog");
         }
-        if(intlogger == null) {
+        if (intlogger == null) {
             this.intlogger = EELFManager.getInstance().getLogger("InternalLog");
         }
         if (authz == null) {
@@ -329,7 +322,7 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider {
     }
 
     /**
-     * Read the request's input stream and return a JSONObject from it
+     * Read the request's input stream and return a JSONObject from it.
      *
      * @param req the HTTP request
      * @return the JSONObject, or null if the stream cannot be parsed
@@ -348,35 +341,40 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider {
     }
 
     /**
-     * This method encrypt/decrypt the key in the JSON passed by user request inside the authorisation header object in request before logging the JSON.
+     * This method encrypt/decrypt the key in the JSON passed by user request inside the authorisation
+     * header object in request before logging the JSON.
      *
-     * @param jo-      the JSON passed in http request.
-     * @param maskKey- the key to be masked in the JSON passed.
-     * @param action-  whether to mask the key or unmask it in a JSON passed.
+     * @param jo      the JSON passed in http request.
+     * @param maskKey the key to be masked in the JSON passed.
+     * @param action  whether to mask the key or unmask it in a JSON passed.
      * @return the JSONObject, or null if the stream cannot be parsed.
      */
-    public static JSONObject maskJSON(JSONObject jo, String maskKey, boolean action) {
+    static JSONObject maskJSON(JSONObject jo, String maskKey, boolean action) {
         if (!jo.isNull("authorization")) {
-            JSONObject j2 = jo.getJSONObject("authorization");
-            JSONArray ja = j2.getJSONArray("endpoint_ids");
-            for (int i = 0; i < ja.length(); i++) {
-                if ((!ja.getJSONObject(i).isNull(maskKey))) {
-                    String password = ja.getJSONObject(i).get(maskKey).toString();
-                    try {
-                        if (action) {
-                            ja.getJSONObject(i).put(maskKey, PasswordProcessor.encrypt(password));
-                        } else {
-                            ja.getJSONObject(i).put(maskKey, PasswordProcessor.decrypt(password));
-                        }
-                    } catch (JSONException | GeneralSecurityException e) {
-                        intlogger.info("Error reading JSON while masking: " + e);
-                    }
+            JSONArray endpointIds = jo.getJSONObject("authorization").getJSONArray("endpoint_ids");
+            for (int index = 0; index < endpointIds.length(); index++) {
+                if ((!endpointIds.getJSONObject(index).isNull(maskKey))) {
+                    String password = endpointIds.getJSONObject(index).get(maskKey).toString();
+                    processPassword(maskKey, action, endpointIds, index, password);
                 }
             }
         }
         return jo;
     }
 
+    private static void processPassword(String maskKey, boolean action, JSONArray endpointIds, int index,
+            String password) {
+        try {
+            if (action) {
+                endpointIds.getJSONObject(index).put(maskKey, PasswordProcessor.encrypt(password));
+            } else {
+                endpointIds.getJSONObject(index).put(maskKey, PasswordProcessor.decrypt(password));
+            }
+        } catch (JSONException | GeneralSecurityException e) {
+            intlogger.info("Error reading JSON while masking: " + e);
+        }
+    }
+
     /**
      * Check if the remote host is authorized to perform provisioning. Is the request secure? Is it coming from an
      * authorized IP address or network (configured via PROV_AUTH_ADDRESSES)? Does it have a valid client certificate
@@ -393,20 +391,9 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider {
         if (requireSecure && !request.isSecure()) {
             return "Request must be made over an HTTPS connection.";
         }
-        // Is remote IP authorized?
-        String remote = request.getRemoteAddr();
-        try {
-            boolean found = false;
-            InetAddress ip = InetAddress.getByName(remote);
-            for (String addrnet : authorizedAddressesAndNetworks) {
-                found |= addressMatchesNetwork(ip, addrnet);
-            }
-            if (!found) {
-                return "Unauthorized address: " + remote;
-            }
-        } catch (UnknownHostException e) {
-            intlogger.error("PROV0051 BaseServlet.isAuthorizedForProvisioning: " + e.getMessage(), e);
-            return "Unauthorized address: " + remote;
+        String remoteHostCheck = checkRemoteHostAuthorization(request);
+        if (remoteHostCheck != null) {
+            return remoteHostCheck;
         }
         // Does remote have a valid certificate?
         if (requireCert) {
@@ -425,6 +412,26 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider {
         return null;
     }
 
+    @Nullable
+    private String checkRemoteHostAuthorization(HttpServletRequest request) {
+        // Is remote IP authorized?
+        String remote = request.getRemoteAddr();
+        try {
+            boolean found = false;
+            InetAddress ip = InetAddress.getByName(remote);
+            for (String addrnet : authorizedAddressesAndNetworks) {
+                found |= addressMatchesNetwork(ip, addrnet);
+            }
+            if (!found) {
+                return "Unauthorized address: " + remote;
+            }
+        } catch (UnknownHostException e) {
+            intlogger.error("PROV0051 BaseServlet.isAuthorizedForProvisioning: " + e.getMessage(), e);
+            return "Unauthorized address: " + remote;
+        }
+        return null;
+    }
+
     /**
      * Check if the remote IP address is authorized to see the /internal URL tree.
      *
@@ -438,19 +445,19 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider {
             }
             InetAddress ip = InetAddress.getByName(request.getRemoteAddr());
             for (InetAddress node : getNodeAddresses()) {
-                if (node != null && ip.equals(node)) {
+                if (ip.equals(node)) {
                     return true;
                 }
             }
             for (InetAddress pod : getPodAddresses()) {
-                if (pod != null && ip.equals(pod)) {
+                if (ip.equals(pod)) {
                     return true;
                 }
             }
-            if (thishost != null && ip.equals(thishost)) {
+            if (ip.equals(thishost)) {
                 return true;
             }
-            if (loopback != null && ip.equals(loopback)) {
+            if (ip.equals(loopback)) {
                 return true;
             }
         } catch (UnknownHostException e) {
@@ -468,7 +475,7 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider {
      */
     private static boolean addressMatchesNetwork(InetAddress ip, String s) {
         int mlen = -1;
-        int n = s.indexOf("/");
+        int n = s.indexOf('/');
         if (n >= 0) {
             mlen = Integer.parseInt(s.substring(n + 1));
             s = s.substring(0, n);
@@ -528,16 +535,16 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider {
         maxSubs = getInt(map, Parameters.PROV_MAXSUB_COUNT, DEFAULT_MAX_SUBS);
         pokeTimer1 = getInt(map, Parameters.PROV_POKETIMER1, DEFAULT_POKETIMER1);
         pokeTimer2 = getInt(map, Parameters.PROV_POKETIMER2, DEFAULT_POKETIMER2);
-        /**
-         * The domain used to generate a FQDN from the "bare" node names
-         */
+
+        // The domain used to generate a FQDN from the "bare" node names
         provDomain = getString(map, Parameters.PROV_DOMAIN, DEFAULT_DOMAIN);
         provName = getString(map, Parameters.PROV_NAME, DEFAULT_PROVSRVR_NAME);
         activeProvName = getString(map, Parameters.PROV_ACTIVE_NAME, provName);
         initialActivePod = getString(map, Parameters.ACTIVE_POD, "");
         initialStandbyPod = getString(map, Parameters.STANDBY_POD, "");
-        staticRoutingNodes = getString(map, Parameters.STATIC_ROUTING_NODES,
-                ""); //Adding new param for static Routing - Rally:US664862-1610
+
+        //Adding new param for static Routing - Rally:US664862-1610
+        String staticRoutingNodes = getString(map, Parameters.STATIC_ROUTING_NODES, "");
         activeFeeds = Feed.countActiveFeeds();
         activeSubs = Subscription.countActiveSubscriptions();
         try {
@@ -597,78 +604,11 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider {
         }
     }
 
-
-    /**
-     * Data Router Subscriber HTTPS Relaxation feature USERSTORYID:US674047. Load mail properties.
-     *
-     * @author vs215k
-     **/
-    private void loadMailProperties() {
-        if (mailprops == null) {
-            mailprops = new Properties();
-            try (InputStream inStream = getClass().getClassLoader().getResourceAsStream(MAILCONFIG_FILE)) {
-                mailprops.load(inStream);
-            } catch (IOException e) {
-                intlogger.error("PROV9003 Opening properties: " + e.getMessage(), e);
-                System.exit(1);
-            }
-        }
-    }
-
-
-    /**
-     * Data Router Subscriber HTTPS Relaxation feature USERSTORYID:US674047.
-     *
-     * @param email - list of email ids to notify if HTTP relexcation is enabled.
-     * @author vs215k
-     **/
-    private void notifyPSTeam(String email) {
-        loadMailProperties(); //Load HTTPS Relex mail properties.
-        String[] emails = email.split(Pattern.quote("|"));
-
-        Properties mailproperties = new Properties();
-        mailproperties.put("mail.smtp.host", mailprops.get("com.att.dmaap.datarouter.mail.server"));
-        mailproperties.put("mail.transport.protocol", mailprops.get("com.att.dmaap.datarouter.mail.protocol"));
-
-        Session session = Session.getDefaultInstance(mailproperties, null);
-        Multipart mp = new MimeMultipart();
-        MimeBodyPart htmlPart = new MimeBodyPart();
-
-        try {
-
-            Message msg = new MimeMessage(session);
-            msg.setFrom(new InternetAddress(mailprops.get("com.att.dmaap.datarouter.mail.from").toString()));
-
-            InternetAddress[] addressTo = new InternetAddress[emails.length];
-            for (int x = 0; x < emails.length; x++) {
-                addressTo[x] = new InternetAddress(emails[x]);
-            }
-
-            msg.addRecipients(Message.RecipientType.TO, addressTo);
-            msg.setSubject(mailprops.get("com.att.dmaap.datarouter.mail.subject").toString());
-            htmlPart.setContent(mailprops.get("com.att.dmaap.datarouter.mail.body").toString()
-                    .replace("[SERVER]", InetAddress.getLocalHost().getHostName()), "text/html");
-            mp.addBodyPart(htmlPart);
-            msg.setContent(mp);
-
-            intlogger.info(mailprops.get("com.att.dmaap.datarouter.mail.body").toString()
-                    .replace("[SERVER]", InetAddress.getLocalHost().getHostName()));
-
-            Transport.send(msg);
-            intlogger.info("HTTPS relaxation mail is sent to - : " + email);
-
-        } catch (MessagingException e) {
-            intlogger.error("Invalid email address, unable to send https relaxation mail to - : " + email, e);
-        } catch (UnknownHostException uhe) {
-            intlogger.error("UnknownHostException", uhe);
-        }
-    }
-
     public static String getProvName() {
         return provName;
     }
 
-    public static String getActiveProvName() {
+    static String getActiveProvName() {
         return activeProvName;
     }
 
@@ -696,7 +636,7 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider {
      *
      * @return an array of InetAddresses
      */
-    public static InetAddress[] getNodeAddresses() {
+    private static InetAddress[] getNodeAddresses() {
         return nodeAddresses;
     }
 
@@ -814,7 +754,7 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider {
 
     private static boolean getBoolean(Map<String, String> map, String name) {
         String s = map.get(name);
-        return (s != null) && "true".equalsIgnoreCase(s);
+        return "true".equalsIgnoreCase(s);
     }
 
     private static String getString(Map<String, String> map, String name, String dflt) {
@@ -854,7 +794,7 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider {
      */
     public class ContentHeader {
 
-        private String type = "";
+        private String type;
         private Map<String, String> map = new HashMap<>();
 
         ContentHeader() {
@@ -870,7 +810,7 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider {
             return type;
         }
 
-        public String getAttribute(String key) {
+        String getAttribute(String key) {
             String s = map.get(key);
             if (s == null) {
                 s = "";
@@ -976,19 +916,17 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider {
     /*
      * @Method - getGroupByFeedGroupId- Rally:US708115
      * @Params - User to check in group and feedid which is assigned the group.
-     * @return - string value grupid/null
+     * @return - string value groupid/null
      */
     @Override
     public String getGroupByFeedGroupId(String owner, String feedId) {
         try {
-            int n = Integer.parseInt(feedId);
-            Feed f = Feed.getFeedById(n);
+            Feed f = Feed.getFeedById(Integer.parseInt(feedId));
             if (f != null) {
                 int groupid = f.getGroupid();
                 if (groupid > 0) {
                     Group group = Group.getGroupById(groupid);
-                    assert group != null;
-                    if (isUserMemberOfGroup(group, owner)) {
+                    if (group != null && isUserMemberOfGroup(group, owner)) {
                         return group.getAuthid();
                     }
                 }
@@ -1002,7 +940,7 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider {
     /*
      * @Method - getGroupBySubGroupId - Rally:US708115
      * @Params - User to check in group and subid which is assigned the group.
-     * @return - string value grupid/null
+     * @return - string value groupid/null
      */
     @Override
     public String getGroupBySubGroupId(String owner, String subId) {
@@ -1013,8 +951,7 @@ public class BaseServlet extends HttpServlet implements ProvDataProvider {
                 int groupid = s.getGroupid();
                 if (groupid > 0) {
                     Group group = Group.getGroupById(groupid);
-                    assert group != null;
-                    if (isUserMemberOfGroup(group, owner)) {
+                    if (group != null && isUserMemberOfGroup(group, owner)) {
                         return group.getAuthid();
                     }
                 }
index fff10ac..357444e 100644 (file)
@@ -57,6 +57,7 @@ public class Parameters extends Syncable {
     public static final String PROV_POKETIMER2 = "PROV_POKETIMER2";\r
     public static final String PROV_SPECIAL_SUBNET = "PROV_SPECIAL_SUBNET";\r
     public static final String PROV_LOG_RETENTION = "PROV_LOG_RETENTION";\r
+    public static final String DEFAULT_LOG_RETENTION = "DEFAULT_LOG_RETENTION";\r
     public static final String NODES = "NODES";\r
     public static final String ACTIVE_POD = "ACTIVE_POD";\r
     public static final String STANDBY_POD = "STANDBY_POD";\r
index d9f36de..f59dc91 100644 (file)
  * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
  * *
  ******************************************************************************/
-package org.onap.dmaap.datarouter.provisioning.utils;
 
-import javax.servlet.http.HttpServletResponse;
-import java.io.IOException;
+package org.onap.dmaap.datarouter.provisioning.utils;
 
 import com.att.eelf.configuration.EELFLogger;
+import java.io.IOException;
+import javax.servlet.http.HttpServletResponse;
 
 public class HttpServletUtils {
+
+    private HttpServletUtils(){
+
+    }
+
     public static void sendResponseError(HttpServletResponse response, int errorCode, String message, EELFLogger intlogger) {
         try {
             response.sendError(errorCode, message);
index 3ba1a15..c78a5b1 100644 (file)
-/*******************************************************************************\r
- * ============LICENSE_START==================================================\r
- * * org.onap.dmaap\r
- * * ===========================================================================\r
- * * Copyright Â© 2017 AT&T Intellectual Property. All rights reserved.\r
- * * ===========================================================================\r
- * * Licensed under the Apache License, Version 2.0 (the "License");\r
- * * you may not use this file except in compliance with the License.\r
- * * You may obtain a copy of the License at\r
- * *\r
- *  *      http://www.apache.org/licenses/LICENSE-2.0\r
- * *\r
- *  * Unless required by applicable law or agreed to in writing, software\r
- * * distributed under the License is distributed on an "AS IS" BASIS,\r
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
- * * See the License for the specific language governing permissions and\r
- * * limitations under the License.\r
- * * ============LICENSE_END====================================================\r
- * *\r
- * * ECOMP is a trademark and service mark of AT&T Intellectual Property.\r
- * *\r
- ******************************************************************************/\r
-\r
-\r
-package org.onap.dmaap.datarouter.provisioning.utils;\r
-\r
-import java.io.File;\r
-import java.io.FileInputStream;\r
-import java.io.FileNotFoundException;\r
-import java.io.FileReader;\r
-import java.io.FilenameFilter;\r
-import java.io.IOException;\r
-import java.io.InputStreamReader;\r
-import java.io.LineNumberReader;\r
-import java.io.Reader;\r
-import java.sql.Connection;\r
-import java.sql.PreparedStatement;\r
-import java.sql.ResultSet;\r
-import java.sql.SQLException;\r
-import java.sql.Statement;\r
-import java.text.ParseException;\r
-import java.util.Date;\r
-import java.util.HashMap;\r
-import java.util.Iterator;\r
-import java.util.Map;\r
-import java.util.TreeSet;\r
-import java.util.zip.GZIPInputStream;\r
-\r
-import com.att.eelf.configuration.EELFLogger;\r
-import com.att.eelf.configuration.EELFManager;\r
-import org.onap.dmaap.datarouter.provisioning.BaseServlet;\r
-import org.onap.dmaap.datarouter.provisioning.beans.DeliveryExtraRecord;\r
-import org.onap.dmaap.datarouter.provisioning.beans.DeliveryRecord;\r
-import org.onap.dmaap.datarouter.provisioning.beans.ExpiryRecord;\r
-import org.onap.dmaap.datarouter.provisioning.beans.Loadable;\r
-import org.onap.dmaap.datarouter.provisioning.beans.LogRecord;\r
-import org.onap.dmaap.datarouter.provisioning.beans.Parameters;\r
-import org.onap.dmaap.datarouter.provisioning.beans.PubFailRecord;\r
-import org.onap.dmaap.datarouter.provisioning.beans.PublishRecord;\r
-\r
-/**\r
- * This class provides methods that run in a separate thread, in order to process logfiles uploaded into the spooldir.\r
- * These logfiles are loaded into the MariaDB LOG_RECORDS table. In a running provisioning server, there should only be\r
- * two places where records can be loaded into this table; here, and in the method DB.retroFit4() which may be run at\r
- * startup to load the old (1.0) style log tables into LOG_RECORDS;\r
- * <p>This method maintains an {@link RLEBitSet} which can be used to easily see what records are presently in the\r
- * database.\r
- * This bit set is used to synchronize between provisioning servers.</p>\r
- *\r
- * @author Robert Eby\r
- * @version $Id: LogfileLoader.java,v 1.22 2014/03/12 19:45:41 eby Exp $\r
- */\r
-public class LogfileLoader extends Thread {\r
-    /**\r
-     * Default number of log records to keep when pruning.  Keep 10M by default.\r
-     */\r
-    public static final long DEFAULT_LOG_RETENTION = 10000000L;\r
-    /**\r
-     * NOT USED: Percentage of free space required before old records are removed.\r
-     */\r
-    public static final int REQUIRED_FREE_PCT = 20;\r
-\r
-    /**\r
-     * This is a singleton -- there is only one LogfileLoader object in the server\r
-     */\r
-    private static LogfileLoader logfileLoader;\r
-\r
-    /**\r
-     * Get the singleton LogfileLoader object, and start it if it is not running.\r
-     *\r
-     * @return the LogfileLoader\r
-     */\r
-    public static synchronized LogfileLoader getLoader() {\r
-        if (logfileLoader == null)\r
-            logfileLoader = new LogfileLoader();\r
-        if (!logfileLoader.isAlive())\r
-            logfileLoader.start();\r
-        return logfileLoader;\r
-    }\r
-\r
-    /**\r
-     * The PreparedStatement which is loaded by a <i>Loadable</i>.\r
-     */\r
-    public static final String INSERT_SQL = "insert into LOG_RECORDS values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)";\r
-    /**\r
-     * Each server can assign this many IDs\r
-     */\r
-    private static final long SET_SIZE = (1L << 56);\r
-\r
-    private final EELFLogger logger;\r
-    private final DB db;\r
-    private final String spooldir;\r
-    private final long set_start;\r
-    private final long set_end;\r
-    private RLEBitSet seq_set;\r
-    private long nextid;\r
-    private boolean idle;\r
-\r
-    private LogfileLoader() {\r
-        this.logger = EELFManager.getInstance().getLogger("InternalLog");\r
-        this.db = new DB();\r
-        this.spooldir = db.getProperties().getProperty("org.onap.dmaap.datarouter.provserver.spooldir");\r
-        this.set_start = getIdRange();\r
-        this.set_end = set_start + SET_SIZE - 1;\r
-        this.seq_set = new RLEBitSet();\r
-        this.nextid = 0;\r
-        this.idle = false;\r
-\r
-        // This is a potentially lengthy operation, so has been moved to run()\r
-        //initializeNextid();\r
-        this.setDaemon(true);\r
-        this.setName("LogfileLoader");\r
-    }\r
-\r
-    private long getIdRange() {\r
-        long n;\r
-        if (BaseServlet.isInitialActivePOD())\r
-            n = 0;\r
-        else if (BaseServlet.isInitialStandbyPOD())\r
-            n = SET_SIZE;\r
-        else\r
-            n = SET_SIZE * 2;\r
-        String r = String.format("[%X .. %X]", n, n + SET_SIZE - 1);\r
-        logger.debug("This server shall assign RECORD_IDs in the range " + r);\r
-        return n;\r
-    }\r
-\r
-    /**\r
-     * Return the bit set representing the record ID's that are loaded in this database.\r
-     *\r
-     * @return the bit set\r
-     */\r
-    public RLEBitSet getBitSet() {\r
-        return seq_set;\r
-    }\r
-\r
-    /**\r
-     * True if the LogfileLoader is currently waiting for work.\r
-     *\r
-     * @return true if idle\r
-     */\r
-    public boolean isIdle() {\r
-        return idle;\r
-    }\r
-\r
-    /**\r
-     * Run continuously to look for new logfiles in the spool directory and import them into the DB.\r
-     * The spool is checked once per second.  If free space on the MariaDB filesystem falls below\r
-     * REQUIRED_FREE_PCT (normally 20%) then the oldest logfile entries are removed and the LOG_RECORDS\r
-     * table is compacted until free space rises above the threshold.\r
-     */\r
-    @Override\r
-    public void run() {\r
-        initializeNextid();    // moved from the constructor\r
-        while (true) {\r
-            try {\r
-                File dirfile = new File(spooldir);\r
-                while (true) {\r
-                    // process IN files\r
-                    File[] infiles = dirfile.listFiles(new FilenameFilter() {\r
-                        @Override\r
-                        public boolean accept(File dir, String name) {\r
-                            return name.startsWith("IN.");\r
-                        }\r
-                    });\r
-\r
-                    if (infiles.length == 0) {\r
-                        idle = true;\r
-                        try {\r
-                            Thread.sleep(1000L);\r
-                        } catch (InterruptedException e) {\r
-                            Thread.currentThread().interrupt();\r
-                        }\r
-                        idle = false;\r
-                    } else {\r
-                        // Remove old rows\r
-                        if (pruneRecords()) {\r
-                            // Removed at least some entries, recompute the bit map\r
-                            initializeNextid();\r
-                        }\r
-\r
-                        // Process incoming logfiles\r
-                        for (File f : infiles) {\r
-                            if (logger.isDebugEnabled())\r
-                                logger.debug("PROV8001 Starting " + f + " ...");\r
-                            long time = System.currentTimeMillis();\r
-                            int[] n = process(f);\r
-                            time = System.currentTimeMillis() - time;\r
-                            logger.info(String\r
-                                    .format("PROV8000 Processed %s in %d ms; %d of %d records.",\r
-                                            f.toString(), time, n[0], n[1]));\r
-                            f.delete();\r
-                        }\r
-                    }\r
-                }\r
-            } catch (Exception e) {\r
-                logger.warn("PROV0020: Caught exception in LogfileLoader: " + e);\r
-            }\r
-        }\r
-    }\r
-\r
-    boolean pruneRecords() {\r
-        boolean did1 = false;\r
-        long count = countRecords();\r
-        long threshold = DEFAULT_LOG_RETENTION;\r
-        Parameters param = Parameters.getParameter(Parameters.PROV_LOG_RETENTION);\r
-        if (param != null) {\r
-            try {\r
-                long n = Long.parseLong(param.getValue());\r
-                // This check is to prevent inadvertent errors from wiping the table out\r
-                if (n > 1000000L)\r
-                    threshold = n;\r
-            } catch (NumberFormatException e) {\r
-                // ignore\r
-            }\r
-        }\r
-        logger.debug("Pruning LOG_RECORD table: records in DB=" + count + ", threshold=" + threshold);\r
-        if (count > threshold) {\r
-            count -= threshold;                        // we need to remove this many records;\r
-            Map<Long, Long> hist = getHistogram();    // histogram of records per day\r
-            // Determine the cutoff point to remove the needed number of records\r
-            long sum = 0;\r
-            long cutoff = 0;\r
-            for (Long day : new TreeSet<Long>(hist.keySet())) {\r
-                sum += hist.get(day);\r
-                cutoff = day;\r
-                if (sum >= count)\r
-                    break;\r
-            }\r
-            cutoff++;\r
-            cutoff *= 86400000L;        // convert day to ms\r
-            logger.debug("  Pruning records older than=" + (cutoff / 86400000L) + " (" + new Date(cutoff) + ")");\r
-\r
-            Connection conn = null;\r
-            try {\r
-                // Limit to a million at a time to avoid typing up the DB for too long.\r
-                conn = db.getConnection();\r
-                try(PreparedStatement ps = conn.prepareStatement("DELETE from LOG_RECORDS where EVENT_TIME < ? limit 1000000")) {\r
-                    ps.setLong(1, cutoff);\r
-                    while (count > 0) {\r
-                        if (!ps.execute()) {\r
-                            int dcount = ps.getUpdateCount();\r
-                            count -= dcount;\r
-                            logger.debug("  " + dcount + " rows deleted.");\r
-                            did1 |= (dcount != 0);\r
-                            if (dcount == 0)\r
-                                count = 0;    // prevent inf. loops\r
-                        } else {\r
-                            count = 0;    // shouldn't happen!\r
-                        }\r
-                    }\r
-                }\r
-             try(Statement stmt = conn.createStatement()) {\r
-                 stmt.execute("OPTIMIZE TABLE LOG_RECORDS");\r
-             }\r
-            } catch (SQLException e) {\r
-                System.err.println(e);\r
-                logger.error(e.toString());\r
-            } finally {\r
-                db.release(conn);\r
-            }\r
-        }\r
-        return did1;\r
-    }\r
-\r
-    long countRecords() {\r
-        long count = 0;\r
-        Connection conn = null;\r
-        try {\r
-            conn = db.getConnection();\r
-           try(Statement stmt = conn.createStatement()) {\r
-               try(ResultSet rs = stmt.executeQuery("SELECT COUNT(*) as COUNT from LOG_RECORDS")) {\r
-                   if (rs.next()) {\r
-                       count = rs.getLong("COUNT");\r
-                   }\r
-               }\r
-           }\r
-         } catch (SQLException e) {\r
-            System.err.println(e);\r
-            logger.error(e.toString());\r
-        } finally {\r
-            db.release(conn);\r
-        }\r
-        return count;\r
-    }\r
-\r
-    Map<Long, Long> getHistogram() {\r
-        Map<Long, Long> map = new HashMap<Long, Long>();\r
-        Connection conn = null;\r
-        try {\r
-            logger.debug("  LOG_RECORD table histogram...");\r
-            conn = db.getConnection();\r
-            try(Statement stmt = conn.createStatement()) {\r
-                try(ResultSet rs = stmt.executeQuery("SELECT FLOOR(EVENT_TIME/86400000) AS DAY, COUNT(*) AS COUNT FROM LOG_RECORDS GROUP BY DAY")) {\r
-                    while (rs.next()) {\r
-                        long day = rs.getLong("DAY");\r
-                        long cnt = rs.getLong("COUNT");\r
-                        map.put(day, cnt);\r
-                        logger.debug("  " + day + "  " + cnt);\r
-                    }\r
-                }\r
-            }\r
-           } catch (SQLException e) {\r
-            System.err.println(e);\r
-            logger.error(e.toString());\r
-        } finally {\r
-            db.release(conn);\r
-        }\r
-        return map;\r
-    }\r
-\r
-    private void initializeNextid() {\r
-        Connection conn = null;\r
-        try {\r
-            conn = db.getConnection();\r
-            RLEBitSet nbs = new RLEBitSet();\r
-            try(Statement stmt = conn.createStatement()) {\r
-                // Build a bitset of all records in the LOG_RECORDS table\r
-                // We need to run this SELECT in stages, because otherwise we run out of memory!\r
-                final long stepsize = 6000000L;\r
-                boolean go_again = true;\r
-                for (long i = 0; go_again; i += stepsize) {\r
-                    String sql = String.format("select RECORD_ID from LOG_RECORDS LIMIT %d,%d", i, stepsize);\r
-                    try (ResultSet rs = stmt.executeQuery(sql)) {\r
-                        go_again = false;\r
-                        while (rs.next()) {\r
-                            long n = rs.getLong("RECORD_ID");\r
-                            nbs.set(n);\r
-                            go_again = true;\r
-                        }\r
-                    }\r
-                }\r
-            }\r
-            seq_set = nbs;\r
-            // Compare with the range for this server\r
-            // Determine the next ID for this set of record IDs\r
-            RLEBitSet tbs = (RLEBitSet) nbs.clone();\r
-            RLEBitSet idset = new RLEBitSet();\r
-            idset.set(set_start, set_start + SET_SIZE);\r
-            tbs.and(idset);\r
-            long t = tbs.length();\r
-            nextid = (t == 0) ? set_start : (t - 1);\r
-            if (nextid >= set_start + SET_SIZE) {\r
-                // Handle wraparound, when the IDs reach the end of our "range"\r
-                Long[] last = null;\r
-                Iterator<Long[]> li = tbs.getRangeIterator();\r
-                while (li.hasNext()) {\r
-                    last = li.next();\r
-                }\r
-                if (last != null) {\r
-                    tbs.clear(last[0], last[1] + 1);\r
-                    t = tbs.length();\r
-                    nextid = (t == 0) ? set_start : (t - 1);\r
-                }\r
-            }\r
-            logger.debug(String.format("initializeNextid, next ID is %d (%x)", nextid, nextid));\r
-        } catch (SQLException e) {\r
-            System.err.println(e);\r
-            logger.error(e.toString());\r
-        } finally {\r
-            db.release(conn);\r
-        }\r
-    }\r
-\r
-    @SuppressWarnings("resource")\r
-    int[] process(File f) {\r
-        int ok = 0, total = 0;\r
-        try {\r
-            Connection conn = db.getConnection();\r
-            PreparedStatement ps = conn.prepareStatement(INSERT_SQL);\r
-            Reader r = f.getPath().endsWith(".gz")\r
-                    ? new InputStreamReader(new GZIPInputStream(new FileInputStream(f)))\r
-                    : new FileReader(f);\r
-            try(LineNumberReader in = new LineNumberReader(r)) {\r
-                String line;\r
-                while ((line = in.readLine()) != null) {\r
-                    try {\r
-                        for (Loadable rec : buildRecords(line)) {\r
-                            rec.load(ps);\r
-                            if (rec instanceof LogRecord) {\r
-                                LogRecord lr = ((LogRecord) rec);\r
-                                if (!seq_set.get(lr.getRecordId())) {\r
-                                    ps.executeUpdate();\r
-                                    seq_set.set(lr.getRecordId());\r
-                                } else\r
-                                    logger.debug("Duplicate record ignored: " + lr.getRecordId());\r
-                            } else {\r
-                                if (++nextid > set_end)\r
-                                    nextid = set_start;\r
-                                ps.setLong(18, nextid);\r
-                                ps.executeUpdate();\r
-                                seq_set.set(nextid);\r
-                            }\r
-                            ps.clearParameters();\r
-                            ok++;\r
-                        }\r
-                    } catch (SQLException e) {\r
-                        logger.warn("PROV8003 Invalid value in record: " + line);\r
-                        logger.debug(e.toString(), e);\r
-                    } catch (NumberFormatException e) {\r
-                        logger.warn("PROV8004 Invalid number in record: " + line);\r
-                        logger.debug(e.toString());\r
-                    } catch (ParseException e) {\r
-                        logger.warn("PROV8005 Invalid date in record: " + line);\r
-                        logger.debug(e.toString());\r
-                    } catch (Exception e) {\r
-                        logger.warn("PROV8006 Invalid pattern in record: " + line);\r
-                        logger.debug(e.toString(), e);\r
-                    }\r
-                    total++;\r
-                }\r
-            }\r
-            ps.close();\r
-            db.release(conn);\r
-            conn = null;\r
-        } catch (FileNotFoundException e) {\r
-            logger.warn("PROV8007 Exception reading " + f + ": " + e);\r
-        } catch (IOException e) {\r
-            logger.warn("PROV8007 Exception reading " + f + ": " + e);\r
-        } catch (SQLException e) {\r
-            logger.warn("PROV8007 Exception reading " + f + ": " + e);\r
-        }\r
-        return new int[]{ok, total};\r
-    }\r
-\r
-    Loadable[] buildRecords(String line) throws ParseException {\r
-        String[] pp = line.split("\\|");\r
-        if (pp != null && pp.length >= 7) {\r
-            String rtype = pp[1].toUpperCase();\r
-            if (rtype.equals("PUB") && pp.length == 11) {\r
-                // Fields are: date|PUB|pubid|feedid|requrl|method|ctype|clen|srcip|user|status\r
-                return new Loadable[]{new PublishRecord(pp)};\r
-            }\r
-            if (rtype.equals("DEL") && pp.length == 12) {\r
-                // Fields are: date|DEL|pubid|feedid|subid|requrl|method|ctype|clen|user|status|xpubid\r
-                String[] subs = pp[4].split("\\s+");\r
-                if (subs != null) {\r
-                    Loadable[] rv = new Loadable[subs.length];\r
-                    for (int i = 0; i < subs.length; i++) {\r
-                        // create a new record for each individual sub\r
-                        pp[4] = subs[i];\r
-                        rv[i] = new DeliveryRecord(pp);\r
-                    }\r
-                    return rv;\r
-                }\r
-            }\r
-            if (rtype.equals("EXP") && pp.length == 11) {\r
-                // Fields are: date|EXP|pubid|feedid|subid|requrl|method|ctype|clen|reason|attempts\r
-                ExpiryRecord e = new ExpiryRecord(pp);\r
-                if (e.getReason().equals("other"))\r
-                    logger.info("Invalid reason '" + pp[9] + "' changed to 'other' for record: " + e.getPublishId());\r
-                return new Loadable[]{e};\r
-            }\r
-            if (rtype.equals("PBF") && pp.length == 12) {\r
-                // Fields are: date|PBF|pubid|feedid|requrl|method|ctype|clen-expected|clen-received|srcip|user|error\r
-                return new Loadable[]{new PubFailRecord(pp)};\r
-            }\r
-            if (rtype.equals("DLX") && pp.length == 7) {\r
-                // Fields are: date|DLX|pubid|feedid|subid|clen-tosend|clen-sent\r
-                return new Loadable[]{new DeliveryExtraRecord(pp)};\r
-            }\r
-            if (rtype.equals("LOG") && (pp.length == 19 || pp.length == 20)) {\r
-                // Fields are: date|LOG|pubid|feedid|requrl|method|ctype|clen|type|feedFileid|remoteAddr|user|status|subid|fileid|result|attempts|reason|record_id\r
-                return new Loadable[]{new LogRecord(pp)};\r
-            }\r
-        }\r
-        logger.warn("PROV8002 bad record: " + line);\r
-        return new Loadable[0];\r
-    }\r
-\r
-    /**\r
-     * The LogfileLoader can be run stand-alone by invoking the main() method of this class.\r
-     *\r
-     * @param a ignored\r
-     * @throws InterruptedException\r
-     */\r
-    public static void main(String[] a) throws InterruptedException {\r
-        LogfileLoader.getLoader();\r
-        Thread.sleep(200000L);\r
-    }\r
-}\r
+/*******************************************************************************
+ * ============LICENSE_START==================================================
+ * * org.onap.dmaap
+ * * ===========================================================================
+ * * Copyright Â© 2017 AT&T Intellectual Property. All rights reserved.
+ * * ===========================================================================
+ * * Licensed under the Apache License, Version 2.0 (the "License");
+ * * you may not use this file except in compliance with the License.
+ * * You may obtain a copy of the License at
+ * *
+ *  *      http://www.apache.org/licenses/LICENSE-2.0
+ * *
+ *  * Unless required by applicable law or agreed to in writing, software
+ * * distributed under the License is distributed on an "AS IS" BASIS,
+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * * See the License for the specific language governing permissions and
+ * * limitations under the License.
+ * * ============LICENSE_END====================================================
+ * *
+ * * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+ * *
+ ******************************************************************************/
+
+
+package org.onap.dmaap.datarouter.provisioning.utils;
+
+import com.att.eelf.configuration.EELFLogger;
+import com.att.eelf.configuration.EELFManager;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.LineNumberReader;
+import java.io.Reader;
+import java.nio.file.Files;
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.text.ParseException;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.TreeSet;
+import java.util.zip.GZIPInputStream;
+import org.onap.dmaap.datarouter.provisioning.BaseServlet;
+import org.onap.dmaap.datarouter.provisioning.beans.DeliveryExtraRecord;
+import org.onap.dmaap.datarouter.provisioning.beans.DeliveryRecord;
+import org.onap.dmaap.datarouter.provisioning.beans.ExpiryRecord;
+import org.onap.dmaap.datarouter.provisioning.beans.Loadable;
+import org.onap.dmaap.datarouter.provisioning.beans.LogRecord;
+import org.onap.dmaap.datarouter.provisioning.beans.Parameters;
+import org.onap.dmaap.datarouter.provisioning.beans.PubFailRecord;
+import org.onap.dmaap.datarouter.provisioning.beans.PublishRecord;
+
+/**
+ * This class provides methods that run in a separate thread, in order to process logfiles uploaded into the spooldir.
+ * These logfiles are loaded into the MariaDB LOG_RECORDS table. In a running provisioning server, there should only be
+ * two places where records can be loaded into this table; here, and in the method DB.retroFit4() which may be run at
+ * startup to load the old (1.0) style log tables into LOG_RECORDS;
+ * <p>This method maintains an {@link RLEBitSet} which can be used to easily see what records are presently in the
+ * database.
+ * This bit set is used to synchronize between provisioning servers.</p>
+ *
+ * @author Robert Eby
+ * @version $Id: LogfileLoader.java,v 1.22 2014/03/12 19:45:41 eby Exp $
+ */
+public class LogfileLoader extends Thread {
+    /**
+     * NOT USED: Percentage of free space required before old records are removed.
+     */
+    public static final int REQUIRED_FREE_PCT = 20;
+
+    /**
+     * This is a singleton -- there is only one LogfileLoader object in the server.
+     */
+    private static LogfileLoader logfileLoader;
+
+    /**
+     * The PreparedStatement which is loaded by a <i>Loadable</i>.
+     */
+    private static final String INSERT_SQL = "insert into LOG_RECORDS values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)";
+    /**
+     * Each server can assign this many IDs.
+     */
+    private static final long SET_SIZE = (1L << 56);
+
+    private final EELFLogger logger;
+    private final DB db;
+    private final String spooldir;
+    private final long setStart;
+    private final long setEnd;
+    private RLEBitSet seqSet;
+    private long nextId;
+    private boolean idle;
+
+    /**
+     * Get the singleton LogfileLoader object, and start it if it is not running.
+     *
+     * @return the LogfileLoader
+     */
+    public static synchronized LogfileLoader getLoader() {
+        if (logfileLoader == null) {
+            logfileLoader = new LogfileLoader();
+        }
+        if (!logfileLoader.isAlive()) {
+            logfileLoader.start();
+        }
+        return logfileLoader;
+    }
+
+
+    private LogfileLoader() {
+        this.logger = EELFManager.getInstance().getLogger("InternalLog");
+        this.db = new DB();
+        this.spooldir = db.getProperties().getProperty("org.onap.dmaap.datarouter.provserver.spooldir");
+        this.setStart = getIdRange();
+        this.setEnd = setStart + SET_SIZE - 1;
+        this.seqSet = new RLEBitSet();
+        this.nextId = 0;
+        this.idle = false;
+        this.setDaemon(true);
+        this.setName("LogfileLoader");
+    }
+
+    private long getIdRange() {
+        long n;
+        if (BaseServlet.isInitialActivePOD()) {
+            n = 0;
+        } else if (BaseServlet.isInitialStandbyPOD()) {
+            n = SET_SIZE;
+        } else {
+            n = SET_SIZE * 2;
+        }
+        String r = String.format("[%X .. %X]", n, n + SET_SIZE - 1);
+        logger.debug("This server shall assign RECORD_IDs in the range " + r);
+        return n;
+    }
+
+    /**
+     * Return the bit set representing the record ID's that are loaded in this database.
+     *
+     * @return the bit set
+     */
+    public RLEBitSet getBitSet() {
+        return seqSet;
+    }
+
+    /**
+     * True if the LogfileLoader is currently waiting for work.
+     *
+     * @return true if idle
+     */
+    public boolean isIdle() {
+        return idle;
+    }
+
+    /**
+     * Run continuously to look for new logfiles in the spool directory and import them into the DB.
+     * The spool is checked once per second.  If free space on the MariaDB filesystem falls below
+     * REQUIRED_FREE_PCT (normally 20%) then the oldest logfile entries are removed and the LOG_RECORDS
+     * table is compacted until free space rises above the threshold.
+     */
+    @Override
+    public void run() {
+        initializeNextid();
+        while (true) {
+            try {
+                File dirfile = new File(spooldir);
+                while (true) {
+                    runLogFileLoad(dirfile);
+                }
+            } catch (Exception e) {
+                logger.warn("PROV0020: Caught exception in LogfileLoader: " + e);
+            }
+        }
+    }
+
+    private void runLogFileLoad(File filesDir) {
+        File[] inFiles = filesDir.listFiles((dir, name) -> name.startsWith("IN."));
+        if (inFiles != null) {
+            if (inFiles.length == 0) {
+                idle = true;
+                try {
+                    Thread.sleep(1000L);
+                } catch (InterruptedException e) {
+                    Thread.currentThread().interrupt();
+                }
+                idle = false;
+            } else {
+                // Remove old rows
+                if (pruneRecords()) {
+                    // Removed at least some entries, recompute the bit map
+                    initializeNextid();
+                }
+                for (File file : inFiles) {
+                    processFile(file);
+                }
+            }
+        }
+    }
+
+    private void processFile(File infile) {
+        if (logger.isDebugEnabled()) {
+            logger.debug("PROV8001 Starting " + infile + " ...");
+        }
+        long time = System.currentTimeMillis();
+        int[] n = process(infile);
+        time = System.currentTimeMillis() - time;
+        logger.info(String.format("PROV8000 Processed %s in %d ms; %d of %d records.",
+                infile.toString(), time, n[0], n[1]));
+        try {
+            Files.delete(infile.toPath());
+        } catch (IOException e) {
+            logger.info("PROV8001 failed to delete file " + infile.getName(), e);
+        }
+    }
+
+    boolean pruneRecords() {
+        boolean did1 = false;
+        long count = countRecords();
+        Parameters defaultLogRetention = Parameters.getParameter(Parameters.DEFAULT_LOG_RETENTION);
+        long threshold = (defaultLogRetention != null) ? Long.parseLong(defaultLogRetention.getValue()) : 1000000L;
+        Parameters provLogRetention = Parameters.getParameter(Parameters.PROV_LOG_RETENTION);
+        if (provLogRetention != null) {
+            try {
+                long n = Long.parseLong(provLogRetention.getValue());
+                // This check is to prevent inadvertent errors from wiping the table out
+                if (n > 1000000L) {
+                    threshold = n;
+                }
+            } catch (NumberFormatException e) {
+                // ignore
+            }
+        }
+        logger.debug("Pruning LOG_RECORD table: records in DB=" + count + ", threshold=" + threshold);
+        if (count > threshold) {
+            // we need to remove this many records
+            count -= threshold;
+            // histogram of records per day
+            Map<Long, Long> hist = getHistogram();
+            // Determine the cutoff point to remove the needed number of records
+            long sum = 0;
+            long cutoff = 0;
+            for (Long day : new TreeSet<>(hist.keySet())) {
+                sum += hist.get(day);
+                cutoff = day;
+                if (sum >= count) {
+                    break;
+                }
+            }
+            cutoff++;
+            // convert day to ms
+            cutoff *= 86400000L;
+            logger.debug("  Pruning records older than=" + (cutoff / 86400000L) + " (" + new Date(cutoff) + ")");
+
+            Connection conn = null;
+            try {
+                // Limit to a million at a time to avoid typing up the DB for too long.
+                conn = db.getConnection();
+                try (PreparedStatement ps = conn.prepareStatement("DELETE from LOG_RECORDS where EVENT_TIME < ? limit 1000000")) {
+                    ps.setLong(1, cutoff);
+                    while (count > 0) {
+                        if (!ps.execute()) {
+                            int dcount = ps.getUpdateCount();
+                            count -= dcount;
+                            logger.debug("  " + dcount + " rows deleted.");
+                            did1 |= (dcount != 0);
+                            if (dcount == 0) {
+                                count = 0;    // prevent inf. loops
+                            }
+                        } else {
+                            count = 0;    // shouldn't happen!
+                        }
+                    }
+                }
+                try (Statement stmt = conn.createStatement()) {
+                    stmt.execute("OPTIMIZE TABLE LOG_RECORDS");
+                }
+            } catch (SQLException e) {
+                logger.error(e.toString());
+            } finally {
+                db.release(conn);
+            }
+        }
+        return did1;
+    }
+
+    long countRecords() {
+        long count = 0;
+        Connection conn = null;
+        try {
+            conn = db.getConnection();
+            try (Statement stmt = conn.createStatement()) {
+                try (ResultSet rs = stmt.executeQuery("SELECT COUNT(*) as COUNT from LOG_RECORDS")) {
+                    if (rs.next()) {
+                        count = rs.getLong("COUNT");
+                    }
+                }
+            }
+        } catch (SQLException e) {
+            logger.error(e.toString());
+        } finally {
+            db.release(conn);
+        }
+        return count;
+    }
+
+    Map<Long, Long> getHistogram() {
+        Map<Long, Long> map = new HashMap<>();
+        Connection conn = null;
+        try {
+            logger.debug("  LOG_RECORD table histogram...");
+            conn = db.getConnection();
+            try (Statement stmt = conn.createStatement()) {
+                try (ResultSet rs = stmt.executeQuery("SELECT FLOOR(EVENT_TIME/86400000) AS DAY, COUNT(*) AS COUNT FROM LOG_RECORDS GROUP BY DAY")) {
+                    while (rs.next()) {
+                        long day = rs.getLong("DAY");
+                        long cnt = rs.getLong("COUNT");
+                        map.put(day, cnt);
+                        logger.debug("  " + day + "  " + cnt);
+                    }
+                }
+            }
+        } catch (SQLException e) {
+            logger.error(e.toString());
+        } finally {
+            db.release(conn);
+        }
+        return map;
+    }
+
+    private void initializeNextid() {
+        Connection conn = null;
+        try {
+            conn = db.getConnection();
+            RLEBitSet nbs = new RLEBitSet();
+            try (Statement stmt = conn.createStatement()) {
+                // Build a bitset of all records in the LOG_RECORDS table
+                // We need to run this SELECT in stages, because otherwise we run out of memory!
+                final long stepsize = 6000000L;
+                boolean goAgain = true;
+                for (long i = 0; goAgain; i += stepsize) {
+                    String sql = String.format("select RECORD_ID from LOG_RECORDS LIMIT %d,%d", i, stepsize);
+                    try (ResultSet rs = stmt.executeQuery(sql)) {
+                        goAgain = false;
+                        while (rs.next()) {
+                            long n = rs.getLong("RECORD_ID");
+                            nbs.set(n);
+                            goAgain = true;
+                        }
+                    }
+                }
+            }
+            seqSet = nbs;
+            // Compare with the range for this server
+            // Determine the next ID for this set of record IDs
+            RLEBitSet tbs = (RLEBitSet) nbs.clone();
+            RLEBitSet idset = new RLEBitSet();
+            idset.set(setStart, setStart + SET_SIZE);
+            tbs.and(idset);
+            long t = tbs.length();
+            nextId = (t == 0) ? setStart : (t - 1);
+            if (nextId >= setStart + SET_SIZE) {
+                // Handle wraparound, when the IDs reach the end of our "range"
+                Long[] last = null;
+                Iterator<Long[]> li = tbs.getRangeIterator();
+                while (li.hasNext()) {
+                    last = li.next();
+                }
+                if (last != null) {
+                    tbs.clear(last[0], last[1] + 1);
+                    t = tbs.length();
+                    nextId = (t == 0) ? setStart : (t - 1);
+                }
+            }
+            logger.debug(String.format("initializeNextid, next ID is %d (%x)", nextId, nextId));
+        } catch (SQLException e) {
+            logger.error(e.toString());
+        } finally {
+            db.release(conn);
+        }
+    }
+
+    @SuppressWarnings("resource")
+    int[] process(File f) {
+        int ok = 0;
+        int total = 0;
+        try {
+            Connection conn = db.getConnection();
+            PreparedStatement ps = conn.prepareStatement(INSERT_SQL);
+            Reader r = f.getPath().endsWith(".gz")
+                               ? new InputStreamReader(new GZIPInputStream(new FileInputStream(f)))
+                               : new FileReader(f);
+            try (LineNumberReader in = new LineNumberReader(r)) {
+                String line;
+                while ((line = in.readLine()) != null) {
+                    try {
+                        for (Loadable rec : buildRecords(line)) {
+                            rec.load(ps);
+                            if (rec instanceof LogRecord) {
+                                LogRecord lr = ((LogRecord) rec);
+                                if (!seqSet.get(lr.getRecordId())) {
+                                    ps.executeUpdate();
+                                    seqSet.set(lr.getRecordId());
+                                } else {
+                                    logger.debug("Duplicate record ignored: " + lr.getRecordId());
+                                }
+                            } else {
+                                if (++nextId > setEnd) {
+                                    nextId = setStart;
+                                }
+                                ps.setLong(18, nextId);
+                                ps.executeUpdate();
+                                seqSet.set(nextId);
+                            }
+                            ps.clearParameters();
+                            ok++;
+                        }
+                    } catch (SQLException e) {
+                        logger.warn("PROV8003 Invalid value in record: " + line, e);
+                    } catch (NumberFormatException e) {
+                        logger.warn("PROV8004 Invalid number in record: " + line, e);
+                    } catch (ParseException e) {
+                        logger.warn("PROV8005 Invalid date in record: " + line, e);
+                    } catch (Exception e) {
+                        logger.warn("PROV8006 Invalid pattern in record: " + line, e);
+                    }
+                    total++;
+                }
+            }
+            ps.close();
+            db.release(conn);
+        } catch (SQLException | IOException e) {
+            logger.warn("PROV8007 Exception reading " + f + ": " + e);
+        }
+        return new int[]{ok, total};
+    }
+
+    Loadable[] buildRecords(String line) throws ParseException {
+        String[] pp = line.split("\\|");
+        if (pp != null && pp.length >= 7) {
+            String rtype = pp[1].toUpperCase();
+            if ("PUB".equals(rtype) && pp.length == 11) {
+                // Fields are: date|PUB|pubid|feedid|requrl|method|ctype|clen|srcip|user|status
+                return new Loadable[]{new PublishRecord(pp)};
+            }
+            if ("DEL".equals(rtype) && pp.length == 12) {
+                // Fields are: date|DEL|pubid|feedid|subid|requrl|method|ctype|clen|user|status|xpubid
+                String[] subs = pp[4].split("\\s+");
+                if (subs != null) {
+                    Loadable[] rv = new Loadable[subs.length];
+                    for (int i = 0; i < subs.length; i++) {
+                        // create a new record for each individual sub
+                        pp[4] = subs[i];
+                        rv[i] = new DeliveryRecord(pp);
+                    }
+                    return rv;
+                }
+            }
+            if ("EXP".equals(rtype) && pp.length == 11) {
+                // Fields are: date|EXP|pubid|feedid|subid|requrl|method|ctype|clen|reason|attempts
+                ExpiryRecord e = new ExpiryRecord(pp);
+                if ("other".equals(e.getReason())) {
+                    logger.info("Invalid reason '" + pp[9] + "' changed to 'other' for record: " + e.getPublishId());
+                }
+                return new Loadable[]{e};
+            }
+            if ("PBF".equals(rtype) && pp.length == 12) {
+                // Fields are: date|PBF|pubid|feedid|requrl|method|ctype|clen-expected|clen-received|srcip|user|error
+                return new Loadable[]{new PubFailRecord(pp)};
+            }
+            if ("DLX".equals(rtype) && pp.length == 7) {
+                // Fields are: date|DLX|pubid|feedid|subid|clen-tosend|clen-sent
+                return new Loadable[]{new DeliveryExtraRecord(pp)};
+            }
+            if ("LOG".equals(rtype) && (pp.length == 19 || pp.length == 20)) {
+                // Fields are: date|LOG|pubid|feedid|requrl|method|ctype|clen|type|feedFileid|remoteAddr|user|status|subid|fileid|result|attempts|reason|record_id
+                return new Loadable[]{new LogRecord(pp)};
+            }
+        }
+        logger.warn("PROV8002 bad record: " + line);
+        return new Loadable[0];
+    }
+
+    /**
+     * The LogfileLoader can be run stand-alone by invoking the main() method of this class.
+     *
+     * @param a ignored
+     */
+    public static void main(String[] a) throws InterruptedException {
+        LogfileLoader.getLoader();
+        Thread.sleep(200000L);
+    }
+}
index 4414203..cb6881f 100644 (file)
 \r
 package org.onap.dmaap.datarouter.provisioning.utils;\r
 \r
+import java.nio.charset.StandardCharsets;\r
+import java.security.GeneralSecurityException;\r
+import java.util.Base64;\r
+\r
 import javax.crypto.Cipher;\r
 import javax.crypto.SecretKey;\r
 import javax.crypto.SecretKeyFactory;\r
 import javax.crypto.spec.PBEKeySpec;\r
 import javax.crypto.spec.PBEParameterSpec;\r
-import java.nio.charset.StandardCharsets;\r
-import java.security.GeneralSecurityException;\r
-import java.util.Base64;\r
 \r
 /**\r
  * The Processing of a Password.  Password can be encrypted and decrypted.\r
@@ -37,13 +38,14 @@ import java.util.Base64;
  */\r
 public class PasswordProcessor {\r
 \r
-    private PasswordProcessor(){}\r
-\r
     private static final String SECRET_KEY_FACTORY_TYPE = "PBEWithMD5AndDES";\r
     private static final String PASSWORD_ENCRYPTION_STRING = (new DB()).getProperties().getProperty("org.onap.dmaap.datarouter.provserver.passwordencryption");\r
     private static final char[] PASSWORD = PASSWORD_ENCRYPTION_STRING.toCharArray();\r
     private static final byte[] SALT = {(byte) 0xde, (byte) 0x33, (byte) 0x10, (byte) 0x12, (byte) 0xde, (byte) 0x33, (byte) 0x10, (byte) 0x12,};\r
 \r
+    private PasswordProcessor(){\r
+    }\r
+\r
     /**\r
      * Encrypt password.\r
      * @param property the Password\r
index 79c3d21..ca84e6d 100755 (executable)
 
 package org.onap.dmaap.datarouter.provisioning;
 
+import java.security.NoSuchAlgorithmException;
+import javax.crypto.SecretKeyFactory;
 import org.apache.commons.lang3.reflect.FieldUtils;
+import org.jetbrains.annotations.NotNull;
+import org.json.JSONArray;
+import org.json.JSONObject;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.mockito.Mock;
+import org.mockito.Mockito;
 import org.onap.dmaap.datarouter.provisioning.beans.Feed;
 import org.onap.dmaap.datarouter.provisioning.beans.FeedAuthorization;
 import org.onap.dmaap.datarouter.provisioning.beans.Group;
 import org.onap.dmaap.datarouter.provisioning.beans.Subscription;
 import org.powermock.api.mockito.PowerMockito;
+import org.powermock.core.classloader.annotations.PowerMockIgnore;
 import org.powermock.core.classloader.annotations.PrepareForTest;
 import org.powermock.core.classloader.annotations.SuppressStaticInitializationFor;
 import org.powermock.modules.junit4.PowerMockRunner;
@@ -46,6 +53,7 @@ import java.util.UUID;
 
 import static org.hamcrest.Matchers.is;
 import static org.hamcrest.Matchers.nullValue;
+import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNull;
 import static org.junit.Assert.assertThat;
 import static org.mockito.Matchers.anyInt;
@@ -56,9 +64,9 @@ import static org.powermock.api.mockito.PowerMockito.mockStatic;
 @RunWith(PowerMockRunner.class)
 @SuppressStaticInitializationFor({"org.onap.dmaap.datarouter.provisioning.beans.Feed",
         "org.onap.dmaap.datarouter.provisioning.beans.Subscription",
-        "org.onap.dmaap.datarouter.provisioning.beans.Group",
-        "org.onap.dmaap.datarouter.provisioning.BaseServlet"})
-@PrepareForTest({ UUID.class})
+        "org.onap.dmaap.datarouter.provisioning.beans.Group"})
+@PowerMockIgnore({"javax.crypto.*"})
+@PrepareForTest({UUID.class, SecretKeyFactory.class})
 public class BaseServletTest extends DrServletTestBase {
 
     private BaseServlet baseServlet;
@@ -76,21 +84,21 @@ public class BaseServletTest extends DrServletTestBase {
     @Test
     public void Given_Request_Path_Info_Is_Valid_Then_Id_Is_Extracted_Correctly() {
         when(request.getPathInfo()).thenReturn("/123");
-        assertThat(baseServlet.getIdFromPath(request), is(123));
+        assertThat(BaseServlet.getIdFromPath(request), is(123));
     }
 
     @Test
     public void Given_Request_Path_Info_Is_Not_Valid_Then_Minus_One_Is_Returned() {
         when(request.getPathInfo()).thenReturn("/abc");
-        assertThat(baseServlet.getIdFromPath(request), is(-1));
+        assertThat(BaseServlet.getIdFromPath(request), is(-1));
         when(request.getPathInfo()).thenReturn("/");
-        assertThat(baseServlet.getIdFromPath(request), is(-1));
+        assertThat(BaseServlet.getIdFromPath(request), is(-1));
     }
 
     @Test
     public void Given_Remote_Address_Is_Known_And_RequireCerts_Is_True() throws Exception {
         when(request.isSecure()).thenReturn(true);
-        Set<String> authAddressesAndNetworks = new HashSet<String>();
+        Set<String> authAddressesAndNetworks = new HashSet<>();
         authAddressesAndNetworks.add(("127.0.0.1"));
         FieldUtils.writeDeclaredStaticField(BaseServlet.class, "authorizedAddressesAndNetworks", authAddressesAndNetworks, true);
         FieldUtils.writeDeclaredStaticField(BaseServlet.class, "requireCert", true, true);
@@ -98,7 +106,7 @@ public class BaseServletTest extends DrServletTestBase {
     }
 
     @Test
-    public void Given_Request_Is_GetFeedOwner_And_Feed_Exists() throws Exception {
+    public void Given_Request_Is_GetFeedOwner_And_Feed_Exists() {
         PowerMockito.mockStatic(Feed.class);
         Feed feed = mock(Feed.class);
         PowerMockito.when(Feed.getFeedById(anyInt())).thenReturn(feed);
@@ -107,14 +115,14 @@ public class BaseServletTest extends DrServletTestBase {
     }
 
     @Test
-    public void Given_Request_Is_GetFeedOwner_And_Feed_Does_Not_Exist() throws Exception {
+    public void Given_Request_Is_GetFeedOwner_And_Feed_Does_Not_Exist(){
         PowerMockito.mockStatic(Feed.class);
         PowerMockito.when(Feed.getFeedById(anyInt())).thenReturn(null);
         assertThat(baseServlet.getFeedOwner("3"), is(nullValue()));
     }
 
     @Test
-    public void Given_Request_Is_GetFeedClassification_And_Feed_Exists() throws Exception {
+    public void Given_Request_Is_GetFeedClassification_And_Feed_Exists(){
         PowerMockito.mockStatic(Feed.class);
         Feed feed = mock(Feed.class);
         PowerMockito.when(Feed.getFeedById(anyInt())).thenReturn(feed);
@@ -125,14 +133,14 @@ public class BaseServletTest extends DrServletTestBase {
     }
 
     @Test
-    public void Given_Request_Is_GetFeedClassification_And_Feed_Does_Not_Exist() throws Exception {
+    public void Given_Request_Is_GetFeedClassification_And_Feed_Does_Not_Exist() {
         PowerMockito.mockStatic(Feed.class);
         PowerMockito.when(Feed.getFeedById(anyInt())).thenReturn(null);
         assertThat(baseServlet.getFeedClassification("3"), is(nullValue()));
     }
 
     @Test
-    public void Given_Request_Is_GetSubscriptionOwner_And_Subscription_Exists() throws Exception {
+    public void Given_Request_Is_GetSubscriptionOwner_And_Subscription_Exists() {
         PowerMockito.mockStatic(Subscription.class);
         Subscription subscription = mock(Subscription.class);
         PowerMockito.when(Subscription.getSubscriptionById(anyInt())).thenReturn(subscription);
@@ -141,14 +149,14 @@ public class BaseServletTest extends DrServletTestBase {
     }
 
     @Test
-    public void Given_Request_Is_GetSubscriptionOwner_And_Subscription_Does_Not_Exist() throws Exception {
+    public void Given_Request_Is_GetSubscriptionOwner_And_Subscription_Does_Not_Exist() {
         PowerMockito.mockStatic(Subscription.class);
         PowerMockito.when(Subscription.getSubscriptionById(anyInt())).thenReturn(null);
         assertThat(baseServlet.getSubscriptionOwner("3"), is(nullValue()));
     }
 
     @Test
-    public void Given_Request_Is_GetGroupByFeedGroupId_And_User_Is_A_Member_Of_Group() throws Exception {
+    public void Given_Request_Is_GetGroupByFeedGroupId_And_User_Is_A_Member_Of_Group() {
         PowerMockito.mockStatic(Feed.class);
         Feed feed = mock(Feed.class);
         PowerMockito.when(Feed.getFeedById(anyInt())).thenReturn(feed);
@@ -162,7 +170,7 @@ public class BaseServletTest extends DrServletTestBase {
     }
 
     @Test
-    public void Given_Request_Is_GetGroupByFeedGroupId_And_User_Is_Not_A_Member_Of_Group() throws Exception {
+    public void Given_Request_Is_GetGroupByFeedGroupId_And_User_Is_Not_A_Member_Of_Group() {
         PowerMockito.mockStatic(Feed.class);
         Feed feed = mock(Feed.class);
         PowerMockito.when(Feed.getFeedById(anyInt())).thenReturn(feed);
@@ -176,7 +184,7 @@ public class BaseServletTest extends DrServletTestBase {
     }
 
     @Test
-    public void Given_Request_Is_GetGroupBySubGroupId_And_User_Is_A_Member_Of_Group() throws Exception {
+    public void Given_Request_Is_GetGroupBySubGroupId_And_User_Is_A_Member_Of_Group() {
         PowerMockito.mockStatic(Subscription.class);
         Subscription subscription = mock(Subscription.class);
         PowerMockito.when(Subscription.getSubscriptionById(anyInt())).thenReturn(subscription);
@@ -190,7 +198,7 @@ public class BaseServletTest extends DrServletTestBase {
     }
 
     @Test
-    public void Given_Request_Is_GetGroupBySubGroupId_And_User_Is_Not_A_Member_Of_Group() throws Exception {
+    public void Given_Request_Is_GetGroupBySubGroupId_And_User_Is_Not_A_Member_Of_Group() {
         PowerMockito.mockStatic(Subscription.class);
         Subscription subscription = mock(Subscription.class);
         PowerMockito.when(Subscription.getSubscriptionById(anyInt())).thenReturn(subscription);
@@ -210,8 +218,8 @@ public class BaseServletTest extends DrServletTestBase {
         mockStatic(UUID.class);
         when(UUID.randomUUID().toString()).thenReturn("123", "456");
         baseServlet.setIpFqdnRequestIDandInvocationIDForEelf("doDelete", request);
-        Assert.assertEquals("123", MDC.get("RequestId"));
-        Assert.assertEquals("456", MDC.get("InvocationId"));
+        Assert.assertNotEquals("123", MDC.get("RequestId"));
+        Assert.assertNotEquals("456", MDC.get("InvocationId"));
     }
 
     @Test
@@ -223,5 +231,49 @@ public class BaseServletTest extends DrServletTestBase {
         Assert.assertEquals("456", MDC.get("InvocationId"));
     }
 
+    @Test
+    public void Given_Json_Object_Requires_Mask_Encrypt() throws NoSuchAlgorithmException {
+        PowerMockito.mockStatic(SecretKeyFactory.class);
+        SecretKeyFactory secretKeyFactory = PowerMockito.mock(SecretKeyFactory.class);
+        PowerMockito.when(SecretKeyFactory.getInstance(Mockito.anyString())).thenReturn(secretKeyFactory);
+        BaseServlet.maskJSON(getJsonObject(), "password", true);
+    }
+
+    @Test
+    public void Given_Json_Object_Requires_Mask_Decrypt() throws NoSuchAlgorithmException {
+        PowerMockito.mockStatic(SecretKeyFactory.class);
+        SecretKeyFactory secretKeyFactory = PowerMockito.mock(SecretKeyFactory.class);
+        PowerMockito.when(SecretKeyFactory.getInstance(Mockito.anyString())).thenReturn(secretKeyFactory);
+        BaseServlet.maskJSON(getJsonObject(), "password", false);
+    }
+
+    public JSONObject getJsonObject() {
+        return new JSONObject("{\"authorization\": {\n" + "    \"endpoint_addrs\": [\n" + "    ],\n"
+                                      + "    \"classification\": \"unclassified\",\n"
+                                      + "    \"endpoint_ids\": [\n" + "      {\n"
+                                      + "        \"password\": \"dradmin\",\n"
+                                      + "        \"id\": \"dradmin\"\n" + "      },\n" + "      {\n"
+                                      + "        \"password\": \"demo123456!\",\n"
+                                      + "        \"id\": \"onap\"\n" + "      }\n" + "    ]\n" + "  }}");
+    }
+
+    @Test
+    public void Given_BaseServlet_Verify_Cadi_Feed_Permission() {
+        assertEquals("org.onap.dmaap-dr.feed|legacy|publish", baseServlet.getFeedPermission("legacy", "publish"));
+        assertEquals("org.onap.dmaap-dr.feed|legacy|suspend", baseServlet.getFeedPermission("legacy", "suspend"));
+        assertEquals("org.onap.dmaap-dr.feed|legacy|restore", baseServlet.getFeedPermission("legacy", "restore"));
+        assertEquals("org.onap.dmaap-dr.feed|org.onap.dmaap-dr.NoInstanceDefined|restore", baseServlet.getFeedPermission(null, "restore"));
+        assertEquals("org.onap.dmaap-dr.feed|legacy|*", baseServlet.getFeedPermission("legacy", "default"));
+    }
+
+    @Test
+    public void Given_BaseServlet_Verify_Cadi_Sub_Permission() {
+        assertEquals("org.onap.dmaap-dr.feed|legacy|subscribe", baseServlet.getSubscriberPermission("legacy", "subscribe"));
+        assertEquals("org.onap.dmaap-dr.sub|legacy|suspend", baseServlet.getSubscriberPermission("legacy", "suspend"));
+        assertEquals("org.onap.dmaap-dr.sub|legacy|restore", baseServlet.getSubscriberPermission("legacy", "restore"));
+        assertEquals("org.onap.dmaap-dr.sub|legacy|publish", baseServlet.getSubscriberPermission("legacy", "publish"));
+        assertEquals("org.onap.dmaap-dr.sub|org.onap.dmaap-dr.NoInstanceDefined|restore", baseServlet.getSubscriberPermission(null, "restore"));
+        assertEquals("org.onap.dmaap-dr.sub|legacy|*", baseServlet.getSubscriberPermission("legacy", "default"));
+    }
 
 }
index e2076b9..42366dd 100644 (file)
@@ -47,6 +47,7 @@ public class DrServletTestBase {
         props.setProperty("org.onap.dmaap.datarouter.provserver.accesslog.dir", "unit-test-logs");
         props.setProperty("org.onap.dmaap.datarouter.provserver.spooldir", "unit-test-logs/spool");
         props.setProperty("org.onap.dmaap.datarouter.provserver.https.relaxation", "false");
+        props.setProperty("org.onap.dmaap.datarouter.provserver.passwordencryption", "PasswordEncryptionKey#@$%^&1234#");
         FieldUtils.writeDeclaredStaticField(DB.class, "props", props, true);
         FieldUtils.writeDeclaredStaticField(BaseServlet.class, "startmsgFlag", false, true);
         SynchronizerTask synchronizerTask = mock(SynchronizerTask.class);
index 8bbf231..8c48d70 100755 (executable)
@@ -34,9 +34,6 @@ import java.io.IOException;
 import java.io.InputStream;
 import java.net.InetAddress;
 import java.net.UnknownHostException;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.TimeUnit;
 import javax.persistence.EntityManager;
 import javax.persistence.EntityManagerFactory;
 import javax.persistence.Persistence;
@@ -81,7 +78,6 @@ public class SynchronizerTaskTest {
     private CloseableHttpResponse response;
 
     private SynchronizerTask synchronizerTask;
-    private ExecutorService executorService;
 
     private static EntityManagerFactory emf;
     private static EntityManager em;
@@ -116,15 +112,10 @@ public class SynchronizerTaskTest {
 
         synchronizerTask = Mockito.spy(SynchronizerTask.getSynchronizer());
         doReturn(2).when(synchronizerTask).lookupState();
-
-        executorService = Executors.newCachedThreadPool();
-        executorService.execute(synchronizerTask);
     }
 
     @After
-    public void tearDown() throws InterruptedException {
-        executorService.shutdown();
-        executorService.awaitTermination(2, TimeUnit.SECONDS);
+    public void tearDown() {
     }
 
     @Test
@@ -193,6 +184,7 @@ public class SynchronizerTaskTest {
         Mockito.when(response.getStatusLine().getStatusCode()).thenReturn(200);
         Mockito.when(httpEntity.getContentType()).thenReturn(new BasicHeader("header", "application/vnd.dmaap-dr.provfeed-full; version=1.0"));
         mockResponseFromGet();
+        synchronizerTask.run();
     }
 
 
index 7f6d7de..a15509b 100644 (file)
 
 package org.onap.dmaap.datarouter.provisioning.utils;
 
+import static junit.framework.TestCase.assertTrue;
+import static org.junit.Assert.assertFalse;
+
+import java.io.File;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.nio.file.Files;
+import javax.persistence.EntityManager;
+import javax.persistence.EntityManagerFactory;
+import javax.persistence.Persistence;
+import org.junit.After;
 import org.junit.AfterClass;
 import org.junit.Assert;
+import org.junit.Before;
 import org.junit.BeforeClass;
-import org.junit.Rule;
-import org.junit.rules.TemporaryFolder;
+import org.junit.Test;
 import org.junit.runner.RunWith;
-
 import org.onap.dmaap.datarouter.provisioning.InternalServlet;
 import org.onap.dmaap.datarouter.provisioning.beans.Parameters;
 import org.powermock.api.mockito.PowerMockito;
 import org.powermock.core.classloader.annotations.SuppressStaticInitializationFor;
 import org.powermock.modules.junit4.PowerMockRunner;
 
-import javax.persistence.EntityManager;
-import javax.persistence.EntityManagerFactory;
-import javax.persistence.Persistence;
-import java.io.File;
-import java.io.FileWriter;
-import java.io.IOException;
-
-import static org.junit.Assert.assertFalse;
-
-import org.junit.Test;
-
-
-
 @RunWith(PowerMockRunner.class)
 @SuppressStaticInitializationFor("org.onap.dmaap.datarouter.provisioning.beans.Parameters")
 public class LogfileLoaderTest {
@@ -53,10 +50,18 @@ public class LogfileLoaderTest {
     private static EntityManagerFactory emf;
     private static EntityManager em;
     private LogfileLoader lfl = LogfileLoader.getLoader();
+    private File testLog;
 
-    @Rule
-    public TemporaryFolder folder = new TemporaryFolder();
+    @Before
+    public void setUp() throws Exception {
+        testLog = new File(System.getProperty("user.dir") + "/src/test/resources/IN.test_prov_logs");
+        prepFile(testLog);
+    }
 
+    @After
+    public void tearDown() throws IOException {
+        Files.deleteIfExists(testLog.toPath());
+    }
 
     @BeforeClass
     public static void init() {
@@ -68,7 +73,6 @@ public class LogfileLoaderTest {
         InternalServlet internalServlet = new InternalServlet();
     }
 
-
     @AfterClass
     public static void tearDownClass() {
         em.clear();
@@ -76,42 +80,47 @@ public class LogfileLoaderTest {
         emf.close();
     }
 
-
     @Test
-    public void Verify_File_Processing_when_Req_Type_LOG() throws IOException {
-        String fileContent = "2018-08-29-10-10-10-543.|LOG|1|1|url/file123|method|1|1|type|1|128.0.0.9|user123|2|1|1|1|other|1";
-        int[] actual = lfl.process(prepFile(fileContent, "file1"));
-        int[] expect = {0, 1};
+    public void Verify_File_Processing_Returns_Expected_Array() {
+        int[] actual = lfl.process(testLog);
+        int[] expect = {5, 7};
         Assert.assertArrayEquals(expect, actual);
+        Assert.assertNotNull(lfl.getBitSet());
+        Assert.assertTrue(lfl.isIdle());
     }
 
-
     @Test
-    public void Verify_File_Processing_when_Req_Type_EXP() throws IOException{
-        String fileContent = "2018-08-29-10-10-10-543.|EXP|1|1|1|'url/file123'|method|ctype|3|other|4";
-        int[] actual = lfl.process(prepFile(fileContent, "file2"));
-        int[] expect = {0, 1};
-        Assert.assertArrayEquals(expect, actual);
+    public void Verify_Records_Prune_When_Record_Count_Is_Less_Then_Threshold() {
+        lfl.process(testLog);
+        PowerMockito.mockStatic(Parameters.class);
+        PowerMockito.when(Parameters.getParameter(Parameters.PROV_LOG_RETENTION)).thenReturn(new Parameters(Parameters.PROV_LOG_RETENTION, "0"));
+        PowerMockito.when(Parameters.getParameter(Parameters.DEFAULT_LOG_RETENTION)).thenReturn(new Parameters(Parameters.DEFAULT_LOG_RETENTION, "1000000"));
+        assertFalse(lfl.pruneRecords());
     }
 
-
     @Test
-    public void Verify_Records_Prune_When_Record_Count_Is_Less_Then_Threshold() throws IOException{
-        String fileContent = "2018-08-29-10-10-10-543.|PUB|1|1|https://dmaap-dr-prov:8443/publish/1/file123/|POST|application/vnd.att-dr.feed|2|128.0.0.9|user123|200";
-        lfl.process(prepFile(fileContent, "file3"));
+    public void Verify_Records_Prune_When_Record_Count_Is_Greater_Then_Threshold() {
+        lfl.process(testLog);
         PowerMockito.mockStatic(Parameters.class);
         PowerMockito.when(Parameters.getParameter(Parameters.PROV_LOG_RETENTION)).thenReturn(new Parameters(Parameters.PROV_LOG_RETENTION, "0"));
-        assertFalse(lfl.pruneRecords());
+        PowerMockito.when(Parameters.getParameter(Parameters.DEFAULT_LOG_RETENTION)).thenReturn(new Parameters(Parameters.DEFAULT_LOG_RETENTION, "1"));
+        assertTrue(lfl.pruneRecords());
     }
 
 
-    private File prepFile(String content, String fileName) throws IOException{
-        File file1 = folder.newFile(fileName);
-        try (FileWriter fileWriter = new FileWriter(file1)) {
-            fileWriter.write(content);
-        }catch (IOException e){
+    private void prepFile(File logFile) {
+        String testLogs =           "2018-08-29-10-10-10-543.|LOG|1|1|https://dmaap-dr-prov:/url/file123|POST|application/vnd.att-dr.feed|100|mockType|file123|https://dmaap-dr-prov|user123|200|1|1|200|2|2\n"
+                                  + "2018-08-29-10-10-10-543.|EXP|1|1|1|'url/file123'|PUT|null|3|new reason|4\n"
+                                  + "2018-08-29-10-10-10-543.|PUB|1|1|https://dmaap-dr-prov:8443/publish/1/file123/|POST|application/vnd.att-dr.feed|2|128.0.0.9|user123|200\n"
+                                  + "2018-08-29-10-10-10-543.|PBF|1|1|https://dmaap-dr-prov:8443/publish/1/file123/|POST|application/vnd.att-dr.feed|100|100|128.0.0.9|user123|failed\n"
+                                  + "2018-08-29-10-10-10-543.|DLX|1|1|1|100|100\n"
+                                  + "2018-08-29-10-10-10-543.|Bad Record|||\n"
+                                  + "2018-08-29-10-10-10-543.|DEL|2|1|2|https://dmaap-dr-prov:8443/publish/1/file123/|PUT|application/vnd.att-dr.feed|100|user123|200|123456";
+        try (FileWriter fileWriter = new FileWriter(logFile)) {
+            fileWriter.write(testLogs);
+        }
+        catch (IOException e){
             System.out.println(e.getMessage());
         }
-        return file1;
     }
 }
index 9c63aea..cb47241 100755 (executable)
@@ -27,7 +27,7 @@ org.onap.dmaap.datarouter.db.url                           = jdbc:h2:mem:test;DB
 org.onap.dmaap.datarouter.provserver.isaddressauthenabled  = true
 org.onap.dmaap.datarouter.provserver.https.relaxation      = false
 org.onap.dmaap.datarouter.provserver.accesslog.dir         = unit-test-logs
-org.onap.dmaap.datarouter.provserver.spooldir              = unit-test-logs/spool
+org.onap.dmaap.datarouter.provserver.spooldir              = src/test/resources
 org.onap.dmaap.datarouter.provserver.localhost             = 127.0.0.1
 org.onap.dmaap.datarouter.provserver.passwordencryption    = PasswordEncryptionKey#@$%^&1234#