X-Git-Url: https://gerrit.onap.org/r/gitweb?p=dmaap%2Fdatarouter.git;a=blobdiff_plain;f=datarouter-node%2Fsrc%2Fmain%2Fjava%2Forg%2Fonap%2Fdmaap%2Fdatarouter%2Fnode%2FNodeConfig.java;h=d455f2d90ebb6fbeb09e39f06ef977009d6278fb;hp=265aafd34a1cb243009580f36ab1271548a6fa03;hb=8d97ad4cf9b468d22e435537a8a9f2946205f35a;hpb=fd418f91c04aa27ec647f8228c880ad0540719b6 diff --git a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeConfig.java b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeConfig.java index 265aafd3..d455f2d9 100644 --- a/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeConfig.java +++ b/datarouter-node/src/main/java/org/onap/dmaap/datarouter/node/NodeConfig.java @@ -24,22 +24,29 @@ package org.onap.dmaap.datarouter.node; -import java.util.*; -import java.io.*; +import com.att.eelf.configuration.EELFLogger; +import com.att.eelf.configuration.EELFManager; + +import java.io.File; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Hashtable; +import java.util.Vector; /** * Processed configuration for this node. *

- * The NodeConfig represents a processed configuration from the Data Router - * provisioning server. Each time configuration data is received from the - * provisioning server, a new NodeConfig is created and the previous one + * The NodeConfig represents a processed configuration from the Data Router provisioning server. Each time + * configuration data is received from the provisioning server, a new NodeConfig is created and the previous one * discarded. */ public class NodeConfig { + private static EELFLogger logger = EELFManager.getInstance().getLogger(NodeConfig.class); /** * Raw configuration entry for a data router node */ public static class ProvNode { + private String cname; /** @@ -63,14 +70,15 @@ public class NodeConfig { * Raw configuration entry for a provisioning parameter */ public static class ProvParam { + private String name; private String value; /** * Construct a provisioning parameter configuration entry. * - * @param name The name of the parameter. - * @param value The value of the parameter. + * @param name The name of the parameter. + * @param value The value of the parameter. */ public ProvParam(String name, String value) { this.name = name; @@ -96,21 +104,46 @@ public class NodeConfig { * Raw configuration entry for a data feed. */ public static class ProvFeed { + private String id; private String logdata; private String status; + private String createdDate; + /* + * AAF changes: TDP EPIC US# 307413 + * Passing aafInstance from to identify legacy/AAF feeds + */ + private String aafInstance; /** * Construct a feed configuration entry. * - * @param id The feed ID of the entry. + * @param id The feed ID of the entry. * @param logdata String for log entries about the entry. - * @param status The reason why this feed cannot be used (Feed has been deleted, Feed has been suspended) or null if it is valid. + * @param status The reason why this feed cannot be used (Feed has been deleted, Feed has been suspended) or + * null if it is valid. */ - public ProvFeed(String id, String logdata, String status) { + public ProvFeed(String id, String logdata, String status, String createdDate, String aafInstance) { this.id = id; this.logdata = logdata; this.status = status; + this.createdDate = createdDate; + this.aafInstance = aafInstance; + } + + /** + * Get the created date of the data feed. + */ + public String getCreatedDate() + { + return(createdDate); + } + + /** + * Get the aafInstance of the data feed. + */ + public String getAafInstance() { + return aafInstance; } /** @@ -139,6 +172,7 @@ public class NodeConfig { * Raw configuration entry for a feed user. */ public static class ProvFeedUser { + private String feedid; private String user; private String credentials; @@ -146,8 +180,8 @@ public class NodeConfig { /** * Construct a feed user configuration entry * - * @param feedid The feed id. - * @param user The user that will publish to the feed. + * @param feedid The feed id. + * @param user The user that will publish to the feed. * @param credentials The Authorization header the user will use to publish. */ public ProvFeedUser(String feedid, String user, String credentials) { @@ -182,6 +216,7 @@ public class NodeConfig { * Raw configuration entry for a feed subnet */ public static class ProvFeedSubnet { + private String feedid; private String cidr; @@ -189,7 +224,7 @@ public class NodeConfig { * Construct a feed subnet configuration entry * * @param feedid The feed ID - * @param cidr The CIDR allowed to publish to the feed. + * @param cidr The CIDR allowed to publish to the feed. */ public ProvFeedSubnet(String feedid, String cidr) { this.feedid = feedid; @@ -215,6 +250,7 @@ public class NodeConfig { * Raw configuration entry for a subscription */ public static class ProvSubscription { + private String subid; private String feedid; private String url; @@ -222,19 +258,26 @@ public class NodeConfig { private String credentials; private boolean metaonly; private boolean use100; + private boolean privilegedSubscriber; + private boolean followRedirect; + private boolean decompress; /** * Construct a subscription configuration entry * - * @param subid The subscription ID - * @param feedid The feed ID - * @param url The base delivery URL (not including the fileid) - * @param authuser The user in the credentials used to deliver - * @param credentials The credentials used to authenticate to the delivery URL exactly as they go in the Authorization header. - * @param metaonly Is this a meta data only subscription? - * @param use100 Should we send Expect: 100-continue? - */ - public ProvSubscription(String subid, String feedid, String url, String authuser, String credentials, boolean metaonly, boolean use100) { + * @param subid The subscription ID + * @param feedid The feed ID + * @param url The base delivery URL (not including the fileid) + * @param authuser The user in the credentials used to deliver + * @param credentials The credentials used to authenticate to the delivery URL exactly as they go in the + * Authorization header. + * @param metaonly Is this a meta data only subscription? + * @param use100 Should we send Expect: 100-continue? + * @param privilegedSubscriber Can we wait to receive a delete file call before deleting file + * @param followRedirect Is follow redirect of destination enabled? + * @param decompress To see if they want their information compressed or decompressed + */ + public ProvSubscription(String subid, String feedid, String url, String authuser, String credentials, boolean metaonly, boolean use100, boolean privilegedSubscriber, boolean followRedirect, boolean decompress) { this.subid = subid; this.feedid = feedid; this.url = url; @@ -242,6 +285,9 @@ public class NodeConfig { this.credentials = credentials; this.metaonly = metaonly; this.use100 = use100; + this.privilegedSubscriber = privilegedSubscriber; + this.followRedirect = followRedirect; + this.decompress = decompress; } /** @@ -292,12 +338,35 @@ public class NodeConfig { public boolean isUsing100() { return (use100); } + + /** + * Can we wait to receive a delete file call before deleting file + */ + public boolean isPrivilegedSubscriber() { + return (privilegedSubscriber); + } + + /** + * Should i decompress the file before sending it on + */ + public boolean isDecompress() { + return (decompress); + } + + /** + * New field is added - FOLLOW_REDIRECTS feature iTrack:DATARTR-17 - 1706 + * Get the followRedirect of this destination + */ + boolean getFollowRedirect() { + return(followRedirect); + } } /** * Raw configuration entry for controlled ingress to the data router node */ public static class ProvForceIngress { + private String feedid; private String subnet; private String user; @@ -307,15 +376,21 @@ public class NodeConfig { * Construct a forced ingress configuration entry * * @param feedid The feed ID that this entry applies to - * @param subnet The CIDR for which publisher IP addresses this entry applies to or "" if it applies to all publisher IP addresses - * @param user The publishing user this entry applies to or "" if it applies to all publishing users. - * @param nodes The array of FQDNs of the data router nodes to redirect publication attempts to. + * @param subnet The CIDR for which publisher IP addresses this entry applies to or "" if it applies to all + * publisher IP addresses + * @param user The publishing user this entry applies to or "" if it applies to all publishing users. + * @param nodes The array of FQDNs of the data router nodes to redirect publication attempts to. */ public ProvForceIngress(String feedid, String subnet, String user, String[] nodes) { this.feedid = feedid; this.subnet = subnet; this.user = user; - this.nodes = nodes; + //Sonar fix + if(nodes == null) { + this.nodes = new String[0]; + } else { + this.nodes = Arrays.copyOf(nodes, nodes.length); + } } /** @@ -351,6 +426,7 @@ public class NodeConfig { * Raw configuration entry for controlled egress from the data router */ public static class ProvForceEgress { + private String subid; private String node; @@ -358,7 +434,7 @@ public class NodeConfig { * Construct a forced egress configuration entry * * @param subid The subscription ID the subscription with forced egress - * @param node The node handling deliveries for this subscription + * @param node The node handling deliveries for this subscription */ public ProvForceEgress(String subid, String node) { this.subid = subid; @@ -384,6 +460,7 @@ public class NodeConfig { * Raw configuration entry for routing within the data router network */ public static class ProvHop { + private String from; private String to; private String via; @@ -399,8 +476,8 @@ public class NodeConfig { * Construct a hop entry * * @param from The FQDN of the node with the data to be delivered - * @param to The FQDN of the node that will deliver to the subscriber - * @param via The FQDN of the node where the from node should send the data + * @param to The FQDN of the node that will deliver to the subscriber + * @param via The FQDN of the node where the from node should send the data */ public ProvHop(String from, String to, String via) { this.from = from; @@ -431,25 +508,30 @@ public class NodeConfig { } private static class Redirection { + SubnetMatcher snm; String user; String[] nodes; } private static class Feed { + String loginfo; String status; SubnetMatcher[] subnets; Hashtable authusers = new Hashtable(); Redirection[] redirections; Target[] targets; + String createdDate; + String aafInstance; } - private Hashtable params = new Hashtable(); - private Hashtable feeds = new Hashtable(); - private Hashtable nodeinfo = new Hashtable(); - private Hashtable subinfo = new Hashtable(); - private Hashtable nodes = new Hashtable(); + private Hashtable params = new Hashtable<>(); + private Hashtable feeds = new Hashtable<>(); + private Hashtable nodeinfo = new Hashtable<>(); + private Hashtable subinfo = new Hashtable<>(); + private Hashtable nodes = new Hashtable<>(); + private Hashtable provSubscriptions = new Hashtable<>(); private String myname; private String myauth; private DestInfo[] alldests; @@ -458,10 +540,10 @@ public class NodeConfig { /** * Process the raw provisioning data to configure this node * - * @param pd The parsed provisioning data - * @param myname My name as seen by external systems - * @param spooldir The directory where temporary files live - * @param port The port number for URLs + * @param pd The parsed provisioning data + * @param myname My name as seen by external systems + * @param spooldir The directory where temporary files live + * @param port The port number for URLs * @param nodeauthkey The keying string used to generate node authentication credentials */ public NodeConfig(ProvData pd, String myname, String spooldir, int port, String nodeauthkey) { @@ -469,26 +551,29 @@ public class NodeConfig { for (ProvParam p : pd.getParams()) { params.put(p.getName(), p.getValue()); } - Vector div = new Vector(); + Vector destInfos = new Vector<>(); myauth = NodeUtils.getNodeAuthHdr(myname, nodeauthkey); for (ProvNode pn : pd.getNodes()) { - String cn = pn.getCName(); - if (nodeinfo.get(cn) != null) { + String cName = pn.getCName(); + if (nodeinfo.get(cName) != null) { continue; } - String auth = NodeUtils.getNodeAuthHdr(cn, nodeauthkey); - DestInfo di = new DestInfo("n:" + cn, spooldir + "/n/" + cn, null, "n2n-" + cn, "https://" + cn + ":" + port + "/internal/publish", cn, myauth, false, true); + String auth = NodeUtils.getNodeAuthHdr(cName, nodeauthkey); + DestInfo di = new DestInfo.DestInfoBuilder().setName("n:" + cName).setSpool(spooldir + "/n/" + cName).setSubid(null) + .setLogdata("n2n-" + cName).setUrl("https://" + cName + ":" + port + "/internal/publish") + .setAuthuser(cName).setAuthentication(myauth).setMetaonly(false).setUse100(true) + .setPrivilegedSubscriber(false).setFollowRedirects(false).setDecompress(false).createDestInfo(); (new File(di.getSpool())).mkdirs(); - div.add(di); - nodeinfo.put(cn, di); - nodes.put(auth, new IsFrom(cn)); + destInfos.add(di); + nodeinfo.put(cName, di); + nodes.put(auth, new IsFrom(cName)); } - PathFinder pf = new PathFinder(myname, nodeinfo.keySet().toArray(new String[nodeinfo.size()]), pd.getHops()); - Hashtable> rdtab = new Hashtable>(); + PathFinder pf = new PathFinder(myname, nodeinfo.keySet().toArray(new String[0]), pd.getHops()); + Hashtable> rdtab = new Hashtable<>(); for (ProvForceIngress pfi : pd.getForceIngress()) { Vector v = rdtab.get(pfi.getFeedId()); if (v == null) { - v = new Vector(); + v = new Vector<>(); rdtab.put(pfi.getFeedId(), v); } Redirection r = new Redirection(); @@ -499,70 +584,73 @@ public class NodeConfig { r.nodes = pfi.getNodes(); v.add(r); } - Hashtable> pfutab = new Hashtable>(); + Hashtable> pfutab = new Hashtable<>(); for (ProvFeedUser pfu : pd.getFeedUsers()) { Hashtable t = pfutab.get(pfu.getFeedId()); if (t == null) { - t = new Hashtable(); + t = new Hashtable<>(); pfutab.put(pfu.getFeedId(), t); } t.put(pfu.getCredentials(), pfu.getUser()); } - Hashtable egrtab = new Hashtable(); + Hashtable egrtab = new Hashtable<>(); for (ProvForceEgress pfe : pd.getForceEgress()) { if (pfe.getNode().equals(myname) || nodeinfo.get(pfe.getNode()) == null) { continue; } egrtab.put(pfe.getSubId(), pfe.getNode()); } - Hashtable> pfstab = new Hashtable>(); + Hashtable> pfstab = new Hashtable<>(); for (ProvFeedSubnet pfs : pd.getFeedSubnets()) { Vector v = pfstab.get(pfs.getFeedId()); if (v == null) { - v = new Vector(); + v = new Vector<>(); pfstab.put(pfs.getFeedId(), v); } v.add(new SubnetMatcher(pfs.getCidr())); } - Hashtable ttab = new Hashtable(); - HashSet allfeeds = new HashSet(); + Hashtable feedTargets = new Hashtable<>(); + HashSet allfeeds = new HashSet<>(); for (ProvFeed pfx : pd.getFeeds()) { if (pfx.getStatus() == null) { allfeeds.add(pfx.getId()); } } - for (ProvSubscription ps : pd.getSubscriptions()) { - String sid = ps.getSubId(); - String fid = ps.getFeedId(); - if (!allfeeds.contains(fid)) { + for (ProvSubscription provSubscription : pd.getSubscriptions()) { + String subId = provSubscription.getSubId(); + String feedId = provSubscription.getFeedId(); + if (!allfeeds.contains(feedId)) { continue; } - if (subinfo.get(sid) != null) { + if (subinfo.get(subId) != null) { continue; } int sididx = 999; try { - sididx = Integer.parseInt(sid); + sididx = Integer.parseInt(subId); sididx -= sididx % 100; } catch (Exception e) { + logger.error("NODE0517 Exception NodeConfig: "+e); } - String siddir = sididx + "/" + sid; - DestInfo di = new DestInfo("s:" + sid, spooldir + "/s/" + siddir, sid, fid, ps.getURL(), ps.getAuthUser(), ps.getCredentials(), ps.isMetaDataOnly(), ps.isUsing100()); - (new File(di.getSpool())).mkdirs(); - div.add(di); - subinfo.put(sid, di); - String egr = egrtab.get(sid); + String subscriptionDirectory = sididx + "/" + subId; + DestInfo destinationInfo = new DestInfo("s:" + subId, + spooldir + "/s/" + subscriptionDirectory, provSubscription); + (new File(destinationInfo.getSpool())).mkdirs(); + destInfos.add(destinationInfo); + provSubscriptions.put(subId, provSubscription); + subinfo.put(subId, destinationInfo); + String egr = egrtab.get(subId); if (egr != null) { - sid = pf.getPath(egr) + sid; + subId = pf.getPath(egr) + subId; } - StringBuffer sb = ttab.get(fid); + StringBuffer sb = feedTargets.get(feedId); if (sb == null) { sb = new StringBuffer(); - ttab.put(fid, sb); + feedTargets.put(feedId, sb); } - sb.append(' ').append(sid); + sb.append(' ').append(subId); } - alldests = div.toArray(new DestInfo[div.size()]); + alldests = destInfos.toArray(new DestInfo[0]); for (ProvFeed pfx : pd.getFeeds()) { String fid = pfx.getId(); Feed f = feeds.get(fid); @@ -571,13 +659,19 @@ public class NodeConfig { } f = new Feed(); feeds.put(fid, f); + f.createdDate = pfx.getCreatedDate(); f.loginfo = pfx.getLogData(); f.status = pfx.getStatus(); + /* + * AAF changes: TDP EPIC US# 307413 + * Passing aafInstance from ProvFeed to identify legacy/AAF feeds + */ + f.aafInstance = pfx.getAafInstance(); Vector v1 = pfstab.get(fid); if (v1 == null) { f.subnets = new SubnetMatcher[0]; } else { - f.subnets = v1.toArray(new SubnetMatcher[v1.size()]); + f.subnets = v1.toArray(new SubnetMatcher[0]); } Hashtable h1 = pfutab.get(fid); if (h1 == null) { @@ -588,9 +682,9 @@ public class NodeConfig { if (v2 == null) { f.redirections = new Redirection[0]; } else { - f.redirections = v2.toArray(new Redirection[v2.size()]); + f.redirections = v2.toArray(new Redirection[0]); } - StringBuffer sb = ttab.get(fid); + StringBuffer sb = feedTargets.get(fid); if (sb == null) { f.targets = new Target[0]; } else { @@ -646,14 +740,14 @@ public class NodeConfig { } } } - return (tv.toArray(new Target[tv.size()])); + return (tv.toArray(new Target[0])); } /** * Check whether this is a valid node-to-node transfer * * @param credentials Credentials offered by the supposed node - * @param ip IP address the request came from + * @param ip IP address the request came from */ public boolean isAnotherNode(String credentials, String ip) { IsFrom n = nodes.get(credentials); @@ -663,9 +757,9 @@ public class NodeConfig { /** * Check whether publication is allowed. * - * @param feedid The ID of the feed being requested. + * @param feedid The ID of the feed being requested. * @param credentials The offered credentials - * @param ip The requesting IP address + * @param ip The requesting IP address */ public String isPublishPermitted(String feedid, String credentials, String ip) { Feed f = feeds.get(feedid); @@ -692,6 +786,42 @@ public class NodeConfig { return ("Publisher not permitted for this feed"); } + /** + * Check whether delete file is allowed. + * + * @param subId The ID of the subscription being requested. + */ + public boolean isDeletePermitted(String subId) { + ProvSubscription provSubscription = provSubscriptions.get(subId); + return provSubscription.isPrivilegedSubscriber(); + } + + /** + * Check whether publication is allowed for AAF Feed. + * @param feedid The ID of the feed being requested. + * @param ip The requesting IP address + */ + public String isPublishPermitted(String feedid, String ip) { + Feed f = feeds.get(feedid); + String nf = "Feed does not exist"; + if (f != null) { + nf = f.status; + } + if (nf != null) { + return(nf); + } + if (f.subnets.length == 0) { + return(null); + } + byte[] addr = NodeUtils.getInetAddress(ip); + for (SubnetMatcher snm: f.subnets) { + if (snm.matches(addr)) { + return(null); + } + } + return("Publisher not permitted for this feed"); + } + /** * Get authenticated user */ @@ -699,6 +829,16 @@ public class NodeConfig { return (feeds.get(feedid).authusers.get(credentials)); } + /** + * AAF changes: TDP EPIC US# 307413 + * Check AAF_instance for feed ID + * @param feedid The ID of the feed specified + */ + public String getAafInstance(String feedid) { + Feed f = feeds.get(feedid); + return f.aafInstance; + } + /** * Check if the request should be redirected to a different ingress node */ @@ -759,6 +899,16 @@ public class NodeConfig { return (f.targets); } + /** + * Get the creation date for a feed + * @param feedid The feed ID + * @return the timestamp of creation date of feed id passed + */ + public String getCreatedDate(String feedid) { + Feed f = feeds.get(feedid); + return(f.createdDate); + } + /** * Get the feed ID for a subscription *