2 * ============LICENSE_START=======================================================
4 * ================================================================================
5 * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
6 * ================================================================================
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
11 * http://www.apache.org/licenses/LICENSE-2.0
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
18 * ============LICENSE_END=========================================================
21 package org.openecomp.aai.dbgen;
23 import java.io.BufferedReader;
24 import java.io.BufferedWriter;
26 import java.io.FileReader;
27 import java.io.FileWriter;
28 import java.io.IOException;
29 import java.util.ArrayList;
30 import java.util.Collection;
31 import java.util.HashMap;
32 import java.util.Iterator;
33 import java.util.LinkedHashSet;
34 import java.util.List;
36 import java.util.Map.Entry;
37 import java.util.Properties;
39 import java.util.UUID;
41 import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
42 import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
43 import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__;
44 import org.apache.tinkerpop.gremlin.structure.Direction;
45 import org.apache.tinkerpop.gremlin.structure.Edge;
46 import org.apache.tinkerpop.gremlin.structure.Graph;
47 import org.apache.tinkerpop.gremlin.structure.Property;
48 import org.apache.tinkerpop.gremlin.structure.Vertex;
49 import org.apache.tinkerpop.gremlin.structure.VertexProperty;
50 import org.openecomp.aai.db.props.AAIProperties;
51 import org.openecomp.aai.dbmap.AAIGraph;
52 import org.openecomp.aai.exceptions.AAIException;
53 import org.openecomp.aai.introspection.Introspector;
54 import org.openecomp.aai.introspection.Loader;
55 import org.openecomp.aai.introspection.LoaderFactory;
56 import org.openecomp.aai.introspection.ModelType;
57 import org.openecomp.aai.introspection.exceptions.AAIUnknownObjectException;
58 import org.openecomp.aai.logging.ErrorLogHelper;
59 import org.openecomp.aai.serialization.db.EdgeProperties;
60 import org.openecomp.aai.serialization.db.EdgeProperty;
61 import org.openecomp.aai.util.AAIConfig;
62 import org.openecomp.aai.util.AAIConstants;
63 import org.openecomp.aai.util.FormatDate;
65 import com.att.eelf.configuration.Configuration;
66 import com.att.eelf.configuration.EELFLogger;
67 import com.att.eelf.configuration.EELFManager;
68 import com.thinkaurelius.titan.core.TitanFactory;
69 import com.thinkaurelius.titan.core.TitanGraph;
72 public class DataGrooming {
74 private static EELFLogger LOGGER;
75 private static final String FROMAPPID = "AAI-DB";
76 private static final String TRANSID = UUID.randomUUID().toString();
77 private static int dupeGrpsDeleted = 0;
82 * @param args the arguments
84 public static void main(String[] args) {
86 // Set the logging file properties to be used by EELFManager
87 Properties props = System.getProperties();
88 props.setProperty(Configuration.PROPERTY_LOGGING_FILE_NAME, AAIConstants.AAI_DATA_GROOMING_LOGBACK_PROPS);
89 props.setProperty(Configuration.PROPERTY_LOGGING_FILE_PATH, AAIConstants.AAI_HOME_ETC_APP_PROPERTIES);
90 LOGGER = EELFManager.getInstance().getLogger(DataGrooming.class);
91 String ver = "version"; // Placeholder
92 Boolean doAutoFix = false;
93 Boolean edgesOnlyFlag = false;
94 Boolean dontFixOrphansFlag = false;
95 Boolean skipHostCheck = false;
96 Boolean singleCommits = false;
97 Boolean dupeCheckOff = false;
98 Boolean dupeFixOn = false;
99 Boolean ghost2CheckOff = false;
100 Boolean ghost2FixOn = false;
101 Boolean neverUseCache = false;
103 int maxRecordsToFix = AAIConstants.AAI_GROOMING_DEFAULT_MAX_FIX;
104 int sleepMinutes = AAIConstants.AAI_GROOMING_DEFAULT_SLEEP_MINUTES;
106 String maxFixStr = AAIConfig.get("aai.grooming.default.max.fix");
107 if( maxFixStr != null && !maxFixStr.equals("") ){
108 maxRecordsToFix = Integer.parseInt(maxFixStr);
110 String sleepStr = AAIConfig.get("aai.grooming.default.sleep.minutes");
111 if( sleepStr != null && !sleepStr.equals("") ){
112 sleepMinutes = Integer.parseInt(sleepStr);
115 catch ( Exception e ){
116 // Don't worry, we'll just use the defaults that we got from AAIConstants
117 LOGGER.warn("WARNING - could not pick up aai.grooming values from aaiconfig.properties file. ");
120 String prevFileName = "";
122 FormatDate fd = new FormatDate("yyyyMMddHHmm", "GMT");
123 String dteStr = fd.getDateTime();
124 String groomOutFileName = "dataGrooming." + dteStr + ".out";
126 if (args.length > 0) {
127 // They passed some arguments in that will affect processing
128 for (int i = 0; i < args.length; i++) {
129 String thisArg = args[i];
130 if (thisArg.equals("-edgesOnly")) {
131 edgesOnlyFlag = true;
132 } else if (thisArg.equals("-autoFix")) {
134 } else if (thisArg.equals("-skipHostCheck")) {
135 skipHostCheck = true;
136 } else if (thisArg.equals("-dontFixOrphans")) {
137 dontFixOrphansFlag = true;
138 } else if (thisArg.equals("-singleCommits")) {
139 singleCommits = true;
140 } else if (thisArg.equals("-dupeCheckOff")) {
142 } else if (thisArg.equals("-dupeFixOn")) {
144 } else if (thisArg.equals("-ghost2CheckOff")) {
145 ghost2CheckOff = true;
146 } else if (thisArg.equals("-neverUseCache")) {
147 neverUseCache = true;
148 } else if (thisArg.equals("-ghost2FixOn")) {
150 } else if (thisArg.equals("-maxFix")) {
152 if (i >= args.length) {
153 LOGGER.error(" No value passed with -maxFix option. ");
156 String nextArg = args[i];
158 maxRecordsToFix = Integer.parseInt(nextArg);
159 } catch (Exception e) {
160 LOGGER.error("Bad value passed with -maxFix option: ["
164 } else if (thisArg.equals("-sleepMinutes")) {
166 if (i >= args.length) {
167 LOGGER.error("No value passed with -sleepMinutes option.");
170 String nextArg = args[i];
172 sleepMinutes = Integer.parseInt(nextArg);
173 } catch (Exception e) {
174 LOGGER.error("Bad value passed with -sleepMinutes option: ["
178 } else if (thisArg.equals("-f")) {
180 if (i >= args.length) {
181 LOGGER.error(" No value passed with -f option. ");
184 prevFileName = args[i];
186 LOGGER.error(" Unrecognized argument passed to DataGrooming: ["
188 LOGGER.error(" Valid values are: -f -autoFix -maxFix -edgesOnly -dupeFixOn -donFixOrphans -sleepMinutes -neverUseCache");
196 LoaderFactory.createLoaderForVersion(ModelType.MOXY, AAIProperties.LATEST);
199 catch (Exception ex){
200 LOGGER.error("ERROR - Could not create loader", ex);
206 if (!prevFileName.equals("")) {
207 // They are trying to fix some data based on a data in a
209 LOGGER.info(" Call doTheGrooming() with a previous fileName ["
210 + prevFileName + "] for cleanup. ");
211 Boolean finalShutdownFlag = true;
212 Boolean cacheDbOkFlag = false;
213 doTheGrooming(prevFileName, edgesOnlyFlag, dontFixOrphansFlag,
214 maxRecordsToFix, groomOutFileName, ver, singleCommits,
215 dupeCheckOff, dupeFixOn, ghost2CheckOff, ghost2FixOn,
216 finalShutdownFlag, cacheDbOkFlag);
217 } else if (doAutoFix) {
218 // They want us to run the processing twice -- first to look for
219 // delete candidates, then after
220 // napping for a while, run it again and delete any candidates
221 // that were found by the first run.
222 // Note: we will produce a separate output file for each of the
224 LOGGER.info(" Doing an auto-fix call to Grooming. ");
225 LOGGER.info(" First, Call doTheGrooming() to look at what's out there. ");
226 Boolean finalShutdownFlag = false;
227 Boolean cacheDbOkFlag = true;
228 int fixCandCount = doTheGrooming("", edgesOnlyFlag,
229 dontFixOrphansFlag, maxRecordsToFix, groomOutFileName,
230 ver, singleCommits, dupeCheckOff, dupeFixOn, ghost2CheckOff, ghost2FixOn,
231 finalShutdownFlag, cacheDbOkFlag);
232 if (fixCandCount == 0) {
233 LOGGER.info(" No fix-Candidates were found by the first pass, so no second/fix-pass is needed. ");
235 // We'll sleep a little and then run a fix-pass based on the
236 // first-run's output file.
238 LOGGER.info("About to sleep for " + sleepMinutes
240 int sleepMsec = sleepMinutes * 60 * 1000;
241 Thread.sleep(sleepMsec);
242 } catch (InterruptedException ie) {
243 LOGGER.info("\n >>> Sleep Thread has been Interrupted <<< ");
247 dteStr = fd.getDateTime();
248 String secondGroomOutFileName = "dataGrooming." + dteStr
250 LOGGER.info(" Now, call doTheGrooming() a second time and pass in the name of the file "
251 + "generated by the first pass for fixing: ["
252 + groomOutFileName + "]");
253 finalShutdownFlag = true;
254 cacheDbOkFlag = false;
255 doTheGrooming(groomOutFileName, edgesOnlyFlag,
256 dontFixOrphansFlag, maxRecordsToFix,
257 secondGroomOutFileName, ver, singleCommits,
258 dupeCheckOff, dupeFixOn, ghost2CheckOff, ghost2FixOn,
259 finalShutdownFlag, cacheDbOkFlag);
262 // Do the grooming - plain vanilla (no fix-it-file, no
264 Boolean finalShutdownFlag = true;
265 LOGGER.info(" Call doTheGrooming() ");
266 Boolean cacheDbOkFlag = true;
268 // They have forbidden us from using a cached db connection.
269 cacheDbOkFlag = false;
271 doTheGrooming("", edgesOnlyFlag, dontFixOrphansFlag,
272 maxRecordsToFix, groomOutFileName, ver, singleCommits,
273 dupeCheckOff, dupeFixOn, ghost2CheckOff, ghost2FixOn,
274 finalShutdownFlag, cacheDbOkFlag);
276 } catch (Exception ex) {
277 LOGGER.error("Exception while grooming data", ex);
280 LOGGER.info(" Done! ");
288 * @param fileNameForFixing the file name for fixing
289 * @param edgesOnlyFlag the edges only flag
290 * @param dontFixOrphansFlag the dont fix orphans flag
291 * @param maxRecordsToFix the max records to fix
292 * @param groomOutFileName the groom out file name
293 * @param version the version
294 * @param singleCommits the single commits
295 * @param dupeCheckOff the dupe check off
296 * @param dupeFixOn the dupe fix on
297 * @param ghost2CheckOff the ghost 2 check off
298 * @param ghost2FixOn the ghost 2 fix on
299 * @param finalShutdownFlag the final shutdown flag
300 * @param cacheDbOkFlag the cacheDbOk flag
303 private static int doTheGrooming(String fileNameForFixing,
304 Boolean edgesOnlyFlag, Boolean dontFixOrphansFlag,
305 int maxRecordsToFix, String groomOutFileName, String version,
306 Boolean singleCommits,
307 Boolean dupeCheckOff, Boolean dupeFixOn,
308 Boolean ghost2CheckOff, Boolean ghost2FixOn,
309 Boolean finalShutdownFlag, Boolean cacheDbOkFlag) {
311 LOGGER.debug(" Entering doTheGrooming \n");
313 int cleanupCandidateCount = 0;
314 BufferedWriter bw = null;
315 TitanGraph graph = null;
316 TitanGraph graph2 = null;
318 boolean executeFinalCommit = false;
319 Set<String> deleteCandidateList = new LinkedHashSet<>();
320 Set<String> processedVertices = new LinkedHashSet<>();
325 String targetDir = AAIConstants.AAI_HOME + AAIConstants.AAI_FILESEP
326 + "logs" + AAIConstants.AAI_FILESEP + "data"
327 + AAIConstants.AAI_FILESEP + "dataGrooming";
329 // Make sure the target directory exists
330 new File(targetDir).mkdirs();
332 if (!fileNameForFixing.equals("")) {
333 deleteCandidateList = getDeleteList(targetDir,
334 fileNameForFixing, edgesOnlyFlag, dontFixOrphansFlag,
338 if (deleteCandidateList.size() > maxRecordsToFix) {
339 LOGGER.warn(" >> WARNING >> Delete candidate list size ("
340 + deleteCandidateList.size()
341 + ") is too big. The maxFix we are using is: "
343 + ". No candidates will be deleted. ");
344 // Clear out the list so it won't be processed below.
345 deleteCandidateList = new LinkedHashSet<>();
348 String fullOutputFileName = targetDir + AAIConstants.AAI_FILESEP
350 File groomOutFile = new File(fullOutputFileName);
352 groomOutFile.createNewFile();
353 } catch (IOException e) {
354 String emsg = " Problem creating output file ["
355 + fullOutputFileName + "], exception=" + e.getMessage();
356 throw new AAIException("AAI_6124", emsg);
359 LOGGER.info(" Will write to " + fullOutputFileName );
360 FileWriter fw = new FileWriter(groomOutFile.getAbsoluteFile());
361 bw = new BufferedWriter(fw);
362 ErrorLogHelper.loadProperties();
364 LOGGER.info(" ---- NOTE --- about to open graph (takes a little while)--------\n");
367 // Since we're just reading (not deleting/fixing anything), we can use
368 // a cached connection to the DB
369 graph = TitanFactory.open(AAIConstants.CACHED_DB_CONFIG);
372 graph = TitanFactory.open(AAIConstants.REALTIME_DB_CONFIG);
375 String emsg = "null graph object in DataGrooming\n";
376 throw new AAIException("AAI_6101", emsg);
379 LOGGER.debug(" Got the graph object. ");
381 g = graph.newTransaction();
383 String emsg = "null graphTransaction object in DataGrooming\n";
384 throw new AAIException("AAI_6101", emsg);
386 GraphTraversalSource source1 = g.traversal();
388 ArrayList<String> errArr = new ArrayList<>();
389 int totalNodeCount = 0;
390 HashMap<String, String> misMatchedHash = new HashMap<String, String>();
391 HashMap<String, Vertex> orphanNodeHash = new HashMap<String, Vertex>();
392 HashMap<String, Vertex> missingDepNodeHash = new HashMap<String, Vertex>();
393 HashMap<String, Edge> oneArmedEdgeHash = new HashMap<String, Edge>();
394 HashMap<String, String> emptyVertexHash = new HashMap<String, String>();
395 HashMap<String, Vertex> ghostNodeHash = new HashMap<String, Vertex>();
396 ArrayList<String> dupeGroups = new ArrayList<>();
398 Loader loader = LoaderFactory.createLoaderForVersion(ModelType.MOXY, AAIProperties.LATEST);
400 Set<Entry<String, Introspector>> entrySet = loader.getAllObjects().entrySet();
403 LOGGER.info(" Starting DataGrooming Processing ");
406 LOGGER.info(" NOTE >> Skipping Node processing as requested. Will only process Edges. << ");
409 for (Entry<String, Introspector> entry : entrySet) {
410 String nType = entry.getKey();
412 int thisNtDeleteCount = 0;
413 LOGGER.debug(" > Look at : [" + nType + "] ...");
414 ntList = ntList + "," + nType;
416 // Get a collection of the names of the key properties for this nodeType to use later
417 // Determine what the key fields are for this nodeType
418 Collection <String> keyProps = entry.getValue().getKeys();
420 // Get the types of nodes that this nodetype depends on for uniqueness (if any)
421 Collection <String> depNodeTypes = loader.introspectorFromName(nType).getDependentOn();
423 // Loop through all the nodes of this Node type
424 int lastShownForNt = 0;
425 ArrayList <Vertex> tmpList = new ArrayList <> ();
426 Iterator <Vertex> iterv = source1.V().has("aai-node-type",nType);
427 while (iterv.hasNext()) {
428 // We put the nodes into an ArrayList because the graph.query iterator can time out
429 tmpList.add(iterv.next());
432 Iterator <Vertex> iter = tmpList.iterator();
433 while (iter.hasNext()) {
436 if( thisNtCount == lastShownForNt + 250 ){
437 lastShownForNt = thisNtCount;
438 LOGGER.debug("count for " + nType + " so far = " + thisNtCount );
440 Vertex thisVtx = iter.next();
441 String thisVid = thisVtx.id().toString();
442 if (processedVertices.contains(thisVid)) {
443 LOGGER.debug("skipping already processed vertex: " + thisVid);
447 List <Vertex> secondGetList = new ArrayList <> ();
448 // -----------------------------------------------------------------------
449 // For each vertex of this nodeType, we want to:
450 // a) make sure that it can be retrieved using it's AAI defined key
451 // b) make sure that it is not a duplicate
452 // -----------------------------------------------------------------------
454 // For this instance of this nodeType, get the key properties
455 HashMap<String, Object> propHashWithKeys = new HashMap<>();
456 Iterator<String> keyPropI = keyProps.iterator();
457 while (keyPropI.hasNext()) {
458 String propName = keyPropI.next();
460 //delete an already deleted vertex
461 Object obj = thisVtx.<Object>property(propName).orElse(null);
463 propVal = obj.toString();
465 propHashWithKeys.put(propName, propVal);
468 // If this node is dependent on another for uniqueness, then do the query from that parent node
469 // Note - all of our nodes that are dependent on others for uniqueness are
470 // "children" of that node.
471 boolean depNodeOk = true;
472 if( depNodeTypes.isEmpty() ){
473 // This kind of node is not dependent on any other.
474 // Make sure we can get it back using it's key properties and that we only get one.
475 secondGetList = getNodeJustUsingKeyParams( TRANSID, FROMAPPID, source1, nType,
476 propHashWithKeys, version );
479 // This kind of node is dependent on another for uniqueness.
480 // Start at it's parent (the dependent vertex) and make sure we can get it
481 // back using it's key properties and that we only get one.
482 Iterator <Vertex> vertI2 = source1.V(thisVtx).union(__.inE().has(EdgeProperties.out(EdgeProperty.IS_PARENT), true).outV(), __.outE().has(EdgeProperties.in(EdgeProperty.IS_PARENT)).inV());
483 Vertex parentVtx = null;
485 while( vertI2 != null && vertI2.hasNext() ){
486 parentVtx = vertI2.next();
492 //List<Vertex> vertI2 = g.traversal().V(thisVtx).union(__.outE().has("isParent-REV",true).outV(),__.inE().has("isParent",true).inV()).toList();
493 //if( vertI2.isEmpty()){
495 // It's Missing it's dependent/parent node
497 boolean zeroEdges = false;
499 Iterator<Edge> tmpEdgeIter = thisVtx.edges(Direction.BOTH);
501 while( tmpEdgeIter.hasNext() ){
505 if( edgeCount == 0 ){
508 } catch (Exception ex) {
509 LOGGER.warn("WARNING from inside the for-each-vid-loop orphan-edges-check ", ex);
512 if (deleteCandidateList.contains(thisVid)) {
513 boolean okFlag = true;
515 processedVertices.add(thisVtx.id().toString());
519 } catch (Exception e) {
521 LOGGER.error("ERROR trying to delete missing-dep-node VID = " + thisVid, e);
524 LOGGER.info(" DELETED missing-dep-node VID = " + thisVid);
527 // We count nodes missing their depNodes two ways - the first if it has
528 // at least some edges, and the second if it has zero edges. Either
529 // way, they are effectively orphaned.
530 // NOTE - Only nodes that have dependent nodes are ever considered "orphaned".
532 missingDepNodeHash.put(thisVid, thisVtx);
535 orphanNodeHash.put(thisVid, thisVtx);
539 else if ( pCount > 1 ){
540 // Not sure how this could happen? Should we do something here?
544 // We found the parent - so use it to do the second-look.
545 // NOTE --- We're just going to do the same check from the other direction - because
546 // there could be duplicates or the pointer going the other way could be broken
547 ArrayList <Vertex> tmpListSec = new ArrayList <> ();
549 tmpListSec = getConnectedChildrenOfOneType( source1, parentVtx, nType ) ;
550 Iterator<Vertex> vIter = tmpListSec.iterator();
551 while (vIter.hasNext()) {
552 Vertex tmpV = vIter.next();
553 if( vertexHasTheseKeys(tmpV, propHashWithKeys) ){
554 secondGetList.add(tmpV);
560 if( depNodeOk && (secondGetList == null || secondGetList.size() == 0) ){
561 // We could not get the node back using it's own key info.
562 // So, it's a PHANTOM
563 if (deleteCandidateList.contains(thisVid)) {
564 boolean okFlag = true;
569 } catch (Exception e) {
571 LOGGER.error("ERROR trying to delete phantom VID = " + thisVid, e);
574 LOGGER.info(" DELETED VID = " + thisVid);
577 ghostNodeHash.put(thisVid, thisVtx);
580 else if( (secondGetList.size() > 1) && depNodeOk && !dupeCheckOff ){
581 // Found some DUPLICATES - need to process them
582 LOGGER.info(" - now check Dupes for this guy - ");
583 List<String> tmpDupeGroups = checkAndProcessDupes(
584 TRANSID, FROMAPPID, g, source1, version,
585 nType, secondGetList, dupeFixOn,
586 deleteCandidateList, singleCommits, dupeGroups, loader);
587 Iterator<String> dIter = tmpDupeGroups.iterator();
588 while (dIter.hasNext()) {
589 // Add in any newly found dupes to our running list
590 String tmpGrp = dIter.next();
591 LOGGER.info("Found set of dupes: [" + tmpGrp + "]");
592 dupeGroups.add(tmpGrp);
596 catch (AAIException e1) {
597 LOGGER.warn(" For nodeType = " + nType + " Caught exception", e1);
598 errArr.add(e1.getErrorObject().toString());
600 catch (Exception e2) {
601 LOGGER.warn(" For nodeType = " + nType
602 + " Caught exception", e2);
603 errArr.add(e2.getMessage());
605 }// try block to enclose looping of a single vertex
606 catch (Exception exx) {
607 LOGGER.warn("WARNING from inside the while-verts-loop ", exx);
610 } // while loop for each record of a nodeType
612 if ( (thisNtDeleteCount > 0) && singleCommits ) {
613 // NOTE - the singleCommits option is not used in normal processing
615 g = AAIGraph.getInstance().getGraph().newTransaction();
618 thisNtDeleteCount = 0;
619 LOGGER.info( " Processed " + thisNtCount + " records for [" + nType + "], " + totalNodeCount + " total overall. " );
621 }// While-loop for each node type
622 }// end of check to make sure we weren't only supposed to do edges
625 // --------------------------------------------------------------------------------------
626 // Now, we're going to look for one-armed-edges. Ie. an edge that
628 // been deleted (because a vertex on one side was deleted) but
629 // somehow was not deleted.
630 // So the one end of it points to a vertexId -- but that vertex is
632 // --------------------------------------------------------------------------------------
634 // To do some strange checking - we need a second graph object
635 LOGGER.debug(" ---- DEBUG --- about to open a SECOND graph (takes a little while)--------\n");
636 // Note - graph2 just reads - but we want it to use a fresh connection to
637 // the database, so we are NOT using the CACHED DB CONFIG here.
638 graph2 = TitanFactory.open(AAIConstants.REALTIME_DB_CONFIG);
639 if (graph2 == null) {
640 String emsg = "null graph2 object in DataGrooming\n";
641 throw new AAIException("AAI_6101", emsg);
643 LOGGER.debug("Got the graph2 object... \n");
645 g2 = graph2.newTransaction();
647 String emsg = "null graphTransaction2 object in DataGrooming\n";
648 throw new AAIException("AAI_6101", emsg);
651 ArrayList<Vertex> vertList = new ArrayList<>();
652 Iterator<Vertex> vItor3 = g.traversal().V();
653 // Gotta hold these in a List - or else HBase times out as you cycle
655 while (vItor3.hasNext()) {
656 Vertex v = vItor3.next();
661 Iterator<Vertex> vItor2 = vertList.iterator();
662 LOGGER.info(" Checking for bad edges --- ");
664 while (vItor2.hasNext()) {
669 } catch (Exception vex) {
670 LOGGER.warn(">>> WARNING trying to get next vertex on the vItor2 ");
675 String thisVertId = "";
677 thisVertId = v.id().toString();
678 } catch (Exception ev) {
679 LOGGER.warn("WARNING when doing getId() on a vertex from our vertex list. ");
682 if (ghostNodeHash.containsKey(thisVertId)) {
683 // This is a phantom node, so don't try to use it
684 LOGGER.info(" >> Skipping edge check for edges from vertexId = "
686 + ", since that guy is a Phantom Node");
689 if (counter == lastShown + 250) {
691 LOGGER.info("... Checking edges for vertex # "
694 Iterator<Edge> eItor = v.edges(Direction.BOTH);
695 while (eItor.hasNext()) {
701 } catch (Exception iex) {
702 LOGGER.warn(">>> WARNING trying to get next edge on the eItor ", iex);
708 } catch (Exception err) {
709 LOGGER.warn(">>> WARNING trying to get edge's In-vertex ", err);
713 Vertex ghost2 = null;
715 Boolean keysMissing = true;
716 Boolean cantGetUsingVid = false;
719 Object ob = vIn.<Object>property("aai-node-type").orElse(null);
721 vNtI = ob.toString();
722 keysMissing = anyKeyFieldsMissing(vNtI, vIn, loader);
727 vIdI = ob.toString();
728 vIdLong = Long.parseLong(vIdI);
731 if( ! ghost2CheckOff ){
732 Vertex connectedVert = g2.traversal().V(vIdLong).next();
733 if( connectedVert == null ) {
734 LOGGER.warn( "GHOST2 -- got NULL when doing getVertex for vid = " + vIdLong);
735 cantGetUsingVid = true;
737 // If we can NOT get this ghost with the SECOND graph-object,
738 // it is still a ghost since even though we can get data about it using the FIRST graph
741 ghost2 = g.traversal().V(vIdLong).next();
743 catch( Exception ex){
744 LOGGER.warn( "GHOST2 -- Could not get the ghost info for a bad edge for vtxId = " + vIdLong, ex);
746 if( ghost2 != null ){
747 ghostNodeHash.put(vIdI, ghost2);
750 }// end of the ghost2 checking
752 catch (Exception err) {
753 LOGGER.warn(">>> WARNING trying to get edge's In-vertex props ", err);
756 if (keysMissing || vIn == null || vNtI.equals("")
757 || cantGetUsingVid) {
758 // this is a bad edge because it points to a vertex
759 // that isn't there anymore or is corrupted
760 String thisEid = e.id().toString();
761 if (deleteCandidateList.contains(thisEid) || deleteCandidateList.contains(vIdI)) {
762 boolean okFlag = true;
763 if (!vIdI.equals("")) {
764 // try to get rid of the corrupted vertex
766 if( (ghost2 != null) && ghost2FixOn ){
773 // NOTE - the singleCommits option is not used in normal processing
775 g = AAIGraph.getInstance().getGraph().newTransaction();
778 } catch (Exception e1) {
780 LOGGER.warn("WARNING when trying to delete bad-edge-connected VERTEX VID = "
784 LOGGER.info(" DELETED vertex from bad edge = "
788 // remove the edge if we couldn't get the
793 // NOTE - the singleCommits option is not used in normal processing
795 g = AAIGraph.getInstance().getGraph().newTransaction();
798 } catch (Exception ex) {
799 // NOTE - often, the exception is just
800 // that this edge has already been
803 LOGGER.warn("WARNING when trying to delete edge = "
807 LOGGER.info(" DELETED edge = " + thisEid);
811 oneArmedEdgeHash.put(thisEid, e);
812 if ((vIn != null) && (vIn.id() != null)) {
813 emptyVertexHash.put(thisEid, vIn.id()
820 vOut = e.outVertex();
821 } catch (Exception err) {
822 LOGGER.warn(">>> WARNING trying to get edge's Out-vertex ");
828 cantGetUsingVid = false;
831 Object ob = vOut.<Object>property("aai-node-type").orElse(null);
833 vNtO = ob.toString();
834 keysMissing = anyKeyFieldsMissing(vNtO,
840 vIdO = ob.toString();
841 vIdLong = Long.parseLong(vIdO);
844 if( ! ghost2CheckOff ){
845 Vertex connectedVert = g2.traversal().V(vIdLong).next();
846 if( connectedVert == null ) {
847 cantGetUsingVid = true;
848 LOGGER.info( "GHOST2 -- got NULL when doing getVertex for vid = " + vIdLong);
849 // If we can get this ghost with the other graph-object, then get it -- it's still a ghost
851 ghost2 = g.traversal().V(vIdLong).next();
853 catch( Exception ex){
854 LOGGER.warn( "GHOST2 -- Could not get the ghost info for a bad edge for vtxId = " + vIdLong, ex);
856 if( ghost2 != null ){
857 ghostNodeHash.put(vIdO, ghost2);
861 } catch (Exception err) {
862 LOGGER.warn(">>> WARNING trying to get edge's Out-vertex props ", err);
865 if (keysMissing || vOut == null || vNtO.equals("")
866 || cantGetUsingVid) {
867 // this is a bad edge because it points to a vertex
868 // that isn't there anymore
869 String thisEid = e.id().toString();
870 if (deleteCandidateList.contains(thisEid) || deleteCandidateList.contains(vIdO)) {
871 boolean okFlag = true;
872 if (!vIdO.equals("")) {
873 // try to get rid of the corrupted vertex
875 if( (ghost2 != null) && ghost2FixOn ){
882 // NOTE - the singleCommits option is not used in normal processing
884 g = AAIGraph.getInstance().getGraph().newTransaction();
887 } catch (Exception e1) {
889 LOGGER.warn("WARNING when trying to delete bad-edge-connected VID = "
893 LOGGER.info(" DELETED vertex from bad edge = "
897 // remove the edge if we couldn't get the
902 // NOTE - the singleCommits option is not used in normal processing
904 g = AAIGraph.getInstance().getGraph().newTransaction();
907 } catch (Exception ex) {
908 // NOTE - often, the exception is just
909 // that this edge has already been
912 LOGGER.warn("WARNING when trying to delete edge = "
916 LOGGER.info(" DELETED edge = " + thisEid);
920 oneArmedEdgeHash.put(thisEid, e);
921 if ((vOut != null) && (vOut.id() != null)) {
922 emptyVertexHash.put(thisEid, vOut.id()
927 }// End of while-edges-loop
928 } catch (Exception exx) {
929 LOGGER.warn("WARNING from in the while-verts-loop ", exx);
931 }// End of while-vertices-loop
933 deleteCount = deleteCount + dupeGrpsDeleted;
934 if (!singleCommits && deleteCount > 0) {
936 LOGGER.info("About to do the commit for "
937 + deleteCount + " removes. ");
938 executeFinalCommit = true;
939 LOGGER.info("Commit was successful ");
940 } catch (Exception excom) {
941 LOGGER.error(" >>>> ERROR <<<< Could not commit changes. ", excom);
946 int ghostNodeCount = ghostNodeHash.size();
947 int orphanNodeCount = orphanNodeHash.size();
948 int missingDepNodeCount = missingDepNodeHash.size();
949 int oneArmedEdgeCount = oneArmedEdgeHash.size();
950 int dupeCount = dupeGroups.size();
952 deleteCount = deleteCount + dupeGrpsDeleted;
954 bw.write("\n\n ============ Summary ==============\n");
955 bw.write("Ran these nodeTypes: " + ntList + "\n\n");
956 bw.write("There were this many delete candidates from previous run = "
957 + deleteCandidateList.size() + "\n");
958 if (dontFixOrphansFlag) {
959 bw.write(" Note - we are not counting orphan nodes since the -dontFixOrphans parameter was used. \n");
961 bw.write("Deleted this many delete candidates = " + deleteCount
963 bw.write("Total number of nodes looked at = " + totalNodeCount
965 bw.write("Ghost Nodes identified = " + ghostNodeCount + "\n");
966 bw.write("Orphan Nodes identified = " + orphanNodeCount + "\n");
967 bw.write("Bad Edges identified = " + oneArmedEdgeCount + "\n");
968 bw.write("Missing Dependent Edge (but not orphaned) node count = "
969 + missingDepNodeCount + "\n");
970 bw.write("Duplicate Groups count = " + dupeCount + "\n");
971 bw.write("MisMatching Label/aai-node-type count = "
972 + misMatchedHash.size() + "\n");
974 bw.write("\n ------------- Delete Candidates ---------\n");
975 for (Map.Entry<String, Vertex> entry : ghostNodeHash
977 String vid = entry.getKey();
978 bw.write("DeleteCandidate: Phantom Vid = [" + vid + "]\n");
979 cleanupCandidateCount++;
981 for (Map.Entry<String, Vertex> entry : orphanNodeHash
983 String vid = entry.getKey();
984 bw.write("DeleteCandidate: OrphanDepNode Vid = [" + vid + "]\n");
985 if (!dontFixOrphansFlag) {
986 cleanupCandidateCount++;
989 for (Map.Entry<String, Edge> entry : oneArmedEdgeHash.entrySet()) {
990 String eid = entry.getKey();
991 bw.write("DeleteCandidate: Bad EDGE Edge-id = [" + eid + "]\n");
992 cleanupCandidateCount++;
994 for (Map.Entry<String, Vertex> entry : missingDepNodeHash
996 String vid = entry.getKey();
997 bw.write("DeleteCandidate: (maybe) missingDepNode Vid = ["
999 cleanupCandidateCount++;
1001 bw.write("\n-- NOTE - To see DeleteCandidates for Duplicates, you need to look in the Duplicates Detail section below.\n");
1003 bw.write("\n ------------- GHOST NODES - detail ");
1004 for (Map.Entry<String, Vertex> entry : ghostNodeHash
1007 String vid = entry.getKey();
1008 bw.write("\n ==> Phantom Vid = " + vid + "\n");
1009 ArrayList<String> retArr = showPropertiesForNode(
1010 TRANSID, FROMAPPID, entry.getValue());
1011 for (String info : retArr) {
1012 bw.write(info + "\n");
1015 retArr = showAllEdgesForNode(TRANSID, FROMAPPID,
1017 for (String info : retArr) {
1018 bw.write(info + "\n");
1020 } catch (Exception dex) {
1021 LOGGER.error("error trying to print detail info for a ghost-node: ", dex);
1025 bw.write("\n ------------- Missing Dependent Edge ORPHAN NODES - detail: ");
1026 for (Map.Entry<String, Vertex> entry : orphanNodeHash
1029 String vid = entry.getKey();
1030 bw.write("\n> Orphan Node Vid = " + vid + "\n");
1031 ArrayList<String> retArr = showPropertiesForNode(
1032 TRANSID, FROMAPPID, entry.getValue());
1033 for (String info : retArr) {
1034 bw.write(info + "\n");
1037 retArr = showAllEdgesForNode(TRANSID, FROMAPPID,
1039 for (String info : retArr) {
1040 bw.write(info + "\n");
1042 } catch (Exception dex) {
1043 LOGGER.error("error trying to print detail info for a Orphan Node /missing dependent edge", dex);
1047 bw.write("\n ------------- Missing Dependent Edge (but not orphan) NODES: ");
1048 for (Map.Entry<String, Vertex> entry : missingDepNodeHash
1051 String vid = entry.getKey();
1052 bw.write("\n> Missing edge to Dependent Node (but has edges) Vid = "
1054 ArrayList<String> retArr = showPropertiesForNode(
1055 TRANSID, FROMAPPID, entry.getValue());
1056 for (String info : retArr) {
1057 bw.write(info + "\n");
1060 retArr = showAllEdgesForNode(TRANSID, FROMAPPID,
1062 for (String info : retArr) {
1063 bw.write(info + "\n");
1065 } catch (Exception dex) {
1066 LOGGER.error("error trying to print detail info for a node missing its dependent edge but not an orphan", dex);
1070 bw.write("\n ------------- EDGES pointing to empty/bad vertices: ");
1071 for (Map.Entry<String, Edge> entry : oneArmedEdgeHash.entrySet()) {
1073 String eid = entry.getKey();
1074 Edge thisE = entry.getValue();
1075 String badVid = emptyVertexHash.get(eid);
1076 bw.write("\n> Edge pointing to bad vertex (Vid = "
1077 + badVid + ") EdgeId = " + eid + "\n");
1078 bw.write("Label: [" + thisE.label() + "]\n");
1079 Iterator<Property<Object>> pI = thisE.properties();
1080 while (pI.hasNext()) {
1081 Property<Object> propKey = pI.next();
1082 bw.write("Prop: [" + propKey + "], val = ["
1083 + propKey.value() + "]\n");
1085 } catch (Exception pex) {
1086 LOGGER.error("error trying to print empty/bad vertex data: ", pex);
1090 bw.write("\n ------------- Duplicates: ");
1091 Iterator<String> dupeIter = dupeGroups.iterator();
1092 int dupeSetCounter = 0;
1093 while (dupeIter.hasNext()) {
1095 String dset = (String) dupeIter.next();
1097 bw.write("\n --- Duplicate Group # " + dupeSetCounter
1098 + " Detail -----------\n");
1100 // We expect each line to have at least two vid's, followed
1101 // by the preferred one to KEEP
1102 String[] dupeArr = dset.split("\\|");
1103 ArrayList<String> idArr = new ArrayList<>();
1104 int lastIndex = dupeArr.length - 1;
1105 for (int i = 0; i <= lastIndex; i++) {
1106 if (i < lastIndex) {
1107 // This is not the last entry, it is one of the
1108 // dupes, so we want to show all its info
1109 bw.write(" >> Duplicate Group # "
1110 + dupeSetCounter + " Node # " + i
1112 String vidString = dupeArr[i];
1113 idArr.add(vidString);
1114 long longVertId = Long.parseLong(vidString);
1115 Iterator<Vertex> vtxIterator = g.vertices(longVertId);
1117 if (vtxIterator.hasNext()) {
1118 vtx = vtxIterator.next();
1120 ArrayList<String> retArr = showPropertiesForNode(TRANSID, FROMAPPID, vtx);
1121 for (String info : retArr) {
1122 bw.write(info + "\n");
1125 retArr = showAllEdgesForNode(TRANSID,
1127 for (String info : retArr) {
1128 bw.write(info + "\n");
1131 // This is the last entry which should tell us if we
1132 // have a preferred keeper
1133 String prefString = dupeArr[i];
1134 if (prefString.equals("KeepVid=UNDETERMINED")) {
1135 bw.write("\n For this group of duplicates, could not tell which one to keep.\n");
1136 bw.write(" >>> This group needs to be taken care of with a manual/forced-delete.\n");
1138 // If we know which to keep, then the prefString
1139 // should look like, "KeepVid=12345"
1140 String[] prefArr = prefString.split("=");
1141 if (prefArr.length != 2
1142 || (!prefArr[0].equals("KeepVid"))) {
1143 throw new Exception("Bad format. Expecting KeepVid=999999");
1145 String keepVidStr = prefArr[1];
1146 if (idArr.contains(keepVidStr)) {
1147 bw.write("\n The vertex we want to KEEP has vertexId = "
1149 bw.write("\n The others become delete candidates: \n");
1150 idArr.remove(keepVidStr);
1151 for (int x = 0; x < idArr.size(); x++) {
1152 cleanupCandidateCount++;
1153 bw.write("DeleteCandidate: Duplicate Vid = ["
1154 + idArr.get(x) + "]\n");
1157 throw new Exception("ERROR - Vertex Id to keep not found in list of dupes. dset = ["
1161 }// else we know which one to keep
1163 }// for each vertex in a group
1164 } catch (Exception dex) {
1165 LOGGER.error("error trying to print duplicate vertex data", dex);
1168 }// while - work on each group of dupes
1170 bw.write("\n ------------- Mis-matched Label/aai-node-type Nodes: \n ");
1171 for (Map.Entry<String, String> entry : misMatchedHash.entrySet()) {
1172 String msg = entry.getValue();
1173 bw.write("MixedMsg = " + msg + "\n");
1176 bw.write("\n ------------- Got these errors while processing: \n");
1177 Iterator<String> errIter = errArr.iterator();
1178 while (errIter.hasNext()) {
1179 String line = (String) errIter.next();
1180 bw.write(line + "\n");
1185 LOGGER.info("\n ------------- Done doing all the checks ------------ ");
1186 LOGGER.info("Output will be written to " + fullOutputFileName);
1188 if (cleanupCandidateCount > 0) {
1189 // Technically, this is not an error -- but we're throwing this
1190 // error so that hopefully a
1191 // monitoring system will pick it up and do something with it.
1192 throw new AAIException("AAI_6123", "See file: [" + fullOutputFileName
1193 + "] and investigate delete candidates. ");
1195 } catch (AAIException e) {
1196 LOGGER.error("Caught AAIException while grooming data", e);
1197 ErrorLogHelper.logException(e);
1198 } catch (Exception ex) {
1199 LOGGER.error("Caught exception while grooming data", ex);
1200 ErrorLogHelper.logError("AAI_6128", ex.getMessage() + ", resolve and rerun dataGrooming");
1206 } catch (IOException iox) {
1207 LOGGER.warn("Got an IOException trying to close bufferedWriter() \n", iox);
1211 if (g != null && g.tx().isOpen()) {
1212 // Any changes that worked correctly should have already done
1215 if (executeFinalCommit) {
1219 } catch (Exception ex) {
1220 // Don't throw anything because Titan sometimes is just saying that the graph is already closed
1221 LOGGER.warn("WARNING from final graphTransaction.rollback()", ex);
1225 if (g2 != null && g2.tx().isOpen()) {
1226 // Any changes that worked correctly should have already done
1230 } catch (Exception ex) {
1231 // Don't throw anything because Titan sometimes is just saying that the graph is already closed
1232 LOGGER.warn("WARNING from final graphTransaction2.rollback()", ex);
1236 if( finalShutdownFlag ){
1238 if( graph != null && graph.isOpen() ){
1242 } catch (Exception ex) {
1243 // Don't throw anything because Titan sometimes is just saying that the graph is already closed{
1244 LOGGER.warn("WARNING from final graph.shutdown()", ex);
1248 if( graph2 != null && graph2.isOpen() ){
1249 graph2.tx().close();
1252 } catch (Exception ex) {
1253 // Don't throw anything because Titan sometimes is just saying that the graph is already closed{
1254 LOGGER.warn("WARNING from final graph2.shutdown()", ex);
1260 return cleanupCandidateCount;
1262 }// end of doTheGrooming()
1266 * Vertex has these keys.
1268 * @param tmpV the tmp V
1269 * @param propHashWithKeys the prop hash with keys
1270 * @return the boolean
1272 private static Boolean vertexHasTheseKeys( Vertex tmpV, HashMap <String, Object> propHashWithKeys) {
1273 Iterator <?> it = propHashWithKeys.entrySet().iterator();
1274 while( it.hasNext() ){
1275 String propName = "";
1276 String propVal = "";
1277 Map.Entry <?,?>propEntry = (Map.Entry<?,?>)it.next();
1278 Object propNameObj = propEntry.getKey();
1279 if( propNameObj != null ){
1280 propName = propNameObj.toString();
1282 Object propValObj = propEntry.getValue();
1283 if( propValObj != null ){
1284 propVal = propValObj.toString();
1286 Object checkValObj = tmpV.<Object>property(propName).orElse(null);
1287 if( checkValObj == null ) {
1290 else if( !propVal.equals(checkValObj.toString()) ){
1299 * Any key fields missing.
1301 * @param nType the n type
1303 * @return the boolean
1305 private static Boolean anyKeyFieldsMissing(String nType, Vertex v, Loader loader) {
1308 Introspector obj = null;
1310 obj = loader.introspectorFromName(nType);
1311 } catch (AAIUnknownObjectException e) {
1312 // They gave us a non-empty nodeType but our NodeKeyProps does
1313 // not have data for it. Since we do not know what the
1314 // key params are for this type of node, we will just
1316 String emsg = " -- WARNING -- Unrecognized nodeType: [" + nType
1317 + "]. We cannot determine required keys for this nType. ";
1318 // NOTE - this will be caught below and a "false" returned
1319 throw new AAIException("AAI_6121", emsg);
1322 // Determine what the key fields are for this nodeType
1323 Collection <String> keyPropNamesColl = obj.getKeys();
1324 Iterator<String> keyPropI = keyPropNamesColl.iterator();
1325 while (keyPropI.hasNext()) {
1326 String propName = keyPropI.next();
1327 Object ob = v.<Object>property(propName).orElse(null);
1328 if (ob == null || ob.toString().equals("")) {
1329 // It is missing a key property
1333 } catch (AAIException e) {
1334 // Something was wrong -- but since we weren't able to check
1335 // the keys, we will not declare that it is missing keys.
1343 * Gets the delete list.
1345 * @param targetDir the target dir
1346 * @param fileName the file name
1347 * @param edgesOnlyFlag the edges only flag
1348 * @param dontFixOrphans the dont fix orphans
1349 * @param dupeFixOn the dupe fix on
1350 * @return the delete list
1351 * @throws AAIException the AAI exception
1353 private static Set<String> getDeleteList(String targetDir,
1354 String fileName, Boolean edgesOnlyFlag, Boolean dontFixOrphans,
1355 Boolean dupeFixOn) throws AAIException {
1357 // Look in the file for lines formated like we expect - pull out any
1358 // Vertex Id's to delete on this run
1359 Set<String> delList = new LinkedHashSet<>();
1360 String fullFileName = targetDir + AAIConstants.AAI_FILESEP + fileName;
1361 BufferedReader br = null;
1364 br = new BufferedReader(new FileReader(fullFileName));
1365 String line = br.readLine();
1366 while (line != null) {
1367 if (!line.equals("") && line.startsWith("DeleteCandidate")) {
1368 if (edgesOnlyFlag && (!line.contains("Bad Edge"))) {
1369 // We're not going to process edge guys
1370 } else if (dontFixOrphans && line.contains("Orphan")) {
1371 // We're not going to process orphans
1372 } else if (!dupeFixOn && line.contains("Duplicate")) {
1373 // We're not going to process Duplicates
1375 int begIndex = line.indexOf("id = ");
1376 int endIndex = line.indexOf("]");
1377 String vidVal = line.substring(begIndex + 6, endIndex);
1378 delList.add(vidVal);
1381 line = br.readLine();
1384 } catch (IOException e) {
1385 throw new AAIException("AAI_6124", e, "Could not open input-file [" + fullFileName
1386 + "], exception= " + e.getMessage());
1391 }// end of getDeleteList
1394 * Gets the preferred dupe.
1396 * @param transId the trans id
1397 * @param fromAppId the from app id
1399 * @param dupeVertexList the dupe vertex list
1400 * @param ver the ver
1402 * @throws AAIException the AAI exception
1404 public static Vertex getPreferredDupe(String transId,
1405 String fromAppId, GraphTraversalSource g,
1406 ArrayList<Vertex> dupeVertexList, String ver, Loader loader)
1407 throws AAIException {
1409 // This method assumes that it is being passed a List of vertex objects
1411 // violate our uniqueness constraints.
1413 Vertex nullVtx = null;
1415 if (dupeVertexList == null) {
1418 int listSize = dupeVertexList.size();
1419 if (listSize == 0) {
1422 if (listSize == 1) {
1423 return (dupeVertexList.get(0));
1426 Vertex vtxPreferred = null;
1427 Vertex currentFaveVtx = dupeVertexList.get(0);
1428 for (int i = 1; i < listSize; i++) {
1429 Vertex vtxB = dupeVertexList.get(i);
1430 vtxPreferred = pickOneOfTwoDupes(transId, fromAppId, g,
1431 currentFaveVtx, vtxB, ver, loader);
1432 if (vtxPreferred == null) {
1433 // We couldn't choose one
1436 currentFaveVtx = vtxPreferred;
1440 return (currentFaveVtx);
1442 } // end of getPreferredDupe()
1445 * Pick one of two dupes.
1447 * @param transId the trans id
1448 * @param fromAppId the from app id
1450 * @param vtxA the vtx A
1451 * @param vtxB the vtx B
1452 * @param ver the ver
1454 * @throws AAIException the AAI exception
1456 public static Vertex pickOneOfTwoDupes(String transId,
1457 String fromAppId, GraphTraversalSource g, Vertex vtxA,
1458 Vertex vtxB, String ver, Loader loader) throws AAIException {
1460 Vertex nullVtx = null;
1461 Vertex preferredVtx = null;
1463 Long vidA = new Long(vtxA.id().toString());
1464 Long vidB = new Long(vtxB.id().toString());
1466 String vtxANodeType = "";
1467 String vtxBNodeType = "";
1468 Object objType = vtxA.<Object>property("aai-node-type").orElse(null);
1469 if (objType != null) {
1470 vtxANodeType = objType.toString();
1472 objType = vtxB.<Object>property("aai-node-type").orElse(null);
1473 if (objType != null) {
1474 vtxBNodeType = objType.toString();
1477 if (vtxANodeType.equals("") || (!vtxANodeType.equals(vtxBNodeType))) {
1478 // Either they're not really dupes or there's some bad data - so
1483 // Check that node A and B both have the same key values (or else they
1485 // (We'll check dep-node later)
1486 // Determine what the key fields are for this nodeType
1487 Collection <String> keyProps = new ArrayList <>();
1489 keyProps = loader.introspectorFromName(vtxANodeType).getKeys();
1490 } catch (AAIUnknownObjectException e) {
1491 throw new AAIException("AAI_6105", "Required Property name(s) not found for nodeType = " + vtxANodeType + ")");
1494 Iterator<String> keyPropI = keyProps.iterator();
1495 while (keyPropI.hasNext()) {
1496 String propName = keyPropI.next();
1497 String vtxAKeyPropVal = "";
1498 objType = vtxA.<Object>property(propName).orElse(null);
1499 if (objType != null) {
1500 vtxAKeyPropVal = objType.toString();
1502 String vtxBKeyPropVal = "";
1503 objType = vtxB.<Object>property(propName).orElse(null);
1504 if (objType != null) {
1505 vtxBKeyPropVal = objType.toString();
1508 if (vtxAKeyPropVal.equals("")
1509 || (!vtxAKeyPropVal.equals(vtxBKeyPropVal))) {
1510 // Either they're not really dupes or they are missing some key
1511 // data - so don't pick one
1516 // Collect the vid's and aai-node-types of the vertices that each vertex
1517 // (A and B) is connected to.
1518 ArrayList<String> vtxIdsConn2A = new ArrayList<>();
1519 ArrayList<String> vtxIdsConn2B = new ArrayList<>();
1520 HashMap<String, String> nodeTypesConn2A = new HashMap<>();
1521 HashMap<String, String> nodeTypesConn2B = new HashMap<>();
1523 ArrayList<Vertex> vertListA = getConnectedNodes( g, vtxA );
1524 if (vertListA != null) {
1525 Iterator<Vertex> iter = vertListA.iterator();
1526 while (iter.hasNext()) {
1527 Vertex tvCon = iter.next();
1528 String conVid = tvCon.id().toString();
1530 objType = tvCon.<Object>property("aai-node-type").orElse(null);
1531 if (objType != null) {
1532 nt = objType.toString();
1534 nodeTypesConn2A.put(nt, conVid);
1535 vtxIdsConn2A.add(conVid);
1539 ArrayList<Vertex> vertListB = getConnectedNodes( g, vtxB );
1540 if (vertListB != null) {
1541 Iterator<Vertex> iter = vertListB.iterator();
1542 while (iter.hasNext()) {
1543 Vertex tvCon = iter.next();
1544 String conVid = tvCon.id().toString();
1546 objType = tvCon.<Object>property("aai-node-type").orElse(null);
1547 if (objType != null) {
1548 nt = objType.toString();
1550 nodeTypesConn2B.put(nt, conVid);
1551 vtxIdsConn2B.add(conVid);
1555 // 1 - If this kind of node needs a dependent node for uniqueness, then
1556 // verify that they both nodes
1557 // point to the same dependent node (otherwise they're not really
1559 // Note - there are sometimes more than one dependent node type since
1560 // one nodeType can be used in
1561 // different ways. But for a particular node, it will only have one
1562 // dependent node that it's
1564 Collection <String> depNodeTypes = loader.introspectorFromName(vtxANodeType).getDependentOn();
1566 if (depNodeTypes.isEmpty()) {
1567 // This kind of node is not dependent on any other. That is ok.
1569 String depNodeVtxId4A = "";
1570 String depNodeVtxId4B = "";
1571 Iterator<String> iter = depNodeTypes.iterator();
1572 while (iter.hasNext()) {
1573 String depNodeType = iter.next();
1574 if (nodeTypesConn2A.containsKey(depNodeType)) {
1575 // This is the dependent node type that vertex A is using
1576 depNodeVtxId4A = nodeTypesConn2A.get(depNodeType);
1578 if (nodeTypesConn2B.containsKey(depNodeType)) {
1579 // This is the dependent node type that vertex B is using
1580 depNodeVtxId4B = nodeTypesConn2B.get(depNodeType);
1583 if (depNodeVtxId4A.equals("")
1584 || (!depNodeVtxId4A.equals(depNodeVtxId4B))) {
1585 // Either they're not really dupes or there's some bad data - so
1586 // don't pick either one
1591 if (vtxIdsConn2A.size() == vtxIdsConn2B.size()) {
1592 // 2 - If they both have edges to all the same vertices, then return
1593 // the one with the lower vertexId.
1594 boolean allTheSame = true;
1595 Iterator<String> iter = vtxIdsConn2A.iterator();
1596 while (iter.hasNext()) {
1597 String vtxIdConn2A = iter.next();
1598 if (!vtxIdsConn2B.contains(vtxIdConn2A)) {
1606 preferredVtx = vtxA;
1608 preferredVtx = vtxB;
1611 } else if (vtxIdsConn2A.size() > vtxIdsConn2B.size()) {
1612 // 3 - VertexA is connected to more things than vtxB.
1613 // We'll pick VtxA if its edges are a superset of vtxB's edges.
1614 boolean missingOne = false;
1615 Iterator<String> iter = vtxIdsConn2B.iterator();
1616 while (iter.hasNext()) {
1617 String vtxIdConn2B = iter.next();
1618 if (!vtxIdsConn2A.contains(vtxIdConn2B)) {
1624 preferredVtx = vtxA;
1626 } else if (vtxIdsConn2B.size() > vtxIdsConn2A.size()) {
1627 // 4 - VertexB is connected to more things than vtxA.
1628 // We'll pick VtxB if its edges are a superset of vtxA's edges.
1629 boolean missingOne = false;
1630 Iterator<String> iter = vtxIdsConn2A.iterator();
1631 while (iter.hasNext()) {
1632 String vtxIdConn2A = iter.next();
1633 if (!vtxIdsConn2B.contains(vtxIdConn2A)) {
1639 preferredVtx = vtxB;
1642 preferredVtx = nullVtx;
1645 return (preferredVtx);
1647 } // end of pickOneOfTwoDupes()
1650 * Check and process dupes.
1652 * @param transId the trans id
1653 * @param fromAppId the from app id
1655 * @param version the version
1656 * @param nType the n type
1657 * @param passedVertList the passed vert list
1658 * @param dupeFixOn the dupe fix on
1659 * @param deleteCandidateList the delete candidate list
1660 * @param singleCommits the single commits
1661 * @param alreadyFoundDupeGroups the already found dupe groups
1662 * @param dbMaps the db maps
1663 * @return the array list
1665 private static List<String> checkAndProcessDupes(String transId,
1666 String fromAppId, Graph g, GraphTraversalSource source, String version, String nType,
1667 List<Vertex> passedVertList, Boolean dupeFixOn,
1668 Set<String> deleteCandidateList, Boolean singleCommits,
1669 ArrayList<String> alreadyFoundDupeGroups, Loader loader ) {
1671 ArrayList<String> returnList = new ArrayList<>();
1672 ArrayList<Vertex> checkVertList = new ArrayList<>();
1673 ArrayList<String> alreadyFoundDupeVidArr = new ArrayList<>();
1674 Boolean noFilterList = true;
1675 Iterator<String> afItr = alreadyFoundDupeGroups.iterator();
1676 while (afItr.hasNext()) {
1677 String dupeGrpStr = afItr.next();
1678 String[] dupeArr = dupeGrpStr.split("\\|");
1679 int lastIndex = dupeArr.length - 1;
1680 for (int i = 0; i < lastIndex; i++) {
1681 // Note: we don't want the last one...
1682 String vidString = dupeArr[i];
1683 alreadyFoundDupeVidArr.add(vidString);
1684 noFilterList = false;
1688 // For a given set of Nodes that were found with a set of KEY
1689 // Parameters, (nodeType + key data) we will
1690 // see if we find any duplicate nodes that need to be cleaned up. Note -
1691 // it's legit to have more than one
1692 // node with the same key data if the nodes depend on a parent for
1693 // uniqueness -- as long as the two nodes
1694 // don't hang off the same Parent.
1695 // If we find duplicates, and we can figure out which of each set of
1696 // duplicates is the one that we
1697 // think should be preserved, we will record that. Whether we can tell
1698 // which one should be
1699 // preserved or not, we will return info about any sets of duplicates
1702 // Each element in the returned arrayList might look like this:
1703 // "1234|5678|keepVid=UNDETERMINED" (if there were 2 dupes, and we
1704 // couldn't figure out which one to keep)
1705 // or, "100017|200027|30037|keepVid=30037" (if there were 3 dupes and we
1706 // thought the third one was the one that should survive)
1708 // Because of the way the calling code loops over stuff, we can get the
1709 // same data multiple times - so we should
1710 // not process any vertices that we've already seen.
1713 Iterator<Vertex> pItr = passedVertList.iterator();
1714 while (pItr.hasNext()) {
1715 Vertex tvx = pItr.next();
1716 String passedId = tvx.id().toString();
1717 if (noFilterList || !alreadyFoundDupeVidArr.contains(passedId)) {
1718 // We haven't seen this one before - so we should check it.
1719 checkVertList.add(tvx);
1723 if (checkVertList.size() < 2) {
1724 // Nothing new to check.
1728 if (loader.introspectorFromName(nType).isTopLevel()) {
1729 // If this was a node that does NOT depend on other nodes for
1730 // uniqueness, and we
1731 // found more than one node using its key -- record the found
1732 // vertices as duplicates.
1733 String dupesStr = "";
1734 for (int i = 0; i < checkVertList.size(); i++) {
1736 + ((checkVertList.get(i))).id()
1739 if (dupesStr != "") {
1740 Vertex prefV = getPreferredDupe(transId, fromAppId,
1741 source, checkVertList, version, loader);
1742 if (prefV == null) {
1743 // We could not determine which duplicate to keep
1744 dupesStr = dupesStr + "KeepVid=UNDETERMINED";
1745 returnList.add(dupesStr);
1747 dupesStr = dupesStr + "KeepVid=" + prefV.id();
1748 Boolean didRemove = false;
1750 didRemove = deleteNonKeepersIfAppropriate(g,
1751 dupesStr, prefV.id().toString(),
1752 deleteCandidateList, singleCommits);
1757 // keep them on our list
1758 returnList.add(dupesStr);
1763 // More than one node have the same key fields since they may
1764 // depend on a parent node for uniqueness. Since we're finding
1765 // more than one, we want to check to see if any of the
1766 // vertices that have this set of keys (and are the same nodeType)
1767 // are also pointing at the same 'parent' node.
1768 // Note: for a given set of key data, it is possible that there
1769 // could be more than one set of duplicates.
1770 HashMap<String, ArrayList<Vertex>> vertsGroupedByParentHash = groupVertsByDepNodes(
1771 transId, fromAppId, source, version, nType,
1772 checkVertList, loader);
1773 for (Map.Entry<String, ArrayList<Vertex>> entry : vertsGroupedByParentHash
1775 ArrayList<Vertex> thisParentsVertList = entry
1777 if (thisParentsVertList.size() > 1) {
1778 // More than one vertex found with the same key info
1779 // hanging off the same parent/dependent node
1780 String dupesStr = "";
1781 for (int i = 0; i < thisParentsVertList.size(); i++) {
1783 + ((thisParentsVertList
1784 .get(i))).id() + "|";
1786 if (dupesStr != "") {
1787 Vertex prefV = getPreferredDupe(transId,
1788 fromAppId, source, thisParentsVertList,
1791 if (prefV == null) {
1792 // We could not determine which duplicate to
1794 dupesStr = dupesStr + "KeepVid=UNDETERMINED";
1795 returnList.add(dupesStr);
1797 Boolean didRemove = false;
1798 dupesStr = dupesStr + "KeepVid="
1799 + prefV.id().toString();
1801 didRemove = deleteNonKeepersIfAppropriate(
1802 g, dupesStr, prefV.id()
1804 deleteCandidateList, singleCommits);
1809 // keep them on our list
1810 returnList.add(dupesStr);
1817 } catch (Exception e) {
1818 LOGGER.warn(" >>> Threw an error in checkAndProcessDupes - just absorb this error and move on. ", e);
1823 }// End of checkAndProcessDupes()
1826 * Group verts by dep nodes.
1828 * @param transId the trans id
1829 * @param fromAppId the from app id
1831 * @param version the version
1832 * @param nType the n type
1833 * @param passedVertList the passed vert list
1834 * @param dbMaps the db maps
1835 * @return the hash map
1836 * @throws AAIException the AAI exception
1838 private static HashMap<String, ArrayList<Vertex>> groupVertsByDepNodes(
1839 String transId, String fromAppId, GraphTraversalSource g, String version,
1840 String nType, ArrayList<Vertex> passedVertList, Loader loader)
1841 throws AAIException {
1842 // Given a list of Titan Vertices of one nodeType (see AAI-8956), group
1843 // them together by the parent node they depend on.
1844 // Ie. if given a list of ip address nodes (assumed to all have the
1845 // same key info) they might sit under several different parent vertices.
1846 // Under Normal conditions, there would only be one per parent -- but
1847 // we're trying to find duplicates - so we
1848 // allow for the case where more than one is under the same parent node.
1850 HashMap<String, ArrayList<Vertex>> retHash = new HashMap<String, ArrayList<Vertex>>();
1851 if (loader.introspectorFromName(nType).isTopLevel()) {
1852 // This method really should not have been called if this is not the
1854 // that depends on a parent for uniqueness, so just return the empty
1859 // Find out what types of nodes the passed in nodes can depend on
1860 ArrayList<String> depNodeTypeL = new ArrayList<>();
1861 Collection<String> depNTColl = loader.introspectorFromName(nType).getDependentOn();
1862 Iterator<String> ntItr = depNTColl.iterator();
1863 while (ntItr.hasNext()) {
1864 depNodeTypeL.add(ntItr.next());
1866 // For each vertex, we want find its depended-on/parent vertex so we
1867 // can track what other vertexes that are dependent on that same guy.
1868 if (passedVertList != null) {
1869 Iterator<Vertex> iter = passedVertList.iterator();
1870 while (iter.hasNext()) {
1871 Vertex thisVert = iter.next();
1872 Vertex tmpParentVtx = getConnectedParent( g, thisVert );
1873 if( tmpParentVtx != null ) {
1874 String parentNt = null;
1875 Object obj = tmpParentVtx.<Object>property("aai-node-type").orElse(null);
1877 parentNt = obj.toString();
1879 if (depNTColl.contains(parentNt)) {
1880 // This must be the parent/dependent node
1881 String parentVid = tmpParentVtx.id().toString();
1882 if (retHash.containsKey(parentVid)) {
1883 // add this vert to the list for this parent key
1884 retHash.get(parentVid).add(thisVert);
1886 // This is the first one we found on this parent
1887 ArrayList<Vertex> vList = new ArrayList<>();
1888 vList.add(thisVert);
1889 retHash.put(parentVid, vList);
1898 }// end of groupVertsByDepNodes()
1901 * Delete non keepers if appropriate.
1904 * @param dupeInfoString the dupe info string
1905 * @param vidToKeep the vid to keep
1906 * @param deleteCandidateList the delete candidate list
1907 * @param singleCommits the single commits
1908 * @return the boolean
1910 private static Boolean deleteNonKeepersIfAppropriate(Graph g,
1911 String dupeInfoString, String vidToKeep,
1912 Set<String> deleteCandidateList, Boolean singleCommits) {
1914 Boolean deletedSomething = false;
1915 // This assumes that the dupeInfoString is in the format of
1916 // pipe-delimited vid's followed by
1917 // ie. "3456|9880|keepVid=3456"
1918 if (deleteCandidateList == null || deleteCandidateList.size() == 0) {
1919 // No vid's on the candidate list -- so no deleting will happen on
1924 String[] dupeArr = dupeInfoString.split("\\|");
1925 ArrayList<String> idArr = new ArrayList<>();
1926 int lastIndex = dupeArr.length - 1;
1927 for (int i = 0; i <= lastIndex; i++) {
1928 if (i < lastIndex) {
1929 // This is not the last entry, it is one of the dupes,
1930 String vidString = dupeArr[i];
1931 idArr.add(vidString);
1933 // This is the last entry which should tell us if we have a
1935 String prefString = dupeArr[i];
1936 if (prefString.equals("KeepVid=UNDETERMINED")) {
1937 // They sent us a bad string -- nothing should be deleted if
1938 // no dupe could be tagged as preferred
1941 // If we know which to keep, then the prefString should look
1942 // like, "KeepVid=12345"
1943 String[] prefArr = prefString.split("=");
1944 if (prefArr.length != 2 || (!prefArr[0].equals("KeepVid"))) {
1945 LOGGER.error("Bad format. Expecting KeepVid=999999");
1948 String keepVidStr = prefArr[1];
1949 if (idArr.contains(keepVidStr)) {
1950 idArr.remove(keepVidStr);
1952 // So now, the idArr should just contain the vid's
1953 // that we want to remove.
1954 for (int x = 0; x < idArr.size(); x++) {
1955 boolean okFlag = true;
1956 String thisVid = idArr.get(x);
1957 if (deleteCandidateList.contains(thisVid)) {
1958 // This vid is a valid delete candidate from
1959 // a prev. run, so we can remove it.
1961 long longVertId = Long
1962 .parseLong(thisVid);
1964 .traversal().V(longVertId).next();
1966 if (singleCommits) {
1967 // NOTE - the singleCommits option is not used in normal processing
1969 g = AAIGraph.getInstance().getGraph().newTransaction();
1971 } catch (Exception e) {
1973 LOGGER.error("ERROR trying to delete VID = " + thisVid, e);
1976 LOGGER.info(" DELETED VID = " + thisVid);
1977 deletedSomething = true;
1982 LOGGER.error("ERROR - Vertex Id to keep not found in list of dupes. dupeInfoString = ["
1983 + dupeInfoString + "]");
1987 }// else we know which one to keep
1989 }// for each vertex in a group
1991 return deletedSomething;
1993 }// end of deleteNonKeepersIfAppropriate()
1997 * Gets the node just using key params.
1999 * @param transId the trans id
2000 * @param fromAppId the from app id
2001 * @param graph the graph
2002 * @param nodeType the node type
2003 * @param keyPropsHash the key props hash
2004 * @param apiVersion the api version
2005 * @return the node just using key params
2006 * @throws AAIException the AAI exception
2008 public static List <Vertex> getNodeJustUsingKeyParams( String transId, String fromAppId, GraphTraversalSource graph, String nodeType,
2009 HashMap<String,Object> keyPropsHash, String apiVersion ) throws AAIException{
2011 List <Vertex> retVertList = new ArrayList <> ();
2013 // We assume that all NodeTypes have at least one key-property defined.
2014 // Note - instead of key-properties (the primary key properties), a user could pass
2015 // alternate-key values if they are defined for the nodeType.
2016 List<String> kName = new ArrayList<>();
2017 List<Object> kVal = new ArrayList<>();
2018 if( keyPropsHash == null || keyPropsHash.isEmpty() ) {
2019 throw new AAIException("AAI_6120", " NO key properties passed for this getNodeJustUsingKeyParams() request. NodeType = [" + nodeType + "]. ");
2023 for( Map.Entry<String, Object> entry : keyPropsHash.entrySet() ){
2025 kName.add(i, entry.getKey());
2026 kVal.add(i, entry.getValue());
2028 int topPropIndex = i;
2030 String propsAndValuesForMsg = "";
2031 Iterator <Vertex> verts = null;
2034 if( topPropIndex == 0 ){
2035 propsAndValuesForMsg = " (" + kName.get(0) + " = " + kVal.get(0) + ") ";
2036 verts= graph.V().has(kName.get(0),kVal.get(0)).has("aai-node-type",nodeType);
2038 else if( topPropIndex == 1 ){
2039 propsAndValuesForMsg = " (" + kName.get(0) + " = " + kVal.get(0) + ", "
2040 + kName.get(1) + " = " + kVal.get(1) + ") ";
2041 verts = graph.V().has(kName.get(0),kVal.get(0)).has(kName.get(1),kVal.get(1)).has("aai-node-type",nodeType);
2043 else if( topPropIndex == 2 ){
2044 propsAndValuesForMsg = " (" + kName.get(0) + " = " + kVal.get(0) + ", "
2045 + kName.get(1) + " = " + kVal.get(1) + ", "
2046 + kName.get(2) + " = " + kVal.get(2) + ") ";
2047 verts= graph.V().has(kName.get(0),kVal.get(0)).has(kName.get(1),kVal.get(1)).has(kName.get(2),kVal.get(2)).has("aai-node-type",nodeType);
2049 else if( topPropIndex == 3 ){
2050 propsAndValuesForMsg = " (" + kName.get(0) + " = " + kVal.get(0) + ", "
2051 + kName.get(1) + " = " + kVal.get(1) + ", "
2052 + kName.get(2) + " = " + kVal.get(2) + ", "
2053 + kName.get(3) + " = " + kVal.get(3) + ") ";
2054 verts= graph.V().has(kName.get(0),kVal.get(0)).has(kName.get(1),kVal.get(1)).has(kName.get(2),kVal.get(2)).has(kName.get(3),kVal.get(3)).has("aai-node-type",nodeType);
2057 throw new AAIException("AAI_6114", " We only support 4 keys per nodeType for now \n");
2060 catch( Exception ex ){
2061 LOGGER.error( " ERROR trying to get node for: [" + propsAndValuesForMsg + "]", ex);
2064 if( verts != null ){
2065 while( verts.hasNext() ){
2067 retVertList.add(tiV);
2071 if( retVertList.size() == 0 ){
2072 LOGGER.debug("DEBUG No node found for nodeType = [" + nodeType +
2073 "], propsAndVal = " + propsAndValuesForMsg );
2078 }// End of getNodeJustUsingKeyParams()
2081 * Show all edges for node.
2083 * @param transId the trans id
2084 * @param fromAppId the from app id
2085 * @param tVert the t vert
2086 * @return the array list
2088 private static ArrayList <String> showAllEdgesForNode( String transId, String fromAppId, Vertex tVert ){
2090 ArrayList <String> retArr = new ArrayList <> ();
2091 Iterator <Edge> eI = tVert.edges(Direction.IN);
2092 if( ! eI.hasNext() ){
2093 retArr.add("No IN edges were found for this vertex. ");
2095 while( eI.hasNext() ){
2096 Edge ed = eI.next();
2097 String lab = ed.label();
2099 if (tVert.equals(ed.inVertex())) {
2100 vtx = ed.outVertex();
2102 vtx = ed.inVertex();
2105 retArr.add(" >>> COULD NOT FIND VERTEX on the other side of this edge edgeId = " + ed.id() + " <<< ");
2108 String nType = vtx.<String>property("aai-node-type").orElse(null);
2109 String vid = vtx.id().toString();
2110 retArr.add("Found an IN edge (" + lab + ") to this vertex from a [" + nType + "] node with VtxId = " + vid );
2115 eI = tVert.edges(Direction.OUT);
2116 if( ! eI.hasNext() ){
2117 retArr.add("No OUT edges were found for this vertex. ");
2119 while( eI.hasNext() ){
2120 Edge ed = eI.next();
2121 String lab = ed.label();
2123 if (tVert.equals(ed.inVertex())) {
2124 vtx = ed.outVertex();
2126 vtx = ed.inVertex();
2129 retArr.add(" >>> COULD NOT FIND VERTEX on the other side of this edge edgeId = " + ed.id() + " <<< ");
2132 String nType = vtx.<String>property("aai-node-type").orElse(null);
2133 String vid = vtx.id().toString();
2134 retArr.add("Found an OUT edge (" + lab + ") from this vertex to a [" + nType + "] node with VtxId = " + vid );
2142 * Show properties for node.
2144 * @param transId the trans id
2145 * @param fromAppId the from app id
2146 * @param tVert the t vert
2147 * @return the array list
2149 private static ArrayList <String> showPropertiesForNode( String transId, String fromAppId, Vertex tVert ){
2151 ArrayList <String> retArr = new ArrayList <> ();
2152 if( tVert == null ){
2153 retArr.add("null Node object passed to showPropertiesForNode()\n");
2156 String nodeType = "";
2157 Object ob = tVert.<Object>property("aai-node-type").orElse(null);
2162 nodeType = ob.toString();
2165 retArr.add(" AAINodeType/VtxID for this Node = [" + nodeType + "/" + tVert.id() + "]");
2166 retArr.add(" Property Detail: ");
2167 Iterator<VertexProperty<Object>> pI = tVert.properties();
2168 while( pI.hasNext() ){
2169 VertexProperty<Object> tp = pI.next();
2170 Object val = tp.value();
2171 retArr.add("Prop: [" + tp.key() + "], val = [" + val + "] ");
2178 private static ArrayList <Vertex> getConnectedNodes(GraphTraversalSource g, Vertex startVtx )
2179 throws AAIException {
2181 ArrayList <Vertex> retArr = new ArrayList <> ();
2182 if( startVtx == null ){
2186 GraphTraversal<Vertex, Vertex> modPipe = null;
2187 modPipe = g.V(startVtx).both();
2188 if( modPipe != null && modPipe.hasNext() ){
2189 while( modPipe.hasNext() ){
2190 Vertex conVert = modPipe.next();
2191 retArr.add(conVert);
2197 }// End of getConnectedNodes()
2200 private static ArrayList <Vertex> getConnectedChildrenOfOneType( GraphTraversalSource g,
2201 Vertex startVtx, String childNType ) throws AAIException{
2203 ArrayList <Vertex> childList = new ArrayList <> ();
2204 Iterator <Vertex> vertI = g.V(startVtx).union(__.outE().has(EdgeProperties.out(EdgeProperty.IS_PARENT), true), __.inE().has(EdgeProperties.in(EdgeProperty.IS_PARENT), true)).bothV();
2205 Vertex tmpVtx = null;
2206 while( vertI != null && vertI.hasNext() ){
2207 tmpVtx = vertI.next();
2208 Object ob = tmpVtx.<Object>property("aai-node-type").orElse(null);
2210 String tmpNt = ob.toString();
2211 if( tmpNt.equals(childNType)){
2212 childList.add(tmpVtx);
2219 }// End of getConnectedChildrenOfOneType()
2222 private static Vertex getConnectedParent( GraphTraversalSource g,
2223 Vertex startVtx ) throws AAIException{
2225 Vertex parentVtx = null;
2226 Iterator <Vertex> vertI = g.V(startVtx).union(__.inE().has(EdgeProperties.out(EdgeProperty.IS_PARENT), true), __.outE().has(EdgeProperties.in(EdgeProperty.IS_PARENT), true)).bothV();
2227 while( vertI != null && vertI.hasNext() ){
2228 // Note - there better only be one!
2229 parentVtx = vertI.next();
2234 }// End of getConnectedParent()