2 * ============LICENSE_START===================================================
3 * SPARKY (AAI UI service)
4 * ============================================================================
5 * Copyright © 2017 AT&T Intellectual Property.
6 * Copyright © 2017 Amdocs
8 * ============================================================================
9 * Licensed under the Apache License, Version 2.0 (the "License");
10 * you may not use this file except in compliance with the License.
11 * You may obtain a copy of the License at
13 * http://www.apache.org/licenses/LICENSE-2.0
15 * Unless required by applicable law or agreed to in writing, software
16 * distributed under the License is distributed on an "AS IS" BASIS,
17 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
18 * See the License for the specific language governing permissions and
19 * limitations under the License.
20 * ============LICENSE_END=====================================================
22 * ECOMP and OpenECOMP are trademarks
23 * and service marks of AT&T Intellectual Property.
25 package org.onap.aai.sparky.autosuggestion.sync;
27 import static java.util.concurrent.CompletableFuture.supplyAsync;
29 import java.io.IOException;
30 import java.util.ArrayList;
31 import java.util.Arrays;
32 import java.util.Collection;
33 import java.util.Deque;
34 import java.util.EnumSet;
35 import java.util.HashMap;
36 import java.util.Iterator;
37 import java.util.List;
39 import java.util.concurrent.ConcurrentHashMap;
40 import java.util.concurrent.ConcurrentLinkedDeque;
41 import java.util.concurrent.ExecutorService;
42 import java.util.concurrent.atomic.AtomicInteger;
43 import java.util.function.Supplier;
45 import org.onap.aai.cl.api.Logger;
46 import org.onap.aai.cl.eelf.LoggerFactory;
47 import org.onap.aai.cl.mdc.MdcContext;
48 import org.onap.aai.restclient.client.OperationResult;
49 import org.onap.aai.sparky.config.oxm.OxmEntityDescriptor;
50 import org.onap.aai.sparky.config.oxm.OxmEntityLookup;
51 import org.onap.aai.sparky.config.oxm.SuggestionEntityDescriptor;
52 import org.onap.aai.sparky.config.oxm.SuggestionEntityLookup;
53 import org.onap.aai.sparky.dal.ActiveInventoryAdapter;
54 import org.onap.aai.sparky.dal.NetworkTransaction;
55 import org.onap.aai.sparky.dal.rest.HttpMethod;
56 import org.onap.aai.sparky.logging.AaiUiMsgs;
57 import org.onap.aai.sparky.search.filters.config.FiltersConfig;
58 import org.onap.aai.sparky.sync.AbstractEntitySynchronizer;
59 import org.onap.aai.sparky.sync.IndexSynchronizer;
60 import org.onap.aai.sparky.sync.SynchronizerConstants;
61 import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig;
62 import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig;
63 import org.onap.aai.sparky.sync.entity.SelfLinkDescriptor;
64 import org.onap.aai.sparky.sync.entity.SuggestionSearchEntity;
65 import org.onap.aai.sparky.sync.enumeration.OperationState;
66 import org.onap.aai.sparky.sync.enumeration.SynchronizerState;
67 import org.onap.aai.sparky.sync.task.PerformActiveInventoryRetrieval;
68 import org.onap.aai.sparky.sync.task.PerformElasticSearchPut;
69 import org.onap.aai.sparky.sync.task.PerformElasticSearchRetrieval;
70 import org.onap.aai.sparky.util.NodeUtils;
71 import org.onap.aai.sparky.util.SuggestionsPermutation;
74 import com.fasterxml.jackson.core.JsonProcessingException;
75 import com.fasterxml.jackson.databind.JsonNode;
76 import com.fasterxml.jackson.databind.node.ArrayNode;
79 * The Class AutosuggestionSynchronizer.
81 public class AutosuggestionSynchronizer extends AbstractEntitySynchronizer
82 implements IndexSynchronizer {
84 private class RetrySuggestionEntitySyncContainer {
85 NetworkTransaction txn;
86 SuggestionSearchEntity ssec;
89 * Instantiates a new RetrySuggestionEntitySyncContainer.
92 * @param icer the icer
94 public RetrySuggestionEntitySyncContainer(NetworkTransaction txn, SuggestionSearchEntity icer) {
99 public NetworkTransaction getNetworkTransaction() {
103 public SuggestionSearchEntity getSuggestionSearchEntity() {
108 private static final Logger LOG =
109 LoggerFactory.getInstance().getLogger(AutosuggestionSynchronizer.class);
110 private static final String INSERTION_DATE_TIME_FORMAT = "yyyyMMdd'T'HHmmssZ";
112 private boolean allWorkEnumerated;
113 private Deque<SelfLinkDescriptor> selflinks;
114 private ConcurrentHashMap<String, AtomicInteger> entityCounters;
115 private boolean syncInProgress;
116 private Map<String, String> contextMap;
117 protected ExecutorService esPutExecutor;
118 private Deque<RetrySuggestionEntitySyncContainer> retryQueue;
119 private Map<String, Integer> retryLimitTracker;
120 private OxmEntityLookup oxmEntityLookup;
121 private SuggestionEntityLookup suggestionEntityLookup;
122 private FiltersConfig filtersConfig;
125 * Instantiates a new historical entity summarizer.
127 * @param indexName the index name
128 * @throws Exception the exception
130 public AutosuggestionSynchronizer(ElasticSearchSchemaConfig schemaConfig, int internalSyncWorkers,
131 int aaiWorkers, int esWorkers, NetworkStatisticsConfig aaiStatConfig,
132 NetworkStatisticsConfig esStatConfig, OxmEntityLookup oxmEntityLookup,
133 SuggestionEntityLookup suggestionEntityLookup, FiltersConfig filtersConfig) throws Exception {
135 super(LOG, "ASES-" + schemaConfig.getIndexName().toUpperCase(), internalSyncWorkers, aaiWorkers,
136 esWorkers, schemaConfig.getIndexName(), aaiStatConfig, esStatConfig);
138 this.oxmEntityLookup = oxmEntityLookup;
139 this.suggestionEntityLookup = suggestionEntityLookup;
140 this.allWorkEnumerated = false;
141 this.selflinks = new ConcurrentLinkedDeque<SelfLinkDescriptor>();
142 this.entityCounters = new ConcurrentHashMap<String, AtomicInteger>();
143 this.synchronizerName = "Autosuggestion Entity Synchronizer";
144 this.enabledStatFlags = EnumSet.of(StatFlag.AAI_REST_STATS, StatFlag.ES_REST_STATS);
145 this.syncInProgress = false;
146 this.contextMap = MDC.getCopyOfContextMap();
147 this.esPutExecutor = NodeUtils.createNamedExecutor("SUES-ES-PUT", 5, LOG);
148 this.retryQueue = new ConcurrentLinkedDeque<RetrySuggestionEntitySyncContainer>();
149 this.retryLimitTracker = new ConcurrentHashMap<String, Integer>();
150 this.syncDurationInMs = -1;
151 this.filtersConfig = filtersConfig;
155 * Collect all the work.
157 * @return the operation state
159 private OperationState collectAllTheWork() {
160 final Map<String, String> contextMap = MDC.getCopyOfContextMap();
161 Map<String, SuggestionEntityDescriptor> descriptorMap =
162 suggestionEntityLookup.getSuggestionSearchEntityDescriptors();
164 if (descriptorMap.isEmpty()) {
165 LOG.error(AaiUiMsgs.ERROR_LOADING_OXM_SUGGESTIBLE_ENTITIES);
166 LOG.info(AaiUiMsgs.ERROR_LOADING_OXM_SUGGESTIBLE_ENTITIES);
167 return OperationState.ERROR;
170 Collection<String> syncTypes = descriptorMap.keySet();
175 * launch a parallel async thread to process the documents for each entity-type (to max the of
176 * the configured executor anyway)
179 aaiWorkOnHand.set(syncTypes.size());
181 for (String key : syncTypes) {
183 supplyAsync(new Supplier<Void>() {
187 MDC.setContextMap(contextMap);
188 OperationResult typeLinksResult = null;
190 typeLinksResult = aaiAdapter.getSelfLinksByEntityType(key);
191 aaiWorkOnHand.decrementAndGet();
192 processEntityTypeSelfLinks(typeLinksResult);
193 } catch (Exception exc) {
194 LOG.error(AaiUiMsgs.ERROR_GENERIC,
195 "An error occurred while processing entity self-links. Error: "
202 }, aaiExecutor).whenComplete((result, error) -> {
205 LOG.error(AaiUiMsgs.ERROR_GENERIC,
206 "An error occurred getting data from AAI. Error = " + error.getMessage());
212 while (aaiWorkOnHand.get() != 0) {
214 if (LOG.isDebugEnabled()) {
215 LOG.debug(AaiUiMsgs.WAIT_FOR_ALL_SELFLINKS_TO_BE_COLLECTED);
221 aaiWorkOnHand.set(selflinks.size());
222 allWorkEnumerated = true;
225 while (!isSyncDone()) {
231 * Make sure we don't hang on to retries that failed which could cause issues during future
234 retryLimitTracker.clear();
236 } catch (Exception exc) {
237 LOG.error(AaiUiMsgs.ERROR_GENERIC,
238 "An error occurred while performing the sync. Error: " + exc.getMessage());
241 return OperationState.OK;
248 * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#doSync()
251 public OperationState doSync() {
252 this.syncDurationInMs = -1;
253 syncStartedTimeStampInMs = System.currentTimeMillis();
254 String txnID = NodeUtils.getRandomTxnId();
255 MdcContext.initialize(txnID, "AutosuggestionSynchronizer", "", "Sync", "");
257 return collectAllTheWork();
261 * Process entity type self links.
263 * @param operationResult the operation result
265 private void processEntityTypeSelfLinks(OperationResult operationResult) {
267 JsonNode rootNode = null;
269 if ( operationResult == null ) {
273 final String jsonResult = operationResult.getResult();
275 if (jsonResult != null && jsonResult.length() > 0 && operationResult.wasSuccessful()) {
278 rootNode = mapper.readTree(jsonResult);
279 } catch (IOException exc) {
280 String message = "Could not deserialize JSON (representing operation result) as node tree. "
281 + "Operation result = " + jsonResult + ". " + exc.getLocalizedMessage();
282 LOG.error(AaiUiMsgs.JSON_PROCESSING_ERROR, message);
285 JsonNode resultData = rootNode.get("result-data");
286 ArrayNode resultDataArrayNode = null;
288 if (resultData.isArray()) {
289 resultDataArrayNode = (ArrayNode) resultData;
291 Iterator<JsonNode> elementIterator = resultDataArrayNode.elements();
292 JsonNode element = null;
294 while (elementIterator.hasNext()) {
295 element = elementIterator.next();
297 final String resourceType = NodeUtils.getNodeFieldAsText(element, "resource-type");
298 final String resourceLink = NodeUtils.getNodeFieldAsText(element, "resource-link");
300 OxmEntityDescriptor descriptor = null;
302 if (resourceType != null && resourceLink != null) {
304 descriptor = oxmEntityLookup.getEntityDescriptors().get(resourceType);
306 if (descriptor == null) {
307 LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, resourceType);
308 // go to next element in iterator
311 selflinks.add(new SelfLinkDescriptor(resourceLink,
312 SynchronizerConstants.NODES_ONLY_MODIFIER, resourceType));
324 private void syncEntityTypes() {
326 while (selflinks.peek() != null) {
328 SelfLinkDescriptor linkDescriptor = selflinks.poll();
329 aaiWorkOnHand.decrementAndGet();
331 OxmEntityDescriptor descriptor = null;
333 if (linkDescriptor.getSelfLink() != null && linkDescriptor.getEntityType() != null) {
335 descriptor = oxmEntityLookup.getEntityDescriptors().get(linkDescriptor.getEntityType());
337 if (descriptor == null) {
338 LOG.error(AaiUiMsgs.MISSING_ENTITY_DESCRIPTOR, linkDescriptor.getEntityType());
339 // go to next element in iterator
343 NetworkTransaction txn = new NetworkTransaction();
344 txn.setDescriptor(descriptor);
345 txn.setLink(linkDescriptor.getSelfLink());
346 txn.setOperationType(HttpMethod.GET);
347 txn.setEntityType(linkDescriptor.getEntityType());
349 aaiWorkOnHand.incrementAndGet();
351 supplyAsync(new PerformActiveInventoryRetrieval(txn, aaiAdapter), aaiExecutor)
352 .whenComplete((result, error) -> {
354 aaiWorkOnHand.decrementAndGet();
357 LOG.error(AaiUiMsgs.AAI_RETRIEVAL_FAILED_GENERIC, error.getLocalizedMessage());
359 if (result == null) {
360 LOG.error(AaiUiMsgs.AAI_RETRIEVAL_FAILED_FOR_SELF_LINK,
361 linkDescriptor.getSelfLink());
363 updateActiveInventoryCounters(result);
364 fetchDocumentForUpsert(result);
375 * Return a set of valid suggestion attributes for the provided entityName that are present in the
378 * @param node JSON node in which the attributes should be found
380 * @param entityName Name of the entity
382 * @return List of all valid suggestion attributes(key's)
384 public List<String> getSuggestableAttrNamesFromReponse(JsonNode node, String entityName) {
385 List<String> suggestableAttr = new ArrayList<String>();
387 HashMap<String, String> desc =
388 suggestionEntityLookup.getSuggestionSearchEntityOxmModel().get(entityName);
392 String attr = desc.get("suggestibleAttributes");
395 suggestableAttr = Arrays.asList(attr.split(","));
396 List<String> suggestableValue = new ArrayList<String>();
397 for (String attribute : suggestableAttr) {
398 if (node.get(attribute) != null && node.get(attribute).asText().length() > 0) {
399 suggestableValue.add(attribute);
402 return suggestableValue;
406 return new ArrayList<String>();
410 * Fetch all the documents for upsert. Based on the number of permutations that are available the
411 * number of documents will be different
415 private void fetchDocumentForUpsert(NetworkTransaction txn) {
416 if (!txn.getOperationResult().wasSuccessful()) {
417 String message = "Self link failure. Result - " + txn.getOperationResult().getResult();
418 LOG.error(AaiUiMsgs.ERROR_GENERIC, message);
422 final String jsonResult = txn.getOperationResult().getResult();
424 if (jsonResult != null && jsonResult.length() > 0) {
426 // Step 1: Calculate the number of possible permutations of attributes
427 String entityName = txn.getDescriptor().getEntityName();
428 JsonNode entityNode = mapper.readTree(jsonResult);
430 List<String> availableSuggestableAttrName =
431 getSuggestableAttrNamesFromReponse(entityNode, entityName);
433 ArrayList<ArrayList<String>> uniqueLists =
434 SuggestionsPermutation.getNonEmptyUniqueLists(availableSuggestableAttrName);
435 // Now we have a list of all possible permutations for the status that are
436 // defined for this entity type. Try inserting a document for every combination.
437 for (ArrayList<String> uniqueList : uniqueLists) {
439 SuggestionSearchEntity sse = new SuggestionSearchEntity(filtersConfig, suggestionEntityLookup);
440 sse.setSuggestableAttr(uniqueList);
441 sse.setFilterBasedPayloadFromResponse(entityNode, entityName, uniqueList);
442 sse.setLink(ActiveInventoryAdapter.extractResourcePath(txn.getLink()));
443 populateSuggestionSearchEntityDocument(sse, jsonResult, txn);
444 // The unique id for the document will be created at derive fields
446 // Insert the document only if it has valid statuses
447 if (sse.isSuggestableDoc()) {
450 link = elasticSearchAdapter.buildElasticSearchGetDocUrl(getIndexName(), sse.getId());
451 } catch (Exception exc) {
452 LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_QUERY, exc.getLocalizedMessage());
456 NetworkTransaction n2 = new NetworkTransaction();
458 n2.setEntityType(txn.getEntityType());
459 n2.setDescriptor(txn.getDescriptor());
460 n2.setOperationType(HttpMethod.GET);
462 esWorkOnHand.incrementAndGet();
464 supplyAsync(new PerformElasticSearchRetrieval(n2, elasticSearchAdapter), esExecutor)
465 .whenComplete((result, error) -> {
467 esWorkOnHand.decrementAndGet();
470 LOG.error(AaiUiMsgs.ES_RETRIEVAL_FAILED, error.getLocalizedMessage());
472 updateElasticSearchCounters(result);
473 performDocumentUpsert(result, sse);
480 } catch (JsonProcessingException exc) {
481 LOG.error(AaiUiMsgs.ERROR_GENERIC, "There was a json processing error while processing the result from elasticsearch. Error: " + exc.getMessage());
482 } catch (IOException exc) {
483 LOG.error(AaiUiMsgs.ERROR_GENERIC, "There was a io processing error while processing the result from elasticsearch. Error: " + exc.getMessage());
487 protected void populateSuggestionSearchEntityDocument(SuggestionSearchEntity sse, String result,
488 NetworkTransaction txn) throws JsonProcessingException, IOException {
490 OxmEntityDescriptor resultDescriptor = txn.getDescriptor();
492 sse.setEntityType(resultDescriptor.getEntityName());
494 JsonNode entityNode = mapper.readTree(result);
496 List<String> primaryKeyValues = new ArrayList<String>();
497 String pkeyValue = null;
499 for (String keyName : resultDescriptor.getPrimaryKeyAttributeNames()) {
500 pkeyValue = NodeUtils.getNodeFieldAsText(entityNode, keyName);
501 if (pkeyValue != null) {
502 primaryKeyValues.add(pkeyValue);
504 String message = "populateSuggestionSearchEntityDocument(),"
505 + " pKeyValue is null for entityType = " + resultDescriptor.getEntityName();
506 LOG.warn(AaiUiMsgs.WARN_GENERIC, message);
510 final String primaryCompositeKeyValue = NodeUtils.concatArray(primaryKeyValues, "/");
511 sse.setEntityPrimaryKeyValue(primaryCompositeKeyValue);
512 sse.generateSuggestionInputPermutations();
515 protected void performDocumentUpsert(NetworkTransaction esGetTxn, SuggestionSearchEntity sse) {
519 * As part of the response processing we need to do the following:
520 * <li>1. Extract the version (if present), it will be the ETAG when we use the
521 * Search-Abstraction-Service
522 * <li>2. Spawn next task which is to do the PUT operation into elastic with or with the version
524 * <li>a) if version is null or RC=404, then standard put, no _update with version tag
525 * <li>b) if version != null, do PUT with _update?version= versionNumber in the URI to elastic
531 link = elasticSearchAdapter.buildElasticSearchGetDocUrl(getIndexName(), sse.getId());
532 } catch (Exception exc) {
533 LOG.error(AaiUiMsgs.ES_LINK_UPSERT, exc.getLocalizedMessage());
537 boolean wasEntryDiscovered = false;
538 if (esGetTxn.getOperationResult().getResultCode() == 404) {
539 LOG.info(AaiUiMsgs.ES_SIMPLE_PUT, sse.getEntityPrimaryKeyValue());
540 } else if (esGetTxn.getOperationResult().getResultCode() == 200) {
541 wasEntryDiscovered = true;
544 * Not being a 200 does not mean a failure. eg 201 is returned for created. and 500 for es not
545 * found TODO -> Should we return.
547 LOG.error(AaiUiMsgs.ES_OPERATION_RETURN_CODE,
548 String.valueOf(esGetTxn.getOperationResult().getResultCode()));
551 // Insert a new document only if the paylod is different.
552 // This is determined by hashing the payload and using it as a id for the document
554 if (!wasEntryDiscovered) {
556 String jsonPayload = null;
558 jsonPayload = sse.getAsJson();
559 if (link != null && jsonPayload != null) {
561 NetworkTransaction updateElasticTxn = new NetworkTransaction();
562 updateElasticTxn.setLink(link);
563 updateElasticTxn.setEntityType(esGetTxn.getEntityType());
564 updateElasticTxn.setDescriptor(esGetTxn.getDescriptor());
565 updateElasticTxn.setOperationType(HttpMethod.PUT);
567 esWorkOnHand.incrementAndGet();
568 supplyAsync(new PerformElasticSearchPut(jsonPayload, updateElasticTxn, elasticSearchAdapter),
569 esPutExecutor).whenComplete((result, error) -> {
571 esWorkOnHand.decrementAndGet();
574 String message = "Suggestion search entity sync UPDATE PUT error - "
575 + error.getLocalizedMessage();
576 LOG.error(AaiUiMsgs.ES_SUGGESTION_SEARCH_ENTITY_SYNC_ERROR, message);
578 updateElasticSearchCounters(result);
579 processStoreDocumentResult(result, esGetTxn, sse);
583 } catch (Exception exc) {
585 "Exception caught during suggestion search entity sync PUT operation. Message - "
586 + exc.getLocalizedMessage();
587 LOG.error(AaiUiMsgs.ES_SUGGESTION_SEARCH_ENTITY_SYNC_ERROR, message);
592 private void processStoreDocumentResult(NetworkTransaction esPutResult,
593 NetworkTransaction esGetResult, SuggestionSearchEntity sse) {
595 OperationResult or = esPutResult.getOperationResult();
597 if (!or.wasSuccessful()) {
598 if (or.getResultCode() == VERSION_CONFLICT_EXCEPTION_CODE) {
600 if (shouldAllowRetry(sse.getId())) {
601 esWorkOnHand.incrementAndGet();
603 RetrySuggestionEntitySyncContainer rssec =
604 new RetrySuggestionEntitySyncContainer(esGetResult, sse);
605 retryQueue.push(rssec);
607 String message = "Store document failed during suggestion search entity synchronization"
608 + " due to version conflict. Entity will be re-synced.";
609 LOG.warn(AaiUiMsgs.ES_SUGGESTION_SEARCH_ENTITY_SYNC_ERROR, message);
613 "Store document failed during suggestion search entity synchronization with result code "
614 + or.getResultCode() + " and result message " + or.getResult();
615 LOG.error(AaiUiMsgs.ES_SUGGESTION_SEARCH_ENTITY_SYNC_ERROR, message);
621 * Perform retry sync.
623 private void performRetrySync() {
624 while (retryQueue.peek() != null) {
626 RetrySuggestionEntitySyncContainer susc = retryQueue.poll();
629 SuggestionSearchEntity sus = susc.getSuggestionSearchEntity();
630 NetworkTransaction txn = susc.getNetworkTransaction();
635 * In this retry flow the se object has already derived its fields
637 link = elasticSearchAdapter.buildElasticSearchGetDocUrl(getIndexName(), sus.getId());
638 } catch (Exception exc) {
639 LOG.error(AaiUiMsgs.ES_FAILED_TO_CONSTRUCT_URI, exc.getLocalizedMessage());
643 NetworkTransaction retryTransaction = new NetworkTransaction();
644 retryTransaction.setLink(link);
645 retryTransaction.setEntityType(txn.getEntityType());
646 retryTransaction.setDescriptor(txn.getDescriptor());
647 retryTransaction.setOperationType(HttpMethod.GET);
650 * IMPORTANT - DO NOT incrementAndGet the esWorkOnHand as this is a retry flow! We already
651 * called incrementAndGet when queuing the failed PUT!
654 supplyAsync(new PerformElasticSearchRetrieval(retryTransaction, elasticSearchAdapter),
655 esExecutor).whenComplete((result, error) -> {
657 esWorkOnHand.decrementAndGet();
660 LOG.error(AaiUiMsgs.ES_RETRIEVAL_FAILED_RESYNC, error.getLocalizedMessage());
662 updateElasticSearchCounters(result);
663 performDocumentUpsert(result, sus);
673 * Should allow retry.
676 * @return true, if successful
678 private boolean shouldAllowRetry(String id) {
679 boolean isRetryAllowed = true;
680 if (retryLimitTracker.get(id) != null) {
681 Integer currentCount = retryLimitTracker.get(id);
682 if (currentCount.intValue() >= RETRY_COUNT_PER_ENTITY_LIMIT.intValue()) {
683 isRetryAllowed = false;
684 String message = "Searchable entity re-sync limit reached for " + id
685 + ", re-sync will no longer be attempted for this entity";
686 LOG.error(AaiUiMsgs.ES_SEARCHABLE_ENTITY_SYNC_ERROR, message);
688 Integer newCount = new Integer(currentCount.intValue() + 1);
689 retryLimitTracker.put(id, newCount);
692 Integer firstRetryCount = new Integer(1);
693 retryLimitTracker.put(id, firstRetryCount);
696 return isRetryAllowed;
702 public SynchronizerState getState() {
705 return SynchronizerState.PERFORMING_SYNCHRONIZATION;
708 return SynchronizerState.IDLE;
715 * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#getStatReport(boolean)
718 public String getStatReport(boolean showFinalReport) {
719 syncDurationInMs = System.currentTimeMillis() - syncStartedTimeStampInMs;
720 return getStatReport(syncDurationInMs, showFinalReport);
726 * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#shutdown()
729 public void shutdown() {
730 this.shutdownExecutors();
734 protected boolean isSyncDone() {
736 int totalWorkOnHand = aaiWorkOnHand.get() + esWorkOnHand.get();
738 if (LOG.isDebugEnabled()) {
739 LOG.debug(AaiUiMsgs.DEBUG_GENERIC, indexName + ", isSyncDone(), totalWorkOnHand = "
740 + totalWorkOnHand + " all work enumerated = " + allWorkEnumerated);
743 if (totalWorkOnHand > 0 || !allWorkEnumerated) {
747 this.syncInProgress = false;
755 * @see org.openecomp.sparky.synchronizer.AbstractEntitySynchronizer#clearCache()
758 public void clearCache() {
760 if (syncInProgress) {
761 LOG.debug(AaiUiMsgs.DEBUG_GENERIC,
762 "Autosuggestion Entity Summarizer in progress, request to clear cache ignored");
767 this.resetCounters();
768 if (entityCounters != null) {
769 entityCounters.clear();
772 allWorkEnumerated = false;