2 * ============LICENSE_START=======================================================
4 * ================================================================================
5 * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
6 * Copyright © 2017 Amdocs
7 * ================================================================================
8 * Licensed under the Apache License, Version 2.0 (the "License");
9 * you may not use this file except in compliance with the License.
10 * You may obtain a copy of the License at
12 * http://www.apache.org/licenses/LICENSE-2.0
14 * Unless required by applicable law or agreed to in writing, software
15 * distributed under the License is distributed on an "AS IS" BASIS,
16 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17 * See the License for the specific language governing permissions and
18 * limitations under the License.
19 * ============LICENSE_END=========================================================
21 * ECOMP is a trademark and service mark of AT&T Intellectual Property.
23 package org.onap.aai.sparky.aggregation.sync;
25 import static java.util.concurrent.CompletableFuture.supplyAsync;
27 import java.io.IOException;
28 import java.sql.Timestamp;
29 import java.text.SimpleDateFormat;
30 import java.util.Collection;
31 import java.util.EnumSet;
33 import java.util.Map.Entry;
35 import java.util.concurrent.ConcurrentHashMap;
36 import java.util.concurrent.atomic.AtomicInteger;
37 import java.util.function.Supplier;
39 import javax.json.Json;
40 import javax.ws.rs.core.MediaType;
42 import org.onap.aai.cl.api.Logger;
43 import org.onap.aai.cl.eelf.LoggerFactory;
44 import org.onap.aai.cl.mdc.MdcContext;
45 import org.onap.aai.restclient.client.OperationResult;
46 import org.onap.aai.sparky.config.oxm.SearchableEntityLookup;
47 import org.onap.aai.sparky.config.oxm.SearchableOxmEntityDescriptor;
48 import org.onap.aai.sparky.dal.rest.HttpMethod;
49 import org.onap.aai.sparky.logging.AaiUiMsgs;
50 import org.onap.aai.sparky.sync.AbstractEntitySynchronizer;
51 import org.onap.aai.sparky.sync.IndexSynchronizer;
52 import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig;
53 import org.onap.aai.sparky.sync.config.NetworkStatisticsConfig;
54 import org.onap.aai.sparky.sync.enumeration.OperationState;
55 import org.onap.aai.sparky.sync.enumeration.SynchronizerState;
56 import org.onap.aai.sparky.util.NodeUtils;
59 import com.fasterxml.jackson.databind.JsonNode;
60 import com.fasterxml.jackson.databind.node.ArrayNode;
63 * The Class HistoricalEntitySummarizer.
65 public class HistoricalEntitySummarizer extends AbstractEntitySynchronizer
66 implements IndexSynchronizer {
68 private static final Logger LOG = LoggerFactory.getInstance().getLogger(HistoricalEntitySummarizer.class);
69 private static final String INSERTION_DATE_TIME_FORMAT = "yyyyMMdd'T'HHmmssZ";
71 private boolean allWorkEnumerated;
72 private ConcurrentHashMap<String, AtomicInteger> entityCounters;
73 private boolean syncInProgress;
74 private Map<String, String> contextMap;
75 private ElasticSearchSchemaConfig schemaConfig;
76 private SearchableEntityLookup searchableEntityLookup;
79 * Instantiates a new historical entity summarizer.
81 * @param indexName the index name
82 * @throws Exception the exception
84 public HistoricalEntitySummarizer(ElasticSearchSchemaConfig schemaConfig, int internalSyncWorkers,
85 int aaiWorkers, int esWorkers, NetworkStatisticsConfig aaiStatConfig,
86 NetworkStatisticsConfig esStatConfig, SearchableEntityLookup searchableEntityLookup)
88 super(LOG, "HES", internalSyncWorkers, aaiWorkers, esWorkers, schemaConfig.getIndexName(), aaiStatConfig, esStatConfig);
90 this.schemaConfig = schemaConfig;
91 this.allWorkEnumerated = false;
92 this.entityCounters = new ConcurrentHashMap<String, AtomicInteger>();
93 this.synchronizerName = "Historical Entity Summarizer";
94 this.enabledStatFlags = EnumSet.of(StatFlag.AAI_REST_STATS, StatFlag.ES_REST_STATS);
95 this.syncInProgress = false;
96 this.contextMap = MDC.getCopyOfContextMap();
97 this.syncDurationInMs = -1;
98 this.searchableEntityLookup = searchableEntityLookup;
102 * Collect all the work.
104 * @return the operation state
106 private OperationState collectAllTheWork() {
108 Map<String, SearchableOxmEntityDescriptor> descriptorMap =
109 searchableEntityLookup.getSearchableEntityDescriptors();
111 if (descriptorMap.isEmpty()) {
112 LOG.error(AaiUiMsgs.OXM_FAILED_RETRIEVAL, "historical entities");
114 return OperationState.ERROR;
117 Collection<String> entityTypes = descriptorMap.keySet();
119 AtomicInteger asyncWoH = new AtomicInteger(0);
121 asyncWoH.set(entityTypes.size());
124 for (String entityType : entityTypes) {
126 supplyAsync(new Supplier<Void>() {
130 MDC.setContextMap(contextMap);
132 OperationResult typeLinksResult =
133 aaiAdapter.getSelfLinksByEntityType(entityType);
134 updateActiveInventoryCounters(HttpMethod.GET, entityType, typeLinksResult);
135 processEntityTypeSelfLinks(entityType, typeLinksResult);
136 } catch (Exception exc) {
137 LOG.error(AaiUiMsgs.ERROR_GETTING_DATA_FROM_AAI, exc.getMessage());
144 }, aaiExecutor).whenComplete((result, error) -> {
146 asyncWoH.decrementAndGet();
149 LOG.error(AaiUiMsgs.HISTORICAL_COLLECT_ERROR, error.getMessage());
157 while (asyncWoH.get() > 0) {
159 if (LOG.isDebugEnabled()) {
160 LOG.debug(AaiUiMsgs.DEBUG_GENERIC, indexName + " summarizer waiting for all the links to be processed.");
166 esWorkOnHand.set(entityCounters.size());
168 // start doing the real work
169 allWorkEnumerated = true;
171 insertEntityTypeCounters();
173 if (LOG.isDebugEnabled()) {
175 StringBuilder sb = new StringBuilder(128);
177 sb.append("\n\nHistorical Entity Counters:");
179 for (Entry<String, AtomicInteger> entry : entityCounters.entrySet()) {
180 sb.append("\n").append(entry.getKey()).append(" = ").append(entry.getValue().get());
183 LOG.debug(AaiUiMsgs.DEBUG_GENERIC, sb.toString());
187 } catch (Exception exc) {
188 LOG.error(AaiUiMsgs.HISTORICAL_COLLECT_ERROR, exc.getMessage());
192 allWorkEnumerated = true;
194 return OperationState.ERROR;
197 return OperationState.OK;
202 * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#doSync()
205 public OperationState doSync() {
206 this.syncDurationInMs = -1;
207 String txnID = NodeUtils.getRandomTxnId();
208 MdcContext.initialize(txnID, "HistoricalEntitySynchronizer", "", "Sync", "");
210 if (syncInProgress) {
211 LOG.info(AaiUiMsgs.HISTORICAL_SYNC_PENDING);
212 return OperationState.PENDING;
217 syncInProgress = true;
218 this.syncStartedTimeStampInMs = System.currentTimeMillis();
219 allWorkEnumerated = false;
221 return collectAllTheWork();
225 * Process entity type self links.
227 * @param entityType the entity type
228 * @param operationResult the operation result
230 private void processEntityTypeSelfLinks(String entityType, OperationResult operationResult) {
232 JsonNode rootNode = null;
234 final String jsonResult = operationResult.getResult();
236 if (jsonResult != null && jsonResult.length() > 0 && operationResult.wasSuccessful()) {
239 rootNode = mapper.readTree(jsonResult);
240 } catch (IOException exc) {
241 LOG.error(AaiUiMsgs.JSON_PROCESSING_ERROR, exc.getMessage());
245 JsonNode resultData = rootNode.get("result-data");
246 ArrayNode resultDataArrayNode = null;
248 if (resultData != null && resultData.isArray()) {
249 resultDataArrayNode = (ArrayNode) resultData;
250 entityCounters.put(entityType, new AtomicInteger(resultDataArrayNode.size()));
257 * Insert entity type counters.
259 private void insertEntityTypeCounters() {
261 if (esWorkOnHand.get() <= 0) {
265 SimpleDateFormat dateFormat = new SimpleDateFormat(INSERTION_DATE_TIME_FORMAT);
266 Timestamp timestamp = new Timestamp(System.currentTimeMillis());
267 String currentFormattedTimeStamp = dateFormat.format(timestamp);
269 Set<Entry<String, AtomicInteger>> entityCounterEntries = entityCounters.entrySet();
271 for (Entry<String, AtomicInteger> entityCounterEntry : entityCounterEntries) {
273 supplyAsync(new Supplier<Void>() {
277 MDC.setContextMap(contextMap);
278 String jsonString = Json.createObjectBuilder().add(
279 "count", entityCounterEntry.getValue().get())
280 .add("entityType", entityCounterEntry.getKey())
281 .add("timestamp", currentFormattedTimeStamp).build().toString();
285 link = elasticSearchAdapter.buildElasticSearchPostUrl(indexName);
286 OperationResult or = elasticSearchAdapter.doPost(link, jsonString, MediaType.APPLICATION_JSON_TYPE);
287 updateElasticSearchCounters(HttpMethod.POST, entityCounterEntry.getKey(), or);
288 } catch (Exception exc) {
289 LOG.error(AaiUiMsgs.ES_STORE_FAILURE, exc.getMessage() );
295 }, esExecutor).whenComplete((result, error) -> {
297 esWorkOnHand.decrementAndGet();
303 while (esWorkOnHand.get() > 0) {
307 } catch (InterruptedException exc) {
308 LOG.error(AaiUiMsgs.INTERRUPTED, "historical Entities", exc.getMessage());
315 public SynchronizerState getState() {
318 return SynchronizerState.PERFORMING_SYNCHRONIZATION;
321 return SynchronizerState.IDLE;
326 * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#getStatReport(boolean)
329 public String getStatReport(boolean showFinalReport) {
330 syncDurationInMs = System.currentTimeMillis() - syncStartedTimeStampInMs;
331 return this.getStatReport(syncDurationInMs, showFinalReport);
335 * @see org.openecomp.sparky.synchronizer.IndexSynchronizer#shutdown()
338 public void shutdown() {
339 this.shutdownExecutors();
343 protected boolean isSyncDone() {
345 int totalWorkOnHand = aaiWorkOnHand.get() + esWorkOnHand.get();
347 if (LOG.isDebugEnabled()) {
348 LOG.debug(AaiUiMsgs.DEBUG_GENERIC,indexName + ", isSyncDone(), totalWorkOnHand = " + totalWorkOnHand
349 + " all work enumerated = " + allWorkEnumerated);
352 if (totalWorkOnHand > 0 || !allWorkEnumerated) {
356 this.syncInProgress = false;
362 * @see org.openecomp.sparky.synchronizer.AbstractEntitySynchronizer#clearCache()
365 public void clearCache() {
367 if (syncInProgress) {
368 LOG.debug(AaiUiMsgs.DEBUG_GENERIC, "Historical Entity Summarizer in progress, request to clear cache ignored");
373 this.resetCounters();
374 if (entityCounters != null) {
375 entityCounters.clear();
378 allWorkEnumerated = false;