2 * ============LICENSE_START=======================================================
4 * ================================================================================
5 * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
6 * Copyright © 2017 Amdocs
7 * ================================================================================
8 * Licensed under the Apache License, Version 2.0 (the "License");
9 * you may not use this file except in compliance with the License.
10 * You may obtain a copy of the License at
12 * http://www.apache.org/licenses/LICENSE-2.0
14 * Unless required by applicable law or agreed to in writing, software
15 * distributed under the License is distributed on an "AS IS" BASIS,
16 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17 * See the License for the specific language governing permissions and
18 * limitations under the License.
19 * ============LICENSE_END=========================================================
21 * ECOMP is a trademark and service mark of AT&T Intellectual Property.
23 package org.onap.aai.sparky.sync;
25 import java.io.IOException;
26 import java.util.ArrayList;
27 import java.util.Collection;
28 import java.util.Iterator;
29 import java.util.List;
31 import javax.ws.rs.core.MediaType;
33 import org.onap.aai.cl.api.Logger;
34 import org.onap.aai.cl.eelf.LoggerFactory;
35 import org.onap.aai.restclient.client.OperationResult;
36 import org.onap.aai.sparky.dal.ElasticSearchAdapter;
37 import org.onap.aai.sparky.logging.AaiUiMsgs;
38 import org.onap.aai.sparky.sync.config.ElasticSearchEndpointConfig;
39 import org.onap.aai.sparky.sync.config.ElasticSearchSchemaConfig;
40 import org.onap.aai.sparky.sync.entity.ObjectIdCollection;
41 import org.onap.aai.sparky.sync.entity.SearchableEntity;
42 import org.onap.aai.sparky.sync.enumeration.OperationState;
44 import com.fasterxml.jackson.core.JsonProcessingException;
45 import com.fasterxml.jackson.databind.JsonNode;
46 import com.fasterxml.jackson.databind.ObjectMapper;
47 import com.fasterxml.jackson.databind.node.ArrayNode;
48 import com.fasterxml.jackson.databind.node.ObjectNode;
51 * The Class ElasticSearchIndexCleaner.
53 public class ElasticSearchIndexCleaner implements IndexCleaner {
55 private static final Logger LOG =
56 LoggerFactory.getInstance().getLogger(ElasticSearchIndexCleaner.class);
58 private static final String BULK_OP_LINE_TEMPLATE = "%s\n";
59 private static final String TIMESTAMP_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSSZ";
61 private ObjectIdCollection before;
62 private ObjectIdCollection after;
64 private ObjectMapper mapper;
65 private ElasticSearchAdapter esAdapter;
66 private ElasticSearchEndpointConfig endpointConfig;
67 private ElasticSearchSchemaConfig schemaConfig;
70 * Instantiates a new elastic search index cleaner.
72 * @param restDataProvider the rest data provider
73 * @param indexName the index name
74 * @param indexType the index type
75 * @param host the host
76 * @param port the port
77 * @param scrollContextTimeToLiveInMinutes the scroll context time to live in minutes
78 * @param numItemsToGetBulkRequest the num items to get bulk request
80 public ElasticSearchIndexCleaner(ElasticSearchAdapter esAdapter, ElasticSearchEndpointConfig endpointConfig, ElasticSearchSchemaConfig schemaConfig) {
81 this.esAdapter = esAdapter;
84 this.endpointConfig = endpointConfig;
85 this.schemaConfig = schemaConfig;
86 this.mapper = new ObjectMapper();
92 * @see org.openecomp.sparky.synchronizer.IndexCleaner#populatePreOperationCollection()
95 public OperationState populatePreOperationCollection() {
98 before = retrieveAllDocumentIdentifiers();
99 return OperationState.OK;
100 } catch (Exception exc) {
101 LOG.error(AaiUiMsgs.ES_PRE_SYNC_FAILURE, schemaConfig.getIndexName(), exc.getMessage());
102 return OperationState.ERROR;
110 * @see org.openecomp.sparky.synchronizer.IndexCleaner#populatePostOperationCollection()
113 public OperationState populatePostOperationCollection() {
115 after = retrieveAllDocumentIdentifiers();
116 return OperationState.OK;
117 } catch (Exception exc) {
118 LOG.error(AaiUiMsgs.ES_PRE_SYNC_FAILURE, schemaConfig.getIndexName(), exc.getMessage());
119 return OperationState.ERROR;
126 * @see org.openecomp.sparky.synchronizer.IndexCleaner#performCleanup()
129 public OperationState performCleanup() {
130 // TODO Auto-generated method stub
131 LOG.info(AaiUiMsgs.ES_SYNC_CLEAN_UP, schemaConfig.getIndexName());
133 int sizeBefore = before.getSize();
134 int sizeAfter = after.getSize();
136 LOG.info(AaiUiMsgs.ES_SYNC_CLEAN_UP_SIZE, String.valueOf(sizeBefore),
137 String.valueOf(sizeAfter));
140 * If the processedImportIds size <= 0, then something has failed in the sync operation and we
141 * shouldn't do the selective delete right now.
146 Collection<String> presyncIds = before.getImportedObjectIds();
147 presyncIds.removeAll(after.getImportedObjectIds());
150 LOG.info(AaiUiMsgs.ES_SYNC_SELECTIVE_DELETE, schemaConfig.getIndexName(), schemaConfig.getIndexDocType(),
151 String.valueOf(presyncIds.size()));
153 ObjectIdCollection bulkIds = new ObjectIdCollection();
155 Iterator<String> it = presyncIds.iterator();
156 int numItemsInBulkRequest = 0;
157 int numItemsRemainingToBeDeleted = presyncIds.size();
159 while (it.hasNext()) {
161 bulkIds.addObjectId(it.next());
162 numItemsInBulkRequest++;
164 if (numItemsInBulkRequest >= endpointConfig.getScrollContextBatchRequestSize()) {
165 LOG.info(AaiUiMsgs.ES_BULK_DELETE, schemaConfig.getIndexName(), String.valueOf(bulkIds.getSize()));
166 bulkDelete(bulkIds.getImportedObjectIds());
167 numItemsRemainingToBeDeleted -= numItemsInBulkRequest;
168 numItemsInBulkRequest = 0;
173 if (numItemsRemainingToBeDeleted > 0) {
174 LOG.info(AaiUiMsgs.ES_BULK_DELETE, schemaConfig.getIndexName(), String.valueOf(bulkIds.getSize()));
175 bulkDelete(bulkIds.getImportedObjectIds());
179 } catch (Exception exc) {
180 LOG.error(AaiUiMsgs.ES_BULK_DELETE_ERROR, schemaConfig.getIndexName(), exc.getLocalizedMessage());
185 return OperationState.OK;
189 public String getIndexName() {
190 return schemaConfig.getIndexName();
194 * Builds the initial scroll request payload.
196 * @param numItemsToGetPerRequest the num items to get per request
197 * @param fieldList the field list
199 * @throws JsonProcessingException the json processing exception
201 protected String buildInitialScrollRequestPayload(int numItemsToGetPerRequest,
202 List<String> fieldList) throws JsonProcessingException {
204 ObjectNode rootNode = mapper.createObjectNode();
205 rootNode.put("size", numItemsToGetPerRequest);
207 ArrayNode fields = mapper.createArrayNode();
209 for (String f : fieldList) {
213 rootNode.set("fields", fields);
215 ObjectNode queryNode = mapper.createObjectNode();
216 queryNode.set("match_all", mapper.createObjectNode());
218 rootNode.set("query", queryNode);
220 return mapper.writeValueAsString(rootNode);
225 * Builds the subsequent scroll context request payload.
227 * @param scrollId the scroll id
228 * @param contextTimeToLiveInMinutes the context time to live in minutes
230 * @throws JsonProcessingException the json processing exception
232 protected String buildSubsequentScrollContextRequestPayload(String scrollId,
233 int contextTimeToLiveInMinutes) throws JsonProcessingException {
235 ObjectNode rootNode = mapper.createObjectNode();
237 rootNode.put("scroll", contextTimeToLiveInMinutes + "m");
238 rootNode.put("scroll_id", scrollId);
240 return mapper.writeValueAsString(rootNode);
245 * Parses the elastic search result.
247 * @param jsonResult the json result
248 * @return the json node
249 * @throws JsonProcessingException the json processing exception
250 * @throws IOException Signals that an I/O exception has occurred.
252 protected JsonNode parseElasticSearchResult(String jsonResult)
253 throws JsonProcessingException, IOException {
254 ObjectMapper mapper = new ObjectMapper();
255 return mapper.readTree(jsonResult);
262 * @param docs the docs
263 * @return the array list
265 protected ArrayList<SearchableEntity> lookupIndexDoc(ArrayList<String> ids,
266 List<SearchableEntity> docs) {
267 ArrayList<SearchableEntity> objs = new ArrayList<SearchableEntity>();
269 if (ids != null && docs != null) {
270 for (SearchableEntity d : docs) {
271 if (ids.contains(d.getId())) {
281 * Builds the delete data object.
283 * @param index the index
284 * @param type the type
286 * @return the object node
288 protected ObjectNode buildDeleteDataObject(String index, String type, String id) {
290 ObjectNode indexDocProperties = mapper.createObjectNode();
292 indexDocProperties.put("_index", index);
293 indexDocProperties.put("_type", type);
294 indexDocProperties.put("_id", id);
296 ObjectNode rootNode = mapper.createObjectNode();
297 rootNode.set("delete", indexDocProperties);
303 * This method might appear to be a little strange, and is simply an optimization to take an
304 * elipsed JsonNode key path and retrieve the node at the end of the path, if it exists.
306 * @param startNode the start node
307 * @param fieldPath the field path
308 * @return the node path
310 protected JsonNode getNodePath(JsonNode startNode, String... fieldPath) {
312 JsonNode jsonNode = null;
314 for (String field : fieldPath) {
315 if (jsonNode == null) {
316 jsonNode = startNode.get(field);
318 jsonNode = jsonNode.get(field);
322 * This is our safety net in case any intermediate path returns a null
325 if (jsonNode == null) {
337 * @param resourceUrl the resource url
338 * @return the full url
340 private String getFullUrl(String resourceUrl) {
341 return String.format("http://%s:%s%s", endpointConfig.getEsIpAddress(),
342 endpointConfig.getEsServerPort(), resourceUrl);
346 * Retrieve all document identifiers.
348 * @return the object id collection
349 * @throws IOException Signals that an I/O exception has occurred.
351 public ObjectIdCollection retrieveAllDocumentIdentifiers() throws IOException {
353 ObjectIdCollection currentDocumentIds = new ObjectIdCollection();
355 long opStartTimeInMs = System.currentTimeMillis();
357 List<String> fields = new ArrayList<String>();
359 // fields.add("entityType");
361 String scrollRequestPayload =
362 buildInitialScrollRequestPayload(endpointConfig.getScrollContextBatchRequestSize(), fields);
364 final String fullUrlStr = getFullUrl("/" + schemaConfig.getIndexName()+ "/" + schemaConfig.getIndexDocType() + "/_search?scroll="
365 + endpointConfig.getScrollContextTimeToLiveInMinutes() + "m");
367 OperationResult result =
368 esAdapter.doPost(fullUrlStr, scrollRequestPayload, MediaType.APPLICATION_JSON_TYPE);
370 if (result.wasSuccessful()) {
372 JsonNode rootNode = parseElasticSearchResult(result.getResult());
375 * Check the result for success / failure, and enumerate all the index ids that resulted in
376 * success, and ignore the ones that failed or log them so we have a record of the failure.
378 int totalRecordsAvailable = 0;
379 String scrollId = null;
380 int numRecordsFetched = 0;
382 if (rootNode != null) {
384 scrollId = getFieldValue(rootNode, "_scroll_id");
385 final String tookStr = getFieldValue(rootNode, "took");
386 int tookInMs = (tookStr == null) ? 0 : Integer.parseInt(tookStr);
387 boolean timedOut = Boolean.parseBoolean(getFieldValue(rootNode, "timed_out"));
390 LOG.error(AaiUiMsgs.COLLECT_TIME_WITH_ERROR, "all document Identifiers",
391 String.valueOf(tookInMs));
393 LOG.info(AaiUiMsgs.COLLECT_TIME_WITH_SUCCESS, "all document Identifiers",
394 String.valueOf(tookInMs));
397 JsonNode hitsNode = rootNode.get("hits");
398 totalRecordsAvailable = Integer.parseInt(hitsNode.get("total").asText());
400 LOG.info(AaiUiMsgs.COLLECT_TOTAL, "all document Identifiers",
401 String.valueOf(totalRecordsAvailable));
404 * Collect all object ids
407 ArrayNode hitsArray = (ArrayNode) hitsNode.get("hits");
409 Iterator<JsonNode> nodeIterator = hitsArray.iterator();
413 JsonNode jsonNode = null;
415 while (nodeIterator.hasNext()) {
417 jsonNode = nodeIterator.next();
419 key = getFieldValue(jsonNode, "_id");
422 currentDocumentIds.addObjectId(key);
427 int totalRecordsRemainingToFetch = (totalRecordsAvailable - numRecordsFetched);
429 int numRequiredAdditionalFetches =
430 (totalRecordsRemainingToFetch / endpointConfig.getScrollContextBatchRequestSize());
433 * Do an additional fetch for the remaining items (if needed)
436 if (totalRecordsRemainingToFetch % endpointConfig.getScrollContextBatchRequestSize() != 0) {
437 numRequiredAdditionalFetches += 1;
440 if (LOG.isDebugEnabled()) {
441 LOG.debug(AaiUiMsgs.SYNC_NUMBER_REQ_FETCHES,
442 String.valueOf(numRequiredAdditionalFetches));
446 for (int x = 0; x < numRequiredAdditionalFetches; x++) {
448 if (collectItemsFromScrollContext(scrollId, currentDocumentIds) != OperationState.OK) {
449 // abort the whole thing because now we can't reliably cleanup the orphans.
450 throw new IOException(
451 "Failed to collect pre-sync doc collection from index. Aborting operation");
453 if (LOG.isDebugEnabled()) {
454 LOG.debug(AaiUiMsgs.SYNC_NUMBER_TOTAL_FETCHES,
455 String.valueOf(currentDocumentIds.getSize()),
456 String.valueOf(totalRecordsAvailable));
464 // scroll context get failed, nothing else to do
465 LOG.error(AaiUiMsgs.ERROR_GENERIC, result.toString());
468 LOG.info(AaiUiMsgs.COLLECT_TOTAL_TIME, "all document Identifiers",
469 String.valueOf((System.currentTimeMillis() - opStartTimeInMs)));
471 return currentDocumentIds;
476 * Collect items from scroll context.
478 * @param scrollId the scroll id
479 * @param objectIds the object ids
480 * @return the operation state
481 * @throws IOException Signals that an I/O exception has occurred.
483 private OperationState collectItemsFromScrollContext(String scrollId,
484 ObjectIdCollection objectIds) throws IOException {
486 String requestPayload = buildSubsequentScrollContextRequestPayload(scrollId,
487 endpointConfig.getScrollContextTimeToLiveInMinutes());
489 final String fullUrlStr = getFullUrl("/_search/scroll");
491 OperationResult opResult =
492 esAdapter.doPost(fullUrlStr, requestPayload, MediaType.APPLICATION_JSON_TYPE);
494 if (opResult.getResultCode() >= 300) {
495 LOG.warn(AaiUiMsgs.ES_SCROLL_CONTEXT_ERROR, opResult.getResult());
496 return OperationState.ERROR;
499 JsonNode rootNode = parseElasticSearchResult(opResult.getResult());
500 boolean timedOut = Boolean.parseBoolean(getFieldValue(rootNode, "timed_out"));
501 final String tookStr = getFieldValue(rootNode, "took");
502 int tookInMs = (tookStr == null) ? 0 : Integer.parseInt(tookStr);
504 JsonNode hitsNode = rootNode.get("hits");
507 * Check the result for success / failure, and enumerate all the index ids that resulted in
508 * success, and ignore the ones that failed or log them so we have a record of the failure.
511 if (rootNode != null) {
514 LOG.info(AaiUiMsgs.COLLECT_TIME_WITH_ERROR, "Scroll Context", String.valueOf(tookInMs));
516 LOG.info(AaiUiMsgs.COLLECT_TIME_WITH_SUCCESS, "Scroll Context", String.valueOf(tookInMs));
520 * Collect all object ids
523 ArrayNode hitsArray = (ArrayNode) hitsNode.get("hits");
526 JsonNode jsonNode = null;
528 Iterator<JsonNode> nodeIterator = hitsArray.iterator();
530 while (nodeIterator.hasNext()) {
532 jsonNode = nodeIterator.next();
534 key = getFieldValue(jsonNode, "_id");
537 objectIds.addObjectId(key);
544 return OperationState.OK;
548 * Gets the field value.
550 * @param node the node
551 * @param fieldName the field name
552 * @return the field value
554 protected String getFieldValue(JsonNode node, String fieldName) {
556 JsonNode field = node.get(fieldName);
559 return field.asText();
569 * @param docIds the doc ids
570 * @return the operation result
571 * @throws IOException Signals that an I/O exception has occurred.
573 public OperationResult bulkDelete(Collection<String> docIds) throws IOException {
575 if (docIds == null || docIds.size() == 0) {
576 LOG.info(AaiUiMsgs.ES_BULK_DELETE_SKIP);
577 return new OperationResult(500,
578 "Skipping bulkDelete(); operation because docs to delete list is empty");
581 LOG.info(AaiUiMsgs.ES_BULK_DELETE_START, String.valueOf(docIds.size()));
583 StringBuilder sb = new StringBuilder(128);
585 for (String id : docIds) {
586 sb.append(String.format(BULK_OP_LINE_TEMPLATE,
587 buildDeleteDataObject(schemaConfig.getIndexName(), schemaConfig.getIndexDocType(), id)));
592 final String fullUrlStr = getFullUrl("/_bulk");
594 return esAdapter.doPost(fullUrlStr, sb.toString(), MediaType.APPLICATION_FORM_URLENCODED_TYPE);