2 * ============LICENSE_START=======================================================
4 * ================================================================================
5 * Copyright © 2017 AT&T Intellectual Property. All rights reserved.
6 * Copyright © 2017 Amdocs
7 * ================================================================================
8 * Licensed under the Apache License, Version 2.0 (the "License");
9 * you may not use this file except in compliance with the License.
10 * You may obtain a copy of the License at
12 * http://www.apache.org/licenses/LICENSE-2.0
14 * Unless required by applicable law or agreed to in writing, software
15 * distributed under the License is distributed on an "AS IS" BASIS,
16 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17 * See the License for the specific language governing permissions and
18 * limitations under the License.
19 * ============LICENSE_END=========================================================
21 * ECOMP is a trademark and service mark of AT&T Intellectual Property.
23 package org.openecomp.sparky.synchronizer;
25 import com.fasterxml.jackson.core.JsonProcessingException;
26 import com.fasterxml.jackson.databind.JsonNode;
27 import com.fasterxml.jackson.databind.ObjectMapper;
28 import com.fasterxml.jackson.databind.node.ArrayNode;
29 import com.fasterxml.jackson.databind.node.ObjectNode;
31 import java.io.IOException;
32 import java.util.ArrayList;
33 import java.util.Collection;
34 import java.util.Iterator;
35 import java.util.List;
37 import org.openecomp.sparky.dal.rest.OperationResult;
38 import org.openecomp.sparky.dal.rest.RestDataProvider;
39 import org.openecomp.sparky.synchronizer.entity.ObjectIdCollection;
40 import org.openecomp.sparky.synchronizer.entity.SearchableEntity;
41 import org.openecomp.sparky.synchronizer.enumeration.OperationState;
42 import org.openecomp.cl.api.Logger;
43 import org.openecomp.cl.eelf.LoggerFactory;
44 import org.openecomp.sparky.logging.AaiUiMsgs;
47 * The Class ElasticSearchIndexCleaner.
49 public class ElasticSearchIndexCleaner implements IndexCleaner {
51 private static final Logger LOG =
52 LoggerFactory.getInstance().getLogger(ElasticSearchIndexCleaner.class);
54 private static final String BULK_OP_LINE_TEMPLATE = "%s\n";
55 private static final String TIMESTAMP_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSSZ";
57 private ObjectIdCollection before;
58 private ObjectIdCollection after;
63 private String indexName;
64 private String indexType;
65 private int scrollContextTimeToLiveInMinutes;
66 private int numItemsToGetBulkRequest;
68 private RestDataProvider restDataProvider;
69 private ObjectMapper mapper;
72 * Instantiates a new elastic search index cleaner.
74 * @param restDataProvider the rest data provider
75 * @param indexName the index name
76 * @param indexType the index type
77 * @param host the host
78 * @param port the port
79 * @param scrollContextTimeToLiveInMinutes the scroll context time to live in minutes
80 * @param numItemsToGetBulkRequest the num items to get bulk request
82 protected ElasticSearchIndexCleaner(RestDataProvider restDataProvider, String indexName,
83 String indexType, String host, String port, int scrollContextTimeToLiveInMinutes,
84 int numItemsToGetBulkRequest) {
85 this.restDataProvider = restDataProvider;
88 this.indexName = indexName;
89 this.indexType = indexType;
90 this.mapper = new ObjectMapper();
93 this.scrollContextTimeToLiveInMinutes = scrollContextTimeToLiveInMinutes;
94 this.numItemsToGetBulkRequest = numItemsToGetBulkRequest;
98 * @see org.openecomp.sparky.synchronizer.IndexCleaner#populatePreOperationCollection()
101 public OperationState populatePreOperationCollection() {
104 before = retrieveAllDocumentIdentifiers();
105 return OperationState.OK;
106 } catch (Exception exc) {
107 LOG.error(AaiUiMsgs.ES_PRE_SYNC_FAILURE, indexName, exc.getMessage());
108 return OperationState.ERROR;
114 * @see org.openecomp.sparky.synchronizer.IndexCleaner#populatePostOperationCollection()
117 public OperationState populatePostOperationCollection() {
119 after = retrieveAllDocumentIdentifiers();
120 return OperationState.OK;
121 } catch (Exception exc) {
122 LOG.error(AaiUiMsgs.ES_PRE_SYNC_FAILURE, indexName, exc.getMessage());
123 return OperationState.ERROR;
128 * @see org.openecomp.sparky.synchronizer.IndexCleaner#performCleanup()
131 public OperationState performCleanup() {
132 // TODO Auto-generated method stub
133 LOG.info(AaiUiMsgs.ES_SYNC_CLEAN_UP, indexName);
135 int sizeBefore = before.getSize();
136 int sizeAfter = after.getSize();
138 LOG.info(AaiUiMsgs.ES_SYNC_CLEAN_UP_SIZE, String.valueOf(sizeBefore),
139 String.valueOf(sizeAfter));
142 * If the processedImportIds size <= 0, then something has failed in the sync operation and we
143 * shouldn't do the selective delete right now.
148 Collection<String> presyncIds = before.getImportedObjectIds();
149 presyncIds.removeAll(after.getImportedObjectIds());
152 LOG.info(AaiUiMsgs.ES_SYNC_SELECTIVE_DELETE, indexName, indexType,
153 String.valueOf(presyncIds.size()));
155 ObjectIdCollection bulkIds = new ObjectIdCollection();
157 Iterator<String> it = presyncIds.iterator();
158 int numItemsInBulkRequest = 0;
159 int numItemsRemainingToBeDeleted = presyncIds.size();
161 while (it.hasNext()) {
163 bulkIds.addObjectId(it.next());
164 numItemsInBulkRequest++;
166 if (numItemsInBulkRequest >= this.numItemsToGetBulkRequest) {
167 LOG.info(AaiUiMsgs.ES_BULK_DELETE, indexName, String.valueOf(bulkIds.getSize()));
168 OperationResult bulkDeleteResult = bulkDelete(bulkIds.getImportedObjectIds());
169 // pegCountersForElasticBulkDelete(bulkDeleteResult);
170 numItemsRemainingToBeDeleted -= numItemsInBulkRequest;
171 numItemsInBulkRequest = 0;
176 if (numItemsRemainingToBeDeleted > 0) {
177 LOG.info(AaiUiMsgs.ES_BULK_DELETE, indexName, String.valueOf(bulkIds.getSize()));
178 OperationResult bulkDeleteResult = bulkDelete(bulkIds.getImportedObjectIds());
179 // pegCountersForElasticBulkDelete(bulkDeleteResult);
183 } catch (Exception exc) {
184 LOG.error(AaiUiMsgs.ES_BULK_DELETE_ERROR, indexName, exc.getLocalizedMessage());
189 return OperationState.OK;
193 public String getIndexName() {
197 public void setIndexName(String indexName) {
198 this.indexName = indexName;
202 * Builds the initial scroll request payload.
204 * @param numItemsToGetPerRequest the num items to get per request
205 * @param fieldList the field list
207 * @throws JsonProcessingException the json processing exception
209 protected String buildInitialScrollRequestPayload(int numItemsToGetPerRequest,
210 List<String> fieldList) throws JsonProcessingException {
212 ObjectNode rootNode = mapper.createObjectNode();
213 rootNode.put("size", numItemsToGetPerRequest);
215 ArrayNode fields = mapper.createArrayNode();
217 for (String f : fieldList) {
221 rootNode.set("fields", fields);
223 ObjectNode queryNode = mapper.createObjectNode();
224 queryNode.set("match_all", mapper.createObjectNode());
226 rootNode.set("query", queryNode);
228 return mapper.writeValueAsString(rootNode);
233 * Builds the subsequent scroll context request payload.
235 * @param scrollId the scroll id
236 * @param contextTimeToLiveInMinutes the context time to live in minutes
238 * @throws JsonProcessingException the json processing exception
240 protected String buildSubsequentScrollContextRequestPayload(String scrollId,
241 int contextTimeToLiveInMinutes) throws JsonProcessingException {
243 ObjectNode rootNode = mapper.createObjectNode();
245 rootNode.put("scroll", contextTimeToLiveInMinutes + "m");
246 rootNode.put("scroll_id", scrollId);
248 return mapper.writeValueAsString(rootNode);
253 * Parses the elastic search result.
255 * @param jsonResult the json result
256 * @return the json node
257 * @throws JsonProcessingException the json processing exception
258 * @throws IOException Signals that an I/O exception has occurred.
260 protected JsonNode parseElasticSearchResult(String jsonResult)
261 throws JsonProcessingException, IOException {
262 ObjectMapper mapper = new ObjectMapper();
263 return mapper.readTree(jsonResult);
270 * @param docs the docs
271 * @return the array list
273 protected ArrayList<SearchableEntity> lookupIndexDoc(ArrayList<String> ids,
274 List<SearchableEntity> docs) {
275 ArrayList<SearchableEntity> objs = new ArrayList<SearchableEntity>();
277 if (ids != null && docs != null) {
278 for (SearchableEntity d : docs) {
279 if (ids.contains(d.getId())) {
289 * Builds the delete data object.
291 * @param index the index
292 * @param type the type
294 * @return the object node
296 protected ObjectNode buildDeleteDataObject(String index, String type, String id) {
298 ObjectNode indexDocProperties = mapper.createObjectNode();
300 indexDocProperties.put("_index", index);
301 indexDocProperties.put("_type", type);
302 indexDocProperties.put("_id", id);
304 ObjectNode rootNode = mapper.createObjectNode();
305 rootNode.set("delete", indexDocProperties);
311 * This method might appear to be a little strange, and is simply an optimization to take an
312 * elipsed JsonNode key path and retrieve the node at the end of the path, if it exists.
314 * @param startNode the start node
315 * @param fieldPath the field path
316 * @return the node path
318 protected JsonNode getNodePath(JsonNode startNode, String... fieldPath) {
320 JsonNode jsonNode = null;
322 for (String field : fieldPath) {
323 if (jsonNode == null) {
324 jsonNode = startNode.get(field);
326 jsonNode = jsonNode.get(field);
330 * This is our safety net in case any intermediate path returns a null
333 if (jsonNode == null) {
345 * @param resourceUrl the resource url
346 * @return the full url
348 private String getFullUrl(String resourceUrl) {
349 return String.format("http://%s:%s%s", host, port, resourceUrl);
353 * Retrieve all document identifiers.
355 * @return the object id collection
356 * @throws IOException Signals that an I/O exception has occurred.
358 public ObjectIdCollection retrieveAllDocumentIdentifiers() throws IOException {
360 ObjectIdCollection currentDocumentIds = new ObjectIdCollection();
362 long opStartTimeInMs = System.currentTimeMillis();
364 List<String> fields = new ArrayList<String>();
366 // fields.add("entityType");
368 String scrollRequestPayload =
369 buildInitialScrollRequestPayload(this.numItemsToGetBulkRequest, fields);
371 final String fullUrlStr = getFullUrl("/" + indexName + "/" + indexType + "/_search?scroll="
372 + this.scrollContextTimeToLiveInMinutes + "m");
374 OperationResult result =
375 restDataProvider.doPost(fullUrlStr, scrollRequestPayload, "application/json");
377 if (result.wasSuccessful()) {
379 JsonNode rootNode = parseElasticSearchResult(result.getResult());
382 * Check the result for success / failure, and enumerate all the index ids that resulted in
383 * success, and ignore the ones that failed or log them so we have a record of the failure.
385 int totalRecordsAvailable = 0;
386 String scrollId = null;
387 int numRecordsFetched = 0;
389 if (rootNode != null) {
391 scrollId = getFieldValue(rootNode, "_scroll_id");
392 final String tookStr = getFieldValue(rootNode, "took");
393 int tookInMs = (tookStr == null) ? 0 : Integer.parseInt(tookStr);
394 boolean timedOut = Boolean.parseBoolean(getFieldValue(rootNode, "timed_out"));
397 LOG.error(AaiUiMsgs.COLLECT_TIME_WITH_ERROR, "all document Identifiers",
398 String.valueOf(tookInMs));
400 LOG.info(AaiUiMsgs.COLLECT_TIME_WITH_SUCCESS, "all document Identifiers",
401 String.valueOf(tookInMs));
404 JsonNode hitsNode = rootNode.get("hits");
405 totalRecordsAvailable = Integer.parseInt(hitsNode.get("total").asText());
407 LOG.info(AaiUiMsgs.COLLECT_TOTAL, "all document Identifiers",
408 String.valueOf(totalRecordsAvailable));
411 * Collect all object ids
414 ArrayNode hitsArray = (ArrayNode) hitsNode.get("hits");
416 Iterator<JsonNode> nodeIterator = hitsArray.iterator();
420 JsonNode jsonNode = null;
422 while (nodeIterator.hasNext()) {
424 jsonNode = nodeIterator.next();
426 key = getFieldValue(jsonNode, "_id");
429 currentDocumentIds.addObjectId(key);
435 * JsonNode fieldsNode = jNode.get("fields");
437 * if (fieldsNode != null) {
439 * JsonNode entityTypeNode = fieldsNode.get("entityType");
441 * if (entityTypeNode != null) { ArrayNode aNode = (ArrayNode) entityTypeNode;
443 * if (aNode.size() > 0) { value = aNode.get(0).asText(); objAndtTypesMap.put(key, value);
444 * numRecordsFetched++; } } } }
449 int totalRecordsRemainingToFetch = (totalRecordsAvailable - numRecordsFetched);
451 int numRequiredAdditionalFetches =
452 (totalRecordsRemainingToFetch / this.numItemsToGetBulkRequest);
455 * Do an additional fetch for the remaining items (if needed)
458 if (totalRecordsRemainingToFetch % numItemsToGetBulkRequest != 0) {
459 numRequiredAdditionalFetches += 1;
462 if (LOG.isDebugEnabled()) {
463 LOG.debug(AaiUiMsgs.SYNC_NUMBER_REQ_FETCHES,
464 String.valueOf(numRequiredAdditionalFetches));
468 for (int x = 0; x < numRequiredAdditionalFetches; x++) {
470 if (collectItemsFromScrollContext(scrollId, currentDocumentIds) != OperationState.OK) {
471 // abort the whole thing because now we can't reliably cleanup the orphans.
472 throw new IOException(
473 "Failed to collect pre-sync doc collection from index. Aborting operation");
475 if (LOG.isDebugEnabled()) {
476 LOG.debug(AaiUiMsgs.SYNC_NUMBER_TOTAL_FETCHES,
477 String.valueOf(currentDocumentIds.getSize()),
478 String.valueOf(totalRecordsAvailable));
486 // scroll context get failed, nothing else to do
487 LOG.error(AaiUiMsgs.ERROR_GENERIC, result.toString());
490 LOG.info(AaiUiMsgs.COLLECT_TOTAL_TIME, "all document Identifiers",
491 String.valueOf((System.currentTimeMillis() - opStartTimeInMs)));
493 return currentDocumentIds;
498 * Collect items from scroll context.
500 * @param scrollId the scroll id
501 * @param objectIds the object ids
502 * @return the operation state
503 * @throws IOException Signals that an I/O exception has occurred.
505 private OperationState collectItemsFromScrollContext(String scrollId,
506 ObjectIdCollection objectIds) throws IOException {
508 // ObjectIdCollection documentIdCollection = new ObjectIdCollection();
510 String requestPayload =
511 buildSubsequentScrollContextRequestPayload(scrollId, scrollContextTimeToLiveInMinutes);
513 final String fullUrlStr = getFullUrl("/_search/scroll");
515 OperationResult opResult =
516 restDataProvider.doPost(fullUrlStr, requestPayload, "application/json");
518 if (opResult.getResultCode() >= 300) {
519 LOG.warn(AaiUiMsgs.ES_SCROLL_CONTEXT_ERROR, opResult.getResult());
520 return OperationState.ERROR;
523 JsonNode rootNode = parseElasticSearchResult(opResult.getResult());
526 * Check the result for success / failure, and enumerate all the index ids that resulted in
527 * success, and ignore the ones that failed or log them so we have a record of the failure.
530 if (rootNode != null) {
531 boolean timedOut = Boolean.parseBoolean(getFieldValue(rootNode, "timed_out"));
532 final String tookStr = getFieldValue(rootNode, "took");
533 int tookInMs = (tookStr == null) ? 0 : Integer.parseInt(tookStr);
535 JsonNode hitsNode = rootNode.get("hits");
538 LOG.info(AaiUiMsgs.COLLECT_TIME_WITH_ERROR, "Scroll Context", String.valueOf(tookInMs));
540 LOG.info(AaiUiMsgs.COLLECT_TIME_WITH_SUCCESS, "Scroll Context", String.valueOf(tookInMs));
544 * Collect all object ids
547 ArrayNode hitsArray = (ArrayNode) hitsNode.get("hits");
550 JsonNode jsonNode = null;
552 Iterator<JsonNode> nodeIterator = hitsArray.iterator();
554 while (nodeIterator.hasNext()) {
556 jsonNode = nodeIterator.next();
558 key = getFieldValue(jsonNode, "_id");
561 objectIds.addObjectId(key);
564 * JsonNode fieldsNode = jNode.get("fields");
566 * if (fieldsNode != null) {
568 * JsonNode entityTypeNode = fieldsNode.get("entityType");
570 * if (entityTypeNode != null) { ArrayNode aNode = (ArrayNode) entityTypeNode;
572 * if (aNode.size() > 0) { value = aNode.get(0).asText(); objectIdsAndTypes.put(key,
580 // scroll context get failed, nothing else to do
581 LOG.error(AaiUiMsgs.ERROR_GENERIC, opResult.toString());
584 return OperationState.OK;
588 * Gets the field value.
590 * @param node the node
591 * @param fieldName the field name
592 * @return the field value
594 protected String getFieldValue(JsonNode node, String fieldName) {
596 JsonNode field = node.get(fieldName);
599 return field.asText();
609 * @param docIds the doc ids
610 * @return the operation result
611 * @throws IOException Signals that an I/O exception has occurred.
613 public OperationResult bulkDelete(Collection<String> docIds) throws IOException {
615 if (docIds == null || docIds.size() == 0) {
616 LOG.info(AaiUiMsgs.ES_BULK_DELETE_SKIP);
617 return new OperationResult(500,
618 "Skipping bulkDelete(); operation because docs to delete list is empty");
621 LOG.info(AaiUiMsgs.ES_BULK_DELETE_START, String.valueOf(docIds.size()));
623 StringBuilder sb = new StringBuilder(128);
625 for (String id : docIds) {
627 String.format(BULK_OP_LINE_TEMPLATE, buildDeleteDataObject(indexName, indexType, id)));
632 final String fullUrlStr = getFullUrl("/_bulk");
634 return restDataProvider.doPost(fullUrlStr, sb.toString(), "application/x-www-form-urlencoded");