From: Toine Siebelink Date: Mon, 23 Jan 2023 12:07:27 +0000 (+0000) Subject: Merge "Added depth parameter in query nodes API." X-Git-Tag: 3.2.1~7 X-Git-Url: https://gerrit.onap.org/r/gitweb?a=commitdiff_plain;h=c6bebbcfc4dbef5e91245b5f69714b238c0515af;hp=cc21a025308153e8f187cd3e82cf828191d7b387;p=cps.git Merge "Added depth parameter in query nodes API." --- diff --git a/cps-application/src/main/resources/application.yml b/cps-application/src/main/resources/application.yml index b5b10b0f7..f7fe46e4e 100644 --- a/cps-application/src/main/resources/application.yml +++ b/cps-application/src/main/resources/application.yml @@ -1,7 +1,7 @@ # ============LICENSE_START======================================================= # Copyright (C) 2021 Pantheon.tech # Modifications Copyright (C) 2021-2022 Bell Canada -# Modifications Copyright (C) 2021-2022 Nordix Foundation +# Modifications Copyright (C) 2021-2023 Nordix Foundation # ================================================================================ # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -84,16 +84,15 @@ spring: properties: spring.deserializer.key.delegate.class: org.apache.kafka.common.serialization.StringDeserializer spring.deserializer.value.delegate.class: org.springframework.kafka.support.serializer.JsonDeserializer - spring.json.value.default.type: org.onap.cps.ncmp.event.model.DmiAsyncRequestResponseEvent spring.json.use.type.headers: false jackson: - default-property-inclusion: NON_NULL - serialization: - FAIL_ON_EMPTY_BEANS: false + default-property-inclusion: NON_NULL + serialization: + FAIL_ON_EMPTY_BEANS: false sql: - init: - mode: ALWAYS + init: + mode: ALWAYS app: ncmp: async-m2m: @@ -104,6 +103,7 @@ app: events: topic: ${LCM_EVENTS_TOPIC:ncmp-events} + notification: enabled: true data-updated: diff --git a/cps-ncmp-events/src/main/resources/schemas/avc-event-schema-v1.json b/cps-ncmp-events/src/main/resources/schemas/avc-event-schema-v1.json new file mode 100644 index 000000000..6db03f6eb --- /dev/null +++ b/cps-ncmp-events/src/main/resources/schemas/avc-event-schema-v1.json @@ -0,0 +1,57 @@ +{ + "$schema": "https://json-schema.org/draft/2019-09/schema", + "$id": "urn:cps:org.onap.cps.ncmp.events:avc-event-schema:v1", + "$ref": "#/definitions/AvcEvent", + "definitions": { + "AvcEvent": { + "description": "The payload for AVC event.", + "type": "object", + "properties": { + "eventId": { + "description": "The unique id identifying the event generated by DMI for this AVC event.", + "type": "string" + }, + "eventCorrelationId": { + "description": "The request id passed by NCMP for this AVC event.", + "type": "string" + }, + "eventTime": { + "description": "The time of the AVC event. The expected format is 'yyyy-MM-dd'T'HH:mm:ss.SSSZ'.", + "type": "string" + }, + "eventTarget": { + "description": "The target of the AVC event.", + "type": "string" + }, + "eventType": { + "description": "The type of the AVC event.", + "type": "string" + }, + "eventSchema": { + "description": "The event schema for AVC events.", + "type": "string" + }, + "eventSchemaVersion": { + "description": "The event schema version for AVC events.", + "type": "string" + }, + "event": { + "$ref": "#/definitions/Event" + } + }, + "required": [ + "eventId", + "eventCorrelationId", + "eventTime", + "eventTarget", + "eventType", + "eventSchema", + "eventSchemaVersion" + ] + }, + "Event": { + "description": "The AVC event content.", + "type": "object" + } + } +} \ No newline at end of file diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/NetworkCmProxyDataServiceImpl.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/NetworkCmProxyDataServiceImpl.java index d00d2119b..5aad404e6 100755 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/NetworkCmProxyDataServiceImpl.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/NetworkCmProxyDataServiceImpl.java @@ -93,8 +93,12 @@ public class NetworkCmProxyDataServiceImpl implements NetworkCmProxyDataService final DmiPluginRegistration dmiPluginRegistration) { dmiPluginRegistration.validateDmiPluginRegistration(); final DmiPluginRegistrationResponse dmiPluginRegistrationResponse = new DmiPluginRegistrationResponse(); - dmiPluginRegistrationResponse.setRemovedCmHandles( - parseAndRemoveCmHandlesInDmiRegistration(dmiPluginRegistration.getRemovedCmHandles())); + + if (!dmiPluginRegistration.getRemovedCmHandles().isEmpty()) { + dmiPluginRegistrationResponse.setRemovedCmHandles( + parseAndRemoveCmHandlesInDmiRegistration(dmiPluginRegistration.getRemovedCmHandles())); + } + if (!dmiPluginRegistration.getCreatedCmHandles().isEmpty()) { dmiPluginRegistrationResponse.setCreatedCmHandles( parseAndCreateCmHandlesInDmiRegistrationAndSyncModules(dmiPluginRegistration)); @@ -321,15 +325,13 @@ public class NetworkCmProxyDataServiceImpl implements NetworkCmProxyDataService final List tobeRemovedCmHandles) { final List cmHandleRegistrationResponses = new ArrayList<>(tobeRemovedCmHandles.size()); + + setState(tobeRemovedCmHandles, CmHandleState.DELETING); + for (final String cmHandleId : tobeRemovedCmHandles) { try { - final YangModelCmHandle yangModelCmHandle = inventoryPersistence.getYangModelCmHandle(cmHandleId); - lcmEventsCmHandleStateHandler.updateCmHandleState(yangModelCmHandle, - CmHandleState.DELETING); deleteCmHandleFromDbAndModuleSyncMap(cmHandleId); cmHandleRegistrationResponses.add(CmHandleRegistrationResponse.createSuccessResponse(cmHandleId)); - lcmEventsCmHandleStateHandler.updateCmHandleState(yangModelCmHandle, - CmHandleState.DELETED); } catch (final DataNodeNotFoundException dataNodeNotFoundException) { log.error("Unable to find dataNode for cmHandleId : {} , caused by : {}", cmHandleId, dataNodeNotFoundException.getMessage()); @@ -347,9 +349,22 @@ public class NetworkCmProxyDataServiceImpl implements NetworkCmProxyDataService CmHandleRegistrationResponse.createFailureResponse(cmHandleId, exception)); } } + + setState(tobeRemovedCmHandles, CmHandleState.DELETED); + return cmHandleRegistrationResponses; } + private void setState(final List tobeRemovedCmHandles, final CmHandleState cmHandleState) { + final Map cmHandleIdsToBeRemoved = new HashMap<>(); + for (final String cmHandleId : tobeRemovedCmHandles) { + cmHandleIdsToBeRemoved.put( + inventoryPersistence.getYangModelCmHandle(cmHandleId), + cmHandleState); + } + lcmEventsCmHandleStateHandler.updateCmHandleStateBatch(cmHandleIdsToBeRemoved); + } + private void deleteCmHandleFromDbAndModuleSyncMap(final String cmHandleId) { inventoryPersistence.deleteSchemaSetWithCascade(cmHandleId); inventoryPersistence.deleteListOrListElement("/dmi-registry/cm-handles[@id='" + cmHandleId + "']"); diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/NcmpAsyncRequestResponseEventConsumer.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/NcmpAsyncRequestResponseEventConsumer.java index a9e7164fd..bc6624dee 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/NcmpAsyncRequestResponseEventConsumer.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/async/NcmpAsyncRequestResponseEventConsumer.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (c) 2022 Nordix Foundation. + * Copyright (c) 2023 Nordix Foundation. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -45,7 +45,9 @@ public class NcmpAsyncRequestResponseEventConsumer { * * @param dmiAsyncRequestResponseEvent the event to be consumed and produced. */ - @KafkaListener(topics = "${app.ncmp.async-m2m.topic}") + @KafkaListener( + topics = "${app.ncmp.async-m2m.topic}", + properties = {"spring.json.value.default.type=org.onap.cps.ncmp.event.model.DmiAsyncRequestResponseEvent"}) public void consumeAndForward(final DmiAsyncRequestResponseEvent dmiAsyncRequestResponseEvent) { log.debug("Consuming event {} ...", dmiAsyncRequestResponseEvent); diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/config/embeddedcache/SynchronizationCacheConfig.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/config/embeddedcache/SynchronizationCacheConfig.java index 5154be799..5c3cb60c2 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/config/embeddedcache/SynchronizationCacheConfig.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/config/embeddedcache/SynchronizationCacheConfig.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================== - * Copyright (C) 2022 Nordix Foundation + * Copyright (C) 2022-2023 Nordix Foundation * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -38,7 +38,7 @@ import org.springframework.context.annotation.Configuration; @Configuration public class SynchronizationCacheConfig { - public static final int MODULE_SYNC_STARTED_TTL_SECS = 60; + public static final int MODULE_SYNC_STARTED_TTL_SECS = 120; public static final int DATA_SYNC_SEMAPHORE_TTL_SECS = 1800; private static final QueueConfig commonQueueConfig = createQueueConfig(); diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/notifications/avc/AvcEventConsumer.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/notifications/avc/AvcEventConsumer.java new file mode 100644 index 000000000..79a36bf50 --- /dev/null +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/notifications/avc/AvcEventConsumer.java @@ -0,0 +1,53 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (c) 2023 Nordix Foundation. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ncmp.api.impl.notifications.avc; + +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.onap.cps.ncmp.event.model.AvcEvent; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.kafka.annotation.KafkaListener; +import org.springframework.stereotype.Component; + +/** + * Listener for AVC events. + */ +@Component +@Slf4j +@RequiredArgsConstructor +@ConditionalOnProperty(name = "notification.enabled", havingValue = "true", matchIfMissing = true) +public class AvcEventConsumer { + + private final AvcEventProducer avcEventProducer; + + /** + * Consume the specified event. + * + * @param avcEvent the event to be consumed and produced. + */ + @KafkaListener( + topics = "dmi-cm-events", + properties = {"spring.json.value.default.type=org.onap.cps.ncmp.event.model.AvcEvent"}) + public void consumeAndForward(final AvcEvent avcEvent) { + log.debug("Consuming AVC event {} ...", avcEvent); + avcEventProducer.sendMessage(avcEvent); + } +} diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/notifications/avc/AvcEventMapper.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/notifications/avc/AvcEventMapper.java new file mode 100644 index 000000000..531de4641 --- /dev/null +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/notifications/avc/AvcEventMapper.java @@ -0,0 +1,50 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2023 Nordix Foundation + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ncmp.api.impl.notifications.avc; + +import java.util.UUID; +import org.mapstruct.Mapper; +import org.mapstruct.Mapping; +import org.mapstruct.Named; +import org.onap.cps.ncmp.event.model.AvcEvent; + + +/** + * Mapper for converting incoming {@link AvcEvent} to outgoing {@link AvcEvent}. + */ +@Mapper(componentModel = "spring") +public interface AvcEventMapper { + + @Mapping(source = "eventTime", target = "eventTime") + @Mapping(source = "eventId", target = "eventId", qualifiedByName = "avcEventId") + @Mapping(source = "eventCorrelationId", target = "eventCorrelationId") + @Mapping(source = "eventSchema", target = "eventSchema") + @Mapping(source = "eventSchemaVersion", target = "eventSchemaVersion") + @Mapping(source = "eventTarget", target = "eventTarget") + @Mapping(source = "eventType", target = "eventType") + AvcEvent toOutgoingAvcEvent(AvcEvent incomingAvcEvent); + + @Named("avcEventId") + static String getAvcEventId(String eventId) { + return UUID.randomUUID().toString(); + } + +} diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/notifications/avc/AvcEventProducer.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/notifications/avc/AvcEventProducer.java new file mode 100644 index 000000000..049f66100 --- /dev/null +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/impl/notifications/avc/AvcEventProducer.java @@ -0,0 +1,52 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2023 Nordix Foundation + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ncmp.api.impl.notifications.avc; + +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.onap.cps.ncmp.event.model.AvcEvent; +import org.springframework.kafka.core.KafkaTemplate; +import org.springframework.stereotype.Service; + +/** + * Producer for AVC events. + */ +@Slf4j +@Service +@RequiredArgsConstructor +public class AvcEventProducer { + + private final KafkaTemplate kafkaTemplate; + + private final AvcEventMapper avcEventMapper; + + /** + * Sends message to the configured topic with a message key. + * + * @param incomingAvcEvent message payload + */ + public void sendMessage(final AvcEvent incomingAvcEvent) { + // generate new event id while keeping other data + final AvcEvent outgoingAvcEvent = avcEventMapper.toOutgoingAvcEvent(incomingAvcEvent); + log.debug("Forwarding AVC event {} to topic {} ", outgoingAvcEvent.getEventId(), "cm-events"); + kafkaTemplate.send("cm-events", outgoingAvcEvent.getEventId(), outgoingAvcEvent); + } +} diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/inventory/sync/ModuleSyncTasks.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/inventory/sync/ModuleSyncTasks.java index 004ef289a..3fbebe077 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/inventory/sync/ModuleSyncTasks.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/inventory/sync/ModuleSyncTasks.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2022 Nordix Foundation + * Copyright (C) 2022-2023 Nordix Foundation * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -97,6 +97,7 @@ public class ModuleSyncTasks { for (final YangModelCmHandle failedCmHandle : failedCmHandles) { final CompositeState compositeState = failedCmHandle.getCompositeState(); final boolean isReadyForRetry = syncUtils.isReadyForRetry(compositeState); + log.info("Retry for cmHandleId : {} is {}", failedCmHandle.getId(), isReadyForRetry); if (isReadyForRetry) { final String resetCmHandleId = failedCmHandle.getId(); log.debug("Reset cm handle {} state to ADVISED to be re-attempted by module-sync watchdog", @@ -115,7 +116,7 @@ public class ModuleSyncTasks { private void removeResetCmHandleFromModuleSyncMap(final String resetCmHandleId) { if (moduleSyncStartedOnCmHandles.remove(resetCmHandleId) != null) { - log.debug("{} removed from in progress map", resetCmHandleId); + log.info("{} removed from in progress map", resetCmHandleId); } } } diff --git a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/inventory/sync/ModuleSyncWatchdog.java b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/inventory/sync/ModuleSyncWatchdog.java index f629b71d2..8acaa0abe 100644 --- a/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/inventory/sync/ModuleSyncWatchdog.java +++ b/cps-ncmp-service/src/main/java/org/onap/cps/ncmp/api/inventory/sync/ModuleSyncWatchdog.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2022 Nordix Foundation + * Copyright (C) 2022-2023 Nordix Foundation * Modifications Copyright (C) 2022 Bell Canada * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); @@ -88,11 +88,13 @@ public class ModuleSyncWatchdog { public void resetPreviouslyFailedCmHandles() { log.info("Processing module sync retry-watchdog waking up."); final List failedCmHandles = syncUtils.getModuleSyncFailedCmHandles(); + log.info("Retrying {} cmHandles", failedCmHandles.size()); moduleSyncTasks.resetFailedCmHandles(failedCmHandles); } private void preventBusyWait() { try { + log.info("Busy waiting now"); TimeUnit.MILLISECONDS.sleep(PREVENT_CPU_BURN_WAIT_TIME_MILLIS); } catch (final InterruptedException e) { Thread.currentThread().interrupt(); @@ -108,6 +110,7 @@ public class ModuleSyncWatchdog { log.warn("Unable to add cm handle {} to the work queue", advisedCmHandle.getLeaves().get("id")); } } + log.info("Work Queue Size : {}", moduleSyncWorkQueue.size()); } } diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/NetworkCmProxyDataServiceImplRegistrationSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/NetworkCmProxyDataServiceImplRegistrationSpec.groovy index e6c79f89a..1ebd69eb6 100644 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/NetworkCmProxyDataServiceImplRegistrationSpec.groovy +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/NetworkCmProxyDataServiceImplRegistrationSpec.groovy @@ -252,7 +252,7 @@ class NetworkCmProxyDataServiceImplRegistrationSpec extends Specification { when: 'registration is updated to delete cmhandle' def response = objectUnderTest.updateDmiRegistrationAndSyncModule(dmiPluginRegistration) then: 'the cmHandle state is updated to "DELETING"' - 1 * mockLcmEventsCmHandleStateHandler.updateCmHandleState(_, CmHandleState.DELETING) + 1 * mockLcmEventsCmHandleStateHandler.updateCmHandleStateBatch(_) and: 'method to delete relevant schema set is called once' 1 * mockInventoryPersistence.deleteSchemaSetWithCascade(_) and: 'method to delete relevant list/list element is called once' @@ -264,7 +264,7 @@ class NetworkCmProxyDataServiceImplRegistrationSpec extends Specification { assert it.cmHandle == 'cmhandle' } and: 'the cmHandle state is updated to "DELETED"' - 1 * mockLcmEventsCmHandleStateHandler.updateCmHandleState(_, CmHandleState.DELETED) + 1 * mockLcmEventsCmHandleStateHandler.updateCmHandleStateBatch(_) where: scenario | schemaSetExist 'schema-set exists and can be deleted successfully' | true diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/notifications/avc/AvcEventProducerIntegrationSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/notifications/avc/AvcEventProducerIntegrationSpec.groovy new file mode 100644 index 000000000..0089f777d --- /dev/null +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/impl/notifications/avc/AvcEventProducerIntegrationSpec.groovy @@ -0,0 +1,83 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (c) 2023 Nordix Foundation. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an 'AS IS' BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.ncmp.api.impl.notifications.avc + +import com.fasterxml.jackson.databind.ObjectMapper +import org.apache.kafka.clients.consumer.KafkaConsumer +import org.mapstruct.factory.Mappers +import org.onap.cps.ncmp.api.impl.async.NcmpAsyncRequestResponseEventMapper +import org.onap.cps.ncmp.api.kafka.MessagingBaseSpec +import org.onap.cps.ncmp.event.model.AvcEvent +import org.onap.cps.ncmp.utils.TestUtils +import org.onap.cps.utils.JsonObjectMapper +import org.spockframework.spring.SpringBean +import org.springframework.beans.factory.annotation.Autowired +import org.springframework.boot.test.context.SpringBootTest +import org.springframework.test.annotation.DirtiesContext +import org.testcontainers.spock.Testcontainers + +import java.time.Duration + +@SpringBootTest(classes = [AvcEventProducer, AvcEventConsumer, ObjectMapper, JsonObjectMapper]) +@Testcontainers +@DirtiesContext +class AvcEventProducerIntegrationSpec extends MessagingBaseSpec { + + @SpringBean + AvcEventMapper avcEventMapper = Mappers.getMapper(AvcEventMapper.class) + + @SpringBean + AvcEventProducer avcEventProducer = new AvcEventProducer(kafkaTemplate, avcEventMapper) + + @SpringBean + AvcEventConsumer acvEventConsumer = new AvcEventConsumer(avcEventProducer) + + @Autowired + JsonObjectMapper jsonObjectMapper + + def kafkaConsumer = new KafkaConsumer<>(consumerConfigProperties('ncmp-group')) + + def 'Consume and forward valid message'() { + given: 'consumer has a subscription' + kafkaConsumer.subscribe(['cm-events'] as List) + and: 'an event is sent' + def jsonData = TestUtils.getResourceFileContent('sampleAvcInputEvent.json') + def testEventSent = jsonObjectMapper.convertJsonString(jsonData, AvcEvent.class) + when: 'the event is consumed' + acvEventConsumer.consumeAndForward(testEventSent) + and: 'the topic is polled' + def records = kafkaConsumer.poll(Duration.ofMillis(1500)) + then: 'poll returns one record' + assert records.size() == 1 + and: 'record can be converted to AVC event' + def record = records.iterator().next() + def convertedAvcEvent = jsonObjectMapper.convertJsonString(record.value(), AvcEvent) + and: 'consumed forwarded NCMP event id differs from DMI event id' + assert testEventSent.eventId != convertedAvcEvent.getEventId() + and: 'correlation id matches' + assert testEventSent.eventCorrelationId == convertedAvcEvent.getEventCorrelationId() + and: 'timestamps match' + assert testEventSent.eventTime == convertedAvcEvent.getEventTime() + and: 'target matches' + assert testEventSent.eventTarget == convertedAvcEvent.getEventTarget() + } + +} \ No newline at end of file diff --git a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/kafka/MessagingBaseSpec.groovy b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/kafka/MessagingBaseSpec.groovy index f7c41ecdf..bb0ce8745 100644 --- a/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/kafka/MessagingBaseSpec.groovy +++ b/cps-ncmp-service/src/test/groovy/org/onap/cps/ncmp/api/kafka/MessagingBaseSpec.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (c) 2022 Nordix Foundation. + * Copyright (c) 2023 Nordix Foundation. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -33,14 +33,14 @@ import spock.lang.Specification class MessagingBaseSpec extends Specification { - static { - Runtime.getRuntime().addShutdownHook(new Thread(kafkaTestContainer::stop)) - } - def setupSpec() { kafkaTestContainer.start() } + def cleanupSpec() { + kafkaTestContainer.stop() + } + static kafkaTestContainer = new KafkaContainer(DockerImageName.parse('registry.nordix.org/onaptest/confluentinc/cp-kafka:6.2.1').asCompatibleSubstituteFor('confluentinc/cp-kafka')) def producerConfigProperties() { diff --git a/cps-ncmp-service/src/test/resources/sampleAvcInputEvent.json b/cps-ncmp-service/src/test/resources/sampleAvcInputEvent.json new file mode 100644 index 000000000..d7d252b9a --- /dev/null +++ b/cps-ncmp-service/src/test/resources/sampleAvcInputEvent.json @@ -0,0 +1,12 @@ +{ + "eventId": "4cb32729-85e3-44d1-aa6e-c923b9b059a5", + "eventCorrelationId": "68f15800-8ed4-4bae-9e53-27a9e03e1911", + "eventTime": "2022-12-12T14:29:23.876+0000", + "eventTarget": "NCMP", + "eventType": "org.onap.cps.ncmp.event.model.AvcEvent", + "eventSchema": "urn:cps:org.onap.cps.ncmp.event.model.AvcEvent", + "eventSchemaVersion": "v1", + "event": { + "payload": "Hello world!" + } +} \ No newline at end of file diff --git a/cps-path-parser/src/test/groovy/org/onap/cps/cpspath/parser/performance/CpsPathUtilPerfTest.groovy b/cps-path-parser/src/test/groovy/org/onap/cps/cpspath/parser/performance/CpsPathUtilPerfTest.groovy index 2ba20c1c5..e5e304b47 100644 --- a/cps-path-parser/src/test/groovy/org/onap/cps/cpspath/parser/performance/CpsPathUtilPerfTest.groovy +++ b/cps-path-parser/src/test/groovy/org/onap/cps/cpspath/parser/performance/CpsPathUtilPerfTest.groovy @@ -35,9 +35,9 @@ class CpsPathUtilPerfTest extends Specification { CpsPathUtil.getNormalizedXpath('//child[@other-leaf=1]/leaf-name[text()="search"]/ancestor::parent') } stopWatch.stop() - then: 'it takes less then 1,000 milliseconds' + then: 'it takes less then 1,100 milliseconds' // In CI this actually takes about 0.3-0.5 sec which is approx. 50+ parser executions per millisecond! - assert stopWatch.getTotalTimeMillis() < 1000 + assert stopWatch.getTotalTimeMillis() < 1100 } } diff --git a/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsDataPersistenceServiceImpl.java b/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsDataPersistenceServiceImpl.java index 3bd299430..06ee8ecad 100644 --- a/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsDataPersistenceServiceImpl.java +++ b/cps-ri/src/main/java/org/onap/cps/spi/impl/CpsDataPersistenceServiceImpl.java @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2021-2022 Nordix Foundation + * Copyright (C) 2021-2023 Nordix Foundation * Modifications Copyright (C) 2021 Pantheon.tech * Modifications Copyright (C) 2020-2022 Bell Canada. * Modifications Copyright (C) 2022 TechMahindra Ltd. @@ -256,6 +256,27 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService return toDataNode(fragmentEntity, fetchDescendantsOption); } + @Override + public Collection getDataNodes(final String dataspaceName, final String anchorName, + final Collection xpaths, + final FetchDescendantsOption fetchDescendantsOption) { + final DataspaceEntity dataspaceEntity = dataspaceRepository.getByName(dataspaceName); + final AnchorEntity anchorEntity = anchorRepository.getByDataspaceAndName(dataspaceEntity, anchorName); + + final Set normalizedXpaths = new HashSet<>(xpaths.size()); + for (final String xpath : xpaths) { + try { + normalizedXpaths.add(CpsPathUtil.getNormalizedXpath(xpath)); + } catch (final PathParsingException e) { + log.warn("Error parsing xpath \"{}\" in getDataNodes: {}", xpath, e.getMessage()); + } + } + + final List fragmentEntities = + fragmentRepository.findByAnchorAndMultipleCpsPaths(anchorEntity.getId(), normalizedXpaths); + return toDataNodes(fragmentEntities, fetchDescendantsOption); + } + private FragmentEntity getFragmentWithoutDescendantsByXpath(final String dataspaceName, final String anchorName, final String xpath) { @@ -317,7 +338,7 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService } fragmentEntities = fragmentRepository.findByAnchorAndCpsPath(anchorEntity.getId(), cpsPathQuery); if (cpsPathQuery.hasAncestorAxis()) { - fragmentEntities = getAncestorFragmentEntities(anchorEntity, cpsPathQuery, fragmentEntities); + fragmentEntities = getAncestorFragmentEntities(anchorEntity.getId(), cpsPathQuery, fragmentEntities); } return createDataNodesFromProxiedFragmentEntities(fetchDescendantsOption, anchorEntity, fragmentEntities); } @@ -338,18 +359,17 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService fragmentRepository.quickFindWithDescendants(anchorEntity.getId(), xpathRegex); fragmentEntities = FragmentEntityArranger.toFragmentEntityTrees(anchorEntity, fragmentExtracts); if (cpsPathQuery.hasAncestorAxis()) { - fragmentEntities = getAncestorFragmentEntities(anchorEntity, cpsPathQuery, fragmentEntities); + fragmentEntities = getAncestorFragmentEntities(anchorEntity.getId(), cpsPathQuery, fragmentEntities); } return createDataNodesFromFragmentEntities(fetchDescendantsOption, fragmentEntities); } - private Collection getAncestorFragmentEntities(final AnchorEntity anchorEntity, + private Collection getAncestorFragmentEntities(final int anchorId, final CpsPathQuery cpsPathQuery, - Collection fragmentEntities) { - final Set ancestorXpaths = processAncestorXpath(fragmentEntities, cpsPathQuery); - fragmentEntities = ancestorXpaths.isEmpty() ? Collections.emptyList() - : fragmentRepository.findAllByAnchorAndXpathIn(anchorEntity, ancestorXpaths); - return fragmentEntities; + final Collection fragmentEntities) { + final Collection ancestorXpaths = processAncestorXpath(fragmentEntities, cpsPathQuery); + return ancestorXpaths.isEmpty() ? Collections.emptyList() + : fragmentRepository.findByAnchorAndMultipleCpsPaths(anchorId, ancestorXpaths); } private List createDataNodesFromProxiedFragmentEntities( @@ -435,6 +455,15 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService .withChildDataNodes(childDataNodes).build(); } + private Collection toDataNodes(final Collection fragmentEntities, + final FetchDescendantsOption fetchDescendantsOption) { + final Collection dataNodes = new ArrayList<>(fragmentEntities.size()); + for (final FragmentEntity fragmentEntity : fragmentEntities) { + dataNodes.add(toDataNode(fragmentEntity, fetchDescendantsOption)); + } + return dataNodes; + } + private List getChildDataNodes(final FragmentEntity fragmentEntity, final FetchDescendantsOption fetchDescendantsOption) { if (fetchDescendantsOption.hasNext()) { @@ -447,9 +476,11 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService @Override public void updateDataLeaves(final String dataspaceName, final String anchorName, final String xpath, - final Map leaves) { + final Map updateLeaves) { final FragmentEntity fragmentEntity = getFragmentWithoutDescendantsByXpath(dataspaceName, anchorName, xpath); - fragmentEntity.setAttributes(jsonObjectMapper.asJsonString(leaves)); + final String currentLeavesAsString = fragmentEntity.getAttributes(); + final String mergedLeaves = mergeLeaves(updateLeaves, currentLeavesAsString); + fragmentEntity.setAttributes(mergedLeaves); fragmentRepository.save(fragmentEntity); } @@ -694,4 +725,14 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService private static boolean isRootXpath(final String xpath) { return "/".equals(xpath) || "".equals(xpath); } + + private String mergeLeaves(final Map updateLeaves, final String currentLeavesAsString) { + final Map currentLeavesAsMap = currentLeavesAsString.isEmpty() + ? new HashMap<>() : jsonObjectMapper.convertJsonString(currentLeavesAsString, Map.class); + currentLeavesAsMap.putAll(updateLeaves); + if (currentLeavesAsMap.isEmpty()) { + return ""; + } + return jsonObjectMapper.asJsonString(currentLeavesAsMap); + } } diff --git a/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepository.java b/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepository.java index c9461bf06..4b42b2da8 100755 --- a/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepository.java +++ b/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepository.java @@ -39,7 +39,8 @@ import org.springframework.data.repository.query.Param; import org.springframework.stereotype.Repository; @Repository -public interface FragmentRepository extends JpaRepository, FragmentRepositoryCpsPathQuery { +public interface FragmentRepository extends JpaRepository, FragmentRepositoryCpsPathQuery, + FragmentRepositoryMultiPathQuery { Optional findByDataspaceAndAnchorAndXpath(@NonNull DataspaceEntity dataspaceEntity, @NonNull AnchorEntity anchorEntity, @@ -80,9 +81,6 @@ public interface FragmentRepository extends JpaRepository, return fragmentExtracts; } - List findAllByAnchorAndXpathIn(@NonNull AnchorEntity anchorEntity, - @NonNull Collection xpath); - @Modifying @Query("DELETE FROM FragmentEntity fe WHERE fe.anchor IN (:anchors)") void deleteByAnchorIn(@NotNull @Param("anchors") Collection anchorEntities); diff --git a/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepositoryMultiPathQuery.java b/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepositoryMultiPathQuery.java new file mode 100644 index 000000000..9c34a459e --- /dev/null +++ b/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepositoryMultiPathQuery.java @@ -0,0 +1,31 @@ +/*- + * ============LICENSE_START======================================================= + * Copyright (C) 2022 Nordix Foundation. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.spi.repository; + +import java.util.Collection; +import java.util.List; +import org.onap.cps.spi.entities.FragmentEntity; + +public interface FragmentRepositoryMultiPathQuery { + + List findByAnchorAndMultipleCpsPaths(Integer anchorId, Collection cpsPathQuery); + +} diff --git a/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepositoryMultiPathQueryImpl.java b/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepositoryMultiPathQueryImpl.java new file mode 100644 index 000000000..8c357bbb3 --- /dev/null +++ b/cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepositoryMultiPathQueryImpl.java @@ -0,0 +1,74 @@ +/*- + * ============LICENSE_START======================================================= + * Copyright (C) 2022-2023 Nordix Foundation. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.spi.repository; + + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import javax.persistence.EntityManager; +import javax.persistence.PersistenceContext; +import javax.transaction.Transactional; +import lombok.AllArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.onap.cps.spi.entities.FragmentEntity; + + +@Slf4j +@AllArgsConstructor +public class FragmentRepositoryMultiPathQueryImpl implements FragmentRepositoryMultiPathQuery { + + @PersistenceContext + private EntityManager entityManager; + + private TempTableCreator tempTableCreator; + + @Override + @Transactional + public List findByAnchorAndMultipleCpsPaths(final Integer anchorId, + final Collection cpsPathQueryList) { + if (cpsPathQueryList.isEmpty()) { + return Collections.emptyList(); + } + final Collection> sqlData = new HashSet<>(cpsPathQueryList.size()); + for (final String query : cpsPathQueryList) { + final List row = new ArrayList<>(1); + row.add(query); + sqlData.add(row); + } + + final String tempTableName = tempTableCreator.createTemporaryTable( + "xpathTemporaryTable", sqlData, "xpath"); + return selectMatchingFragments(anchorId, tempTableName); + } + + private List selectMatchingFragments(final Integer anchorId, final String tempTableName) { + final String sql = String.format( + "SELECT * FROM FRAGMENT WHERE anchor_id = %d AND xpath IN (select xpath FROM %s);", + anchorId, tempTableName); + final List fragmentEntities = entityManager.createNativeQuery(sql, FragmentEntity.class) + .getResultList(); + log.debug("Fetched {} fragment entities by anchor and cps path.", fragmentEntities.size()); + return fragmentEntities; + } +} diff --git a/cps-ri/src/main/java/org/onap/cps/spi/repository/TempTableCreator.java b/cps-ri/src/main/java/org/onap/cps/spi/repository/TempTableCreator.java index 8cad9f5e4..d713746e4 100644 --- a/cps-ri/src/main/java/org/onap/cps/spi/repository/TempTableCreator.java +++ b/cps-ri/src/main/java/org/onap/cps/spi/repository/TempTableCreator.java @@ -26,6 +26,7 @@ import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.UUID; +import java.util.stream.Collectors; import javax.persistence.EntityManager; import javax.persistence.PersistenceContext; import lombok.AllArgsConstructor; @@ -82,8 +83,10 @@ public class TempTableCreator { final String[] columnNames, final Collection> sqlData) { final Collection sqlInserts = new HashSet<>(sqlData.size()); - for (final Collection row : sqlData) { - sqlInserts.add("('" + String.join("','", row) + "')"); + for (final Collection rowValues : sqlData) { + final Collection escapedValues = + rowValues.stream().map(it -> escapeSingleQuotesByDoublingThem(it)).collect(Collectors.toList()); + sqlInserts.add("('" + String.join("','", escapedValues) + "')"); } sqlStringBuilder.append("INSERT INTO "); sqlStringBuilder.append(tempTableName); @@ -94,4 +97,8 @@ public class TempTableCreator { sqlStringBuilder.append(";"); } + private static String escapeSingleQuotesByDoublingThem(final String value) { + return value.replace("'", "''"); + } + } diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceQueryDataNodeSpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceQueryDataNodeSpec.groovy index 59bbfd854..ba8425fef 100644 --- a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceQueryDataNodeSpec.groovy +++ b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceQueryDataNodeSpec.groovy @@ -28,6 +28,8 @@ import org.onap.cps.spi.exceptions.CpsPathException import org.springframework.beans.factory.annotation.Autowired import org.springframework.test.context.jdbc.Sql +import java.util.stream.Collectors + import static org.onap.cps.spi.FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS import static org.onap.cps.spi.FetchDescendantsOption.OMIT_DESCENDANTS @@ -151,27 +153,30 @@ class CpsDataPersistenceQueryDataNodeSpec extends CpsPersistenceSpecBase { def result = objectUnderTest.queryDataNodes(DATASPACE_NAME, ANCHOR_FOR_SHOP_EXAMPLE, cpsPath, INCLUDE_ALL_DESCENDANTS) then: 'the xpaths of the retrieved data nodes are as expected' result.size() == expectedXPaths.size() - for (int i = 0; i < result.size(); i++) { - assert result[i].getXpath() == expectedXPaths[i] - assert result[i].childDataNodes.size() == expectedNumberOfChildren[i] + if (result.size() > 0) { + def resultXpaths = result.stream().map(it -> it.xpath).collect(Collectors.toSet()) + resultXpaths.containsAll(expectedXPaths) + result.each { + assert it.childDataNodes.size() == expectedNumberOfChildren + } } where: 'the following data is used' scenario | cpsPath || expectedXPaths || expectedNumberOfChildren - 'multiple list-ancestors' | '//book/ancestor::categories' || ["/shops/shop[@id='1']/categories[@code='1']", "/shops/shop[@id='1']/categories[@code='2']"] || [1, 1] - 'one ancestor with list value' | '//book/ancestor::categories[@code=1]' || ["/shops/shop[@id='1']/categories[@code='1']"] || [1] - 'top ancestor' | '//shop[@id=1]/ancestor::shops' || ['/shops'] || [5] - 'list with index value in the xpath prefix' | '//categories[@code=1]/book/ancestor::shop[@id=1]' || ["/shops/shop[@id='1']"] || [3] - 'ancestor with parent list' | '//book/ancestor::shop[@id=1]/categories[@code=2]' || ["/shops/shop[@id='1']/categories[@code='2']"] || [1] - 'ancestor with parent' | '//phonenumbers[@type="mob"]/ancestor::info/contact' || ["/shops/shop[@id='3']/info/contact"] || [3] - 'ancestor combined with text condition' | '//book/title[text()="Dune"]/ancestor::shop' || ["/shops/shop[@id='1']"] || [3] - 'ancestor with parent that does not exist' | '//book/ancestor::parentDoesNoExist/categories' || [] || [] - 'ancestor does not exist' | '//book/ancestor::ancestorDoesNotExist' || [] || [] + 'multiple list-ancestors' | '//book/ancestor::categories' || ["/shops/shop[@id='1']/categories[@code='2']", "/shops/shop[@id='1']/categories[@code='1']"] || 1 + 'one ancestor with list value' | '//book/ancestor::categories[@code=1]' || ["/shops/shop[@id='1']/categories[@code='1']"] || 1 + 'top ancestor' | '//shop[@id=1]/ancestor::shops' || ['/shops'] || 5 + 'list with index value in the xpath prefix' | '//categories[@code=1]/book/ancestor::shop[@id=1]' || ["/shops/shop[@id='1']"] || 3 + 'ancestor with parent list' | '//book/ancestor::shop[@id=1]/categories[@code=2]' || ["/shops/shop[@id='1']/categories[@code='2']"] || 1 + 'ancestor with parent' | '//phonenumbers[@type="mob"]/ancestor::info/contact' || ["/shops/shop[@id='3']/info/contact"] || 3 + 'ancestor combined with text condition' | '//book/title[text()="Dune"]/ancestor::shop' || ["/shops/shop[@id='1']"] || 3 + 'ancestor with parent that does not exist' | '//book/ancestor::parentDoesNoExist/categories' || [] || null + 'ancestor does not exist' | '//book/ancestor::ancestorDoesNotExist' || [] || null } def 'Cps Path query with syntax error throws a CPS Path Exception.'() { when: 'trying to execute a query with a syntax (parsing) error' objectUnderTest.queryDataNodes(DATASPACE_NAME, ANCHOR_FOR_SHOP_EXAMPLE, 'cpsPath that cannot be parsed' , OMIT_DESCENDANTS) - then: 'exception is thrown' + then: 'a cps path exception is thrown' thrown(CpsPathException) } diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceIntegrationSpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceIntegrationSpec.groovy index cc2369d50..6252fff56 100755 --- a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceIntegrationSpec.groovy +++ b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceIntegrationSpec.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2021-2022 Nordix Foundation + * Copyright (C) 2021-2023 Nordix Foundation * Modifications Copyright (C) 2021 Pantheon.tech * Modifications Copyright (C) 2021-2022 Bell Canada. * Modifications Copyright (C) 2022 TechMahindra Ltd. @@ -295,6 +295,39 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase { 'invalid xpath' | DATASPACE_NAME | ANCHOR_FOR_DATA_NODES_WITH_LEAVES | 'INVALID XPATH' || CpsPathException } + @Sql([CLEAR_DATA, SET_DATA]) + def 'Get multiple data nodes by xpath.'() { + when: 'fetch #scenario.' + def results = objectUnderTest.getDataNodes(DATASPACE_NAME, ANCHOR_NAME3, inputXpaths, OMIT_DESCENDANTS) + then: 'the expected number of data nodes are returned' + assert results.size() == expectedResultSize + where: 'following parameters were used' + scenario | inputXpaths || expectedResultSize + '1 node' | ["/parent-200"] || 1 + '2 unique nodes' | ["/parent-200", "/parent-201"] || 2 + '3 unique nodes' | ["/parent-200", "/parent-201", "/parent-202"] || 3 + '1 unique node with duplicate xpath' | ["/parent-200", "/parent-200"] || 1 + '2 unique nodes with duplicate xpath' | ["/parent-200", "/parent-202", "/parent-200"] || 2 + 'list element with key (single quote)' | ["/parent-201/child-204[@key='A']"] || 1 + 'list element with key (double quote)' | ['/parent-201/child-204[@key="A"]'] || 1 + 'non-existing xpath' | ["/NO-XPATH"] || 0 + 'existing and non-existing xpaths' | ["/parent-200", "/NO-XPATH", "/parent-201"] || 2 + 'invalid xpath' | ["INVALID XPATH"] || 0 + 'valid and invalid xpaths' | ["/parent-200", "INVALID XPATH", "/parent-201"] || 2 + } + + @Sql([CLEAR_DATA, SET_DATA]) + def 'Get multiple data nodes error scenario: #scenario.'() { + when: 'attempt to get data nodes with #scenario' + objectUnderTest.getDataNodes(dataspaceName, anchorName, ['/not-relevant'], OMIT_DESCENDANTS) + then: 'a #expectedException is thrown' + thrown(expectedException) + where: 'the following data is used' + scenario | dataspaceName | anchorName || expectedException + 'non-existing dataspace' | 'NO DATASPACE' | 'not relevant' || DataspaceNotFoundException + 'non-existing anchor' | DATASPACE_NAME | 'NO ANCHOR' || AnchorNotFoundException + } + @Sql([CLEAR_DATA, SET_DATA]) def 'Update data node leaves.'() { when: 'update is performed for leaves' diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy index 255e8e52f..87e59c60d 100644 --- a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy +++ b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy @@ -1,8 +1,8 @@ /* * ============LICENSE_START======================================================= * Copyright (c) 2021 Bell Canada. - * Modifications Copyright (C) 2021-2022 Nordix Foundation - * Modifications Copyright (C) 2022 TechMahindra Ltd. + * Modifications Copyright (C) 2021-2023 Nordix Foundation + * Modifications Copyright (C) 2022 TechMahindra Ltd. * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -107,7 +107,6 @@ class CpsDataPersistenceServiceSpec extends Specification { assert thrown.details.contains('/node3') } - def 'Retrieving a data node with a property JSON value of #scenario'() { given: 'the db has a fragment with an attribute property JSON value of #scenario' mockFragmentWithJson("{\"some attribute\": ${dataString}}") @@ -142,6 +141,20 @@ class CpsDataPersistenceServiceSpec extends Specification { thrown(DataValidationException) } + def 'Retrieving multiple data nodes.'() { + given: 'db contains an anchor' + def anchorEntity = new AnchorEntity(id:123) + mockAnchorRepository.getByDataspaceAndName(*_) >> anchorEntity + and: 'fragment repository returns a collection of fragments' + def fragmentEntity1 = new FragmentEntity(xpath: '/xpath1', childFragments: []) + def fragmentEntity2 = new FragmentEntity(xpath: '/xpath2', childFragments: []) + mockFragmentRepository.findByAnchorAndMultipleCpsPaths(123, ['/xpath1', '/xpath2'] as Set) >> [fragmentEntity1, fragmentEntity2] + when: 'getting data nodes for 2 xpaths' + def result = objectUnderTest.getDataNodes('some-dataspace', 'some-anchor', ['/xpath1', '/xpath2'], FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS) + then: '2 data nodes are returned' + assert result.size() == 2 + } + def 'start session'() { when: 'start session' objectUnderTest.startSession() @@ -165,6 +178,25 @@ class CpsDataPersistenceServiceSpec extends Specification { 1 * mockSessionManager.lockAnchor('mySessionId', 'myDataspaceName', 'myAnchorName', 123L) } + def 'update data node leaves: #scenario'(){ + given: 'A node exists for the given xpath' + mockFragmentRepository.getByDataspaceAndAnchorAndXpath(_, _, '/some/xpath') >> new FragmentEntity(xpath: '/some/xpath', attributes: existingAttributes) + when: 'the node leaves are updated' + objectUnderTest.updateDataLeaves('some-dataspace', 'some-anchor', '/some/xpath', newAttributes as Map) + then: 'the fragment entity saved has the original and new attributes' + 1 * mockFragmentRepository.save({fragmentEntity -> { + assert fragmentEntity.getXpath() == '/some/xpath' + assert fragmentEntity.getAttributes() == mergedAttributes + }}) + where: 'the following attributes combinations are used' + scenario | existingAttributes | newAttributes | mergedAttributes + 'add new leaf' | '{"existing":"value"}' | ["new":"value"] | '{"existing":"value","new":"value"}' + 'update existing leaf' | '{"existing":"value"}' | ["existing":"value2"] | '{"existing":"value2"}' + 'update nothing with nothing' | '' | [] | '' + 'update with nothing' | '{"existing":"value"}' | [] | '{"existing":"value"}' + 'update with same value' | '{"existing":"value"}' | ["existing":"value"] | '{"existing":"value"}' + } + def 'update data node and descendants: #scenario'(){ given: 'mocked responses' mockFragmentRepository.getByDataspaceAndAnchorAndXpath(_, _, '/test/xpath') >> new FragmentEntity(xpath: '/test/xpath', childFragments: []) @@ -208,11 +240,8 @@ class CpsDataPersistenceServiceSpec extends Specification { } def mockFragmentWithJson(json) { - def anchorName = 'some anchor' - def mockAnchor = Mock(AnchorEntity) - mockAnchor.getId() >> 123 - mockAnchor.getName() >> anchorName - mockAnchorRepository.getByDataspaceAndName(*_) >> mockAnchor + def anchorEntity = new AnchorEntity(id:123) + mockAnchorRepository.getByDataspaceAndName(*_) >> anchorEntity def mockFragmentExtract = Mock(FragmentExtract) mockFragmentExtract.getId() >> 456 mockFragmentExtract.getAttributes() >> json diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsPersistencePerfSpecBase.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsPersistencePerfSpecBase.groovy new file mode 100644 index 000000000..3bbae2d08 --- /dev/null +++ b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsPersistencePerfSpecBase.groovy @@ -0,0 +1,74 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2023 Nordix Foundation + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the 'License'); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an 'AS IS' BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.spi.impl + +import org.onap.cps.spi.model.DataNode +import org.onap.cps.spi.model.DataNodeBuilder + +class CpsPersistencePerfSpecBase extends CpsPersistenceSpecBase { + + static final String PERF_TEST_DATA = '/data/perf-test.sql' + static final String PERF_DATASPACE = 'PERF-DATASPACE' + static final String PERF_ANCHOR = 'PERF-ANCHOR' + static final String PERF_TEST_PARENT = '/perf-parent-1' + + static def xpathsToAllGrandChildren = [] + + def createLineage(cpsDataPersistenceService, numberOfChildren, numberOfGrandChildren, createLists) { + xpathsToAllGrandChildren = [] + (1..numberOfChildren).each { + if (createLists) { + def xpathFormat = "${PERF_TEST_PARENT}/perf-test-list-${it}[@key='%d']" + def listElements = goForthAndMultiply(xpathFormat, numberOfGrandChildren) + cpsDataPersistenceService.addListElements(PERF_DATASPACE, PERF_ANCHOR, PERF_TEST_PARENT, listElements) + } else { + def xpathFormat = "${PERF_TEST_PARENT}/perf-test-child-${it}/perf-test-grand-child-%d" + def grandChildren = goForthAndMultiply(xpathFormat, numberOfGrandChildren) + def child = new DataNodeBuilder() + .withXpath("${PERF_TEST_PARENT}/perf-test-child-${it}") + .withChildDataNodes(grandChildren) + .build() + cpsDataPersistenceService.addChildDataNode(PERF_DATASPACE, PERF_ANCHOR, PERF_TEST_PARENT, child) + } + } + } + + def goForthAndMultiply(xpathFormat, numberOfGrandChildren) { + def grandChildren = [] + (1..numberOfGrandChildren).each { + def xpath = String.format(xpathFormat as String, it) + def grandChild = new DataNodeBuilder().withXpath(xpath).build() + xpathsToAllGrandChildren.add(grandChild.xpath) + grandChildren.add(grandChild) + } + return grandChildren + } + + def countDataNodes(dataNodes) { + int nodeCount = 1 + for (DataNode parent : dataNodes) { + for (DataNode child : parent.childDataNodes) { + nodeCount = nodeCount + (countDataNodes(child)) + } + } + return nodeCount + } +} diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/performance/CpsDataPersistenceServiceDeletePerfTest.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/performance/CpsDataPersistenceServiceDeletePerfTest.groovy new file mode 100644 index 000000000..5aae285d7 --- /dev/null +++ b/cps-ri/src/test/groovy/org/onap/cps/spi/performance/CpsDataPersistenceServiceDeletePerfTest.groovy @@ -0,0 +1,154 @@ +/* + * ============LICENSE_START======================================================= + * Copyright (C) 2023 Nordix Foundation + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ + +package org.onap.cps.spi.performance + +import org.onap.cps.spi.CpsDataPersistenceService +import org.onap.cps.spi.impl.CpsPersistencePerfSpecBase +import org.springframework.beans.factory.annotation.Autowired +import org.springframework.test.context.jdbc.Sql +import org.springframework.util.StopWatch + +import java.util.concurrent.TimeUnit + +class CpsDataPersistenceServiceDeletePerfTest extends CpsPersistencePerfSpecBase { + + @Autowired + CpsDataPersistenceService objectUnderTest + + static def NUMBER_OF_CHILDREN = 100 + static def NUMBER_OF_GRAND_CHILDREN = 50 + static def NUMBER_OF_LISTS = 100 + static def NUMBER_OF_LIST_ELEMENTS = 50 + static def ALLOWED_SETUP_TIME_MS = TimeUnit.SECONDS.toMillis(10) + + def stopWatch = new StopWatch() + + @Sql([CLEAR_DATA, PERF_TEST_DATA]) + def 'Create a node with many descendants (please note, subsequent tests depend on this running first).'() { + given: 'a node with a large number of descendants is created' + stopWatch.start() + createLineage(objectUnderTest, NUMBER_OF_CHILDREN, NUMBER_OF_GRAND_CHILDREN, false) + stopWatch.stop() + def setupDurationInMillis = stopWatch.getTotalTimeMillis() + and: 'setup duration is under #ALLOWED_SETUP_TIME_MS milliseconds' + assert setupDurationInMillis < ALLOWED_SETUP_TIME_MS + } + + def 'Delete 5 children with grandchildren'() { + when: 'child nodes are deleted' + stopWatch.start() + (1..5).each { + def childPath = "${PERF_TEST_PARENT}/perf-test-child-${it}".toString(); + objectUnderTest.deleteDataNode(PERF_DATASPACE, PERF_ANCHOR, childPath) + } + stopWatch.stop() + def deleteDurationInMillis = stopWatch.getTotalTimeMillis() + then: 'delete duration is under 6000 milliseconds' + assert deleteDurationInMillis < 6000 + } + + def 'Delete 50 grandchildren (that have no descendants)'() { + when: 'target nodes are deleted' + stopWatch.start() + (1..50).each { + def grandchildPath = "${PERF_TEST_PARENT}/perf-test-child-6/perf-test-grand-child-${it}".toString(); + objectUnderTest.deleteDataNode(PERF_DATASPACE, PERF_ANCHOR, grandchildPath) + } + stopWatch.stop() + def deleteDurationInMillis = stopWatch.getTotalTimeMillis() + then: 'delete duration is under 500 milliseconds' + assert deleteDurationInMillis < 500 + } + + def 'Delete 1 large data node with many descendants'() { + when: 'parent node is deleted' + stopWatch.start() + objectUnderTest.deleteDataNode(PERF_DATASPACE, PERF_ANCHOR, PERF_TEST_PARENT) + stopWatch.stop() + def deleteDurationInMillis = stopWatch.getTotalTimeMillis() + then: 'delete duration is under 2500 milliseconds' + assert deleteDurationInMillis < 2500 + } + + @Sql([CLEAR_DATA, PERF_TEST_DATA]) + def 'Create a node with many list elements (please note, subsequent tests depend on this running first).'() { + given: 'a node with a large number of descendants is created' + stopWatch.start() + createLineage(objectUnderTest, NUMBER_OF_LISTS, NUMBER_OF_LIST_ELEMENTS, true) + stopWatch.stop() + def setupDurationInMillis = stopWatch.getTotalTimeMillis() + and: 'setup duration is under #ALLOWED_SETUP_TIME_MS milliseconds' + assert setupDurationInMillis < ALLOWED_SETUP_TIME_MS + } + + def 'Delete 5 whole lists with many elements'() { + when: 'list nodes are deleted' + stopWatch.start() + (1..5).each { + def childPath = "${PERF_TEST_PARENT}/perf-test-list-${it}".toString(); + objectUnderTest.deleteListDataNode(PERF_DATASPACE, PERF_ANCHOR, childPath) + } + stopWatch.stop() + def deleteDurationInMillis = stopWatch.getTotalTimeMillis() + then: 'delete duration is under 4000 milliseconds' + assert deleteDurationInMillis < 4000 + } + + def 'Delete 10 list elements with keys'() { + when: 'list elements are deleted' + stopWatch.start() + (1..10).each { + def key = it.toString() + def grandchildPath = "${PERF_TEST_PARENT}/perf-test-list-6[@key='${key}']" + objectUnderTest.deleteListDataNode(PERF_DATASPACE, PERF_ANCHOR, grandchildPath) + } + stopWatch.stop() + def deleteDurationInMillis = stopWatch.getTotalTimeMillis() + then: 'delete duration is under 6000 milliseconds' + assert deleteDurationInMillis < 6000 + } + + @Sql([CLEAR_DATA, PERF_TEST_DATA]) + def 'Delete root node with many descendants'() { + given: 'a node with a large number of descendants is created' + createLineage(objectUnderTest, NUMBER_OF_CHILDREN, NUMBER_OF_GRAND_CHILDREN, false) + when: 'root node is deleted' + stopWatch.start() + objectUnderTest.deleteDataNode(PERF_DATASPACE, PERF_ANCHOR, '/') + stopWatch.stop() + def deleteDurationInMillis = stopWatch.getTotalTimeMillis() + then: 'delete duration is under 250 milliseconds' + assert deleteDurationInMillis < 250 + } + + @Sql([CLEAR_DATA, PERF_TEST_DATA]) + def 'Delete data nodes for an anchor'() { + given: 'a node with a large number of descendants is created' + createLineage(objectUnderTest, NUMBER_OF_CHILDREN, NUMBER_OF_GRAND_CHILDREN, false) + when: 'data nodes are deleted' + stopWatch.start() + objectUnderTest.deleteDataNodes(PERF_DATASPACE, PERF_ANCHOR) + stopWatch.stop() + def deleteDurationInMillis = stopWatch.getTotalTimeMillis() + then: 'delete duration is under 250 milliseconds' + assert deleteDurationInMillis < 250 + } +} diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/performance/CpsToDataNodePerfTest.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/performance/CpsDataPersistenceServicePerfTest.groovy similarity index 61% rename from cps-ri/src/test/groovy/org/onap/cps/spi/performance/CpsToDataNodePerfTest.groovy rename to cps-ri/src/test/groovy/org/onap/cps/spi/performance/CpsDataPersistenceServicePerfTest.groovy index 265c5fc5f..2346239df 100644 --- a/cps-ri/src/test/groovy/org/onap/cps/spi/performance/CpsToDataNodePerfTest.groovy +++ b/cps-ri/src/test/groovy/org/onap/cps/spi/performance/CpsDataPersistenceServicePerfTest.groovy @@ -1,6 +1,6 @@ /* * ============LICENSE_START======================================================= - * Copyright (C) 2022 Nordix Foundation + * Copyright (C) 2022-2023 Nordix Foundation * ================================================================================ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -20,11 +20,12 @@ package org.onap.cps.spi.performance +import org.onap.cps.spi.impl.CpsPersistencePerfSpecBase import org.springframework.util.StopWatch import org.onap.cps.spi.CpsDataPersistenceService -import org.onap.cps.spi.impl.CpsPersistenceSpecBase -import org.onap.cps.spi.model.DataNode -import org.onap.cps.spi.model.DataNodeBuilder +import org.onap.cps.spi.repository.AnchorRepository +import org.onap.cps.spi.repository.DataspaceRepository +import org.onap.cps.spi.repository.FragmentRepository import org.springframework.beans.factory.annotation.Autowired import org.springframework.test.context.jdbc.Sql @@ -33,41 +34,46 @@ import java.util.concurrent.TimeUnit import static org.onap.cps.spi.FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS import static org.onap.cps.spi.FetchDescendantsOption.OMIT_DESCENDANTS -class CpsToDataNodePerfTest extends CpsPersistenceSpecBase { - - static final String PERF_TEST_DATA = '/data/perf-test.sql' +class CpsDataPersistenceServicePerfTest extends CpsPersistencePerfSpecBase { @Autowired CpsDataPersistenceService objectUnderTest - static def PERF_TEST_PARENT = '/perf-parent-1' + @Autowired + DataspaceRepository dataspaceRepository + + @Autowired + AnchorRepository anchorRepository + + @Autowired + FragmentRepository fragmentRepository + static def NUMBER_OF_CHILDREN = 200 static def NUMBER_OF_GRAND_CHILDREN = 50 static def TOTAL_NUMBER_OF_NODES = 1 + NUMBER_OF_CHILDREN + (NUMBER_OF_CHILDREN * NUMBER_OF_GRAND_CHILDREN) // Parent + Children + Grand-children - static def ALLOWED_SETUP_TIME_MS = TimeUnit.SECONDS.toMillis(10) - static def ALLOWED_READ_TIME_AL_NODES_MS = 500 def stopWatch = new StopWatch() + def readStopWatch = new StopWatch() @Sql([CLEAR_DATA, PERF_TEST_DATA]) def 'Create a node with many descendants (please note, subsequent tests depend on this running first).'() { given: 'a node with a large number of descendants is created' stopWatch.start() - createLineage() + createLineage(objectUnderTest, NUMBER_OF_CHILDREN, NUMBER_OF_GRAND_CHILDREN, false) stopWatch.stop() def setupDurationInMillis = stopWatch.getTotalTimeMillis() - and: 'setup duration is under #ALLOWED_SETUP_TIME_MS milliseconds' - assert setupDurationInMillis < ALLOWED_SETUP_TIME_MS + and: 'setup duration is under 10 seconds' + assert setupDurationInMillis < 10000 } def 'Get data node with many descendants by xpath #scenario'() { when: 'get parent is executed with all descendants' stopWatch.start() - def result = objectUnderTest.getDataNode('PERF-DATASPACE', 'PERF-ANCHOR', xpath, INCLUDE_ALL_DESCENDANTS) + def result = objectUnderTest.getDataNode(PERF_DATASPACE, PERF_ANCHOR, xpath, INCLUDE_ALL_DESCENDANTS) stopWatch.stop() def readDurationInMillis = stopWatch.getTotalTimeMillis() then: 'read duration is under 500 milliseconds' - assert readDurationInMillis < ALLOWED_READ_TIME_AL_NODES_MS + assert readDurationInMillis < 500 and: 'data node is returned with all the descendants populated' assert countDataNodes(result) == TOTAL_NUMBER_OF_NODES where: 'the following xPaths are used' @@ -79,55 +85,41 @@ class CpsToDataNodePerfTest extends CpsPersistenceSpecBase { def 'Query parent data node with many descendants by cps-path'() { when: 'query is executed with all descendants' stopWatch.start() - def result = objectUnderTest.queryDataNodes('PERF-DATASPACE', 'PERF-ANCHOR', '//perf-parent-1' , INCLUDE_ALL_DESCENDANTS) + def result = objectUnderTest.queryDataNodes(PERF_DATASPACE, PERF_ANCHOR, '//perf-parent-1' , INCLUDE_ALL_DESCENDANTS) stopWatch.stop() def readDurationInMillis = stopWatch.getTotalTimeMillis() then: 'read duration is under 500 milliseconds' - assert readDurationInMillis < ALLOWED_READ_TIME_AL_NODES_MS + assert readDurationInMillis < 500 and: 'data node is returned with all the descendants populated' assert countDataNodes(result) == TOTAL_NUMBER_OF_NODES } + def 'Performance of finding multiple xpaths'() { + when: 'we query for all grandchildren (except 1 for fun) with the new native method' + xpathsToAllGrandChildren.remove(0) + readStopWatch.start() + def result = objectUnderTest.getDataNodes(PERF_DATASPACE, PERF_ANCHOR, xpathsToAllGrandChildren, INCLUDE_ALL_DESCENDANTS) + readStopWatch.stop() + def readDurationInMillis = readStopWatch.getTotalTimeMillis() + then: 'the returned number of entities equal to the number of children * number of grandchildren' + assert result.size() == xpathsToAllGrandChildren.size() + and: 'it took less then 4000ms' + assert readDurationInMillis < 4000 + } + def 'Query many descendants by cps-path with #scenario'() { when: 'query is executed with all descendants' stopWatch.start() - def result = objectUnderTest.queryDataNodes('PERF-DATASPACE', 'PERF-ANCHOR', '//perf-test-grand-child-1', descendantsOption) + def result = objectUnderTest.queryDataNodes(PERF_DATASPACE, PERF_ANCHOR, '//perf-test-grand-child-1', descendantsOption) stopWatch.stop() def readDurationInMillis = stopWatch.getTotalTimeMillis() - then: 'read duration is under 500 milliseconds' - assert readDurationInMillis < alowedDuration + then: 'read duration is under #allowedDuration milliseconds' + assert readDurationInMillis < allowedDuration and: 'data node is returned with all the descendants populated' assert result.size() == NUMBER_OF_CHILDREN where: 'the following options are used' - scenario | descendantsOption || alowedDuration + scenario | descendantsOption || allowedDuration 'omit descendants ' | OMIT_DESCENDANTS || 150 'include descendants (although there are none)' | INCLUDE_ALL_DESCENDANTS || 150 } - - def createLineage() { - (1..NUMBER_OF_CHILDREN).each { - def childName = "perf-test-child-${it}".toString() - def child = goForthAndMultiply(PERF_TEST_PARENT, childName) - objectUnderTest.addChildDataNode('PERF-DATASPACE', 'PERF-ANCHOR', PERF_TEST_PARENT, child) - } - } - - def goForthAndMultiply(parentXpath, childName) { - def grandChildren = [] - (1..NUMBER_OF_GRAND_CHILDREN).each { - def grandChild = new DataNodeBuilder().withXpath("${parentXpath}/${childName}/perf-test-grand-child-${it}").build() - grandChildren.add(grandChild) - } - return new DataNodeBuilder().withXpath("${parentXpath}/${childName}").withChildDataNodes(grandChildren).build() - } - - def countDataNodes(dataNodes) { - int nodeCount = 1 - for (DataNode parent : dataNodes) { - for (DataNode child : parent.childDataNodes) { - nodeCount = nodeCount + (countDataNodes(child)) - } - } - return nodeCount - } } diff --git a/cps-service/src/main/java/org/onap/cps/api/CpsDataService.java b/cps-service/src/main/java/org/onap/cps/api/CpsDataService.java index 012d7f825..6332f0910 100644 --- a/cps-service/src/main/java/org/onap/cps/api/CpsDataService.java +++ b/cps-service/src/main/java/org/onap/cps/api/CpsDataService.java @@ -122,6 +122,19 @@ public interface CpsDataService { DataNode getDataNode(String dataspaceName, String anchorName, String xpath, FetchDescendantsOption fetchDescendantsOption); + /** + * Retrieves datanodes by XPath for given dataspace and anchor. + * + * @param dataspaceName dataspace name + * @param anchorName anchor name + * @param xpaths collection of xpath + * @param fetchDescendantsOption defines the scope of data to fetch: either single node or all the descendant nodes + * (recursively) as well + * @return data node object + */ + Collection getDataNodes(String dataspaceName, String anchorName, Collection xpaths, + FetchDescendantsOption fetchDescendantsOption); + /** * Updates data node for given dataspace and anchor using xpath to parent node. * diff --git a/cps-service/src/main/java/org/onap/cps/api/impl/CpsDataServiceImpl.java b/cps-service/src/main/java/org/onap/cps/api/impl/CpsDataServiceImpl.java index 65dfa7f5c..38fa92a09 100755 --- a/cps-service/src/main/java/org/onap/cps/api/impl/CpsDataServiceImpl.java +++ b/cps-service/src/main/java/org/onap/cps/api/impl/CpsDataServiceImpl.java @@ -129,6 +129,14 @@ public class CpsDataServiceImpl implements CpsDataService { return cpsDataPersistenceService.getDataNode(dataspaceName, anchorName, xpath, fetchDescendantsOption); } + @Override + public Collection getDataNodes(final String dataspaceName, final String anchorName, + final Collection xpaths, + final FetchDescendantsOption fetchDescendantsOption) { + cpsValidator.validateNameCharacters(dataspaceName, anchorName); + return cpsDataPersistenceService.getDataNodes(dataspaceName, anchorName, xpaths, fetchDescendantsOption); + } + @Override public void updateNodeLeaves(final String dataspaceName, final String anchorName, final String parentNodeXpath, final String jsonData, final OffsetDateTime observedTimestamp) { diff --git a/cps-service/src/main/java/org/onap/cps/spi/CpsDataPersistenceService.java b/cps-service/src/main/java/org/onap/cps/spi/CpsDataPersistenceService.java index b9da4af02..0989ccae2 100644 --- a/cps-service/src/main/java/org/onap/cps/spi/CpsDataPersistenceService.java +++ b/cps-service/src/main/java/org/onap/cps/spi/CpsDataPersistenceService.java @@ -111,6 +111,19 @@ public interface CpsDataPersistenceService { DataNode getDataNode(String dataspaceName, String anchorName, String xpath, FetchDescendantsOption fetchDescendantsOption); + /** + * Retrieves datanode by XPath for given dataspace and anchor. + * + * @param dataspaceName dataspace name + * @param anchorName anchor name + * @param xpaths collection of xpaths + * @param fetchDescendantsOption defines the scope of data to fetch: either single node or all the descendant nodes + * (recursively) as well + * @return data node object + */ + Collection getDataNodes(String dataspaceName, String anchorName, Collection xpaths, + FetchDescendantsOption fetchDescendantsOption); + /** * Updates leaves for existing data node. * diff --git a/cps-service/src/main/java/org/onap/cps/utils/XmlFileUtils.java b/cps-service/src/main/java/org/onap/cps/utils/XmlFileUtils.java index 365404207..09f2e16c6 100644 --- a/cps-service/src/main/java/org/onap/cps/utils/XmlFileUtils.java +++ b/cps-service/src/main/java/org/onap/cps/utils/XmlFileUtils.java @@ -49,7 +49,10 @@ import org.xml.sax.SAXException; @NoArgsConstructor(access = AccessLevel.PRIVATE) public class XmlFileUtils { - private static DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance(); + private static final DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance(); + private static boolean isNewDocumentBuilderFactoryInstance = true; + private static final TransformerFactory transformerFactory = TransformerFactory.newInstance(); + private static boolean isNewTransformerFactoryInstance = true; private static final Pattern XPATH_PROPERTY_REGEX = Pattern.compile("\\[@(\\S{1,100})=['\\\"](\\S{1,100})['\\\"]\\]"); @@ -72,7 +75,7 @@ public class XmlFileUtils { * * @param xmlContent XML content sent to store * @param parentSchemaNode Parent schema node - * @Param xpath Parent xpath + * @param xpath Parent xpath * * @return XML content wrapped by root node (if needed) */ @@ -98,7 +101,7 @@ public class XmlFileUtils { final String namespace, final Map rootNodeProperty) throws IOException, SAXException, ParserConfigurationException, TransformerException { - final DocumentBuilder documentBuilder = dbFactory.newDocumentBuilder(); + final DocumentBuilder documentBuilder = getDocumentBuilderFactory().newDocumentBuilder(); final StringBuilder xmlStringBuilder = new StringBuilder(); xmlStringBuilder.append(xmlContent); final Document document = documentBuilder.parse( @@ -108,10 +111,7 @@ public class XmlFileUtils { && !root.getTagName().equals(YangUtils.DATA_ROOT_NODE_TAG_NAME)) { final Document documentWithRootNode = addDataRootNode(root, rootNodeTagName, namespace, rootNodeProperty); documentWithRootNode.setXmlStandalone(true); - final TransformerFactory transformerFactory = TransformerFactory.newInstance(); - transformerFactory.setAttribute(XMLConstants.ACCESS_EXTERNAL_DTD, ""); - transformerFactory.setAttribute(XMLConstants.ACCESS_EXTERNAL_STYLESHEET, ""); - final Transformer transformer = transformerFactory.newTransformer(); + final Transformer transformer = getTransformerFactory().newTransformer(); final StringWriter stringWriter = new StringWriter(); transformer.transform(new DOMSource(documentWithRootNode), new StreamResult(stringWriter)); return stringWriter.toString(); @@ -145,8 +145,8 @@ public class XmlFileUtils { final String namespace, final Map rootNodeProperty) { try { - final DocumentBuilder docBuilder = dbFactory.newDocumentBuilder(); - final Document document = docBuilder.newDocument(); + final DocumentBuilder documentBuilder = getDocumentBuilderFactory().newDocumentBuilder(); + final Document document = documentBuilder.newDocument(); final Element rootElement = document.createElementNS(namespace, tagName); for (final Map.Entry entry : rootNodeProperty.entrySet()) { final Element propertyElement = document.createElement(entry.getKey()); @@ -160,4 +160,26 @@ public class XmlFileUtils { throw new DataValidationException("Can't parse XML", "XML can't be parsed", exception); } } + + private static DocumentBuilderFactory getDocumentBuilderFactory() { + + if (isNewDocumentBuilderFactoryInstance) { + documentBuilderFactory.setAttribute(XMLConstants.ACCESS_EXTERNAL_DTD, ""); + documentBuilderFactory.setAttribute(XMLConstants.ACCESS_EXTERNAL_SCHEMA, ""); + isNewDocumentBuilderFactoryInstance = false; + } + + return documentBuilderFactory; + } + + private static TransformerFactory getTransformerFactory() { + + if (isNewTransformerFactoryInstance) { + transformerFactory.setAttribute(XMLConstants.ACCESS_EXTERNAL_DTD, ""); + transformerFactory.setAttribute(XMLConstants.ACCESS_EXTERNAL_STYLESHEET, ""); + isNewTransformerFactoryInstance = false; + } + + return transformerFactory; + } }