Support for Patch across multiple data nodes 19/134019/17
author--global <as00745003@techmahindra.com>
Wed, 5 Apr 2023 11:38:08 +0000 (17:08 +0530)
committer--global <as00745003@techmahindra.com>
Tue, 9 May 2023 10:34:03 +0000 (16:04 +0530)
- Added new method updateMultipleDataLeaves to perform Update on
  multiple data nodes
- Deprecated singular method of update data node(updateDataLeaves)
- Refactored code where singular version was used
- Updated release notes

Issue-ID: CPS-1006
Signed-off-by: <as00745003@techmahindra.com>
Change-Id: If67280e2dd3ad566de9a8217489f168415e624bc

cps-ri/src/main/java/org/onap/cps/spi/entities/FragmentEntityArranger.java
cps-ri/src/main/java/org/onap/cps/spi/impl/CpsDataPersistenceServiceImpl.java
cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceIntegrationSpec.groovy
cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy
cps-service/src/main/java/org/onap/cps/api/CpsDataService.java
cps-service/src/main/java/org/onap/cps/api/impl/CpsDataServiceImpl.java
cps-service/src/main/java/org/onap/cps/spi/CpsDataPersistenceService.java
cps-service/src/test/groovy/org/onap/cps/api/impl/CpsDataServiceImplSpec.groovy
docs/release-notes.rst
integration-test/src/test/groovy/org/onap/cps/integration/base/FunctionalSpecBase.groovy
integration-test/src/test/groovy/org/onap/cps/integration/functional/CpsDataServiceIntegrationSpec.groovy

index 49e2dd2..b7ce98e 100644 (file)
@@ -1,6 +1,7 @@
 /*
  *  ============LICENSE_START=======================================================
  *  Copyright (C) 2022 Nordix Foundation
+ *  Modifications Copyright (C) 2023 TechMahindra Ltd.
  *  ================================================================================
  *  Licensed under the Apache License, Version 2.0 (the "License");
  *  you may not use this file except in compliance with the License.
@@ -40,9 +41,11 @@ public class FragmentEntityArranger {
     public static Collection<FragmentEntity> toFragmentEntityTrees(final AnchorEntity anchorEntity,
                                                       final Collection<FragmentExtract> fragmentExtracts) {
         final Map<Long, FragmentEntity> fragmentEntityPerId = new HashMap<>();
-        for (final FragmentExtract fragmentExtract : fragmentExtracts) {
-            final FragmentEntity fragmentEntity = toFragmentEntity(anchorEntity, fragmentExtract);
-            fragmentEntityPerId.put(fragmentEntity.getId(), fragmentEntity);
+        if (fragmentExtracts !=  null) {
+            for (final FragmentExtract fragmentExtract : fragmentExtracts) {
+                final FragmentEntity fragmentEntity = toFragmentEntity(anchorEntity, fragmentExtract);
+                fragmentEntityPerId.put(fragmentEntity.getId(), fragmentEntity);
+            }
         }
         return reuniteChildrenWithTheirParents(fragmentEntityPerId);
     }
index c26cd2f..3d2b87d 100644 (file)
@@ -450,14 +450,25 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService
     }
 
     @Override
-    public void updateDataLeaves(final String dataspaceName, final String anchorName, final String xpath,
-                                 final Map<String, Serializable> updateLeaves) {
+    public void batchUpdateDataLeaves(final String dataspaceName, final String anchorName,
+                                        final Map<String, Map<String, Serializable>> updatedLeavesPerXPath) {
         final AnchorEntity anchorEntity = getAnchorEntity(dataspaceName, anchorName);
-        final FragmentEntity fragmentEntity = getFragmentEntity(anchorEntity, xpath);
-        final String currentLeavesAsString = fragmentEntity.getAttributes();
-        final String mergedLeaves = mergeLeaves(updateLeaves, currentLeavesAsString);
-        fragmentEntity.setAttributes(mergedLeaves);
-        fragmentRepository.save(fragmentEntity);
+
+        final Collection<String> xpathsOfUpdatedLeaves = updatedLeavesPerXPath.keySet();
+        final Collection<FragmentEntity> fragmentEntities = getFragmentEntities(anchorEntity, xpathsOfUpdatedLeaves,
+                FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS);
+
+        for (final FragmentEntity fragmentEntity : fragmentEntities) {
+            final Map<String, Serializable> updatedLeaves = updatedLeavesPerXPath.get(fragmentEntity.getXpath());
+            final String mergedLeaves = mergeLeaves(updatedLeaves, fragmentEntity.getAttributes());
+            fragmentEntity.setAttributes(mergedLeaves);
+        }
+
+        try {
+            fragmentRepository.saveAll(fragmentEntities);
+        } catch (final StaleStateException staleStateException) {
+            retryUpdateDataNodesIndividually(anchorEntity, fragmentEntities);
+        }
     }
 
     @Override
@@ -687,9 +698,13 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService
     }
 
     private String mergeLeaves(final Map<String, Serializable> updateLeaves, final String currentLeavesAsString) {
-        final Map<String, Serializable> currentLeavesAsMap = currentLeavesAsString.isEmpty()
-            ? new HashMap<>() : jsonObjectMapper.convertJsonString(currentLeavesAsString, Map.class);
-        currentLeavesAsMap.putAll(updateLeaves);
+        Map<String, Serializable> currentLeavesAsMap = new HashMap<>();
+        if (currentLeavesAsString != null) {
+            currentLeavesAsMap = currentLeavesAsString.isEmpty()
+                    ? new HashMap<>() : jsonObjectMapper.convertJsonString(currentLeavesAsString, Map.class);
+            currentLeavesAsMap.putAll(updateLeaves);
+        }
+
         if (currentLeavesAsMap.isEmpty()) {
             return "";
         }
index 67ccc80..080e348 100755 (executable)
@@ -319,36 +319,6 @@ class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase {
             'non-existing anchor'    | DATASPACE_NAME | 'NO ANCHOR'    || AnchorNotFoundException
     }
 
-    @Sql([CLEAR_DATA, SET_DATA])
-    def 'Update data node leaves.'() {
-        when: 'update is performed for leaves'
-            objectUnderTest.updateDataLeaves(DATASPACE_NAME, ANCHOR_FOR_DATA_NODES_WITH_LEAVES,
-                    '/parent-200/child-201', ['leaf-value': 'new'])
-        then: 'leaves are updated for selected data node'
-            def updatedFragment = fragmentRepository.getReferenceById(DATA_NODE_202_FRAGMENT_ID)
-            def updatedLeaves = getLeavesMap(updatedFragment)
-            assert updatedLeaves.size() == 1
-            assert updatedLeaves.'leaf-value' == 'new'
-        and: 'existing child entry remains as is'
-            def childFragment = updatedFragment.childFragments.iterator().next()
-            def childLeaves = getLeavesMap(childFragment)
-            assert childFragment.id == CHILD_OF_DATA_NODE_202_FRAGMENT_ID
-            assert childLeaves.'leaf-value' == 'original'
-    }
-
-    @Sql([CLEAR_DATA, SET_DATA])
-    def 'Update data leaves error scenario: #scenario.'() {
-        when: 'attempt to update data node for #scenario'
-            objectUnderTest.updateDataLeaves(dataspaceName, anchorName, xpath, ['leaf-name': 'leaf-value'])
-        then: 'a #expectedException is thrown'
-            thrown(expectedException)
-        where: 'the following data is used'
-            scenario                 | dataspaceName  | anchorName                        | xpath                 || expectedException
-            'non-existing dataspace' | 'NO DATASPACE' | 'not relevant'                    | '/not relevant'       || DataspaceNotFoundException
-            'non-existing anchor'    | DATASPACE_NAME | 'NO ANCHOR'                       | '/not relevant'       || AnchorNotFoundException
-            'non-existing xpath'     | DATASPACE_NAME | ANCHOR_FOR_DATA_NODES_WITH_LEAVES | '/NON-EXISTING-XPATH' || DataNodeNotFoundException
-    }
-
     @Sql([CLEAR_DATA, SET_DATA])
     def 'Update data nodes and descendants by removing descendants.'() {
         given: 'data nodes with leaves updated, no children'
index 8a58388..e8921b3 100644 (file)
@@ -38,6 +38,7 @@ import org.onap.cps.spi.utils.SessionManager
 import org.onap.cps.utils.JsonObjectMapper
 import org.springframework.dao.DataIntegrityViolationException
 import spock.lang.Specification
+import java.util.stream.Collectors
 
 class CpsDataPersistenceServiceSpec extends Specification {
 
@@ -68,6 +69,53 @@ class CpsDataPersistenceServiceSpec extends Specification {
             2 * mockFragmentRepository.save(_)
     }
 
+    def 'Handling of StaleStateException (caused by concurrent updates) during patch operation for data nodes.'() {
+        given: 'the system can update one datanode and has two more datanodes that throw an exception while updating'
+            def dataNodes = createDataNodesAndMockRepositoryMethodSupportingThem([
+                    '/node1': 'OK',
+                    '/node2': 'EXCEPTION',
+                    '/node3': 'EXCEPTION'])
+            def updatedLeavesPerXPath = dataNodes.stream()
+                    .collect(Collectors.toMap(DataNode::getXpath, DataNode::getLeaves))
+        and: 'the batch update will therefore also fail'
+            mockFragmentRepository.saveAll(*_) >> { throw new StaleStateException("concurrent updates") }
+        when: 'attempt batch update data nodes'
+            objectUnderTest.batchUpdateDataLeaves('some-dataspace', 'some-anchor', updatedLeavesPerXPath)
+        then: 'concurrency exception is thrown'
+            def thrown = thrown(ConcurrencyException)
+            assert thrown.message == 'Concurrent Transactions'
+        and: 'it does not contain the successful datanode'
+            assert !thrown.details.contains('/node1')
+        and: 'it contains the failed datanodes'
+            assert thrown.details.contains('/node2')
+            assert thrown.details.contains('/node3')
+    }
+
+    def 'Batch update data node leaves and descendants: #scenario'(){
+        given: 'the fragment repository returns fragment entities related to the xpath inputs'
+            mockFragmentRepository.findExtractsWithDescendants(_, [] as Set, _) >> []
+            mockFragmentRepository.findExtractsWithDescendants(_, ['/test/xpath'] as Set, _) >> [
+                    mockFragmentExtract(1, null, 123, '/test/xpath', "{\"id\":\"testId1\"}")
+            ]
+            mockFragmentRepository.findExtractsWithDescendants(123, ['/test/xpath1', '/test/xpath2'] as Set, _) >> [
+                    mockFragmentExtract(1, null, 123, '/test/xpath1', "{\"id\":\"testId1\"}"),
+                    mockFragmentExtract(2, null, 123, '/test/xpath2', "{\"id\":\"testId1\"}")
+            ]
+        when: 'replace data node tree'
+            objectUnderTest.batchUpdateDataLeaves('dataspaceName', 'anchorName',
+                    dataNodes.stream().collect(Collectors.toMap(DataNode::getXpath, DataNode::getLeaves)))
+        then: 'call fragment repository save all method'
+            1 * mockFragmentRepository.saveAll({fragmentEntities ->
+                assert fragmentEntities as List == expectedFragmentEntities
+                assert fragmentEntities.size() == expectedSize
+            })
+        where: 'the following Data Type is passed'
+            scenario                         | dataNodes                                                                                                                              | expectedSize || expectedFragmentEntities
+            'empty data node list'           | []                                                                                                                                     | 0            || []
+            'one data node in list'          | [new DataNode(xpath: '/test/xpath', leaves: ['id': 'testId'])]                                                                         | 1            || [new FragmentEntity(xpath: '/test/xpath', attributes: '{"id":"testId"}', anchor: anchorEntity)]
+            'multiple data nodes'            | [new DataNode(xpath: '/test/xpath1', leaves: ['id': 'newTestId1']), new DataNode(xpath: '/test/xpath2', leaves: ['id': 'newTestId2'])] | 2            || [new FragmentEntity(xpath: '/test/xpath2', attributes: '{"id":"newTestId2"}', anchor: anchorEntity), new FragmentEntity(xpath: '/test/xpath1', attributes: '{"id":"newTestId1"}', anchor: anchorEntity)]
+    }
+
     def 'Handling of StaleStateException (caused by concurrent updates) during update data nodes and descendants.'() {
         given: 'the system can update one datanode and has two more datanodes that throw an exception while updating'
             def dataNodes = createDataNodesAndMockRepositoryMethodSupportingThem([
@@ -81,7 +129,7 @@ class CpsDataPersistenceServiceSpec extends Specification {
         then: 'concurrency exception is thrown'
             def thrown = thrown(ConcurrencyException)
             assert thrown.message == 'Concurrent Transactions'
-        and: 'it does not contain the successfull datanode'
+        and: 'it does not contain the successful datanode'
             assert !thrown.details.contains('/node1')
         and: 'it contains the failed datanodes'
             assert thrown.details.contains('/node2')
@@ -157,26 +205,7 @@ class CpsDataPersistenceServiceSpec extends Specification {
             1 * mockSessionManager.lockAnchor('mySessionId', 'myDataspaceName', 'myAnchorName', 123L)
     }
 
-    def 'update data node leaves: #scenario'(){
-        given: 'A node exists for the given xpath'
-            mockFragmentRepository.getByAnchorAndXpath(_, '/some/xpath') >> new FragmentEntity(xpath: '/some/xpath', attributes:  existingAttributes)
-        when: 'the node leaves are updated'
-            objectUnderTest.updateDataLeaves('some-dataspace', 'some-anchor', '/some/xpath', newAttributes as Map<String, Serializable>)
-        then: 'the fragment entity saved has the original and new attributes'
-            1 * mockFragmentRepository.save({fragmentEntity -> {
-                assert fragmentEntity.getXpath() == '/some/xpath'
-                assert fragmentEntity.getAttributes() == mergedAttributes
-            }})
-        where: 'the following attributes combinations are used'
-            scenario                      | existingAttributes     | newAttributes         | mergedAttributes
-            'add new leaf'                | '{"existing":"value"}' | ["new":"value"]       | '{"existing":"value","new":"value"}'
-            'update existing leaf'        | '{"existing":"value"}' | ["existing":"value2"] | '{"existing":"value2"}'
-            'update nothing with nothing' | ''                     | []                    | ''
-            'update with nothing'         | '{"existing":"value"}' | []                    | '{"existing":"value"}'
-            'update with same value'      | '{"existing":"value"}' | ["existing":"value"]  | '{"existing":"value"}'
-    }
-
-    def 'update data node and descendants: #scenario'(){
+    def 'Replace data node and descendants: #scenario'(){
         given: 'the fragment repository returns fragment entities related to the xpath inputs'
             mockFragmentRepository.findExtractsWithDescendants(_, [] as Set, _) >> []
             mockFragmentRepository.findExtractsWithDescendants(_, ['/test/xpath'] as Set, _) >> [
@@ -192,7 +221,7 @@ class CpsDataPersistenceServiceSpec extends Specification {
             'one data node in list'          | [new DataNode(xpath: '/test/xpath', leaves: ['id': 'testId'], childDataNodes: [])] || [new FragmentEntity(xpath: '/test/xpath', attributes: '{"id":"testId"}', anchor: anchorEntity, childFragments: [])]
     }
 
-    def 'update data nodes and descendants'() {
+    def 'Replace data nodes and descendants'() {
         given: 'the fragment repository returns fragment entities related to the xpath inputs'
             mockFragmentRepository.findExtractsWithDescendants(_, ['/test/xpath1', '/test/xpath2'] as Set, _) >> [
                 mockFragmentExtract(1, null, 123, '/test/xpath1', null),
index fc00868..6a2cac4 100644 (file)
@@ -138,9 +138,7 @@ public interface CpsDataService {
                                                        FetchDescendantsOption fetchDescendantsOption);
 
     /**
-     * Updates data node for given dataspace and anchor using xpath to parent node. This method can currently
-     * update only one top level data node. The method will throw DataValidationException when more than one top level
-     * data nodes are provided in jsonData
+     * Updates multiple data nodes for given dataspace and anchor using xpath to parent node.
      *
      * @param dataspaceName   dataspace name
      * @param anchorName      anchor name
@@ -154,10 +152,10 @@ public interface CpsDataService {
     /**
      * Replaces an existing data node's content including descendants.
      *
-     * @param dataspaceName   dataspace name
-     * @param anchorName      anchor name
-     * @param parentNodeXpath xpath to parent node
-     * @param jsonData        json data
+     * @param dataspaceName     dataspace name
+     * @param anchorName        anchor name
+     * @param parentNodeXpath   xpath to parent node
+     * @param jsonData          json data
      * @param observedTimestamp observedTimestamp
      */
     void updateDataNodeAndDescendants(String dataspaceName, String anchorName, String parentNodeXpath, String jsonData,
index 51e31f0..99cda22 100755 (executable)
@@ -29,8 +29,10 @@ import static org.onap.cps.notification.Operation.DELETE;
 import static org.onap.cps.notification.Operation.UPDATE;
 
 import io.micrometer.core.annotation.Timed;
+import java.io.Serializable;
 import java.time.OffsetDateTime;
 import java.util.Collection;
+import java.util.Collections;
 import java.util.Map;
 import java.util.stream.Collectors;
 import lombok.RequiredArgsConstructor;
@@ -155,20 +157,16 @@ public class CpsDataServiceImpl implements CpsDataService {
 
     @Override
     @Timed(value = "cps.data.service.datanode.leaves.update",
-        description = "Time taken to get a batch of data nodes")
+        description = "Time taken to update a batch of leaf data nodes")
     public void updateNodeLeaves(final String dataspaceName, final String anchorName, final String parentNodeXpath,
         final String jsonData, final OffsetDateTime observedTimestamp) {
         cpsValidator.validateNameCharacters(dataspaceName, anchorName);
         final Anchor anchor = cpsAdminService.getAnchor(dataspaceName, anchorName);
         final Collection<DataNode> dataNodesInPatch = buildDataNodes(anchor, parentNodeXpath, jsonData,
                 ContentType.JSON);
-        if (dataNodesInPatch.size() > 1) {
-            throw new DataValidationException("Operation is not supported for multiple data nodes",
-                    "Number of data nodes present: " + dataNodesInPatch.size());
-        }
-        cpsDataPersistenceService.updateDataLeaves(dataspaceName, anchorName,
-                dataNodesInPatch.iterator().next().getXpath(),
-            dataNodesInPatch.iterator().next().getLeaves());
+        final Map<String, Map<String, Serializable>> xpathToUpdatedLeaves = dataNodesInPatch.stream()
+                .collect(Collectors.toMap(DataNode::getXpath, DataNode::getLeaves));
+        cpsDataPersistenceService.batchUpdateDataLeaves(dataspaceName, anchorName, xpathToUpdatedLeaves);
         processDataUpdatedEventAsync(anchor, parentNodeXpath, UPDATE, observedTimestamp);
     }
 
@@ -395,8 +393,8 @@ public class CpsDataServiceImpl implements CpsDataService {
         if (dataNodeUpdate == null) {
             return;
         }
-        cpsDataPersistenceService.updateDataLeaves(anchor.getDataspaceName(), anchor.getName(),
-            dataNodeUpdate.getXpath(), dataNodeUpdate.getLeaves());
+        cpsDataPersistenceService.batchUpdateDataLeaves(anchor.getDataspaceName(), anchor.getName(),
+                Collections.singletonMap(dataNodeUpdate.getXpath(), dataNodeUpdate.getLeaves()));
         final Collection<DataNode> childDataNodeUpdates = dataNodeUpdate.getChildDataNodes();
         for (final DataNode childDataNodeUpdate : childDataNodeUpdates) {
             processDataNodeUpdate(anchor, childDataNodeUpdate);
index d28a333..4fb25e9 100644 (file)
@@ -116,14 +116,14 @@ public interface CpsDataPersistenceService {
                                                        FetchDescendantsOption fetchDescendantsOption);
 
     /**
-     * Updates leaves for existing data node.
+     * Updates data leaves for multiple data nodes.
      *
-     * @param dataspaceName dataspace name
-     * @param anchorName    anchor name
-     * @param xpath         xpath
-     * @param leaves        the leaves as a map where key is a leaf name and a value is a leaf value
+     * @param dataspaceName              dataspace name
+     * @param anchorName                 anchor name
+     * @param updatedLeavesPerXPath      Map of xPaths to updated leaf nodes
      */
-    void updateDataLeaves(String dataspaceName, String anchorName, String xpath, Map<String, Serializable> leaves);
+    void batchUpdateDataLeaves(String dataspaceName, String anchorName,
+                               Map<String, Map<String, Serializable>> updatedLeavesPerXPath);
 
     /**
      * Replaces multiple existing data nodes' content including descendants in a batch operation.
index be397b9..e357d24 100644 (file)
@@ -215,15 +215,15 @@ class CpsDataServiceImplSpec extends Specification {
         when: 'update data method is invoked with json data #jsonData and parent node xpath #parentNodeXpath'
             objectUnderTest.updateNodeLeaves(dataspaceName, anchorName, parentNodeXpath, jsonData, observedTimestamp)
         then: 'the persistence service method is invoked with correct parameters'
-            1 * mockCpsDataPersistenceService.updateDataLeaves(dataspaceName, anchorName, expectedNodeXpath, leaves)
+            1 * mockCpsDataPersistenceService.batchUpdateDataLeaves(dataspaceName, anchorName, {dataNode -> dataNode.keySet()[0] == expectedNodeXpath})
         and: 'the CpsValidator is called on the dataspaceName and AnchorName'
             1 * mockCpsValidator.validateNameCharacters(dataspaceName, anchorName)
         and: 'data updated event is sent to notification service'
             1 * mockNotificationService.processDataUpdatedEvent(anchor, parentNodeXpath, Operation.UPDATE, observedTimestamp)
         where: 'following parameters were used'
-            scenario         | parentNodeXpath | jsonData                        || expectedNodeXpath                   | leaves
-            'top level node' | '/'             | '{"test-tree": {"branch": []}}' || '/test-tree'                        | Collections.emptyMap()
-            'level 2 node'   | '/test-tree'    | '{"branch": [{"name":"Name"}]}' || '/test-tree/branch[@name=\'Name\']' | ['name': 'Name']
+            scenario         | parentNodeXpath | jsonData                        || expectedNodeXpath
+            'top level node' | '/'             | '{"test-tree": {"branch": []}}' || '/test-tree'
+            'level 2 node'   | '/test-tree'    | '{"branch": [{"name":"Name"}]}' || '/test-tree/branch[@name=\'Name\']'
     }
 
     def 'Update list-element data node with : #scenario.'() {
@@ -244,11 +244,21 @@ class CpsDataServiceImplSpec extends Specification {
         given: 'schema set for given dataspace and anchor refers multipleDataTree model'
             setupSchemaSetMocks('multipleDataTree.yang')
         and: 'json string with multiple data trees'
+            def parentNodeXpath = '/'
             def updatedJsonData = '{"first-container":{"a-leaf":"a-new-Value"},"last-container":{"x-leaf":"x-new-value"}}'
         when: 'update operation is performed on multiple data nodes'
-            objectUnderTest.updateNodeLeaves(dataspaceName, anchorName, '/', updatedJsonData, observedTimestamp)
-        then: 'expected exception is thrown'
-            thrown(DataValidationException)
+            objectUnderTest.updateNodeLeaves(dataspaceName, anchorName, parentNodeXpath, updatedJsonData, observedTimestamp)
+        then: 'the persistence service method is invoked with correct parameters'
+            1 * mockCpsDataPersistenceService.batchUpdateDataLeaves(dataspaceName, anchorName, {dataNode -> dataNode.keySet()[index] == expectedNodeXpath})
+        and: 'the CpsValidator is called on the dataspaceName and AnchorName'
+            1 * mockCpsValidator.validateNameCharacters(dataspaceName, anchorName)
+        and: 'data updated event is sent to notification service'
+            1 * mockNotificationService.processDataUpdatedEvent(anchor, parentNodeXpath, Operation.UPDATE, observedTimestamp)
+        where: 'the following parameters were used'
+            index | expectedNodeXpath
+            0     | '/first-container'
+            1     | '/last-container'
+
     }
 
     def 'Update Bookstore node leaves' () {
@@ -260,8 +270,9 @@ class CpsDataServiceImplSpec extends Specification {
             objectUnderTest.updateNodeLeavesAndExistingDescendantLeaves(dataspaceName, anchorName,
                 '/bookstore', jsonData, observedTimestamp)
         then: 'the persistence service method is invoked with correct parameters'
-            1 * mockCpsDataPersistenceService.updateDataLeaves(dataspaceName, anchorName,
-                "/bookstore/categories[@code='01']", ['name':'Romance', 'code': '01'])
+            1 * mockCpsDataPersistenceService.batchUpdateDataLeaves(dataspaceName, anchorName,
+                    {updatedDataNodesPerXPath -> updatedDataNodesPerXPath.keySet()
+                                                .iterator().next() == "/bookstore/categories[@code='01']"})
         and: 'the CpsValidator is called on the dataspaceName and AnchorName'
             1 * mockCpsValidator.validateNameCharacters(dataspaceName, anchorName)
         and: 'the data updated event is sent to the notification service'
index d662023..0b2d2a4 100755 (executable)
@@ -43,7 +43,7 @@ Bug Fixes
 
 Features
 --------
-    - None
+    - `CPS-1006 <https://jira.onap.org/browse/CPS-1006>`_ Extend CPS PATCH API to allow update of leaves for multiple data nodes
 
 Version: 3.3.1
 ==============
index b7a6030..f18a8e4 100644 (file)
@@ -27,6 +27,7 @@ class FunctionalSpecBase extends CpsIntegrationSpecBase {
     def static NUMBER_OF_ANCHORS_PER_DATASPACE_WITH_BOOKSTORE_DATA = 2
     def static BOOKSTORE_ANCHOR_1 = 'bookstoreAnchor1'
     def static BOOKSTORE_ANCHOR_2 = 'bookstoreAnchor2'
+    def static BOOKSTORE_ANCHOR_FOR_PATCH = 'bookstoreAnchor2'
 
     def static initialized = false
 
index f609ba0..e721414 100644 (file)
@@ -24,6 +24,11 @@ package org.onap.cps.integration.functional
 import org.onap.cps.api.CpsDataService
 import org.onap.cps.integration.base.FunctionalSpecBase
 import org.onap.cps.spi.FetchDescendantsOption
+import org.onap.cps.spi.exceptions.AnchorNotFoundException
+import org.onap.cps.spi.exceptions.DataValidationException
+import org.onap.cps.spi.exceptions.DataspaceNotFoundException
+
+import java.time.OffsetDateTime
 
 class CpsDataServiceIntegrationSpec extends FunctionalSpecBase {
 
@@ -55,4 +60,31 @@ class CpsDataServiceIntegrationSpec extends FunctionalSpecBase {
             assert result.anchorName.toSet() == [BOOKSTORE_ANCHOR_1].toSet()
     }
 
+    def 'Update multiple data node leaves.'() {
+        given: 'Updated json for bookstore data'
+            def jsonData =  "{'book-store:books':{'lang':'English/French','price':100,'title':'Matilda','authors':['RoaldDahl']}}"
+        when: 'update is performed for leaves'
+            objectUnderTest.updateNodeLeaves(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_FOR_PATCH, "/bookstore/categories[@code='1']", jsonData, OffsetDateTime.now())
+        then: 'the updated data nodes are retrieved'
+            def result = cpsDataService.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_FOR_PATCH, "/bookstore/categories[@code=1]/books[@title='Matilda']", FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS)
+        and: 'the leaf values are updated as expected'
+            assert result.leaves['lang'] == ['English/French']
+            assert result.leaves['price'] == [100]
+    }
+
+    def 'Update multiple data leaves error scenario: #scenario.'() {
+        given: 'Updated json for bookstore data'
+            def jsonData =  "{'book-store:books':{'lang':'English/French','price':100,'title':'Matilda','authors':['RoaldDahl'],'pub_year':1988}}"
+        when: 'attempt to update data node for #scenario'
+            objectUnderTest.updateNodeLeaves(dataspaceName, anchorName, xpath, jsonData, OffsetDateTime.now())
+        then: 'a #expectedException is thrown'
+            thrown(expectedException)
+        where: 'the following data is used'
+            scenario                 | dataspaceName                  | anchorName                 | xpath                 || expectedException
+            'invalid dataspace name' | 'INVALID DATAsPACE'            | 'not-relevant'             | '/not relevant'       || DataValidationException
+            'invalid anchor name'    | FUNCTIONAL_TEST_DATASPACE_1    | 'INVALID ANCHOR'           | '/not relevant'       || DataValidationException
+            'non-existing dataspace' | 'non-existing-dataspace'       | 'not-relevant'             | '/not relevant'       || DataspaceNotFoundException
+            'non-existing anchor'    | FUNCTIONAL_TEST_DATASPACE_1    | 'non-existing-anchor'      | '/not relevant'       || AnchorNotFoundException
+            'non-existing-xpath'     | FUNCTIONAL_TEST_DATASPACE_1    | BOOKSTORE_ANCHOR_FOR_PATCH | '/non-existing'       || DataValidationException
+    }
 }