Merge "Move integration test (DataService)"
authorToine Siebelink <toine.siebelink@est.tech>
Tue, 13 Jun 2023 10:56:29 +0000 (10:56 +0000)
committerGerrit Code Review <gerrit@onap.org>
Tue, 13 Jun 2023 10:56:29 +0000 (10:56 +0000)
cps-ri/pom.xml
cps-ri/src/main/java/org/onap/cps/spi/impl/CpsDataPersistenceServiceImpl.java
cps-ri/src/main/java/org/onap/cps/spi/impl/CpsModulePersistenceServiceImpl.java
cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceIntegrationSpec.groovy [deleted file]
cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsModulePersistenceServiceSpec.groovy
integration-test/src/test/groovy/org/onap/cps/integration/base/FunctionalSpecBase.groovy
integration-test/src/test/groovy/org/onap/cps/integration/functional/CpsDataServiceIntegrationSpec.groovy
integration-test/src/test/resources/data/bookstore/bookstore.yang

index aa86a7f..66b89de 100644 (file)
@@ -33,8 +33,8 @@
     <artifactId>cps-ri</artifactId>\r
 \r
     <properties>\r
-        <minimum-coverage>0.53</minimum-coverage>\r
-        <!-- Additional coverage is provided by integration-test module -->\r
+        <minimum-coverage>0.34</minimum-coverage>\r
+        <!-- Additional coverage is provided by the integration-test module -->\r
     </properties>\r
 \r
     <dependencies>\r
index fdb0148..02f7230 100644 (file)
@@ -692,8 +692,7 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService
     private String mergeLeaves(final Map<String, Serializable> updateLeaves, final String currentLeavesAsString) {
         Map<String, Serializable> currentLeavesAsMap = new HashMap<>();
         if (currentLeavesAsString != null) {
-            currentLeavesAsMap = currentLeavesAsString.isEmpty()
-                    ? new HashMap<>() : jsonObjectMapper.convertJsonString(currentLeavesAsString, Map.class);
+            currentLeavesAsMap = jsonObjectMapper.convertJsonString(currentLeavesAsString, Map.class);
             currentLeavesAsMap.putAll(updateLeaves);
         }
 
index e149a1f..1365120 100755 (executable)
@@ -340,15 +340,15 @@ public class CpsModulePersistenceServiceImpl implements CpsModulePersistenceServ
         if (optionalFileName.isPresent()) {
             return optionalFileName.get();
         }
-        return null;
+        return "no filename";
     }
 
     private String getDuplicatedChecksumFromException(final ConstraintViolationException exception) {
         final Matcher matcher = CHECKSUM_EXCEPTION_PATTERN.matcher(exception.getSQLException().getMessage());
-        if (matcher.find() && matcher.groupCount() == 1) {
+        if (matcher.find()) {
             return matcher.group(1);
         }
-        return null;
+        return "no checksum found";
     }
 
     private static ModuleReference toModuleReference(
diff --git a/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceIntegrationSpec.groovy b/cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceIntegrationSpec.groovy
deleted file mode 100644 (file)
index 6d6dfd2..0000000
+++ /dev/null
@@ -1,727 +0,0 @@
-/*
- *  ============LICENSE_START=======================================================
- *  Copyright (C) 2021-2023 Nordix Foundation
- *  Modifications Copyright (C) 2021 Pantheon.tech
- *  Modifications Copyright (C) 2021-2022 Bell Canada.
- *  Modifications Copyright (C) 2022-2023 TechMahindra Ltd.
- *  ================================================================================
- *  Licensed under the Apache License, Version 2.0 (the "License");
- *  you may not use this file except in compliance with the License.
- *  You may obtain a copy of the License at
- *
- *        http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- *
- *  SPDX-License-Identifier: Apache-2.0
- *  ============LICENSE_END=========================================================
- */
-
-package org.onap.cps.spi.impl
-
-import com.google.common.collect.ImmutableSet
-import org.onap.cps.spi.CpsDataPersistenceService
-import org.onap.cps.spi.entities.FragmentEntity
-import org.onap.cps.spi.exceptions.AlreadyDefinedExceptionBatch
-import org.onap.cps.spi.exceptions.AnchorNotFoundException
-import org.onap.cps.spi.exceptions.CpsAdminException
-import org.onap.cps.spi.exceptions.CpsPathException
-import org.onap.cps.spi.exceptions.DataNodeNotFoundException
-import org.onap.cps.spi.exceptions.DataspaceNotFoundException
-import org.onap.cps.spi.model.DataNode
-import org.onap.cps.spi.model.DataNodeBuilder
-import org.springframework.beans.factory.annotation.Autowired
-import org.springframework.test.context.jdbc.Sql
-
-import javax.validation.ConstraintViolationException
-
-import static org.onap.cps.spi.FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS
-import static org.onap.cps.spi.FetchDescendantsOption.OMIT_DESCENDANTS
-
-class CpsDataPersistenceServiceIntegrationSpec extends CpsPersistenceSpecBase {
-
-    @Autowired
-    CpsDataPersistenceService objectUnderTest
-
-    static DataNodeBuilder dataNodeBuilder = new DataNodeBuilder()
-
-    static final String SET_DATA = '/data/fragment.sql'
-    static long ID_DATA_NODE_WITH_DESCENDANTS = 4001
-    static String XPATH_DATA_NODE_WITH_DESCENDANTS = '/parent-1'
-    static long DATA_NODE_202_FRAGMENT_ID = 4202L
-    static long CHILD_OF_DATA_NODE_202_FRAGMENT_ID = 4203L
-    static long LIST_DATA_NODE_PARENT201_FRAGMENT_ID = 4206L
-    static long LIST_DATA_NODE_PARENT203_FRAGMENT_ID = 4214L
-    static long LIST_DATA_NODE_PARENT202_FRAGMENT_ID = 4211L
-    static long PARENT_3_FRAGMENT_ID = 4003L
-
-    static Collection<DataNode> newDataNodes = [new DataNodeBuilder().build()]
-    static Collection<DataNode> existingDataNodes = [createDataNodeTree(XPATH_DATA_NODE_WITH_DESCENDANTS)]
-    static Collection<DataNode> existingChildDataNodes = [createDataNodeTree('/parent-1/child-1')]
-
-    def static deleteTestParentXPath = '/parent-200'
-    def static deleteTestChildXpath = "${deleteTestParentXPath}/child-with-slash[@key='a/b']"
-    def static deleteTestGrandChildXPath = "${deleteTestChildXpath}/grandChild"
-
-    @Sql([CLEAR_DATA, SET_DATA])
-    def 'Get all datanodes with descendants .'() {
-        when: 'data nodes are retrieved by their xpath'
-            def dataNodes = objectUnderTest.getDataNodesForMultipleXpaths(DATASPACE_NAME, ANCHOR_NAME1, ['/parent-1'], INCLUDE_ALL_DESCENDANTS)
-        then: 'same data nodes are returned by getDataNodesForMultipleXpaths method'
-            assert objectUnderTest.getDataNodes(DATASPACE_NAME, ANCHOR_NAME1, '/parent-1', INCLUDE_ALL_DESCENDANTS) == dataNodes
-        and: 'the dataNodes have no prefix (to be addressed by CPS-1301)'
-            assert dataNodes[0].moduleNamePrefix == null
-    }
-
-    @Sql([CLEAR_DATA, SET_DATA])
-    def 'Storing and Retrieving a new DataNodes with descendants.'() {
-        when: 'a fragment with descendants is stored'
-            def parentXpath = '/parent-new'
-            def childXpath = '/parent-new/child-new'
-            def grandChildXpath = '/parent-new/child-new/grandchild-new'
-            def dataNodes = [createDataNodeTree(parentXpath, childXpath, grandChildXpath)]
-            objectUnderTest.storeDataNodes(DATASPACE_NAME, ANCHOR_NAME1, dataNodes)
-        then: 'it can be retrieved by its xpath'
-            def dataNode = objectUnderTest.getDataNodes(DATASPACE_NAME, ANCHOR_NAME1, parentXpath, INCLUDE_ALL_DESCENDANTS)
-            assert dataNode[0].xpath == parentXpath
-        and: 'it has the correct child'
-            assert dataNode[0].childDataNodes.size() == 1
-            def childDataNode = dataNode[0].childDataNodes[0]
-            assert childDataNode.xpath == childXpath
-        and: 'and its grandchild'
-            assert childDataNode.childDataNodes.size() == 1
-            def grandChildDataNode = childDataNode.childDataNodes[0]
-            assert grandChildDataNode.xpath == grandChildXpath
-    }
-
-    @Sql([CLEAR_DATA, SET_DATA])
-    def 'Store data node for multiple anchors using the same schema.'() {
-        def xpath = '/parent-new'
-        given: 'a fragment is stored for an anchor'
-            objectUnderTest.storeDataNodes(DATASPACE_NAME, ANCHOR_NAME1, [createDataNodeTree(xpath)])
-        when: 'another fragment is stored for an other anchor, using the same schema set'
-            objectUnderTest.storeDataNodes(DATASPACE_NAME, ANCHOR_NAME3, [createDataNodeTree(xpath)])
-        then: 'both fragments can be retrieved by their xpath'
-            def fragment1 = getFragmentByXpath(DATASPACE_NAME, ANCHOR_NAME1, xpath)
-            fragment1.anchor.name == ANCHOR_NAME1
-            fragment1.xpath == xpath
-            def fragment2 = getFragmentByXpath(DATASPACE_NAME, ANCHOR_NAME3, xpath)
-            fragment2.anchor.name == ANCHOR_NAME3
-            fragment2.xpath == xpath
-    }
-
-    @Sql([CLEAR_DATA, SET_DATA])
-    def 'Store datanodes error scenario: #scenario.'() {
-        when: 'attempt to store a data node with #scenario'
-            objectUnderTest.storeDataNodes(dataspaceName, anchorName, dataNodes)
-        then: 'a #expectedException is thrown'
-            thrown(expectedException)
-        where: 'the following data is used'
-            scenario                    | dataspaceName  | anchorName     | dataNodes          || expectedException
-            'dataspace does not exist'  | 'unknown'      | 'not-relevant' | newDataNodes       || DataspaceNotFoundException
-            'schema set does not exist' | DATASPACE_NAME | 'unknown'      | newDataNodes       || AnchorNotFoundException
-            'anchor already exists'     | DATASPACE_NAME | ANCHOR_NAME1   | newDataNodes       || ConstraintViolationException
-            'datanode already exists'   | DATASPACE_NAME | ANCHOR_NAME1   | existingDataNodes  || AlreadyDefinedExceptionBatch
-    }
-
-    @Sql([CLEAR_DATA, SET_DATA])
-    def 'Add children to a Fragment that already has a child.'() {
-        given: 'collection of new child data nodes'
-            def newChild1 = createDataNodeTree('/parent-1/child-2')
-            def newChild2 = createDataNodeTree('/parent-1/child-3')
-            def newChildrenCollection = [newChild1, newChild2]
-        when: 'the child is added to an existing parent with 1 child'
-            objectUnderTest.addChildDataNodes(DATASPACE_NAME, ANCHOR_NAME1, XPATH_DATA_NODE_WITH_DESCENDANTS, newChildrenCollection)
-        then: 'the parent is now has to 3 children'
-            def expectedExistingChildPath = '/parent-1/child-1'
-            def parentFragment = fragmentRepository.findById(ID_DATA_NODE_WITH_DESCENDANTS).orElseThrow()
-            parentFragment.childFragments.size() == 3
-        and: 'it still has the old child'
-            parentFragment.childFragments.find({ it.xpath == expectedExistingChildPath })
-        and: 'it has the new children'
-            parentFragment.childFragments.find({ it.xpath == newChildrenCollection[0].xpath })
-            parentFragment.childFragments.find({ it.xpath == newChildrenCollection[1].xpath })
-    }
-
-    @Sql([CLEAR_DATA, SET_DATA])
-    def 'Add child error scenario: #scenario.'() {
-        when: 'attempt to add a child data node with #scenario'
-            objectUnderTest.addChildDataNodes(DATASPACE_NAME, ANCHOR_NAME1, parentXpath, dataNodes)
-        then: 'a #expectedException is thrown'
-            thrown(expectedException)
-        where: 'the following data is used'
-            scenario                 | parentXpath                      | dataNodes               || expectedException
-            'parent does not exist'  | '/unknown'                       | newDataNodes            || DataNodeNotFoundException
-            'already existing child' | XPATH_DATA_NODE_WITH_DESCENDANTS | existingChildDataNodes  || AlreadyDefinedExceptionBatch
-    }
-
-    @Sql([CLEAR_DATA, SET_DATA])
-    def 'Add collection of multiple new list elements including an element with a child datanode.'() {
-        given: 'two new child list elements for an existing parent'
-            def listElementXpaths = ['/parent-201/child-204[@key="NEW1"]', '/parent-201/child-204[@key="NEW2"]']
-            def listElements = toDataNodes(listElementXpaths)
-        and: 'a (grand)child data node for one of the new list elements'
-            def grandChild = buildDataNode('/parent-201/child-204[@key="NEW1"]/grand-child-204[@key2="NEW1-CHILD"]', [leave:'value'], [])
-            listElements[0].childDataNodes = [grandChild]
-        when: 'the new data node (list elements) are added to an existing parent node'
-            objectUnderTest.addMultipleLists(DATASPACE_NAME, ANCHOR_NAME3, '/parent-201', [listElements])
-        then: 'new entries are successfully persisted, parent node now contains 5 children (2 new + 3 existing before)'
-            def parentFragment = fragmentRepository.getReferenceById(LIST_DATA_NODE_PARENT201_FRAGMENT_ID)
-            def allChildXpaths = parentFragment.childFragments.collect { it.xpath }
-            assert allChildXpaths.size() == 5
-            assert allChildXpaths.containsAll(listElementXpaths)
-        and: 'the (grand)child node of the new list entry is also present'
-            def dataspaceEntity = dataspaceRepository.getByName(DATASPACE_NAME)
-            def anchorEntity = anchorRepository.getByDataspaceAndName(dataspaceEntity, ANCHOR_NAME3)
-            def grandChildFragmentEntity = fragmentRepository.findByAnchorAndXpath(anchorEntity, grandChild.xpath)
-            assert grandChildFragmentEntity.isPresent()
-    }
-
-    @Sql([CLEAR_DATA, SET_DATA])
-    def 'Add multiple list with a mix of existing and new elements'() {
-        given: 'two new child list elements for an existing parent'
-            def existingDataNode = dataNodeBuilder.withXpath('/parent-207/child-001').withLeaves(['id': '001']).build()
-            def newDataNode1 = dataNodeBuilder.withXpath('/parent-207/child-new1').withLeaves(['id': 'new1']).build()
-            def newDataNode2 = dataNodeBuilder.withXpath('/parent-200/child-new2').withLeaves(['id': 'new2']).build()
-            def dataNodeList1 = [existingDataNode, newDataNode1]
-            def dataNodeList2 = [newDataNode2]
-        when: 'duplicate data node is requested to be added'
-            objectUnderTest.addMultipleLists(DATASPACE_NAME, ANCHOR_HAVING_SINGLE_TOP_LEVEL_FRAGMENT, '/', [dataNodeList1, dataNodeList2])
-        then: 'already defined batch exception is thrown'
-            def thrown = thrown(AlreadyDefinedExceptionBatch)
-        and: 'it only contains the xpath(s) of the duplicated elements'
-            assert thrown.alreadyDefinedXpaths.size() == 1
-            assert thrown.alreadyDefinedXpaths.contains('/parent-207/child-001')
-        and: 'it does NOT contains the xpaths of the new element that were not combined with existing elements'
-            assert !thrown.alreadyDefinedXpaths.contains('/parent-207/child-new1')
-            assert !thrown.alreadyDefinedXpaths.contains('/parent-207/child-new1')
-        and: 'the new entity is inserted correctly'
-            def dataspaceEntity = dataspaceRepository.getByName(DATASPACE_NAME)
-            def anchorEntity = anchorRepository.getByDataspaceAndName(dataspaceEntity, ANCHOR_HAVING_SINGLE_TOP_LEVEL_FRAGMENT)
-            fragmentRepository.findByAnchorAndXpath(anchorEntity, '/parent-200/child-new2').isPresent()
-    }
-
-    @Sql([CLEAR_DATA, SET_DATA])
-    def 'Add list element error scenario: #scenario.'() {
-        given: 'list element as a collection of data nodes'
-            def listElements = toDataNodes(listElementXpaths)
-        when: 'attempt to add list elements to parent node'
-            objectUnderTest.addListElements(DATASPACE_NAME, ANCHOR_NAME3, parentNodeXpath, listElements)
-        then: 'a #expectedException is thrown'
-            thrown(expectedException)
-        where: 'following parameters were used'
-            scenario                        | parentNodeXpath | listElementXpaths                   || expectedException
-            'parent node does not exist'    | '/unknown'      | ['irrelevant']                      || DataNodeNotFoundException
-            'data fragment already exists'  | '/parent-201'   | ["/parent-201/child-204[@key='A']"] || AlreadyDefinedExceptionBatch
-    }
-
-    @Sql([CLEAR_DATA, SET_DATA])
-    def 'Get all data nodes by single xpath without descendants : #scenario'() {
-        when: 'data nodes are requested'
-            def result = objectUnderTest.getDataNodesForMultipleXpaths(DATASPACE_NAME, ANCHOR_WITH_MULTIPLE_TOP_LEVEL_FRAGMENTS,
-                [inputXPath], OMIT_DESCENDANTS)
-        then: 'data nodes under root are returned'
-            assert result.childDataNodes.size() == 2
-        and: 'no descendants of parent nodes are returned'
-            result.each {assert it.childDataNodes.size() == 0}
-        and: 'same data nodes are returned when V2 of get Data Nodes API is executed'
-            assert objectUnderTest.getDataNodes(DATASPACE_NAME, ANCHOR_WITH_MULTIPLE_TOP_LEVEL_FRAGMENTS,
-                inputXPath, OMIT_DESCENDANTS) == result
-        where: 'the following xpath is used'
-            scenario      | inputXPath
-            'root xpath'  | '/'
-            'empty xpath' | ''
-    }
-
-    @Sql([CLEAR_DATA, SET_DATA])
-    def 'Cps Path query with syntax error throws a CPS Path Exception.'() {
-        when: 'trying to execute a query with a syntax (parsing) error'
-            objectUnderTest.getDataNodes(DATASPACE_NAME, ANCHOR_FOR_DATA_NODES_WITH_LEAVES, 'invalid-cps-path/child' , OMIT_DESCENDANTS)
-        then: 'exception is thrown'
-            def exceptionThrown = thrown(CpsPathException)
-            assert exceptionThrown.getDetails() == "failed to parse at line 1 due to extraneous input 'invalid-cps-path' expecting '/'"
-    }
-
-    @Sql([CLEAR_DATA, SET_DATA])
-    def 'Get all data nodes by single xpath with all descendants : #scenario'() {
-        when: 'data nodes are requested with all descendants'
-            def result = objectUnderTest.getDataNodesForMultipleXpaths(DATASPACE_NAME, ANCHOR_WITH_MULTIPLE_TOP_LEVEL_FRAGMENTS,
-                [inputXPath], INCLUDE_ALL_DESCENDANTS)
-            def mappedResult = multipleTreesToFlatMapByXpath(new HashMap<>(), result)
-        then: 'data nodes are returned with all the descendants populated'
-            assert mappedResult.size() == 8
-            assert result.childDataNodes.size() == 2
-            assert mappedResult.get('/parent-208/child-001').childDataNodes.size() == 0
-            assert mappedResult.get('/parent-208/child-002').childDataNodes.size() == 1
-            assert mappedResult.get('/parent-209/child-001').childDataNodes.size() == 0
-            assert mappedResult.get('/parent-209/child-002').childDataNodes.size() == 1
-        and: 'same data nodes are returned when V2 of Get Data Nodes API is executed'
-            assert objectUnderTest.getDataNodes(DATASPACE_NAME, ANCHOR_WITH_MULTIPLE_TOP_LEVEL_FRAGMENTS,
-                inputXPath, INCLUDE_ALL_DESCENDANTS) == result
-        where: 'the following data is used'
-            scenario      | inputXPath
-            'root xpath'  | '/'
-            'empty xpath' | ''
-    }
-
-    @Sql([CLEAR_DATA, SET_DATA])
-    def 'Get data nodes error scenario : #scenario.'() {
-        when: 'attempt to get data nodes with #scenario'
-            objectUnderTest.getDataNodes(dataspaceName, anchorName, xpath, OMIT_DESCENDANTS)
-        then: 'an #expectedException is thrown'
-            thrown(expectedException)
-        where: 'the following data is used'
-            scenario             | dataspaceName  | anchorName                        | xpath           || expectedException
-            'non existing xpath' | DATASPACE_NAME | ANCHOR_FOR_DATA_NODES_WITH_LEAVES | '/NO-XPATH'     || DataNodeNotFoundException
-            'invalid Xpath'      | DATASPACE_NAME | ANCHOR_FOR_DATA_NODES_WITH_LEAVES | 'INVALID XPATH' || CpsPathException
-    }
-
-    @Sql([CLEAR_DATA, SET_DATA])
-    def 'Get data nodes for multiple xpaths.'() {
-        when: 'fetch #scenario.'
-            def results = objectUnderTest.getDataNodesForMultipleXpaths(DATASPACE_NAME, ANCHOR_NAME3, inputXpaths, OMIT_DESCENDANTS)
-        then: 'the expected number of data nodes are returned'
-            assert results.size() == expectedResultSize
-        where: 'following parameters were used'
-            scenario                               | inputXpaths                                     || expectedResultSize
-            '0 nodes'                              | []                                              || 0
-            '1 node'                               | ["/parent-200"]                                 || 1
-            '2 unique nodes'                       | ["/parent-200", "/parent-201"]                  || 2
-            '3 unique nodes'                       | ["/parent-200", "/parent-201", "/parent-202"]   || 3
-            '1 unique node with duplicate xpath'   | ["/parent-200", "/parent-200"]                  || 1
-            '2 unique nodes with duplicate xpath'  | ["/parent-200", "/parent-202", "/parent-200"]   || 2
-            'list element with key (single quote)' | ["/parent-201/child-204[@key='A']"]             || 1
-            'list element with key (double quote)' | ['/parent-201/child-204[@key="A"]']             || 1
-            'whole list (not implemented)'         | ["/parent-201/child-204"]                       || 0
-            'non-existing xpath'                   | ["/NO-XPATH"]                                   || 0
-            'existing and non-existing xpaths'     | ["/parent-200", "/NO-XPATH", "/parent-201"]     || 2
-            'invalid xpath'                        | ["INVALID XPATH"]                               || 0
-            'valid and invalid xpaths'             | ["/parent-200", "INVALID XPATH", "/parent-201"] || 2
-            'root xpath'                           | ["/"]                                           || 7
-            'empty (root) xpath'                   | [""]                                            || 7
-            'root and top-level xpaths'            | ["/", "/parent-200", "/parent-201"]             || 7
-            'root and child xpaths'                | ["/", "/parent-200/child-201"]                  || 7
-    }
-
-    @Sql([CLEAR_DATA, SET_DATA])
-    def 'Get data nodes for collection of xpath error scenario : #scenario.'() {
-        when: 'attempt to get data nodes with #scenario'
-            objectUnderTest.getDataNodesForMultipleXpaths(dataspaceName, anchorName, ['/not-relevant'], OMIT_DESCENDANTS)
-        then: 'a #expectedException is thrown'
-            thrown(expectedException)
-        where: 'the following data is used'
-            scenario                 | dataspaceName  | anchorName     || expectedException
-            'non-existing dataspace' | 'NO DATASPACE' | 'not relevant' || DataspaceNotFoundException
-            'non-existing anchor'    | DATASPACE_NAME | 'NO ANCHOR'    || AnchorNotFoundException
-    }
-
-    @Sql([CLEAR_DATA, SET_DATA])
-    def 'Update data nodes and descendants by removing descendants.'() {
-        given: 'data nodes with leaves updated, no children'
-            def submittedDataNodes = [buildDataNode('/parent-200/child-201', ['leaf-value': 'new'], [])]
-        when: 'update data nodes and descendants is performed'
-            objectUnderTest.updateDataNodesAndDescendants(DATASPACE_NAME, ANCHOR_FOR_DATA_NODES_WITH_LEAVES, submittedDataNodes)
-        then: 'leaves have been updated for selected data node'
-            def updatedFragment = fragmentRepository.getReferenceById(DATA_NODE_202_FRAGMENT_ID)
-            def updatedLeaves = getLeavesMap(updatedFragment)
-            assert updatedLeaves.size() == 1
-            assert updatedLeaves.'leaf-value' == 'new'
-        and: 'updated entry has no children'
-            updatedFragment.childFragments.isEmpty()
-        and: 'previously attached child entry is removed from database'
-            fragmentRepository.findById(CHILD_OF_DATA_NODE_202_FRAGMENT_ID).isEmpty()
-    }
-
-    @Sql([CLEAR_DATA, SET_DATA])
-    def 'Update data nodes and descendants with new descendants'() {
-        given: 'data nodes with leaves updated, having child with old content'
-            def submittedDataNodes = [buildDataNode('/parent-200/child-201', ['leaf-value': 'new'], [
-                  buildDataNode('/parent-200/child-201/grand-child', ['leaf-value': 'original'], [])
-            ])]
-        when: 'update is performed including descendants'
-            objectUnderTest.updateDataNodesAndDescendants(DATASPACE_NAME, ANCHOR_FOR_DATA_NODES_WITH_LEAVES, submittedDataNodes)
-        then: 'leaves have been updated for selected data node'
-            def updatedFragment = fragmentRepository.getReferenceById(DATA_NODE_202_FRAGMENT_ID)
-            def updatedLeaves = getLeavesMap(updatedFragment)
-            assert updatedLeaves.size() == 1
-            assert updatedLeaves.'leaf-value' == 'new'
-        and: 'existing child entry is not updated as content is same'
-            def childFragment = updatedFragment.childFragments.iterator().next()
-            childFragment.xpath == '/parent-200/child-201/grand-child'
-            def childLeaves = getLeavesMap(childFragment)
-            assert childLeaves.'leaf-value' == 'original'
-    }
-
-    @Sql([CLEAR_DATA, SET_DATA])
-    def 'Update data nodes and descendants with same descendants but changed leaf value.'() {
-        given: 'data nodes with leaves updated, having child with old content'
-            def submittedDataNodes = [buildDataNode('/parent-200/child-201', ['leaf-value': 'new'], [
-                    buildDataNode('/parent-200/child-201/grand-child', ['leaf-value': 'new'], [])
-            ])]
-        when: 'update is performed including descendants'
-            objectUnderTest.updateDataNodesAndDescendants(DATASPACE_NAME, ANCHOR_FOR_DATA_NODES_WITH_LEAVES, submittedDataNodes)
-        then: 'leaves have been updated for selected data node'
-            def updatedFragment = fragmentRepository.getReferenceById(DATA_NODE_202_FRAGMENT_ID)
-            def updatedLeaves = getLeavesMap(updatedFragment)
-            assert updatedLeaves.size() == 1
-            assert updatedLeaves.'leaf-value' == 'new'
-        and: 'existing child entry is updated with the new content'
-            def childFragment = updatedFragment.childFragments.iterator().next()
-            childFragment.xpath == '/parent-200/child-201/grand-child'
-            def childLeaves = getLeavesMap(childFragment)
-            assert childLeaves.'leaf-value' == 'new'
-    }
-
-    @Sql([CLEAR_DATA, SET_DATA])
-    def 'Update data nodes and descendants with different descendants xpath'() {
-        given: 'data nodes with leaves updated, having child with old content'
-            def submittedDataNodes = [buildDataNode('/parent-200/child-201', ['leaf-value': 'new'], [
-                    buildDataNode('/parent-200/child-201/grand-child-new', ['leaf-value': 'new'], [])
-            ])]
-        when: 'update is performed including descendants'
-            objectUnderTest.updateDataNodesAndDescendants(DATASPACE_NAME, ANCHOR_FOR_DATA_NODES_WITH_LEAVES, submittedDataNodes)
-        then: 'leaves have been updated for selected data node'
-            def updatedFragment = fragmentRepository.getReferenceById(DATA_NODE_202_FRAGMENT_ID)
-            def updatedLeaves = getLeavesMap(updatedFragment)
-            assert updatedLeaves.size() == 1
-            assert updatedLeaves.'leaf-value' == 'new'
-        and: 'previously attached child entry is removed from database'
-            fragmentRepository.findById(CHILD_OF_DATA_NODE_202_FRAGMENT_ID).isEmpty()
-        and: 'new child entry is persisted'
-            def childFragment = updatedFragment.childFragments.iterator().next()
-            childFragment.xpath == '/parent-200/child-201/grand-child-new'
-            def childLeaves = getLeavesMap(childFragment)
-            assert childLeaves.'leaf-value' == 'new'
-    }
-
-    @Sql([CLEAR_DATA, SET_DATA])
-    def 'Update data nodes and descendants error scenario: #scenario.'() {
-        given: 'data nodes collection'
-            def submittedDataNodes = [buildDataNode(xpath, ['leaf-name': 'leaf-value'], [])]
-        when: 'attempt to update data node for #scenario'
-            objectUnderTest.updateDataNodesAndDescendants(dataspaceName, anchorName, submittedDataNodes)
-        then: 'a #expectedException is thrown'
-            thrown(expectedException)
-        where: 'the following data is used'
-            scenario                 | dataspaceName  | anchorName                        | xpath                 || expectedException
-            'non-existing dataspace' | 'NO DATASPACE' | 'not relevant'                    | '/not relevant'       || DataspaceNotFoundException
-            'non-existing anchor'    | DATASPACE_NAME | 'NO ANCHOR'                       | '/not relevant'       || AnchorNotFoundException
-    }
-
-    @Sql([CLEAR_DATA, SET_DATA])
-    def 'Update existing list with #scenario.'() {
-        given: 'a parent having a list of data nodes containing: #originalKeys (ech list element has a child too)'
-            def parentXpath = '/parent-3'
-            if (originalKeys.size() > 0) {
-                def originalListEntriesAsDataNodes = createChildListAllHavingAttributeValue(parentXpath, 'original value', originalKeys, true)
-                objectUnderTest.addListElements(DATASPACE_NAME, ANCHOR_NAME1, parentXpath, originalListEntriesAsDataNodes)
-            }
-        and: 'each original list element has one child'
-            def originalParentFragment = fragmentRepository.getReferenceById(PARENT_3_FRAGMENT_ID)
-            originalParentFragment.childFragments.each {assert it.childFragments.size() == 1 }
-        when: 'it is updated with #scenario'
-            def replacementListEntriesAsDataNodes = createChildListAllHavingAttributeValue(parentXpath, 'new value', replacementKeys, false)
-            objectUnderTest.replaceListContent(DATASPACE_NAME, ANCHOR_NAME1, parentXpath, replacementListEntriesAsDataNodes)
-        then: 'the result list ONLY contains the expected replacement elements'
-            def parentFragment = fragmentRepository.getReferenceById(PARENT_3_FRAGMENT_ID)
-            def allChildXpaths = parentFragment.childFragments.collect { it.xpath }
-            def expectedListEntriesAfterUpdateAsXpaths = keysToXpaths(parentXpath, replacementKeys)
-            assert allChildXpaths.size() == replacementKeys.size()
-            assert allChildXpaths.containsAll(expectedListEntriesAfterUpdateAsXpaths)
-        and: 'all the list elements have the new values'
-            assert parentFragment.childFragments.stream().allMatch(childFragment -> childFragment.attributes.contains('new value'))
-        and: 'there are no more grandchildren as none of the replacement list entries had a child'
-            parentFragment.childFragments.each {assert it.childFragments.size() == 0 }
-        where: 'the following replacement lists are applied'
-            scenario                                            | originalKeys | replacementKeys
-            'one existing entry only'                           | []           | ['NEW']
-            'multiple new entries'                              | []           | ['NEW1', 'NEW2']
-            'one new entry only (existing entries are deleted)' | ['A', 'B']   | ['NEW1', 'NEW2']
-            'one existing on new entry'                         | ['A', 'B']   | ['A', 'NEW']
-            'one existing entry only'                           | ['A', 'B']   | ['A']
-    }
-
-    @Sql([CLEAR_DATA, SET_DATA])
-    def 'Replacing existing list element with attributes and (grand)child.'() {
-        given: 'a parent with list elements A and B with attribute and grandchild tagged as "org"'
-            def parentXpath = '/parent-3'
-            def originalListEntriesAsDataNodes = createChildListAllHavingAttributeValue(parentXpath, 'org', ['A','B'], true)
-            objectUnderTest.addListElements(DATASPACE_NAME, ANCHOR_NAME1, parentXpath, originalListEntriesAsDataNodes)
-        when: 'A is replaced with an entry with attribute and grandchild tagged tagged as "new" (B is not in replacement list)'
-            def replacementListEntriesAsDataNodes = createChildListAllHavingAttributeValue(parentXpath, 'new', ['A'], true)
-            objectUnderTest.replaceListContent(DATASPACE_NAME, ANCHOR_NAME1, parentXpath, replacementListEntriesAsDataNodes)
-        then: 'The updated fragment has a child-list with ONLY element "A"'
-            def parentFragment = fragmentRepository.getReferenceById(PARENT_3_FRAGMENT_ID)
-            parentFragment.childFragments.size() == 1
-            def childListElementA = parentFragment.childFragments[0]
-            childListElementA.xpath == "/parent-3/child-list[@key='A']"
-        and: 'element "A" has an attribute with the "new" (tag) value'
-            childListElementA.attributes == '{"attr1": "new"}'
-        and: 'element "A" has a only one (grand)child'
-            childListElementA.childFragments.size() == 1
-        and: 'the grandchild is the new grandchild (tag)'
-            def grandChild = childListElementA.childFragments[0]
-            grandChild.xpath == "/parent-3/child-list[@key='A']/new-grand-child"
-        and: 'the grandchild has an attribute with the "new" (tag) value'
-            grandChild.attributes == '{"attr1": "new"}'
-    }
-
-    @Sql([CLEAR_DATA, SET_DATA])
-    def 'Replace list element for a parent (parent-1) with existing one (non-list) child'() {
-        when: 'a list element is added under the parent'
-            def replacementListEntriesAsDataNodes = createChildListAllHavingAttributeValue(XPATH_DATA_NODE_WITH_DESCENDANTS, 'new', ['A','B'], false)
-            objectUnderTest.replaceListContent(DATASPACE_NAME, ANCHOR_NAME1, XPATH_DATA_NODE_WITH_DESCENDANTS, replacementListEntriesAsDataNodes)
-        then: 'the parent will have 3 children after the replacement'
-            def parentFragment = fragmentRepository.getReferenceById(ID_DATA_NODE_WITH_DESCENDANTS)
-            parentFragment.childFragments.size() == 3
-            def xpaths = parentFragment.childFragments.collect {it.xpath}
-        and: 'one of the children is the original child fragment'
-            xpaths.contains('/parent-1/child-1')
-        and: 'it has the two new list elements'
-            xpaths.containsAll("/parent-1/child-list[@key='A']", "/parent-1/child-list[@key='B']")
-    }
-
-    @Sql([CLEAR_DATA, SET_DATA])
-    def 'Replace list content using unknown parent'() {
-        given: 'list element as a collection of data nodes'
-            def listElementCollection = toDataNodes(['irrelevant'])
-        when: 'attempt to replace list elements under unknown parent node'
-            objectUnderTest.replaceListContent(DATASPACE_NAME, ANCHOR_NAME3, '/unknown', listElementCollection)
-        then: 'a datanode not found exception is thrown'
-            thrown(DataNodeNotFoundException)
-    }
-
-    @Sql([CLEAR_DATA, SET_DATA])
-    def 'Replace list content with empty collection is not supported'() {
-        when: 'attempt to replace list elements with empty collection'
-            objectUnderTest.replaceListContent(DATASPACE_NAME, ANCHOR_NAME3, '/parent-203', [])
-        then: 'a CPS admin exception is thrown'
-            def thrown = thrown(CpsAdminException)
-            assert thrown.message == 'Invalid list replacement'
-    }
-
-    @Sql([CLEAR_DATA, SET_DATA])
-    def 'Delete list scenario: #scenario.'() {
-        when: 'deleting list is executed for: #scenario.'
-            objectUnderTest.deleteListDataNode(DATASPACE_NAME, ANCHOR_NAME3, targetXpaths)
-        and: 'remaining children are fetched'
-            def parentFragment = fragmentRepository.getReferenceById(parentFragmentId)
-            def remainingChildXpaths = parentFragment.childFragments.collect { it.xpath }
-        then: 'only the expected children remain'
-            assert remainingChildXpaths.size() == expectedRemainingChildXpaths.size()
-            assert remainingChildXpaths.containsAll(expectedRemainingChildXpaths)
-        where: 'following parameters were used'
-            scenario                          | targetXpaths                                                 | parentFragmentId                     || expectedRemainingChildXpaths
-            'list element with key'           | '/parent-203/child-204[@key="A"]'                            | LIST_DATA_NODE_PARENT203_FRAGMENT_ID || ["/parent-203/child-203", "/parent-203/child-204[@key='B']"]
-            'list element with combined keys' | '/parent-202/child-205[@key="A" and @key2="B"]'              | LIST_DATA_NODE_PARENT202_FRAGMENT_ID || ["/parent-202/child-206[@key='A']"]
-            'whole list'                      | '/parent-203/child-204'                                      | LIST_DATA_NODE_PARENT203_FRAGMENT_ID || ['/parent-203/child-203']
-            'list element under list element' | '/parent-203/child-204[@key="B"]/grand-child-204[@key2="Y"]' | LIST_DATA_NODE_PARENT203_FRAGMENT_ID || ["/parent-203/child-203", "/parent-203/child-204[@key='A']", "/parent-203/child-204[@key='B']"]
-    }
-
-    @Sql([CLEAR_DATA, SET_DATA])
-    def 'Delete multiple data nodes using scenario: #scenario.'() {
-        when: 'deleting nodes is executed for: #scenario.'
-            objectUnderTest.deleteDataNodes(DATASPACE_NAME, ANCHOR_NAME3, targetXpaths)
-        and: 'remaining children are fetched'
-            def parentFragment = fragmentRepository.getReferenceById(LIST_DATA_NODE_PARENT203_FRAGMENT_ID)
-            def remainingChildXpaths = parentFragment.childFragments.collect { it.xpath }
-        then: 'only the expected children remain'
-            assert remainingChildXpaths.size() == expectedRemainingChildXpaths.size()
-            assert remainingChildXpaths.containsAll(expectedRemainingChildXpaths)
-        where: 'following parameters were used'
-            scenario                          | targetXpaths                                                           || expectedRemainingChildXpaths
-            'delete nothing'                  | []                                                                     || ["/parent-203/child-203", "/parent-203/child-204[@key='A']", "/parent-203/child-204[@key='B']"]
-            'datanode'                        | ['/parent-203/child-203']                                              || ["/parent-203/child-204[@key='A']", "/parent-203/child-204[@key='B']"]
-            '1 list element'                  | ['/parent-203/child-204[@key="A"]']                                    || ["/parent-203/child-203", "/parent-203/child-204[@key='B']"]
-            '2 list elements'                 | ['/parent-203/child-204[@key="A"]', '/parent-203/child-204[@key="B"]'] || ["/parent-203/child-203"]
-            'whole list'                      | ['/parent-203/child-204']                                              || ['/parent-203/child-203']
-            'list and element in same list'   | ['/parent-203/child-204', '/parent-203/child-204[@key="A"]']           || ['/parent-203/child-203']
-            'list element under list element' | ['/parent-203/child-204[@key="B"]/grand-child-204[@key2="Y"]']         || ["/parent-203/child-203", "/parent-203/child-204[@key='A']", "/parent-203/child-204[@key='B']"]
-            'invalid xpath'                   | ['INVALID XPATH', '/parent-203/child-204']                             || ['/parent-203/child-203']
-    }
-
-    @Sql([CLEAR_DATA, SET_DATA])
-    def 'Delete multiple data nodes error scenario: #scenario.'() {
-        when: 'deleting nodes is executed for: #scenario.'
-            objectUnderTest.deleteDataNodes(dataspaceName, anchorName, targetXpaths)
-        then: 'a #expectedException is thrown'
-            thrown(expectedException)
-        where: 'the following data is used'
-            scenario                 | dataspaceName  | anchorName     | targetXpaths            || expectedException
-            'non-existing dataspace' | 'NO DATASPACE' | 'not relevant' | ['/not relevant']       || DataspaceNotFoundException
-            'non-existing anchor'    | DATASPACE_NAME | 'NO ANCHOR'    | ['/not relevant']       || AnchorNotFoundException
-            'non-existing datanode'  | DATASPACE_NAME | ANCHOR_NAME3   | ['/NON-EXISTING-XPATH'] || DataNodeNotFoundException
-    }
-
-    @Sql([CLEAR_DATA, SET_DATA])
-    def 'Delete data nodes with "/"-token in list key value: #scenario. (CPS-1409)'() {
-        given: 'a data nodes with list-element child with "/" in index value (and grandchild)'
-            def grandChild = new DataNodeBuilder().withXpath(deleteTestGrandChildXPath).build()
-            def child = new DataNodeBuilder().withXpath(deleteTestChildXpath).withChildDataNodes([grandChild]).build()
-            objectUnderTest.addChildDataNodes(DATASPACE_NAME, ANCHOR_NAME3, deleteTestParentXPath, [child])
-        and: 'number of children before delete is stored'
-            def numberOfChildrenBeforeDelete = objectUnderTest.getDataNodes(DATASPACE_NAME, ANCHOR_NAME3, pathToParentOfDeletedNode, INCLUDE_ALL_DESCENDANTS)[0].childDataNodes.size()
-        when: 'target node is deleted'
-            objectUnderTest.deleteDataNode(DATASPACE_NAME, ANCHOR_NAME3, deleteTarget)
-        then: 'one child has been deleted'
-            def numberOfChildrenAfterDelete = objectUnderTest.getDataNodes(DATASPACE_NAME, ANCHOR_NAME3, pathToParentOfDeletedNode, INCLUDE_ALL_DESCENDANTS)[0].childDataNodes.size()
-            assert numberOfChildrenAfterDelete == numberOfChildrenBeforeDelete - 1
-        where:
-            scenario                | deleteTarget              | pathToParentOfDeletedNode
-            'list element with /'   | deleteTestChildXpath      | deleteTestParentXPath
-            'child of list element' | deleteTestGrandChildXPath | deleteTestChildXpath
-    }
-
-    @Sql([CLEAR_DATA, SET_DATA])
-    def 'Delete list error scenario: #scenario.'() {
-        when: 'attempting to delete scenario: #scenario.'
-            objectUnderTest.deleteListDataNode(DATASPACE_NAME, ANCHOR_NAME3, targetXpaths)
-        then: 'a DataNodeNotFoundException is thrown'
-            thrown(DataNodeNotFoundException)
-        where: 'following parameters were used'
-            scenario                                   | targetXpaths
-            'whole list, parent node does not exist'   | '/unknown/some-child'
-            'list element, parent node does not exist' | '/unknown/child-204[@key="A"]'
-            'whole list does not exist'                | '/parent-200/unknown'
-            'list element, list does not exist'        | '/parent-200/unknown[@key="C"]'
-            'list element, element does not exist'     | '/parent-203/child-204[@key="C"]'
-            'valid datanode but not a list'            | '/parent-200/child-202'
-    }
-
-    @Sql([CLEAR_DATA, SET_DATA])
-    def 'Delete data node by xpath #scenario.'() {
-        given: 'a valid data node'
-            def dataNode
-        and: 'data nodes are deleted'
-            objectUnderTest.deleteDataNode(DATASPACE_NAME, ANCHOR_NAME3, xpathForDeletion)
-        when: 'verify data nodes are removed'
-            objectUnderTest.getDataNodes(DATASPACE_NAME, ANCHOR_NAME3, xpathForDeletion, INCLUDE_ALL_DESCENDANTS)
-        then:
-            thrown(DataNodeNotFoundException)
-        and: 'some related object is not deleted'
-            if (xpathSurvivor!=null) {
-                dataNode = objectUnderTest.getDataNodes(DATASPACE_NAME, ANCHOR_NAME3, xpathSurvivor, INCLUDE_ALL_DESCENDANTS)
-                assert dataNode[0].xpath == xpathSurvivor
-            }
-        where: 'following parameters were used'
-            scenario                               | xpathForDeletion                                  || xpathSurvivor
-            'child data node, parent still exists' | '/parent-206/child-206'                           || '/parent-206'
-            'list element, sibling still exists'   | '/parent-206/child-206/grand-child-206[@key="A"]' || "/parent-206/child-206/grand-child-206[@key='X']"
-            'container node'                       | '/parent-206'                                     || null
-            'container list node'                  | '/parent-206[@key="A"]'                           || "/parent-206[@key='B']"
-            'root node with xpath /'               | '/'                                               || null
-            'root node with xpath passed as blank' | ''                                                || null
-    }
-
-    @Sql([CLEAR_DATA, SET_DATA])
-    def 'Delete data node error scenario: #scenario.'() {
-        when: 'data node is deleted'
-            objectUnderTest.deleteDataNode(DATASPACE_NAME, ANCHOR_NAME3, datanodeXpath)
-        then: 'a #expectedException is thrown'
-            thrown(expectedException)
-        where: 'the following parameters were used'
-            scenario                                   | datanodeXpath                                    | expectedException
-            'valid data node, non existent child node' | '/parent-203/child-non-existent'                 | DataNodeNotFoundException
-            'invalid list element'                     | '/parent-206/child-206/grand-child-206@key="A"]' | CpsPathException
-    }
-
-    @Sql([CLEAR_DATA, SET_DATA])
-    def 'Delete data node for an anchor.'() {
-        given: 'a data-node exists for an anchor'
-            assert fragmentsExistInDB(3003)
-        when: 'data nodes are deleted '
-            objectUnderTest.deleteDataNodes(DATASPACE_NAME, ANCHOR_NAME3)
-        then: 'all data-nodes are deleted successfully'
-            assert !fragmentsExistInDB(3003)
-    }
-
-    @Sql([CLEAR_DATA, SET_DATA])
-    def 'Delete data node for multiple anchors.'() {
-        given: 'a data-node exists for an anchor'
-            assert fragmentsExistInDB(3001)
-            assert fragmentsExistInDB(3003)
-        when: 'data nodes are deleted '
-            objectUnderTest.deleteDataNodes(DATASPACE_NAME, ['ANCHOR-001', 'ANCHOR-003'])
-        then: 'all data-nodes are deleted successfully'
-            assert !fragmentsExistInDB(3001)
-            assert !fragmentsExistInDB(3003)
-    }
-
-    def fragmentsExistInDB(anchorId) {
-        fragmentRepository.existsByAnchorId(anchorId)
-    }
-
-    static Collection<DataNode> toDataNodes(xpaths) {
-        return xpaths.collect { new DataNodeBuilder().withXpath(it).build() }
-    }
-
-
-    static DataNode buildDataNode(xpath, leaves, childDataNodes) {
-        return dataNodeBuilder.withXpath(xpath).withLeaves(leaves).withChildDataNodes(childDataNodes).build()
-    }
-
-    Map<String, Object> getLeavesMap(FragmentEntity fragmentEntity) {
-        return jsonObjectMapper.convertJsonString(fragmentEntity.attributes, Map<String, Object>.class)
-    }
-
-    def static treeToFlatMapByXpath(Map<String, DataNode> flatMap, DataNode dataNodeTree) {
-        flatMap.put(dataNodeTree.xpath, dataNodeTree)
-        dataNodeTree.childDataNodes
-                .forEach(childDataNode -> treeToFlatMapByXpath(flatMap, childDataNode))
-        return flatMap
-    }
-
-    def static multipleTreesToFlatMapByXpath(Map<String, DataNode> flatMap, Collection<DataNode> dataNodeTrees) {
-        for (DataNode dataNodeTree: dataNodeTrees){
-            flatMap.put(dataNodeTree.xpath, dataNodeTree)
-            dataNodeTree.childDataNodes
-                .forEach(childDataNode -> multipleTreesToFlatMapByXpath(flatMap, [childDataNode]))
-        }
-        return flatMap
-    }
-
-    def keysToXpaths(parent, Collection keys) {
-        return keys.collect { "${parent}/child-list[@key='${it}']".toString() }
-    }
-
-    def static createDataNodeTree(String... xpaths) {
-        def dataNodeBuilder = new DataNodeBuilder().withXpath(xpaths[0])
-        if (xpaths.length > 1) {
-            def xPathsDescendant = Arrays.copyOfRange(xpaths, 1, xpaths.length)
-            def childDataNode = createDataNodeTree(xPathsDescendant)
-            dataNodeBuilder.withChildDataNodes(ImmutableSet.of(childDataNode))
-        }
-        dataNodeBuilder.build()
-    }
-
-    def getFragmentByXpath(dataspaceName, anchorName, xpath) {
-        def dataspace = dataspaceRepository.getByName(dataspaceName)
-        def anchor = anchorRepository.getByDataspaceAndName(dataspace, anchorName)
-        return fragmentRepository.findByAnchorAndXpath(anchor, xpath).orElseThrow()
-    }
-
-    def createChildListAllHavingAttributeValue(parentXpath, tag, Collection keys, boolean addGrandChild) {
-        def listElementAsDataNodes = keysToXpaths(parentXpath, keys).collect {
-                new DataNodeBuilder()
-                    .withXpath(it)
-                    .withLeaves([attr1: tag])
-                    .build()
-        }
-        if (addGrandChild) {
-            listElementAsDataNodes.each {it.childDataNodes = [createGrandChild(it.xpath, tag)]}
-        }
-        return listElementAsDataNodes
-    }
-
-    def createGrandChild(parentXPath, tag) {
-        new DataNodeBuilder()
-            .withXpath("${parentXPath}/${tag}-grand-child")
-            .withLeaves([attr1: tag])
-            .build()
-    }
-
-}
index 811c329..5e42ce0 100644 (file)
@@ -63,19 +63,16 @@ class CpsModulePersistenceServiceSpec extends Specification {
             '}'
 
     // Scenario data
-    @Shared
-    yangResourceChecksum = 'b13faef573ed1374139d02c40d8ce09c80ea1dc70e63e464c1ed61568d48d539'
-    @Shared
-    yangResourceChecksumDbConstraint = 'yang_resource_checksum_key'
-    @Shared
-    sqlExceptionMessage = String.format('(checksum)=(%s)', yangResourceChecksum)
-    @Shared
-    checksumIntegrityException =
-            new DataIntegrityViolationException(
+    static yangResourceChecksum = 'b13faef573ed1374139d02c40d8ce09c80ea1dc70e63e464c1ed61568d48d539'
+    static yangResourceChecksumDbConstraint = 'yang_resource_checksum_key'
+    static sqlExceptionMessage = String.format('(checksum)=(%s)', yangResourceChecksum)
+    static checksumIntegrityException =  new DataIntegrityViolationException(
                     "checksum integrity exception",
                     new ConstraintViolationException('', new SQLException(sqlExceptionMessage), yangResourceChecksumDbConstraint))
-    @Shared
-    anotherIntegrityException = new DataIntegrityViolationException("another integrity exception")
+    static checksumIntegrityExceptionWithoutChecksum =  new DataIntegrityViolationException(
+                    "checksum integrity exception",
+                    new ConstraintViolationException('', new SQLException('no checksum'), yangResourceChecksumDbConstraint))
+    static anotherIntegrityException = new DataIntegrityViolationException("another integrity exception")
 
     def setup() {
         objectUnderTest = new CpsModulePersistenceServiceImpl(yangResourceRepositoryMock, schemaSetRepositoryMock,
@@ -92,11 +89,12 @@ class CpsModulePersistenceServiceSpec extends Specification {
             objectUnderTest.storeSchemaSet('my-dataspace', 'my-schema-set', newYangResourcesNameToContentMap)
         then: 'an #expectedThrownException is thrown'
             def e = thrown(expectedThrownException)
-            e.getMessage().contains(expectedThrownExceptionMessage)
+            assert e.getMessage().contains(expectedThrownExceptionMessage)
         where: 'the following data is used'
-            scenario                | dbException                || expectedThrownException | expectedThrownExceptionMessage
-            'checksum data failure' | checksumIntegrityException || DuplicatedYangResourceException | yangResourceChecksum
-            'other data failure'    | anotherIntegrityException  || DataIntegrityViolationException | 'another integrity exception'
+            scenario                            | dbException                               || expectedThrownException         | expectedThrownExceptionMessage
+            'checksum data failure'             | checksumIntegrityException                || DuplicatedYangResourceException | yangResourceChecksum
+            'checksum failure without checksum' | checksumIntegrityExceptionWithoutChecksum || DuplicatedYangResourceException | 'no checksum found'
+            'other data failure'                | anotherIntegrityException                 || DataIntegrityViolationException | 'another integrity exception'
     }
 
 }
index f18a8e4..89a5e40 100644 (file)
@@ -20,6 +20,8 @@
 
 package org.onap.cps.integration.base
 
+import java.time.OffsetDateTime
+
 class FunctionalSpecBase extends CpsIntegrationSpecBase {
 
     def static FUNCTIONAL_TEST_DATASPACE_1 = 'functionalTestDataspace1'
@@ -27,9 +29,9 @@ class FunctionalSpecBase extends CpsIntegrationSpecBase {
     def static NUMBER_OF_ANCHORS_PER_DATASPACE_WITH_BOOKSTORE_DATA = 2
     def static BOOKSTORE_ANCHOR_1 = 'bookstoreAnchor1'
     def static BOOKSTORE_ANCHOR_2 = 'bookstoreAnchor2'
-    def static BOOKSTORE_ANCHOR_FOR_PATCH = 'bookstoreAnchor2'
 
     def static initialized = false
+    def static bookstoreJsonData = readResourceDataFile('bookstore/bookstoreData.json')
 
     def setup() {
         if (!initialized) {
@@ -48,9 +50,15 @@ class FunctionalSpecBase extends CpsIntegrationSpecBase {
     }
 
     def addBookstoreData() {
-        def bookstoreJsonData = readResourceDataFile('bookstore/bookstoreData.json')
         addAnchorsWithData(NUMBER_OF_ANCHORS_PER_DATASPACE_WITH_BOOKSTORE_DATA, FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_SCHEMA_SET, 'bookstoreAnchor', bookstoreJsonData)
         addAnchorsWithData(NUMBER_OF_ANCHORS_PER_DATASPACE_WITH_BOOKSTORE_DATA, FUNCTIONAL_TEST_DATASPACE_2, BOOKSTORE_SCHEMA_SET, 'bookstoreAnchor', bookstoreJsonData)
     }
 
+    def restoreBookstoreDataAnchor(anchorNumber) {
+        def anchorName = 'bookstoreAnchor' + anchorNumber
+        cpsAdminService.deleteAnchor(FUNCTIONAL_TEST_DATASPACE_1, anchorName)
+        cpsAdminService.createAnchor(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_SCHEMA_SET, anchorName)
+        cpsDataService.saveData(FUNCTIONAL_TEST_DATASPACE_1, anchorName, bookstoreJsonData, OffsetDateTime.now())
+    }
+
 }
index bf86e13..2efbcb2 100644 (file)
@@ -24,120 +24,342 @@ package org.onap.cps.integration.functional
 import org.onap.cps.api.CpsDataService
 import org.onap.cps.integration.base.FunctionalSpecBase
 import org.onap.cps.spi.FetchDescendantsOption
+import org.onap.cps.spi.exceptions.AlreadyDefinedExceptionBatch
 import org.onap.cps.spi.exceptions.AnchorNotFoundException
+import org.onap.cps.spi.exceptions.CpsAdminException
+import org.onap.cps.spi.exceptions.CpsPathException
+import org.onap.cps.spi.exceptions.DataNodeNotFoundException
+import org.onap.cps.spi.exceptions.DataNodeNotFoundExceptionBatch
 import org.onap.cps.spi.exceptions.DataValidationException
 import org.onap.cps.spi.exceptions.DataspaceNotFoundException
 
 import java.time.OffsetDateTime
 
-import java.time.OffsetDateTime
+import static org.onap.cps.spi.FetchDescendantsOption.DIRECT_CHILDREN_ONLY
+import static org.onap.cps.spi.FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS
+import static org.onap.cps.spi.FetchDescendantsOption.OMIT_DESCENDANTS
 
 class CpsDataServiceIntegrationSpec extends FunctionalSpecBase {
 
     CpsDataService objectUnderTest
     def originalCountBookstoreChildNodes
+    def now = OffsetDateTime.now()
 
     def setup() {
         objectUnderTest = cpsDataService
-        originalCountBookstoreChildNodes = countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', FetchDescendantsOption.DIRECT_CHILDREN_ONLY))
-    }
-
-    def 'Read bookstore top-level container(s) using #fetchDescendantsOption.'() {
-        when: 'get data nodes for bookstore container'
-            def result = objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', fetchDescendantsOption)
-        then: 'the tree consist ouf of #expectNumberOfDataNodes data nodes'
-            assert countDataNodesInTree(result) == expectNumberOfDataNodes
-        and: 'the top level data node has the expected attribute and value'
-            assert result.leaves['bookstore-name'] == ['Easons']
-        and: 'they are from the correct dataspace'
-            assert result.dataspace == [FUNCTIONAL_TEST_DATASPACE_1]
-        and: 'they are from the correct anchor'
-            assert result.anchorName == [BOOKSTORE_ANCHOR_1]
-        where: 'the following option is used'
-            fetchDescendantsOption                         || expectNumberOfDataNodes
-            FetchDescendantsOption.OMIT_DESCENDANTS        || 1
-            FetchDescendantsOption.DIRECT_CHILDREN_ONLY    || 6
-            FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS || 17
-            new FetchDescendantsOption(2)                  || 17
-    }
-
-    def 'Add and Delete a (container) datanode.'() {
-        given: 'new (webinfo) datanode'
-            def json = '{"webinfo": {"domain-name":"ourbookstore.com" ,"contact-email":"info@ourbookstore.com" }}'
-        when: 'the new datanode is saved'
-            objectUnderTest.saveData(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1 , '/bookstore', json, OffsetDateTime.now())
-        then: 'it can be retrieved by its xpath'
-            def result = objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/webinfo', FetchDescendantsOption.DIRECT_CHILDREN_ONLY)
+        originalCountBookstoreChildNodes = countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', DIRECT_CHILDREN_ONLY))
+}
+
+def 'Read bookstore top-level container(s) using #fetchDescendantsOption.'() {
+    when: 'get data nodes for bookstore container'
+        def result = objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', fetchDescendantsOption)
+    then: 'the tree consist ouf of #expectNumberOfDataNodes data nodes'
+        assert countDataNodesInTree(result) == expectNumberOfDataNodes
+    and: 'the top level data node has the expected attribute and value'
+        assert result.leaves['bookstore-name'] == ['Easons']
+    and: 'they are from the correct dataspace'
+        assert result.dataspace == [FUNCTIONAL_TEST_DATASPACE_1]
+    and: 'they are from the correct anchor'
+        assert result.anchorName == [BOOKSTORE_ANCHOR_1]
+    where: 'the following option is used'
+        fetchDescendantsOption        || expectNumberOfDataNodes
+        OMIT_DESCENDANTS              || 1
+        DIRECT_CHILDREN_ONLY          || 6
+        INCLUDE_ALL_DESCENDANTS       || 17
+        new FetchDescendantsOption(2) || 17
+}
+
+def 'Read bookstore top-level container(s) using "root" path variations.'() {
+    when: 'get data nodes for bookstore container'
+        def result = objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, root, OMIT_DESCENDANTS)
+    then: 'the tree consist ouf of one data node'
+        assert countDataNodesInTree(result) == 1
+    and: 'the top level data node has the expected attribute and value'
+        assert result.leaves['bookstore-name'] == ['Easons']
+    where: 'the following variations of "root" are used'
+        root << [ '/', '' ]
+}
+
+def 'Read data nodes with error: #cpsPath'() {
+    when: 'attempt to get data nodes using invalid path'
+        objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, cpsPath, DIRECT_CHILDREN_ONLY)
+    then: 'a #expectedException is thrown'
+        thrown(expectedException)
+    where:
+        cpsPath              || expectedException
+        'invalid path'       || CpsPathException
+        '/non-existing-path' || DataNodeNotFoundException
+}
+
+def 'Read (multiple) data nodes (batch) with #cpsPath'() {
+    when: 'attempt to get data nodes using invalid path'
+        objectUnderTest.getDataNodesForMultipleXpaths(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, [ cpsPath ], DIRECT_CHILDREN_ONLY)
+    then: 'no exception is thrown'
+        noExceptionThrown()
+    where:
+        cpsPath << [ 'invalid path', '/non-existing-path' ]
+}
+
+def 'Delete root data node.'() {
+    when: 'the "root" is deleted'
+        objectUnderTest.deleteDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, [ '/' ], now)
+    and: 'attempt to get the top level data node'
+        objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', DIRECT_CHILDREN_ONLY)
+    then: 'an datanode not found exception is thrown'
+        thrown(DataNodeNotFoundException)
+    cleanup:
+        restoreBookstoreDataAnchor(1)
+}
+
+def 'Add and Delete a (container) data node.'() {
+    given: 'new (webinfo) datanode'
+        def json = '{"webinfo": {"domain-name":"ourbookstore.com" ,"contact-email":"info@ourbookstore.com" }}'
+    when: 'the new datanode is saved'
+        objectUnderTest.saveData(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1 , '/bookstore', json, now)
+    then: 'it can be retrieved by its xpath'
+        def result = objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/webinfo', DIRECT_CHILDREN_ONLY)
             assert result.size() == 1
             assert result[0].xpath == '/bookstore/webinfo'
         and: 'there is now one extra datanode'
-            assert originalCountBookstoreChildNodes + 1 == countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', FetchDescendantsOption.DIRECT_CHILDREN_ONLY))
+            assert originalCountBookstoreChildNodes + 1 == countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', DIRECT_CHILDREN_ONLY))
         when: 'the new datanode is deleted'
-            objectUnderTest.deleteDataNode(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/webinfo', OffsetDateTime.now())
-        then: 'the original number of datanodes is restored'
-            assert originalCountBookstoreChildNodes == countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', FetchDescendantsOption.DIRECT_CHILDREN_ONLY))
+            objectUnderTest.deleteDataNode(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/webinfo', now)
+        then: 'the original number of data nodes is restored'
+            assert originalCountBookstoreChildNodes == countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', DIRECT_CHILDREN_ONLY))
     }
 
-    def 'Add and Delete list (element) datanodes.'() {
-        given: 'two new (categories) datanodes'
+    def 'Attempt to create a top level data node using root.'() {
+        given: 'a new anchor'
+            cpsAdminService.createAnchor(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_SCHEMA_SET, 'newAnchor1');
+        when: 'attempt to save new top level datanode'
+            def json = '{"bookstore": {"bookstore-name": "New Store"} }'
+            objectUnderTest.saveData(FUNCTIONAL_TEST_DATASPACE_1, 'newAnchor1' , '/', json, now)
+        then: 'since there is no data a data node not found exception is thrown'
+            thrown(DataNodeNotFoundException)
+    }
+
+    def 'Attempt to save top level data node that already exist'() {
+        when: 'attempt to save already existing top level node'
+            def json = '{"bookstore": {} }'
+            objectUnderTest.saveData(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, json, now)
+        then: 'an exception that (one cps paths is)  already defined is thrown '
+            def exceptionThrown = thrown(AlreadyDefinedExceptionBatch)
+            exceptionThrown.alreadyDefinedXpaths == [ '/bookstore' ] as Set
+        cleanup:
+            restoreBookstoreDataAnchor(1)
+    }
+
+    def 'Delete a single datanode with invalid path.'() {
+        when: 'attempt to delete a single datanode with invalid path'
+            objectUnderTest.deleteDataNode(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/invalid path', now)
+        then: 'a cps path parser exception is thrown'
+            thrown(CpsPathException)
+    }
+
+    def 'Delete multiple data nodes with invalid path.'() {
+        when: 'attempt to delete datanode collection with invalid path'
+            objectUnderTest.deleteDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, ['/invalid path'], now)
+        then: 'the error is silently ignored'
+            noExceptionThrown()
+    }
+
+    def 'Delete single data node with non-existing path.'() {
+        when: 'attempt to delete a single datanode non-existing invalid path'
+            objectUnderTest.deleteDataNode(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/does/not/exist', now)
+        then: 'a datanode not found exception is thrown'
+            thrown(DataNodeNotFoundException)
+    }
+
+    def 'Delete multiple data nodes with non-existing path(s).'() {
+        when: 'attempt to delete a single datanode non-existing invalid path'
+            objectUnderTest.deleteDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, ['/does/not/exist'], now)
+        then: 'a  datanode not found (batch) exception is thrown'
+            thrown(DataNodeNotFoundExceptionBatch)
+    }
+
+    def 'Add and Delete list (element) data nodes.'() {
+        given: 'two new (categories) data nodes'
             def json = '{"categories": [ {"code":"new1"}, {"code":"new2" } ] }'
         when: 'the new list elements are saved'
-            objectUnderTest.saveListElements(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1 , '/bookstore', json, OffsetDateTime.now())
+            objectUnderTest.saveListElements(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1 , '/bookstore', json, now)
         then: 'they can be retrieved by their xpaths'
-            objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new1"]', FetchDescendantsOption.DIRECT_CHILDREN_ONLY).size() == 1
-            objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new2"]', FetchDescendantsOption.DIRECT_CHILDREN_ONLY).size() == 1
-        and: 'there are now two extra datanodes'
-            assert originalCountBookstoreChildNodes + 2 == countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', FetchDescendantsOption.DIRECT_CHILDREN_ONLY))
+            objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new1"]', DIRECT_CHILDREN_ONLY).size() == 1
+            objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new2"]', DIRECT_CHILDREN_ONLY).size() == 1
+        and: 'there are now two extra data nodes'
+            assert originalCountBookstoreChildNodes + 2 == countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', DIRECT_CHILDREN_ONLY))
         when: 'the new elements are deleted'
-            objectUnderTest.deleteDataNode(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new1"]', OffsetDateTime.now())
-            objectUnderTest.deleteDataNode(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new2"]', OffsetDateTime.now())
-        then: 'the original number of datanodes is restored'
-            assert originalCountBookstoreChildNodes == countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', FetchDescendantsOption.DIRECT_CHILDREN_ONLY))
+            objectUnderTest.deleteDataNode(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new1"]', now)
+            objectUnderTest.deleteDataNode(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new2"]', now)
+        then: 'the original number of data nodes is restored'
+            assert originalCountBookstoreChildNodes == countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', DIRECT_CHILDREN_ONLY))
     }
 
-    def 'Add and Delete a batch of lists (element) datanodes.'() {
-        given: 'two new (categories) datanodes in two separate batches'
+    def 'Add list (element) data nodes that already exist.'() {
+        given: 'two (categories) data nodes, one new and one existing'
+            def json = '{"categories": [ {"code":"1"}, {"code":"new1"} ] }'
+        when: 'attempt to save the list element'
+            objectUnderTest.saveListElements(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1 , '/bookstore', json, now)
+        then: 'an exception that (one cps paths is)  already defined is thrown '
+            def exceptionThrown = thrown(AlreadyDefinedExceptionBatch)
+            exceptionThrown.alreadyDefinedXpaths == [ '/bookstore/categories[@code=\'1\']' ] as Set
+        and: 'there is now one extra data nodes'
+            assert originalCountBookstoreChildNodes + 1 == countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', DIRECT_CHILDREN_ONLY))
+        cleanup:
+            restoreBookstoreDataAnchor(1)
+    }
+
+    def 'Add and Delete list (element) data nodes using lists specific method.'() {
+        given: 'a new (categories) data nodes'
+            def json = '{"categories": [ {"code":"new1"} ] }'
+        and: 'the new list element is saved'
+            objectUnderTest.saveListElements(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1 , '/bookstore', json, now)
+        when: 'the new element is deleted'
+            objectUnderTest.deleteListOrListElement(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new1"]', now)
+        then: 'the original number of data nodes is restored'
+            assert originalCountBookstoreChildNodes == countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', DIRECT_CHILDREN_ONLY))
+    }
+
+    def 'Add and Delete a batch of lists (element) data nodes.'() {
+        given: 'two new (categories) data nodes in two separate batches'
             def json1 = '{"categories": [ {"code":"new1"} ] }'
-            def json2 = '{"categories": [ {"code":"new2"} ] }'
+            def json2 = '{"categories": [ {"code":"new2"} ] } '
         when: 'the batches of new list element(s) are saved'
-            objectUnderTest.saveListElementsBatch(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1 , '/bookstore', [json1, json2], OffsetDateTime.now())
+            objectUnderTest.saveListElementsBatch(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1 , '/bookstore', [json1, json2], now)
         then: 'they can be retrieved by their xpaths'
-            objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new1"]', FetchDescendantsOption.DIRECT_CHILDREN_ONLY).size() == 1
-            objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new2"]', FetchDescendantsOption.DIRECT_CHILDREN_ONLY).size() == 1
-        and: 'there are now two extra datanodes'
-            assert originalCountBookstoreChildNodes + 2 == countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', FetchDescendantsOption.DIRECT_CHILDREN_ONLY))
+            assert objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new1"]', DIRECT_CHILDREN_ONLY).size() == 1
+            assert objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new2"]', DIRECT_CHILDREN_ONLY).size() == 1
+        and: 'there are now two extra data nodes'
+            assert originalCountBookstoreChildNodes + 2 == countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', DIRECT_CHILDREN_ONLY))
         when: 'the new elements are deleted'
-            objectUnderTest.deleteDataNode(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new1"]', OffsetDateTime.now())
-            objectUnderTest.deleteDataNode(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new2"]', OffsetDateTime.now())
-        then: 'the original number of datanodes is restored'
-            assert originalCountBookstoreChildNodes == countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', FetchDescendantsOption.DIRECT_CHILDREN_ONLY))
+            objectUnderTest.deleteDataNode(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new1"]', now)
+            objectUnderTest.deleteDataNode(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new2"]', now)
+        then: 'the original number of data nodes is restored'
+            assert originalCountBookstoreChildNodes == countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', DIRECT_CHILDREN_ONLY))
+    }
+
+    def 'Add and Delete a batch of lists (element) data nodes with partial success.'() {
+        given: 'two new (categories) data nodes in two separate batches'
+            def jsonNewElement = '{"categories": [ {"code":"new1"} ] }'
+            def jsonExistingElement = '{"categories": [ {"code":"1"} ] } '
+        when: 'the batches of new list element(s) are saved'
+            objectUnderTest.saveListElementsBatch(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1 , '/bookstore', [jsonNewElement, jsonExistingElement], now)
+        then: 'an already defined (batch) exception is thrown for the existing path'
+            def exceptionThrown = thrown(AlreadyDefinedExceptionBatch)
+            assert exceptionThrown.alreadyDefinedXpaths ==  [ '/bookstore/categories[@code=\'1\']' ] as Set
+        and: 'there is now one extra data node'
+            assert originalCountBookstoreChildNodes + 1 == countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', DIRECT_CHILDREN_ONLY))
+        cleanup:
+            restoreBookstoreDataAnchor(1)
+    }
+
+    def 'Attempt to add empty lists.'() {
+        when: 'the batches of new list element(s) are saved'
+            objectUnderTest.replaceListContent(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1 , '/bookstore', [ ], now)
+        then: 'an admin exception is thrown'
+            thrown(CpsAdminException)
+    }
+
+    def 'Add child error scenario: #scenario.'() {
+        when: 'attempt to add a child data node with #scenario'
+            objectUnderTest.saveData(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, parentXpath, json, now)
+        then: 'a #expectedException is thrown'
+            thrown(expectedException)
+        where: 'the following data is used'
+            scenario                 | parentXpath                              | json                                || expectedException
+            'parent does not exist'  | '/bookstore/categories[@code="unknown"]' | '{"books": [ {"title":"new"} ] } '  || DataNodeNotFoundException
+            'already existing child' | '/bookstore'                             | '{"categories": [ {"code":"1"} ] }' || AlreadyDefinedExceptionBatch
+    }
+
+    def 'Add multiple child data nodes with partial success.'() {
+        given: 'one existing and one new list element'
+            def json = '{"categories": [ {"code":"1"}, {"code":"new"} ] }'
+        when: 'attempt to add the elements'
+            objectUnderTest.saveData(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', json, now)
+        then: 'an already defined (batch) exception is thrown for the existing path'
+            def thrown  = thrown(AlreadyDefinedExceptionBatch)
+            assert thrown.alreadyDefinedXpaths == [ "/bookstore/categories[@code='1']" ] as Set
+        and: 'the new data node has been added i.e. can be retrieved'
+            assert objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="new"]', DIRECT_CHILDREN_ONLY).size() == 1
+    }
+
+    def 'Replace list content #scenario.'() {
+        given: 'the bookstore categories 1 and 2 exist and have at least 1 child each '
+            assert countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="1"]', DIRECT_CHILDREN_ONLY)) > 1
+            assert countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="2"]', DIRECT_CHILDREN_ONLY)) > 1
+        when: 'the categories list is replaced with just category "1" and without child nodes (books)'
+            def json = '{"categories": [ {"code":"' +categoryCode + '"' + childJson + '} ] }'
+            objectUnderTest.replaceListContent(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', json, now)
+        then: 'the new replaced category can be retrieved but has no children anymore'
+            assert expectedNumberOfDataNodes == countDataNodesInTree(objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="' +categoryCode + '"]', DIRECT_CHILDREN_ONLY))
+        when: 'attempt to retrieve a category (code) not in the new list'
+            objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/categories[@code="2"]', DIRECT_CHILDREN_ONLY)
+        then: 'a datanode not found exception occurs'
+            thrown(DataNodeNotFoundException)
+        cleanup:
+            restoreBookstoreDataAnchor(1)
+        where: 'the following data is used'
+            scenario                        | categoryCode | childJson                                 || expectedNumberOfDataNodes
+            'existing code, no children'    | '1'          | ''                                        || 1
+            'existing code, new child'      | '1'          | ', "books" : [ { "title": "New Book" } ]' || 2
+            'existing code, existing child' | '1'          | ', "books" : [ { "title": "Matilda" } ]'  || 2
+            'new code, new child'           | 'new'        | ', "books" : [ { "title": "New Book" } ]' || 2
     }
 
     def 'Update multiple data node leaves.'() {
         given: 'Updated json for bookstore data'
-            def jsonData =  "{'book-store:books':{'lang':'English/French','price':100,'title':'Matilda','authors':['RoaldDahl']}}"
+            def jsonData =  "{'book-store:books':{'lang':'English/French','price':100,'title':'Matilda'}}"
         when: 'update is performed for leaves'
-            objectUnderTest.updateNodeLeaves(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_FOR_PATCH, "/bookstore/categories[@code='1']", jsonData, OffsetDateTime.now())
+            objectUnderTest.updateNodeLeaves(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_2, "/bookstore/categories[@code='1']", jsonData, now)
         then: 'the updated data nodes are retrieved'
-            def result = cpsDataService.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_FOR_PATCH, "/bookstore/categories[@code=1]/books[@title='Matilda']", FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS)
+            def result = cpsDataService.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_2, "/bookstore/categories[@code=1]/books[@title='Matilda']", INCLUDE_ALL_DESCENDANTS)
         and: 'the leaf values are updated as expected'
             assert result.leaves['lang'] == ['English/French']
             assert result.leaves['price'] == [100]
+        cleanup:
+            restoreBookstoreDataAnchor(2)
+    }
+
+    def 'Update data node leaves for node that has no leaves (yet).'() {
+        given: 'new (webinfo) datanode without leaves'
+            def json = '{"webinfo": {} }'
+            objectUnderTest.saveData(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1 , '/bookstore', json, now)
+        when: 'update is performed to add a leaf'
+            def updatedJson = '{"webinfo": {"domain-name":"new leaf data"}}'
+            objectUnderTest.updateNodeLeaves(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, "/bookstore", updatedJson, now)
+        then: 'the updated data nodes are retrieved'
+            def result = cpsDataService.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, "/bookstore/webinfo", INCLUDE_ALL_DESCENDANTS)
+        and: 'the leaf value is updated as expected'
+            assert result.leaves['domain-name'] == ['new leaf data']
+        cleanup:
+            restoreBookstoreDataAnchor(1)
     }
 
     def 'Update multiple data leaves error scenario: #scenario.'() {
-        given: 'Updated json for bookstore data'
-            def jsonData =  "{'book-store:books':{'lang':'English/French','price':100,'title':'Matilda','authors':['RoaldDahl'],'pub_year':1988}}"
         when: 'attempt to update data node for #scenario'
-            objectUnderTest.updateNodeLeaves(dataspaceName, anchorName, xpath, jsonData, OffsetDateTime.now())
+            objectUnderTest.updateNodeLeaves(dataspaceName, anchorName, xpath, 'irrelevant json data', now)
         then: 'a #expectedException is thrown'
             thrown(expectedException)
         where: 'the following data is used'
-            scenario                 | dataspaceName                  | anchorName                 | xpath                 || expectedException
-            'invalid dataspace name' | 'INVALID DATAsPACE'            | 'not-relevant'             | '/not relevant'       || DataValidationException
-            'invalid anchor name'    | FUNCTIONAL_TEST_DATASPACE_1    | 'INVALID ANCHOR'           | '/not relevant'       || DataValidationException
-            'non-existing dataspace' | 'non-existing-dataspace'       | 'not-relevant'             | '/not relevant'       || DataspaceNotFoundException
-            'non-existing anchor'    | FUNCTIONAL_TEST_DATASPACE_1    | 'non-existing-anchor'      | '/not relevant'       || AnchorNotFoundException
-            'non-existing-xpath'     | FUNCTIONAL_TEST_DATASPACE_1    | BOOKSTORE_ANCHOR_FOR_PATCH | '/non-existing'       || DataValidationException
+            scenario                 | dataspaceName               | anchorName                 | xpath           || expectedException
+            'invalid dataspace name' | 'Invalid Dataspace'         | 'not-relevant'             | '/not relevant' || DataValidationException
+            'invalid anchor name'    | FUNCTIONAL_TEST_DATASPACE_1 | 'INVALID ANCHOR'           | '/not relevant' || DataValidationException
+            'non-existing dataspace' | 'non-existing-dataspace'    | 'not-relevant'             | '/not relevant' || DataspaceNotFoundException
+            'non-existing anchor'    | FUNCTIONAL_TEST_DATASPACE_1 | 'non-existing-anchor'      | '/not relevant' || AnchorNotFoundException
+            'non-existing-xpath'     | FUNCTIONAL_TEST_DATASPACE_1 | BOOKSTORE_ANCHOR_1         | '/non-existing' || DataValidationException
+    }
+
+    def 'Update data nodes and descendants.'() {
+        given: 'some web info for the bookstore'
+            def json = '{"webinfo": {"domain-name":"ourbookstore.com" ,"contact-email":"info@ourbookstore.com" }}'
+            objectUnderTest.saveData(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1 , '/bookstore', json, now)
+        when: 'the webinfo (container) is updated'
+            json = '{"webinfo": {"domain-name":"newdomain.com" ,"contact-email":"info@newdomain.com" }}'
+            objectUnderTest.updateDataNodeAndDescendants(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore', json, now)
+        then: 'webinfo has been updated with teh new details'
+            def result = objectUnderTest.getDataNodes(FUNCTIONAL_TEST_DATASPACE_1, BOOKSTORE_ANCHOR_1, '/bookstore/webinfo', DIRECT_CHILDREN_ONLY)
+            result.leaves.'domain-name'[0] == 'newdomain.com'
+            result.leaves.'contact-email'[0] == 'info@newdomain.com'
+        cleanup:
+            restoreBookstoreDataAnchor(1)
     }
 }
index f3219a0..e592a9c 100644 (file)
@@ -30,6 +30,8 @@ module stores {
             }
         }
 
+        container container-without-leaves { }
+
         container premises {
             list addresses {
                 key "house-number street";