Merge "Add event to mapper as well"
authorLuke Gleeson <luke.gleeson@est.tech>
Wed, 22 Mar 2023 10:27:39 +0000 (10:27 +0000)
committerGerrit Code Review <gerrit@onap.org>
Wed, 22 Mar 2023 10:27:39 +0000 (10:27 +0000)
25 files changed:
cps-rest/docs/openapi/components.yml
cps-rest/docs/openapi/cpsQueryV2.yml
cps-rest/docs/openapi/openapi.yml
cps-rest/src/main/java/org/onap/cps/rest/controller/QueryRestController.java
cps-rest/src/test/groovy/org/onap/cps/rest/controller/QueryRestControllerSpec.groovy
cps-ri/src/main/java/org/onap/cps/spi/entities/FragmentEntityArranger.java
cps-ri/src/main/java/org/onap/cps/spi/impl/CpsDataPersistenceServiceImpl.java
cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentQueryBuilder.java
cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepository.java
cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepositoryCpsPathQuery.java
cps-ri/src/main/java/org/onap/cps/spi/repository/FragmentRepositoryCpsPathQueryImpl.java
cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceQueryDataNodeSpec.groovy
cps-ri/src/test/groovy/org/onap/cps/spi/impl/CpsDataPersistenceServiceSpec.groovy
cps-ri/src/test/resources/data/cps-path-query.sql
cps-service/src/main/java/org/onap/cps/api/CpsQueryService.java
cps-service/src/main/java/org/onap/cps/api/impl/CpsQueryServiceImpl.java
cps-service/src/main/java/org/onap/cps/spi/CpsDataPersistenceService.java
cps-service/src/main/java/org/onap/cps/spi/model/DataNodeBuilder.java
cps-service/src/main/java/org/onap/cps/utils/DataMapUtils.java
cps-service/src/test/groovy/org/onap/cps/api/impl/CpsQueryServiceImplSpec.groovy
cps-service/src/test/groovy/org/onap/cps/utils/DataMapUtilsSpec.groovy
docs/api/swagger/cps/openapi.yaml
test-tools/generate-metrics-report.sh [moved from generate-metrics-report.sh with 100% similarity]
test-tools/subtract-metrics-reports.py [new file with mode: 0755]
test-tools/test-deregistration.sh [new file with mode: 0755]

index 2a8ba3a..da43743 100644 (file)
@@ -118,6 +118,26 @@ components:
               </categories>
             </bookstore>
           </stores>
+    dataSampleAcrossAnchors:
+      value:
+        - anchorName: bookstore1
+          dataNode:
+            test:bookstore:
+              bookstore-name: Chapters
+              categories:
+                - code: 01
+                  name: SciFi
+                - code: 02
+                  name: kids
+        - anchorName: bookstore2
+          dataNode:
+            test:bookstore:
+              bookstore-name: Chapters
+              categories:
+                - code: 01
+                  name: SciFi
+                - code: 02
+                  name: kids
 
   parameters:
     dataspaceNameInQuery:
index 5bfd1bb..9beb0e3 100644 (file)
@@ -47,3 +47,34 @@ nodesByDataspaceAndAnchorAndCpsPath:
       '500':
         $ref: 'components.yml#/components/responses/InternalServerError'
     x-codegen-request-body-name: xpath
+
+nodesByDataspaceAndCpsPath:
+  get:
+    description: Query data nodes for the given dataspace across anchors using CPS path
+    tags:
+      - cps-query
+    summary: Query data nodes
+    operationId: getNodesByDataspaceAndCpsPath
+    parameters:
+      - $ref: 'components.yml#/components/parameters/dataspaceNameInPath'
+      - $ref: 'components.yml#/components/parameters/cpsPathInQuery'
+      - $ref: 'components.yml#/components/parameters/descendantsInQuery'
+    responses:
+      '200':
+        description: OK
+        content:
+          application/json:
+            schema:
+              type: object
+            examples:
+              dataSample:
+                $ref: 'components.yml#/components/examples/dataSampleAcrossAnchors'
+      '400':
+        $ref: 'components.yml#/components/responses/BadRequest'
+      '401':
+        $ref: 'components.yml#/components/responses/Unauthorized'
+      '403':
+        $ref: 'components.yml#/components/responses/Forbidden'
+      '500':
+        $ref: 'components.yml#/components/responses/InternalServerError'
+    x-codegen-request-body-name: xpath
index 0ac825a..d5ba97a 100644 (file)
@@ -110,5 +110,8 @@ paths:
   /v2/dataspaces/{dataspace-name}/anchors/{anchor-name}/nodes/query:
     $ref: 'cpsQueryV2.yml#/nodesByDataspaceAndAnchorAndCpsPath'
 
+  /v2/dataspaces/{dataspace-name}/nodes/query:
+    $ref: 'cpsQueryV2.yml#/nodesByDataspaceAndCpsPath'
+
 security:
   - basicAuth: []
index 5f4283d..1fc13fc 100644 (file)
@@ -71,6 +71,25 @@ public class QueryRestController implements CpsQueryApi {
                 fetchDescendantsOption);
     }
 
+    @Override
+    public ResponseEntity<Object> getNodesByDataspaceAndCpsPath(final String dataspaceName,
+        final String cpsPath, final String fetchDescendantsOptionAsString) {
+        final FetchDescendantsOption fetchDescendantsOption =
+                FetchDescendantsOption.getFetchDescendantsOption(fetchDescendantsOptionAsString);
+        final Collection<DataNode> dataNodes =
+                cpsQueryService.queryDataNodesAcrossAnchors(dataspaceName, cpsPath, fetchDescendantsOption);
+        final List<Map<String, Object>> dataMaps = new ArrayList<>(dataNodes.size());
+        String prefix = null;
+        for (final DataNode dataNode : dataNodes) {
+            if (prefix == null) {
+                prefix = prefixResolver.getPrefix(dataspaceName, dataNode.getAnchorName(), dataNode.getXpath());
+            }
+            final Map<String, Object> dataMap = DataMapUtils.toDataMapWithIdentifierAndAnchor(dataNode, prefix);
+            dataMaps.add(dataMap);
+        }
+        return new ResponseEntity<>(jsonObjectMapper.asJsonString(dataMaps), HttpStatus.OK);
+    }
+
     private ResponseEntity<Object> executeNodesByDataspaceQueryAndCreateResponse(final String dataspaceName,
              final String anchorName, final String cpsPath, final FetchDescendantsOption fetchDescendantsOption) {
         final Collection<DataNode> dataNodes =
index b881c38..c4bb23c 100644 (file)
@@ -110,4 +110,34 @@ class QueryRestControllerSpec extends Specification {
             assert response.getContentAsString().contains('{"xpath":{"leaf":"value","leafList":["leaveListElement1","leaveListElement2"]}}')
     }
 
+    def 'Query data node by cps path for the given dataspace across all anchors with #scenario.'() {
+        given: 'service method returns a list containing a data node'
+            def dataNode1 = new DataNodeBuilder().withXpath('/xpath')
+                .withAnchor('my_anchor')
+                .withLeaves([leaf: 'value', leafList: ['leaveListElement1', 'leaveListElement2']]).build()
+            def dataNode2 = new DataNodeBuilder().withXpath('/xpath')
+                .withAnchor('my_anchor_2')
+                .withLeaves([leaf: 'value', leafList: ['leaveListElement3', 'leaveListElement4']]).build()
+            def dataspaceName = 'my_dataspace'
+            def cpsPath = 'some/cps/path'
+            mockCpsQueryService.queryDataNodesAcrossAnchors(dataspaceName, cpsPath, expectedCpsDataServiceOption) >> [dataNode1, dataNode2]
+        and: 'the query endpoint'
+            def dataNodeEndpoint = "$basePath/v2/dataspaces/$dataspaceName/nodes/query"
+        when: 'query data nodes API is invoked'
+            def response =
+                mvc.perform(
+                        get(dataNodeEndpoint)
+                                .param('cps-path', cpsPath)
+                                .param('descendants', includeDescendantsOptionString))
+                        .andReturn().response
+        then: 'the response contains the the datanode in json format'
+            response.status == HttpStatus.OK.value()
+            response.getContentAsString().contains('{"xpath":{"leaf":"value","leafList":["leaveListElement1","leaveListElement2"]}}')
+            response.getContentAsString().contains('{"xpath":{"leaf":"value","leafList":["leaveListElement3","leaveListElement4"]}}')
+        where: 'the following options for include descendants are provided in the request'
+            scenario                    | includeDescendantsOptionString || expectedCpsDataServiceOption
+            'no descendants by default' | ''                             || OMIT_DESCENDANTS
+            'no descendant explicitly'  | 'none'                         || OMIT_DESCENDANTS
+            'descendants'               | 'all'                          || INCLUDE_ALL_DESCENDANTS
+    }
 }
index 6b1162d..55d3c7e 100644 (file)
@@ -50,13 +50,15 @@ public class FragmentEntityArranger {
     private static FragmentEntity toFragmentEntity(final AnchorEntity anchorEntity,
                                                    final FragmentExtract fragmentExtract) {
         final FragmentEntity fragmentEntity = new FragmentEntity();
-        fragmentEntity.setAnchor(anchorEntity);
+        if (anchorEntity != null) {
+            fragmentEntity.setAnchor(anchorEntity);
+            fragmentEntity.setDataspace(anchorEntity.getDataspace());
+        }
         fragmentEntity.setId(fragmentExtract.getId());
         fragmentEntity.setXpath(fragmentExtract.getXpath());
         fragmentEntity.setAttributes(fragmentExtract.getAttributes());
         fragmentEntity.setParentId(fragmentExtract.getParentId());
         fragmentEntity.setChildFragments(new HashSet<>());
-        fragmentEntity.setDataspace(anchorEntity.getDataspace());
         return fragmentEntity;
     }
 
index 4756991..aa631d1 100644 (file)
@@ -23,6 +23,7 @@
 
 package org.onap.cps.spi.impl;
 
+import com.google.common.base.Strings;
 import com.google.common.collect.ImmutableSet;
 import com.google.common.collect.ImmutableSet.Builder;
 import io.micrometer.core.annotation.Timed;
@@ -82,6 +83,8 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService
     private final SessionManager sessionManager;
 
     private static final String REG_EX_FOR_OPTIONAL_LIST_INDEX = "(\\[@[\\s\\S]+?])?)";
+    private static final String QUERY_ACROSS_ANCHORS = null;
+    private static final AnchorEntity ALL_ANCHORS = null;
 
     @Override
     public void addChildDataNode(final String dataspaceName, final String anchorName, final String parentNodeXpath,
@@ -313,7 +316,8 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService
             description = "Time taken to query data nodes")
     public List<DataNode> queryDataNodes(final String dataspaceName, final String anchorName, final String cpsPath,
                                          final FetchDescendantsOption fetchDescendantsOption) {
-        final AnchorEntity anchorEntity = getAnchorEntity(dataspaceName, anchorName);
+        final AnchorEntity anchorEntity = (Strings.isNullOrEmpty(anchorName)) ? ALL_ANCHORS
+                : getAnchorEntity(dataspaceName, anchorName);
         final CpsPathQuery cpsPathQuery;
         try {
             cpsPathQuery = CpsPathUtil.getCpsPathQuery(cpsPath);
@@ -325,14 +329,22 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService
         if (canUseRegexQuickFind(fetchDescendantsOption, cpsPathQuery)) {
             return getDataNodesUsingRegexQuickFind(fetchDescendantsOption, anchorEntity, cpsPathQuery);
         }
-        fragmentEntities = fragmentRepository.findByAnchorAndCpsPath(anchorEntity.getId(), cpsPathQuery);
+        fragmentEntities = (anchorEntity == ALL_ANCHORS) ? fragmentRepository.findByCpsPath(cpsPathQuery)
+                : fragmentRepository.findByAnchorAndCpsPath(anchorEntity.getId(), cpsPathQuery);
         if (cpsPathQuery.hasAncestorAxis()) {
             final Collection<String> ancestorXpaths = processAncestorXpath(fragmentEntities, cpsPathQuery);
-            fragmentEntities = getFragmentEntities(anchorEntity, ancestorXpaths, fetchDescendantsOption);
+            fragmentEntities = (anchorEntity == ALL_ANCHORS) ? getAncestorFragmentEntitiesAcrossAnchors(cpsPathQuery,
+            fragmentEntities) : getFragmentEntities(anchorEntity, ancestorXpaths, fetchDescendantsOption);
         }
         return createDataNodesFromProxiedFragmentEntities(fetchDescendantsOption, anchorEntity, fragmentEntities);
     }
 
+    @Override
+    public List<DataNode> queryDataNodesAcrossAnchors(final String dataspaceName, final String cpsPath,
+                                         final FetchDescendantsOption fetchDescendantsOption) {
+        return queryDataNodes(dataspaceName, QUERY_ACROSS_ANCHORS, cpsPath, fetchDescendantsOption);
+    }
+
     private static boolean canUseRegexQuickFind(final FetchDescendantsOption fetchDescendantsOption,
                                                 final CpsPathQuery cpsPathQuery) {
         return fetchDescendantsOption.equals(FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS)
@@ -345,16 +357,24 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService
                                                            final CpsPathQuery cpsPathQuery) {
         Collection<FragmentEntity> fragmentEntities;
         final String xpathRegex = FragmentQueryBuilder.getXpathSqlRegex(cpsPathQuery, true);
-        final List<FragmentExtract> fragmentExtracts =
+        final List<FragmentExtract> fragmentExtracts = (anchorEntity == ALL_ANCHORS)
+                ? fragmentRepository.quickFindWithDescendantsAcrossAnchor(xpathRegex) :
             fragmentRepository.quickFindWithDescendants(anchorEntity.getId(), xpathRegex);
         fragmentEntities = FragmentEntityArranger.toFragmentEntityTrees(anchorEntity, fragmentExtracts);
         if (cpsPathQuery.hasAncestorAxis()) {
             final Collection<String> ancestorXpaths = processAncestorXpath(fragmentEntities, cpsPathQuery);
-            fragmentEntities = getFragmentEntities(anchorEntity, ancestorXpaths, fetchDescendantsOption);
+            fragmentEntities = (anchorEntity == ALL_ANCHORS) ? getAncestorFragmentEntitiesAcrossAnchors(cpsPathQuery,
+            fragmentEntities) : getFragmentEntities(anchorEntity, ancestorXpaths, fetchDescendantsOption);
         }
         return createDataNodesFromFragmentEntities(fetchDescendantsOption, fragmentEntities);
     }
 
+    private Collection<FragmentEntity> getAncestorFragmentEntitiesAcrossAnchors(final CpsPathQuery cpsPathQuery,
+        final Collection<FragmentEntity> fragmentEntities) {
+        final Collection<String> ancestorXpaths = processAncestorXpath(fragmentEntities, cpsPathQuery);
+        return ancestorXpaths.isEmpty() ? Collections.emptyList() : fragmentRepository.findAllByXpathIn(ancestorXpaths);
+    }
+
     private List<DataNode> createDataNodesFromProxiedFragmentEntities(
                                             final FetchDescendantsOption fetchDescendantsOption,
                                             final AnchorEntity anchorEntity,
@@ -365,8 +385,10 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService
                 dataNodes.add(toDataNode(proxiedFragmentEntity, fetchDescendantsOption));
             } else {
                 final String normalizedXpath = getNormalizedXpath(proxiedFragmentEntity.getXpath());
+                final AnchorEntity anchorEntityForFragmentExtract = (anchorEntity == ALL_ANCHORS)
+                        ? proxiedFragmentEntity.getAnchor() : anchorEntity;
                 final Collection<FragmentEntity> unproxiedFragmentEntities =
-                    buildFragmentEntitiesFromFragmentExtracts(anchorEntity, normalizedXpath);
+                    buildFragmentEntitiesFromFragmentExtracts(anchorEntityForFragmentExtract, normalizedXpath);
                 for (final FragmentEntity unproxiedFragmentEntity : unproxiedFragmentEntities) {
                     dataNodes.add(toDataNode(unproxiedFragmentEntity, fetchDescendantsOption));
                 }
@@ -436,6 +458,7 @@ public class CpsDataPersistenceServiceImpl implements CpsDataPersistenceService
         return new DataNodeBuilder()
                 .withXpath(fragmentEntity.getXpath())
                 .withLeaves(leaves)
+                .withAnchor(fragmentEntity.getAnchor().getName())
                 .withChildDataNodes(childDataNodes).build();
     }
 
index f107928..c231595 100644 (file)
@@ -1,6 +1,7 @@
 /*
  *  ============LICENSE_START=======================================================
  *  Copyright (C) 2022 Nordix Foundation
+ *  Modifications Copyright (C) 2023 TechMahindra Ltd.
  *  ================================================================================
  *  Licensed under the Apache License, Version 2.0 (the "License");
  *  you may not use this file except in compliance with the License.
@@ -73,6 +74,29 @@ public class FragmentQueryBuilder {
         return query;
     }
 
+    /**
+     * Create a sql query to retrieve by cps path.
+     *
+     * @param cpsPathQuery the cps path query to be transformed into a sql query
+     * @return a executable query object
+     */
+    public Query getQueryForCpsPath(final CpsPathQuery cpsPathQuery) {
+        final StringBuilder sqlStringBuilder = new StringBuilder("SELECT * FROM FRAGMENT WHERE xpath ~ :xpathRegex");
+        final Map<String, Object> queryParameters = new HashMap<>();
+        final String xpathRegex = getXpathSqlRegex(cpsPathQuery, false);
+        queryParameters.put("xpathRegex", xpathRegex);
+        if (cpsPathQuery.hasLeafConditions()) {
+            sqlStringBuilder.append(" AND attributes @> :leafDataAsJson\\:\\:jsonb");
+            queryParameters.put("leafDataAsJson", jsonObjectMapper.asJsonString(
+                    cpsPathQuery.getLeavesData()));
+        }
+
+        addTextFunctionCondition(cpsPathQuery, sqlStringBuilder, queryParameters);
+        final Query query = entityManager.createNativeQuery(sqlStringBuilder.toString(), FragmentEntity.class);
+        setQueryParameters(query, queryParameters);
+        return query;
+    }
+
     /**
      * Create a regular expression (string) for xpath based on the given cps path query.
      *
index 426a460..d486a39 100755 (executable)
@@ -3,6 +3,7 @@
  * Copyright (C) 2021-2023 Nordix Foundation.\r
  * Modifications Copyright (C) 2020-2021 Bell Canada.\r
  * Modifications Copyright (C) 2020-2021 Pantheon.tech.\r
+ * Modifications Copyright (C) 2023 TechMahindra Ltd.\r
  * ================================================================================\r
  * Licensed under the Apache License, Version 2.0 (the "License");\r
  * you may not use this file except in compliance with the License.\r
@@ -51,6 +52,10 @@ public interface FragmentRepository extends JpaRepository<FragmentEntity, Long>,
     @Query("SELECT f FROM FragmentEntity f WHERE anchor = :anchor")\r
     List<FragmentExtract> findAllExtractsByAnchor(@Param("anchor") AnchorEntity anchorEntity);\r
 \r
+    List<FragmentEntity> findAllByAnchorAndXpathIn(AnchorEntity anchorEntity, Collection<String> xpath);\r
+\r
+    List<FragmentEntity> findAllByXpathIn(Collection<String> xpath);\r
+\r
     @Modifying\r
     @Query("DELETE FROM FragmentEntity WHERE anchor IN (:anchors)")\r
     void deleteByAnchorIn(@Param("anchors") Collection<AnchorEntity> anchorEntities);\r
@@ -60,6 +65,13 @@ public interface FragmentRepository extends JpaRepository<FragmentEntity, Long>,
     List<FragmentExtract> findByAnchorAndParentXpath(@Param("anchor") AnchorEntity anchorEntity,\r
                                                      @Param("parentXpath") String parentXpath);\r
 \r
+    @Query(value = "SELECT id, anchor_id AS anchorId, xpath, parent_id AS parentId,"\r
+            + " CAST(attributes AS TEXT) AS attributes"\r
+            + " FROM FRAGMENT WHERE "\r
+            + "( xpath = :parentXpath OR xpath LIKE CONCAT(:parentXpath,'/%') )",\r
+            nativeQuery = true)\r
+    List<FragmentExtract> findByParentXpath(@Param("parentXpath") String parentXpath);\r
+\r
     @Query(value = "SELECT id, anchor_id AS anchorId, xpath, parent_id AS parentId,"\r
         + " CAST(attributes AS TEXT) AS attributes"\r
         + " FROM FRAGMENT WHERE anchor_id = :anchorId"\r
@@ -94,4 +106,10 @@ public interface FragmentRepository extends JpaRepository<FragmentEntity, Long>,
     List<FragmentExtract> findExtractsWithDescendants(@Param("anchorId") int anchorId,\r
                                                       @Param("xpaths") Collection<String> xpaths,\r
                                                       @Param("maxDepth") int maxDepth);\r
+\r
+    @Query(value = "SELECT id, anchor_id AS anchorId, xpath, parent_id AS parentId,"\r
+            + " CAST(attributes AS TEXT) AS attributes"\r
+            + " FROM FRAGMENT WHERE xpath ~ :xpathRegex",\r
+            nativeQuery = true)\r
+    List<FragmentExtract> quickFindWithDescendantsAcrossAnchor(@Param("xpathRegex") String xpathRegex);\r
 }\r
index 04138ec..32041e7 100644 (file)
@@ -1,6 +1,7 @@
 /*-
  * ============LICENSE_START=======================================================
  *  Copyright (C) 2021 Nordix Foundation.
+ *  Modifications Copyright (C) 2023 TechMahindra Ltd.
  * ================================================================================
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -26,4 +27,6 @@ import org.onap.cps.spi.entities.FragmentEntity;
 
 public interface FragmentRepositoryCpsPathQuery {
     List<FragmentEntity> findByAnchorAndCpsPath(int anchorId, CpsPathQuery cpsPathQuery);
+
+    List<FragmentEntity> findByCpsPath(CpsPathQuery cpsPathQuery);
 }
index 6e8f05f..b95491c 100644 (file)
@@ -1,6 +1,7 @@
 /*-
  * ============LICENSE_START=======================================================
  *  Copyright (C) 2021-2022 Nordix Foundation.
+ *  Modifications Copyright (C) 2023 TechMahindra Ltd.
  * ================================================================================
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -48,4 +49,13 @@ public class FragmentRepositoryCpsPathQueryImpl implements FragmentRepositoryCps
         return fragmentEntities;
     }
 
+    @Override
+    @Transactional
+    public List<FragmentEntity> findByCpsPath(final CpsPathQuery cpsPathQuery) {
+        final Query query = fragmentQueryBuilder.getQueryForCpsPath(cpsPathQuery);
+        final List<FragmentEntity> fragmentEntities = query.getResultList();
+        log.debug("Fetched {} fragment entities by cps path across all anchors.", fragmentEntities.size());
+        return fragmentEntities;
+    }
+
 }
index ba8425f..60aaa81 100644 (file)
@@ -180,4 +180,33 @@ class CpsDataPersistenceQueryDataNodeSpec extends CpsPersistenceSpecBase {
             thrown(CpsPathException)
     }
 
+    @Sql([CLEAR_DATA, SET_DATA])
+    def 'Cps Path query across anchors for leaf value(s) with : #scenario.'() {
+        when: 'a query is executed to get a data node by the given cps path'
+            def result = objectUnderTest.queryDataNodesAcrossAnchors(DATASPACE_NAME, cpsPath, includeDescendantsOption)
+        then: 'the correct number of queried nodes are returned'
+            assert result.size() == expectedNumberOfQueriedNodes
+        and : 'correct anchors are queried'
+            assert result.anchorName.containsAll(expectedAnchors)
+        where: 'the following data is used'
+            scenario                                    | cpsPath                                                      | includeDescendantsOption || expectedNumberOfQueriedNodes || expectedAnchors
+            'String and no descendants'                 | '/shops/shop[@id=1]/categories[@code=1]/book[@title="Dune"]' | OMIT_DESCENDANTS         || 2                            || ['ANCHOR-004', 'ANCHOR-005']
+            'Integer and descendants'                   | '/shops/shop[@id=1]/categories[@code=1]/book[@price=5]'      | INCLUDE_ALL_DESCENDANTS  || 3                            || ['ANCHOR-004', 'ANCHOR-005']
+            'No condition no descendants'               | '/shops/shop[@id=1]/categories'                              | OMIT_DESCENDANTS         || 6                            || ['ANCHOR-004', 'ANCHOR-005']
+            'multiple list-ancestors'                   | '//book/ancestor::categories'                                | INCLUDE_ALL_DESCENDANTS  || 4                            || ['ANCHOR-004', 'ANCHOR-005']
+            'one ancestor with list value'              | '//book/ancestor::categories[@code=1]'                       | INCLUDE_ALL_DESCENDANTS  || 2                            || ['ANCHOR-004', 'ANCHOR-005']
+            'list with index value in the xpath prefix' | '//categories[@code=1]/book/ancestor::shop[@id=1]'           | INCLUDE_ALL_DESCENDANTS  || 2                            || ['ANCHOR-004', 'ANCHOR-005']
+            'ancestor with parent list'                 | '//book/ancestor::shop[@id=1]/categories[@code=2]'           | INCLUDE_ALL_DESCENDANTS  || 2                            || ['ANCHOR-004', 'ANCHOR-005']
+            'ancestor with parent'                      | '//phonenumbers[@type="mob"]/ancestor::info/contact'         | INCLUDE_ALL_DESCENDANTS  || 5                            || ['ANCHOR-004', 'ANCHOR-005']
+            'ancestor combined with text condition'     | '//book/title[text()="Dune"]/ancestor::shop'                 | INCLUDE_ALL_DESCENDANTS  || 10                           || ['ANCHOR-004', 'ANCHOR-005']
+            'ancestor with parent that does not exist'  | '//book/ancestor::parentDoesNoExist/categories'              | INCLUDE_ALL_DESCENDANTS  || 0                            || []
+            'ancestor does not exist'                   | '//book/ancestor::ancestorDoesNotExist'                      | INCLUDE_ALL_DESCENDANTS  || 0                            || []
+    }
+
+    def 'Cps Path query across anchors with syntax error throws a CPS Path Exception.'() {
+        when: 'trying to execute a query with a syntax (parsing) error'
+            objectUnderTest.queryDataNodesAcrossAnchors(DATASPACE_NAME, 'cpsPath that cannot be parsed' , OMIT_DESCENDANTS)
+        then: 'a cps path exception is thrown'
+            thrown(CpsPathException)
+    }
 }
index 3d7003d..f02aa75 100644 (file)
@@ -2,7 +2,7 @@
  * ============LICENSE_START=======================================================
  * Copyright (c) 2021 Bell Canada.
  * Modifications Copyright (C) 2021-2023 Nordix Foundation
- * Modifications Copyright (C) 2022 TechMahindra Ltd.
+ * Modifications Copyright (C) 2022-2023 TechMahindra Ltd.
  * ================================================================================
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -273,7 +273,7 @@ class CpsDataPersistenceServiceSpec extends Specification {
     }
 
     def mockFragmentWithJson(json) {
-        def fragmentExtract = mockFragmentExtract(456, null, null, '/parent-01', json)
+        def fragmentExtract = mockFragmentExtract(456, null, 123, '/parent-01', json)
         mockFragmentRepository.findExtractsWithDescendants(123, ['/parent-01'] as Set, _) >> [fragmentExtract]
     }
 
index 18fd74a..5fe927b 100644 (file)
@@ -2,6 +2,7 @@
    ============LICENSE_START=======================================================
     Copyright (C) 2021-2022 Nordix Foundation.
     Modifications Copyright (C) 2021 Bell Canada.
+    Modifications Copyright (C) 2023 TechMahindra Ltd.
    ================================================================================
    Licensed under the Apache License, Version 2.0 (the "License");
    you may not use this file except in compliance with the License.
@@ -50,6 +51,9 @@ INSERT INTO SCHEMA_SET_YANG_RESOURCES (SCHEMA_SET_ID, YANG_RESOURCE_ID) VALUES
 INSERT INTO ANCHOR (ID, NAME, DATASPACE_ID, SCHEMA_SET_ID) VALUES
     (1003, 'ANCHOR-004', 1001, 2001);
 
+INSERT INTO ANCHOR (ID, NAME, DATASPACE_ID, SCHEMA_SET_ID) VALUES
+    (1004, 'ANCHOR-005', 1001, 2001);
+
 INSERT INTO FRAGMENT (ID, DATASPACE_ID, ANCHOR_ID, PARENT_ID, XPATH, ATTRIBUTES) VALUES
     (1, 1001, 1003, null, '/shops', null),
     (2, 1001, 1003, 1, '/shops/shop[@id=''1'']', '{"id" : 1, "type" : "bookstore"}'),
@@ -76,3 +80,30 @@ INSERT INTO FRAGMENT (ID, DATASPACE_ID, ANCHOR_ID, PARENT_ID, XPATH, ATTRIBUTES)
     (18, 1001, 1003, 17, '/shops/shop[@id=''3'']/info/contact/website', '{"address" : "myshop.ie"}'),
     (19, 1001, 1003, 17, '/shops/shop[@id=''3'']/info/contact/phonenumbers[@type=''mob'']', '{"type" : "mob", "number" : "123123456"}'),
     (20, 1001, 1003, 17, '/shops/shop[@id=''3'']/info/contact/phonenumbers[@type=''landline'']', '{"type" : "landline", "number" : "012123456"}');
+
+    INSERT INTO FRAGMENT (ID, DATASPACE_ID, ANCHOR_ID, PARENT_ID, XPATH, ATTRIBUTES) VALUES
+    (41, 1001, 1004, null, '/shops', null),
+    (42, 1001, 1004, 1, '/shops/shop[@id=''1'']', '{"id" : 1, "type" : "bookstore"}'),
+    (43, 1001, 1004, 2, '/shops/shop[@id=''1'']/categories[@code=''1'']', '{"code" : 1, "type" : "bookstore", "name": "SciFi"}'),
+    (44, 1001, 1004, 2, '/shops/shop[@id=''1'']/categories[@code=''2'']', '{"code" : 2, "type" : "bookstore", "name": "Fiction"}'),
+    (71, 1001, 1004, 2, '/shops/shop[@id=''1'']/categories[@code=''string/with/slash/'']', '{"code" : "string/with/slash", "type" : "text/with/slash", "name": "Fiction"}'),
+    (45, 1001, 1004, 3, '/shops/shop[@id=''1'']/categories[@code=''1'']/book', '{"price" :  5, "title" : "Dune", "labels" : ["special offer","classics",""]}'),
+    (46, 1001, 1004, 4, '/shops/shop[@id=''1'']/categories[@code=''2'']/book', '{"price" : 15, "title" : "Chapters", "editions" : [2000,2010,2020]}'),
+    (47, 1001, 1004, 5, '/shops/shop[@id=''1'']/categories[@code=''1'']/book/author[@FirstName=''Joe'' and @Surname=''Bloggs'']', '{"FirstName" : "Joe", "Surname": "Bloggs","title": "Dune"}'),
+    (48, 1001, 1004, 6, '/shops/shop[@id=''1'']/categories[@code=''2'']/book/author[@FirstName=''Joe'' and @Surname=''Smith'']', '{"FirstName" : "Joe", "Surname": "Smith","title": "Chapters"}'),
+    (72, 1001, 1004, 6, '/shops/shop[@id=''1'']/categories[@code=''2'']/book/author[@FirstName=''Joe'' and @Address=''string[with]square[brackets]'']', '{"FirstName" : "Joe", "Address": "string[with]square[brackets]","title": "Chapters"}');
+
+    INSERT INTO FRAGMENT (ID, DATASPACE_ID, ANCHOR_ID, PARENT_ID, XPATH, ATTRIBUTES) VALUES
+    (49, 1001, 1004, 1, '/shops/shop[@id=''2'']', '{"type" : "bookstore"}'),
+    (50, 1001, 1004, 9, '/shops/shop[@id=''2'']/categories[@code=''1'']', '{"code" : 2, "type" : "bookstore", "name": "Kids"}'),
+    (51, 1001, 1004, 10, '/shops/shop[@id=''2'']/categories[@code=''2'']', '{"code" : 2, "type" : "bookstore", "name": "Fiction"}');
+
+    INSERT INTO FRAGMENT (ID, DATASPACE_ID, ANCHOR_ID, PARENT_ID, XPATH, ATTRIBUTES) VALUES
+    (52, 1001, 1004, 1, '/shops/shop[@id=''3'']', '{"type" : "garden centre"}'),
+    (53, 1001, 1004, 12, '/shops/shop[@id=''3'']/categories[@code=''1'']', '{"id" : 1, "type" : "garden centre", "name": "indoor plants"}'),
+    (54, 1001, 1004, 12, '/shops/shop[@id=''3'']/categories[@code=''2'']', '{"id" : 2, "type" : "garden centre", "name": "outdoor plants"}'),
+    (56, 1001, 1004, 1, '/shops/shop[@id=''3'']/info', null),
+    (57, 1001, 1004, 1, '/shops/shop[@id=''3'']/info/contact', null),
+    (58, 1001, 1004, 17, '/shops/shop[@id=''3'']/info/contact/website', '{"address" : "myshop.ie"}'),
+    (59, 1001, 1004, 17, '/shops/shop[@id=''3'']/info/contact/phonenumbers[@type=''mob'']', '{"type" : "mob", "number" : "123123456"}'),
+    (60, 1001, 1004, 17, '/shops/shop[@id=''3'']/info/contact/phonenumbers[@type=''landline'']', '{"type" : "landline", "number" : "012123456"}');
index 68ae1eb..af54077 100644 (file)
@@ -1,6 +1,7 @@
 /*
  *  ============LICENSE_START=======================================================
  *  Copyright (C) 2020-2022 Nordix Foundation
+ *  Modifications Copyright (C) 2022-2023 TechMahindra Ltd.
  *  ================================================================================
  *  Licensed under the Apache License, Version 2.0 (the "License");
  *  you may not use this file except in compliance with the License.
@@ -42,4 +43,15 @@ public interface CpsQueryService {
     Collection<DataNode> queryDataNodes(String dataspaceName, String anchorName,
                                         String cpsPath, FetchDescendantsOption fetchDescendantsOption);
 
+    /**
+     * Get data nodes for the given dataspace across all anchors by cps path.
+     *
+     * @param dataspaceName dataspace name
+     * @param cpsPath CPS path
+     * @param fetchDescendantsOption defines whether the descendants of the node(s) found by the query should be
+     *                               included in the output
+     * @return a collection of data nodes
+     */
+    Collection<DataNode> queryDataNodesAcrossAnchors(String dataspaceName, String cpsPath,
+                                                     FetchDescendantsOption fetchDescendantsOption);
 }
index a63faab..ac018c9 100644 (file)
@@ -1,6 +1,7 @@
 /*
  *  ============LICENSE_START=======================================================
  *  Copyright (C) 2021-2022 Nordix Foundation
+ *  Modifications Copyright (C) 2022-2023 TechMahindra Ltd.
  *  ================================================================================
  *  Licensed under the Apache License, Version 2.0 (the "License");
  *  you may not use this file except in compliance with the License.
@@ -45,4 +46,11 @@ public class CpsQueryServiceImpl implements CpsQueryService {
         cpsValidator.validateNameCharacters(dataspaceName, anchorName);
         return cpsDataPersistenceService.queryDataNodes(dataspaceName, anchorName, cpsPath, fetchDescendantsOption);
     }
+
+    @Override
+    public Collection<DataNode> queryDataNodesAcrossAnchors(final String dataspaceName,
+        final String cpsPath, final FetchDescendantsOption fetchDescendantsOption) {
+        cpsValidator.validateNameCharacters(dataspaceName);
+        return cpsDataPersistenceService.queryDataNodesAcrossAnchors(dataspaceName, cpsPath, fetchDescendantsOption);
+    }
 }
index f10443f..5404019 100644 (file)
@@ -223,6 +223,19 @@ public interface CpsDataPersistenceService {
     List<DataNode> queryDataNodes(String dataspaceName, String anchorName,
                                   String cpsPath, FetchDescendantsOption fetchDescendantsOption);
 
+    /**
+     * Get a datanode by dataspace name and cps path across all anchors.
+     *
+     * @param dataspaceName          dataspace name
+     * @param cpsPath                cps path
+     * @param fetchDescendantsOption defines whether the descendants of the node(s) found by the query should be
+     *                               included in the output
+     * @return the data nodes found i.e. 0 or more data nodes
+     */
+    List<DataNode> queryDataNodesAcrossAnchors(String dataspaceName,
+                                  String cpsPath, FetchDescendantsOption fetchDescendantsOption);
+
+
     /**
      * Starts a session which allows use of locks and batch interaction with the persistence service.
      *
index b23cdfc..6fc36eb 100644 (file)
@@ -3,7 +3,7 @@
  *  Copyright (C) 2021 Bell Canada. All rights reserved.
  *  Modifications Copyright (C) 2021 Pantheon.tech
  *  Modifications Copyright (C) 2022 Nordix Foundation.
- *  Modifications Copyright (C) 2022 TechMahindra Ltd.
+ *  Modifications Copyright (C) 2022-2023 TechMahindra Ltd.
  *  ================================================================================
  *  Licensed under the Apache License, Version 2.0 (the "License");
  *  you may not use this file except in compliance with the License.
@@ -54,6 +54,7 @@ public class DataNodeBuilder {
     private String parentNodeXpath = "";
     private Map<String, Serializable> leaves = Collections.emptyMap();
     private Collection<DataNode> childDataNodes = Collections.emptySet();
+    private String anchorName;
 
     /**
      * To use parent node xpath for creating {@link DataNode}.
@@ -88,6 +89,17 @@ public class DataNodeBuilder {
         return this;
     }
 
+    /**
+     * To use anchor name for creating {@link DataNode}.
+     *
+     * @param anchorName anchor name for the data node
+     * @return DataNodeBuilder
+     */
+    public DataNodeBuilder withAnchor(final String anchorName) {
+        this.anchorName = anchorName;
+        return this;
+    }
+
     /**
      * To use module name for prefix for creating {@link DataNode}.
      *
@@ -153,6 +165,7 @@ public class DataNodeBuilder {
         dataNode.setModuleNamePrefix(moduleNamePrefix);
         dataNode.setLeaves(leaves);
         dataNode.setChildDataNodes(childDataNodes);
+        dataNode.setAnchorName(anchorName);
         return dataNode;
     }
 
index 14641e0..b0e109b 100644 (file)
@@ -3,6 +3,7 @@
  *  Copyright (C) 2021 Pantheon.tech
  *  Modifications (C) 2021-2022 Nordix Foundation
  *  Modifications Copyright (C) 2022 Bell Canada
+ *  Modifications Copyright (C) 2022-2023 TechMahindra Ltd.
  *  ================================================================================
  *  Licensed under the Apache License, Version 2.0 (the "License");
  *  you may not use this file except in compliance with the License.
@@ -48,6 +49,20 @@ public class DataMapUtils {
         return ImmutableMap.<String, Object>builder().put(nodeIdentifierWithPrefix, toDataMap(dataNode)).build();
     }
 
+    /**
+     * Converts DataNode structure into a map including the root node identifier for a JSON response.
+     *
+     * @param dataNode data node object
+     * @return a map representing same data with the root node identifier
+     */
+    public static Map<String, Object> toDataMapWithIdentifierAndAnchor(final DataNode dataNode, final String prefix) {
+        final String nodeIdentifierWithPrefix = getNodeIdentifierWithPrefix(dataNode.getXpath(), prefix);
+        final Map<String, Object> dataMap = ImmutableMap.<String, Object>builder()
+                .put(nodeIdentifierWithPrefix, toDataMap(dataNode)).build();
+        return ImmutableMap.<String, Object>builder().put("anchorName", dataNode.getAnchorName())
+                .put("dataNode", dataMap).build();
+    }
+
     /**
      * Converts DataNode structure into a map for a JSON response.
      *
index 56c43d1..553027a 100644 (file)
@@ -48,4 +48,18 @@ class CpsQueryServiceImplSpec extends Specification {
                                        FetchDescendantsOption.DIRECT_CHILDREN_ONLY, new FetchDescendantsOption(10)]
     }
 
+    def 'Query data nodes across all anchors by cps path with #fetchDescendantsOption.'() {
+        given: 'a dataspace name, an anchor name and a cps path'
+            def dataspaceName = 'some-dataspace'
+            def cpsPath = '/cps-path'
+        when: 'queryDataNodes is invoked'
+            objectUnderTest.queryDataNodesAcrossAnchors(dataspaceName, cpsPath, fetchDescendantsOption)
+        then: 'the persistence service is called once with the correct parameters'
+            1 * mockCpsDataPersistenceService.queryDataNodesAcrossAnchors(dataspaceName, cpsPath, fetchDescendantsOption)
+        and: 'the CpsValidator is called on the dataspaceName, schemaSetName and anchorName'
+            1 * mockCpsValidator.validateNameCharacters(dataspaceName)
+        where: 'all fetch descendants options are supported'
+            fetchDescendantsOption << [FetchDescendantsOption.OMIT_DESCENDANTS, FetchDescendantsOption.INCLUDE_ALL_DESCENDANTS]
+    }
+
 }
index 84dddeb..e27b437 100644 (file)
@@ -3,6 +3,7 @@
  *  Copyright (C) 2021 Pantheon.tech
  *  Modifications Copyright (C) 2020-2022 Nordix Foundation
  *  Modifications Copyright (C) 2022 Bell Canada.
+ *  Modifications Copyright (C) 2023 TechMahindra Ltd.
  *  ================================================================================
  *  Licensed under the Apache License, Version 2.0 (the "License");
  *  you may not use this file except in compliance with the License.
@@ -37,10 +38,23 @@ class DataMapUtilsSpec extends Specification {
             ),
     ])
 
+    def dataNodeWithAnchor = buildDataNodeWithAnchor(
+            "/parent", 'anchor01',[parentLeaf:'parentLeafValue', parentLeafList:['parentLeafListEntry1','parentLeafListEntry2']],[
+            buildDataNode('/parent/child-list[@id=1/2]',[listElementLeaf:'listElement1leafValue'],noChildren),
+            buildDataNode('/parent/child-list[@id=2]',[listElementLeaf:'listElement2leafValue'],noChildren),
+            buildDataNode('/parent/child-object',[childLeaf:'childLeafValue'],
+                    [buildDataNode('/parent/child-object/grand-child-object',[grandChildLeaf:'grandChildLeafValue'],noChildren)]
+            ),
+    ])
+
     static def buildDataNode(xpath,  leaves,  children) {
         return new DataNodeBuilder().withXpath(xpath).withLeaves(leaves).withChildDataNodes(children).build()
     }
 
+    static def buildDataNodeWithAnchor(xpath, anchorName, leaves,  children) {
+        return new DataNodeBuilder().withXpath(xpath).withAnchor(anchorName).withLeaves(leaves).withChildDataNodes(children).build()
+    }
+
     def 'Data node structure conversion to map.'() {
         when: 'data node structure is converted to a map'
             def result = DataMapUtils.toDataMap(dataNode)
@@ -90,5 +104,23 @@ class DataMapUtilsSpec extends Specification {
             'xpath contains list attributes with /' | '/bookstore/categories[@code=1/2]'        | 'sampleModuleName:categories'
 
     }
+
+    def 'Data node structure with anchor name conversion to map with root node identifier.'() {
+        when: 'data node structure is converted to a map with root node identifier'
+            def result = DataMapUtils.toDataMapWithIdentifierAndAnchor(dataNodeWithAnchor, dataNodeWithAnchor.moduleNamePrefix)
+        then: 'root node leaves are populated under its node identifier'
+            def parentNode = result.get("dataNode").parent
+            parentNode.parentLeaf == 'parentLeafValue'
+            parentNode.parentLeafList == ['parentLeafListEntry1','parentLeafListEntry2']
+
+        and: 'leaves for child element is populated under its node identifier'
+            assert parentNode.'child-object'.childLeaf == 'childLeafValue'
+
+        and: 'leaves for grandchild element is populated under its node identifier'
+            assert parentNode.'child-object'.'grand-child-object'.grandChildLeaf == 'grandChildLeafValue'
+
+        and: 'data node is associated with anchor name'
+            assert result.get('anchorName') == 'anchor01'
+    }
 }
 
index 57b9962..9f28526 100644 (file)
@@ -2314,6 +2314,93 @@ paths:
                 message: Internal Server Error
                 details: Internal Server Error occurred
       x-codegen-request-body-name: xpath
+  /v2/dataspaces/{dataspace-name}/nodes/query:
+    get:
+      tags:
+        - cps-query
+      summary: Query data nodes
+      description: Query data nodes for the given dataspace and anchor using CPS path
+      operationId: getNodesByDataspaceAndCpsPath
+      parameters:
+        - name: dataspace-name
+          in: path
+          description: dataspace-name
+          required: true
+          schema:
+            type: string
+            example: my-dataspace
+        - name: cps-path
+          in: query
+          description: "For more details on cps path, please refer https://docs.onap.org/projects/onap-cps/en/latest/cps-path.html"
+          required: false
+          schema:
+            type: string
+            default: /
+          examples:
+            container cps path:
+              value: //bookstore
+            list attributes cps path:
+              value: "//categories[@code=1]"
+        - name: descendants
+          in: query
+          description: "Number of descendants to query. Allowed values are 'none', 'all',\
+          \ -1 (for all), 0 (for none) and any positive number."
+          required: false
+          schema:
+            type: string
+            example: "3"
+            default: none
+      responses:
+        "200":
+          description: OK
+          content:
+            application/json:
+              schema:
+                type: object
+              examples:
+                dataSample:
+                  $ref: '#/components/examples/dataSample'
+        "400":
+          description: Bad Request
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/ErrorMessage'
+              example:
+                status: 400
+                message: Bad Request
+                details: The provided request is not valid
+        "401":
+          description: Unauthorized
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/ErrorMessage'
+              example:
+                status: 401
+                message: Unauthorized request
+                details: This request is unauthorized
+        "403":
+          description: Forbidden
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/ErrorMessage'
+              example:
+                status: 403
+                message: Request Forbidden
+                details: This request is forbidden
+        "500":
+          description: Internal Server Error
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/ErrorMessage'
+              example:
+                status: 500
+                message: Internal Server Error
+                details: Internal Server Error occurred
+      x-codegen-request-body-name: xpath
 components:
   schemas:
     ErrorMessage:
diff --git a/test-tools/subtract-metrics-reports.py b/test-tools/subtract-metrics-reports.py
new file mode 100755 (executable)
index 0000000..8f6fab3
--- /dev/null
@@ -0,0 +1,80 @@
+#!/usr/bin/env python3
+#
+# Copyright 2023 Nordix Foundation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import argparse
+import csv
+
+
+def load_metrics_table(filename):
+    with open(filename) as tsvFile:
+        csvreader = csv.DictReader(tsvFile, dialect="excel-tab")
+        table = {}
+        for source_row in csvreader:
+            method, count, sum_time = source_row['Method'], source_row['Count'], source_row['Sum']
+            table[method] = { 'Count': int(float(count)), 'Sum': float(sum_time) }
+    return table
+
+
+def save_metrics_table(table, filename):
+    with open(filename, 'w', newline='') as outfile:
+        csvwriter = csv.writer(outfile, dialect="excel-tab")
+        csvwriter.writerow(["Method", "Count", "Sum"])
+        for method in table:
+            count, sum_time = table[method]['Count'], table[method]['Sum']
+            csvwriter.writerow([method, count, sum_time])
+
+
+def subtract_metrics_tables(table, table_to_subtract):
+    result = {}
+    for method in table:
+        result[method] = table[method]
+    for method in table_to_subtract:
+        result[method]['Count'] = result[method]['Count'] - table_to_subtract[method]['Count']
+        result[method]['Sum'] = result[method]['Sum'] - table_to_subtract[method]['Sum']
+    return filter_null_metrics_from_metrics_table(result)
+
+
+def filter_null_metrics_from_metrics_table(table):
+    result = {}
+    for method in table:
+        if table[method]['Count'] > 0:
+            result[method] = table[method]
+    return result
+
+
+if __name__ == "__main__":
+    parser = argparse.ArgumentParser()
+    parser.add_argument('-a', '--metrics-after',
+                        required=True,
+                        help='path to metrics table to subtract from',
+                        dest='tsvpath_after',
+                        type=str)
+    parser.add_argument('-b', '--metrics-before',
+                        required=True,
+                        help='path to metrics table to subtract',
+                        dest='tsvpath_before',
+                        type=str)
+    parser.add_argument('-o', '--output',
+                        required=True,
+                        help='path to output metrics table',
+                        dest='outpath',
+                        type=str)
+    args = parser.parse_args()
+    table1 = load_metrics_table(args.tsvpath_before)
+    table2 = load_metrics_table(args.tsvpath_after)
+    table_diff = subtract_metrics_tables(table2, table1)
+    save_metrics_table(table_diff, args.outpath)
diff --git a/test-tools/test-deregistration.sh b/test-tools/test-deregistration.sh
new file mode 100755 (executable)
index 0000000..bf38e3b
--- /dev/null
@@ -0,0 +1,162 @@
+#!/bin/bash
+#
+# Copyright 2023 Nordix Foundation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+set -o errexit  # Exit on most errors
+set -o nounset  # Disallow expansion of unset variables
+set -o pipefail # Use last non-zero exit code in a pipeline
+#set -o xtrace   # Uncomment for debugging
+
+GRAB_METRICS=true
+DOCKER_COMPOSE_FILE=../docker-compose/docker-compose.yml
+CREATE_REQUEST=/tmp/cmhandles-create-req.txt
+REMOVE_REQUEST=/tmp/cmhandles-remove-req.txt
+REPORT_FILE=metrics-reports/deregister-summary-$(date --iso-8601=seconds).tsv
+
+stop_docker() {
+    docker-compose -f $DOCKER_COMPOSE_FILE down >/dev/null
+    docker container prune -f >/dev/null
+    docker volume prune -f >/dev/null
+}
+
+restart_docker() {
+    stop_docker
+    docker-compose -f $DOCKER_COMPOSE_FILE --profile dmi-stub --profile monitoring up -d >/dev/null
+}
+
+wait_for_cps_to_start() {
+    docker logs cps-and-ncmp -f | grep -m 1 'Started Application' >/dev/null || true
+}
+
+get_number_of_handles_ready() {
+    PGPASSWORD=cps psql -h localhost -p 5432 cpsdb cps -c \
+        "SELECT count(*) FROM public.fragment where attributes @> '{\"cm-handle-state\": \"READY\"}';" \
+        | sed '3!d' | sed 's/ *//'
+}
+
+wait_for_handles_to_be_ready() {
+    local TOTAL_HANDLES=$1
+    while
+        sleep 30
+        HANDLES_READY=$(get_number_of_handles_ready)
+        echo "There are $HANDLES_READY CM handles in READY state."
+        [ $HANDLES_READY -ne $TOTAL_HANDLES ]
+    do true; done
+}
+
+create_handles() {
+    curl --fail --silent --show-error \
+        --location 'http://localhost:8883/ncmpInventory/v1/ch' \
+        --header 'Authorization: Basic Y3BzdXNlcjpjcHNyMGNrcyE=' \
+        --header 'Content-Type: application/json' \
+        --data @$CREATE_REQUEST
+}
+
+remove_handles_and_record_time() {
+    curl --fail --silent --show-error --output /dev/null --write-out '%{time_total}\n' \
+        --location 'http://localhost:8883/ncmpInventory/v1/ch' \
+        --header 'Authorization: Basic Y3BzdXNlcjpjcHNyMGNrcyE=' \
+        --header 'Content-Type: application/json' \
+        --header 'Cookie: JSESSIONID=node018g80wfn6qfk9yihx8pne7bc31.node0' \
+        --data @$REMOVE_REQUEST >> $REPORT_FILE
+}
+
+create_request_bodies() {
+    local CREATE_SIZE=$1
+    local REMOVE_SIZE=$2
+    echo -n '{"dmiPlugin": "http://ncmp-dmi-plugin-stub:8783","createdCmHandles":[' > $CREATE_REQUEST
+    echo -n '{"dmiPlugin": "http://ncmp-dmi-plugin-stub:8783","removedCmHandles":[' > $REMOVE_REQUEST
+    for i in $(seq 1 $CREATE_SIZE); do
+        local CMHANDLE=$(uuidgen | tr -d '-')
+        echo -n "{\"cmHandle\": \"$CMHANDLE\",\"cmHandleProperties\":{\"neType\":\"RadioNode\"}}" \
+            >> $CREATE_REQUEST
+        if [ $i -lt $CREATE_SIZE ]; then
+            echo -n "," >> $CREATE_REQUEST
+        fi
+        if [ $i -le $REMOVE_SIZE ]; then
+            echo -n "\"$CMHANDLE\"" >> $REMOVE_REQUEST
+        fi
+        if [ $i -lt $REMOVE_SIZE ]; then
+            echo -n "," >> $REMOVE_REQUEST
+        fi
+    done
+    echo ']}' >> $CREATE_REQUEST
+    echo ']}' >> $REMOVE_REQUEST
+}
+
+test_deregistration() {
+    local REMOVE_SIZE=$1
+    local CREATE_SIZE=$2
+
+    echo "Testing deregistration of $REMOVE_SIZE out of $CREATE_SIZE CM handles"
+
+    echo "Restarting docker"
+    restart_docker
+    echo "Waiting for CPS to start"
+    wait_for_cps_to_start
+
+    echo "Creating request bodies"
+    create_request_bodies $CREATE_SIZE $REMOVE_SIZE
+
+    echo "[$(date --iso-8601=seconds)] Creating CM handles"
+    create_handles
+    echo "Waiting for CM handles to be in READY state"
+    wait_for_handles_to_be_ready $CREATE_SIZE
+
+    if [ "$GRAB_METRICS" = "true" ]; then
+        echo "Grabbing metrics before deregistration"
+        METRICS_BEFORE=$(./generate-metrics-report.sh)
+    fi
+
+    echo "[$(date --iso-8601=seconds)] Removing CM handles"
+    echo -e -n "$REMOVE_SIZE\t$CREATE_SIZE\t" >> $REPORT_FILE
+    remove_handles_and_record_time
+    echo "There are $(get_number_of_handles_ready) CM handles still in READY state."
+
+    if [ "$GRAB_METRICS" = "true" ]; then
+        echo "Grabbing metrics after deregistration"
+        METRICS_AFTER=$(./generate-metrics-report.sh)
+        echo "Generating metrics report"
+        ./subtract-metrics-reports.py -a $METRICS_AFTER -b $METRICS_BEFORE \
+            -o metrics-reports/deregister-$(date --iso-8601=seconds)-$REMOVE_SIZE-$CREATE_SIZE.tsv
+        rm $METRICS_BEFORE $METRICS_AFTER
+    fi
+
+    echo
+}
+
+cleanup() {
+    rm -f "$CREATE_REQUEST" "$REMOVE_REQUEST"
+    stop_docker
+}
+trap cleanup EXIT
+
+mkdir -p $(dirname $REPORT_FILE)
+echo -e "Removed\tTotal\tTime" > $REPORT_FILE
+
+# Delete N/N: 100/100, 200/200... 20000/20000
+for number_to_delete in 100 200 300 400 500 600 700 800 900 1000 2000 3000 4000 5000 10000 15000 20000; do
+    test_deregistration $number_to_delete $number_to_delete
+done
+# Delete N/C: 1000/5000, 2000/5000... 5000/5000
+for number_to_delete in 1000 2000 3000 4000 5000; do
+    test_deregistration $number_to_delete 5000
+done
+# Delete C/N: 1000/1000, 1000/2000... 1000/5000
+for total in 1000 2000 3000 4000 5000; do
+    test_deregistration 1000 $total
+done
+