Add CM Write Data Job Scenario to K6 Test Suite 67/140667/10
authorsourabh_sourabh <sourabh.sourabh@est.tech>
Tue, 8 Apr 2025 17:08:09 +0000 (18:08 +0100)
committersourabh_sourabh <sourabh.sourabh@est.tech>
Wed, 7 May 2025 09:32:11 +0000 (10:32 +0100)
- Added 'write_data_job_scenario' in the K6 test configuration for small
  and large data size.
- It helps to simulate lower / higher load and monitor system behavior under infrequent
  write operations.

Issue-ID: CPS-2716
Change-Id: Ic5a5f3642ff57b294c822541dcc4b1458301ccac
Signed-off-by: sourabh_sourabh <sourabh.sourabh@est.tech>
k6-tests/ncmp/common/cmhandle-crud.js
k6-tests/ncmp/common/passthrough-crud.js
k6-tests/ncmp/common/utils.js
k6-tests/ncmp/common/write-data-job.js [new file with mode: 0644]
k6-tests/ncmp/config/endurance.json
k6-tests/ncmp/config/kpi.json
k6-tests/ncmp/ncmp-test-runner.js

index 3b6c3ff..5699795 100644 (file)
@@ -19,7 +19,8 @@
  */
 
 import { sleep } from 'k6';
-import { performPostRequest, NCMP_BASE_URL, DMI_PLUGIN_URL, TOTAL_CM_HANDLES, MODULE_SET_TAGS
+import {
+    performPostRequest, getAlternateId, NCMP_BASE_URL, DMI_PLUGIN_URL, TOTAL_CM_HANDLES, MODULE_SET_TAGS
 } from './utils.js';
 import { executeCmHandleIdSearch } from './search-base.js';
 
@@ -53,19 +54,20 @@ function createCmHandlePayload(cmHandleIds) {
         "dmiPlugin": DMI_PLUGIN_URL,
         "createdCmHandles": cmHandleIds.map((cmHandleId, index) => {
             // Ensure unique networkSegment within range 1-10
-            let networkSegmentId = Math.floor(Math.random() * 10) + 1; // Random between 1-10
+            let networkSegmentId = Math.floor(Math.random() * 10) + 1;
             let moduleTag = MODULE_SET_TAGS[index % MODULE_SET_TAGS.length];
 
             return {
                 "cmHandle": cmHandleId,
-                "alternateId": cmHandleId.replace('ch-', 'Region=NorthAmerica,Segment='),
+                "alternateId": getAlternateId(cmHandleId.replace('ch-', '')),
                 "moduleSetTag": moduleTag,
+                "dataProducerIdentifier": "some-data-producer-id",
                 "cmHandleProperties": {
                     "segmentId": index + 1,
-                    "networkSegment": `Region=NorthAmerica,Segment=${networkSegmentId}`, // Unique within range 1-10
-                    "deviceIdentifier": `Element=RadioBaseStation_5G_${index + 1000}`, // Unique per cmHandle
-                    "hardwareVersion": `HW-${moduleTag}`, // Shares uniqueness with moduleSetTag
-                    "softwareVersion": `Firmware_${moduleTag}`, // Shares uniqueness with moduleSetTag
+                    "networkSegment": `Region=NorthAmerica,Segment=${networkSegmentId}`,
+                    "deviceIdentifier": `Element=RadioBaseStation_5G_${index + 1000}`,
+                    "hardwareVersion": `HW-${moduleTag}`,
+                    "softwareVersion": `Firmware_${moduleTag}`,
                     "syncStatus": "ACTIVE",
                     "nodeCategory": "VirtualNode"
                 },
index c673257..bf0730a 100644 (file)
@@ -23,24 +23,24 @@ import {
     performGetRequest,
     NCMP_BASE_URL,
     LEGACY_BATCH_TOPIC_NAME,
-    getRandomCmHandleReference,
+    getRandomAlternateId,
 } from './utils.js';
 
-export function passthroughRead(useAlternateId) {
-    const cmHandleReference = getRandomCmHandleReference(useAlternateId);
+export function passthroughRead() {
+    const randomAlternateId = getRandomAlternateId();
     const resourceIdentifier = 'ManagedElement=NRNode1/GNBDUFunction=1';
     const datastoreName = 'ncmp-datastore:passthrough-operational';
     const includeDescendants = true;
-    const url = generatePassthroughUrl(cmHandleReference, datastoreName, resourceIdentifier, includeDescendants);
+    const url = generatePassthroughUrl(randomAlternateId, datastoreName, resourceIdentifier, includeDescendants);
     return performGetRequest(url, 'passthroughRead');
 }
 
-export function passthroughWrite(useAlternateId) {
-    const cmHandleReference = getRandomCmHandleReference(useAlternateId);
+export function passthroughWrite() {
+    const randomAlternateId = getRandomAlternateId();
     const resourceIdentifier = 'ManagedElement=NRNode1/GNBDUFunction=1';
     const datastoreName = 'ncmp-datastore:passthrough-running';
     const includeDescendants = false;
-    const url = generatePassthroughUrl(cmHandleReference, datastoreName, resourceIdentifier, includeDescendants);
+    const url = generatePassthroughUrl(randomAlternateId, datastoreName, resourceIdentifier, includeDescendants);
     const payload = JSON.stringify({
         "id": "123",
         "attributes": {"userLabel": "test"}
index 49feead..1334bab 100644 (file)
@@ -58,19 +58,38 @@ export function makeBatchOfCmHandleIds(batchSize, batchNumber) {
 export function makeRandomBatchOfAlternateIds() {
     const alternateIds = new Set();
     while (alternateIds.size < LEGACY_BATCH_THROUGHPUT_TEST_BATCH_SIZE) {
-        alternateIds.add(getRandomCmHandleReference(true));
+        alternateIds.add(getRandomAlternateId());
     }
     return Array.from(alternateIds)
 }
 
 /**
- * Generates a random CM Handle reference based on the provided flag.
- * @param useAlternateId
- * @returns {string} CM Handle reference representing a CM handle ID or an alternate ID.
+ * Generates a random CM Handle alternate ID.
+ *
+ * This function selects a random CM Handle ID between 1 and TOTAL_CM_HANDLES (inclusive)
+ * and returns its corresponding alternate ID by invoking `getAlternateId(id)`.
+ *
+ * @returns {string} A CM Handle alternate ID derived from a randomly selected CM Handle ID.
+ */
+export function getRandomAlternateId() {
+    let randomCmHandleId = randomIntBetween(1, TOTAL_CM_HANDLES);
+    return getAlternateId(randomCmHandleId);
+}
+
+/**
+ * Generates an alternate ID path for a CM handle based on its numeric identifier.
+ *
+ * The path follows the structure used in network models, embedding the numeric ID
+ * into both the MeContext and ManagedElement components.
+ *
+ * Example output:
+ *  "/SubNetwork=Europe/SubNetwork=Ireland/MeContext=MyRadioNode123/ManagedElement=MyManagedElement123"
+ *
+ * @param {number} cmHandleNumericId - The numeric identifier extracted from the CM handle ID.
+ * @returns {string} The alternate ID path string.
  */
-export function getRandomCmHandleReference(useAlternateId) {
-    const prefix = useAlternateId ? 'Region=NorthAmerica,Segment=' : 'ch-';
-    return `${prefix}${randomIntBetween(1, TOTAL_CM_HANDLES)}`;
+export function getAlternateId(cmHandleNumericId) {
+    return `/SubNetwork=Europe/SubNetwork=Ireland/MeContext=MyRadioNode${cmHandleNumericId}/ManagedElement=MyManagedElement${cmHandleNumericId}`;
 }
 
 /**
@@ -129,6 +148,8 @@ export function makeCustomSummaryReport(testResults, scenarioConfig) {
         makeSummaryCsvLine('5b', 'NCMP overhead for Synchronous single CM-handle pass-through read with alternate id', 'milliseconds', 'ncmp_overhead_passthrough_read_alt_id', 18, testResults, scenarioConfig),
         makeSummaryCsvLine('6b', 'NCMP overhead for Synchronous single CM-handle pass-through write with alternate id', 'milliseconds', 'ncmp_overhead_passthrough_write_alt_id', 18, testResults, scenarioConfig),
         makeSummaryCsvLine('7', 'Legacy batch read operation', 'events/second', 'legacy_batch_read_cmhandles_per_second', 1750, testResults, scenarioConfig),
+        makeSummaryCsvLine('8', 'Write data job scenario - small', 'milliseconds', 'write_small_data_job_duration', 300, testResults, scenarioConfig),
+        makeSummaryCsvLine('9', 'Write data job scenario - large', 'milliseconds', 'write_large_data_job_duration', 300, testResults, scenarioConfig),
     ];
     return summaryCsvLines.join('\n') + '\n';
 }
diff --git a/k6-tests/ncmp/common/write-data-job.js b/k6-tests/ncmp/common/write-data-job.js
new file mode 100644 (file)
index 0000000..99ed0d5
--- /dev/null
@@ -0,0 +1,121 @@
+/*
+ *  ============LICENSE_START=======================================================
+ *  Copyright (C) 2025 OpenInfra Foundation Europe. All rights reserved.
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *
+ *  SPDX-License-Identifier: Apache-2.0
+ *  ============LICENSE_END=========================================================
+ */
+
+import {crypto} from 'k6/experimental/webcrypto';
+import {performPostRequest, getRandomAlternateId, NCMP_BASE_URL} from './utils.js';
+
+/**
+ * Executes a write data job against the NCMP endpoint.
+ *
+ * @param {number} numberOfOperations - Number of base operation sets to include in the job.
+ * @returns {*} The HTTP response from the POST request.
+ */
+export function executeWriteDataJob(numberOfOperations) {
+    const jobId = crypto.randomUUID();
+    const requestPayload = buildDataJobRequestPayload(numberOfOperations);
+
+    console.log(`[WriteJob] Starting job → ID: ${jobId}, Operations: ${numberOfOperations}`);
+    return sendWriteDataJobRequest(jobId, requestPayload);
+}
+
+/**
+ * Sends a write data job request to the NCMP API endpoint.
+ *
+ * @param {string} jobId - The unique identifier for this write job.
+ * @param {Object} payload - The complete request body for the write operation.
+ * @returns {*} The response from the HTTP POST request.
+ */
+function sendWriteDataJobRequest(jobId, payload) {
+    const targetUrl = `${NCMP_BASE_URL}/do-not-use/dataJobs/${jobId}/write`;
+    const serializedPayload = JSON.stringify(payload);
+    return performPostRequest(targetUrl, serializedPayload, 'WriteDataJob');
+}
+
+/**
+ * Builds the full payload for a write data job.
+ *
+ * Each base operation set consists of three write operations:
+ *  - `add` at a nested child path
+ *  - `merge` at a different child path
+ *  - `remove` at the parent path
+ *
+ * The structure returned matches the expected `DataJobRequest` format on the server side:
+ *
+ * Java-side representation:
+ * ```java
+ * public record DataJobRequest(
+ *   DataJobMetadata dataJobMetadata,
+ *   DataJobWriteRequest dataJobWriteRequest
+ * )
+ * ```
+ *
+ * @param {number} numberOfWriteOperations - The number of base sets to generate (each set = 3 operations).
+ * @returns {{
+ *   dataJobMetadata: {
+ *     destination: string,
+ *     dataAcceptType: string,
+ *     dataContentType: string
+ *   },
+ *   dataJobWriteRequest: {
+ *     data: Array<{
+ *       path: string,
+ *       op: "add" | "merge" | "remove",
+ *       operationId: string,
+ *       value: Map<String, Object>
+ *     }>
+ *   }
+ * }} Fully-formed data job request payload.
+ */
+function buildDataJobRequestPayload(numberOfWriteOperations) {
+    const operations = [];
+    for (let i = 1; i <= numberOfWriteOperations / 2; i++) {
+        const basePath = getRandomAlternateId();
+        operations.push(
+            {
+                path: `${basePath}/SomeChild=child-1`,
+                op: 'add',
+                operationId: `${i}-1`,
+                value: {
+                    key: `some-value-one-${i}`
+                }
+            },
+            {
+                path: `${basePath}/SomeChild=child-2`,
+                op: 'merge',
+                operationId: `${i}-2`,
+                value: {
+                    key: `some-value-two-${i}`
+                }
+            }
+        );
+    }
+    return {
+        dataJobMetadata: {
+            destination: "device/managed-element-collection",
+            dataAcceptType: "application/json",
+            dataContentType: "application/merge-patch+json"
+        },
+        dataJobWriteRequest: {
+            data: operations
+        }
+    };
+}
+
+
index 8f65b81..d9bdccb 100644 (file)
       "rate": 1,
       "timeUnit": "1s",
       "preAllocatedVUs": 1
+    },
+    "write_large_data_job_scenario": {
+      "executor": "constant-vus",
+      "exec": "writeDataJobLargeScenario",
+      "vus": 1,
+      "duration": "2h"
+    },
+    "write_small_data_job_scenario": {
+      "executor": "constant-vus",
+      "exec": "writeDataJobSmallScenario",
+      "vus": 10,
+      "duration": "2h"
     }
   }
 }
index 030c0f5..473d8f2 100644 (file)
       "vus": 1,
       "iterations": 1
     },
-    "produceKafkaMessages": {
+    "produce_kafka_messages": {
       "executor": "constant-arrival-rate",
       "rate": 10,
       "timeUnit": "1s",
       "maxVUs": 12,
       "exec": "produceAvcEventsScenario",
       "gracefulStop": "10s"
+    },
+    "write_large_data_job_scenario": {
+      "executor": "constant-arrival-rate",
+      "exec": "writeDataJobLargeScenario",
+      "rate": 1,
+      "timeUnit": "60s",
+      "duration": "15m",
+      "preAllocatedVUs": 1,
+      "startTime": "6s"
+    },
+    "write_small_data_job_scenario": {
+      "executor": "constant-arrival-rate",
+      "exec": "writeDataJobSmallScenario",
+      "rate": 10,
+      "timeUnit": "5s",
+      "duration": "15m",
+      "preAllocatedVUs": 10,
+      "startTime": "6s"
     }
   },
   "thresholds": {
     "cm_search_property_duration": ["avg <= 24000"],
     "cm_search_cpspath_duration": ["avg <= 24000"],
     "cm_search_trustlevel_duration": ["avg <= 24000"],
-    "legacy_batch_read_cmhandles_per_second": ["avg >= 150"]
+    "legacy_batch_read_cmhandles_per_second": ["avg >= 150"],
+    "write_large_data_job_duration": ["avg <= 60000"],
+    "write_small_data_job_duration": ["avg <= 60000"]
   }
 }
index 4e44a47..e438d52 100644 (file)
@@ -31,6 +31,7 @@ import { createCmHandles, deleteCmHandles, waitForAllCmHandlesToBeReady } from '
 import { executeCmHandleSearch, executeCmHandleIdSearch } from './common/search-base.js';
 import { passthroughRead, passthroughWrite, legacyBatchRead } from './common/passthrough-crud.js';
 import { sendBatchOfKafkaMessages } from './common/produce-avc-event.js';
+import { executeWriteDataJob } from "./common/write-data-job.js";
 
 let cmHandlesCreatedPerSecondTrend = new Trend('cmhandles_created_per_second', false);
 let cmHandlesDeletedPerSecondTrend = new Trend('cmhandles_deleted_per_second', false);
@@ -47,6 +48,8 @@ let cmSearchPropertyDurationTrend = new Trend('cm_search_property_duration', tru
 let cmSearchCpsPathDurationTrend = new Trend('cm_search_cpspath_duration', true);
 let cmSearchTrustLevelDurationTrend = new Trend('cm_search_trustlevel_duration', true);
 let legacyBatchReadCmHandlesPerSecondTrend = new Trend('legacy_batch_read_cmhandles_per_second', false);
+let writeSmallDataJobDurationTrend = new Trend('write_small_data_job_duration', true);
+let writeLargeDataJobDurationTrend = new Trend('write_large_data_job_duration', true);
 
 export const legacyBatchEventReader = new Reader({
     brokers: [KAFKA_BOOTSTRAP_SERVERS],
@@ -101,7 +104,7 @@ export function teardown() {
 }
 
 export function passthroughReadAltIdScenario() {
-    const response = passthroughRead(true);
+    const response = passthroughRead();
     if (check(response, { 'passthrough read with alternate Id status equals 200': (r) => r.status === 200 })) {
         const overhead = response.timings.duration - READ_DATA_FOR_CM_HANDLE_DELAY_MS;
         passthroughReadNcmpOverheadTrendWithAlternateId.add(overhead);
@@ -109,7 +112,7 @@ export function passthroughReadAltIdScenario() {
 }
 
 export function passthroughWriteAltIdScenario() {
-    const response = passthroughWrite(true);
+    const response = passthroughWrite();
     if (check(response, { 'passthrough write with alternate Id status equals 201': (r) => r.status === 201 })) {
         const overhead = response.timings.duration - WRITE_DATA_FOR_CM_HANDLE_DELAY_MS;
         passthroughWriteNcmpOverheadTrendWithAlternateId.add(overhead);
@@ -202,6 +205,28 @@ export function legacyBatchProduceScenario() {
     check(response, { 'data operation batch read status equals 200': (r) => r.status === 200 });
 }
 
+/**
+ * Scenario for writing a large volume of DCM write operation.
+ */
+export function writeDataJobLargeScenario() {
+    const response = executeWriteDataJob(100000);
+    if (check(response, {'large  writeDataJob response status is 200': (r) => r.status === 200})
+        && check(response, {'large  writeDataJob received expected number of responses': (r) => r.json('#') === 1})) {
+        writeLargeDataJobDurationTrend.add(response.timings.duration);
+    }
+}
+
+/**
+ * Scenario for writing a small volume of DCM write operation.
+ */
+export function writeDataJobSmallScenario() {
+    const response = executeWriteDataJob(100);
+    if (check(response, {'small writeDataJob response status is 200': (r) => r.status === 200})
+        && check(response, {'small writeDataJob received expected number of responses': (r) => r.json('#') === 1})) {
+        writeSmallDataJobDurationTrend.add(response.timings.duration);
+    }
+}
+
 export function produceAvcEventsScenario() {
     sendBatchOfKafkaMessages(250);
 }