*/
import { sleep } from 'k6';
-import {
- performPostRequest, getAlternateId, NCMP_BASE_URL, DMI_PLUGIN_URL, TOTAL_CM_HANDLES, MODULE_SET_TAGS
+import { performPostRequest, NCMP_BASE_URL, DMI_PLUGIN_URL, TOTAL_CM_HANDLES, MODULE_SET_TAGS
} from './utils.js';
import { executeCmHandleIdSearch } from './search-base.js';
"dmiPlugin": DMI_PLUGIN_URL,
"createdCmHandles": cmHandleIds.map((cmHandleId, index) => {
// Ensure unique networkSegment within range 1-10
- let networkSegmentId = Math.floor(Math.random() * 10) + 1;
+ let networkSegmentId = Math.floor(Math.random() * 10) + 1; // Random between 1-10
let moduleTag = MODULE_SET_TAGS[index % MODULE_SET_TAGS.length];
return {
"cmHandle": cmHandleId,
- "alternateId": getAlternateId(cmHandleId.replace('ch-', '')),
+ "alternateId": cmHandleId.replace('ch-', 'Region=NorthAmerica,Segment='),
"moduleSetTag": moduleTag,
- "dataProducerIdentifier": "some-data-producer-id",
"cmHandleProperties": {
"segmentId": index + 1,
- "networkSegment": `Region=NorthAmerica,Segment=${networkSegmentId}`,
- "deviceIdentifier": `Element=RadioBaseStation_5G_${index + 1000}`,
- "hardwareVersion": `HW-${moduleTag}`,
- "softwareVersion": `Firmware_${moduleTag}`,
+ "networkSegment": `Region=NorthAmerica,Segment=${networkSegmentId}`, // Unique within range 1-10
+ "deviceIdentifier": `Element=RadioBaseStation_5G_${index + 1000}`, // Unique per cmHandle
+ "hardwareVersion": `HW-${moduleTag}`, // Shares uniqueness with moduleSetTag
+ "softwareVersion": `Firmware_${moduleTag}`, // Shares uniqueness with moduleSetTag
"syncStatus": "ACTIVE",
"nodeCategory": "VirtualNode"
},
performGetRequest,
NCMP_BASE_URL,
LEGACY_BATCH_TOPIC_NAME,
- getRandomAlternateId,
+ getRandomCmHandleReference,
} from './utils.js';
-export function passthroughRead() {
- const randomAlternateId = getRandomAlternateId();
+export function passthroughRead(useAlternateId) {
+ const cmHandleReference = getRandomCmHandleReference(useAlternateId);
const resourceIdentifier = 'ManagedElement=NRNode1/GNBDUFunction=1';
const datastoreName = 'ncmp-datastore:passthrough-operational';
const includeDescendants = true;
- const url = generatePassthroughUrl(randomAlternateId, datastoreName, resourceIdentifier, includeDescendants);
+ const url = generatePassthroughUrl(cmHandleReference, datastoreName, resourceIdentifier, includeDescendants);
return performGetRequest(url, 'passthroughRead');
}
-export function passthroughWrite() {
- const randomAlternateId = getRandomAlternateId();
+export function passthroughWrite(useAlternateId) {
+ const cmHandleReference = getRandomCmHandleReference(useAlternateId);
const resourceIdentifier = 'ManagedElement=NRNode1/GNBDUFunction=1';
const datastoreName = 'ncmp-datastore:passthrough-running';
const includeDescendants = false;
- const url = generatePassthroughUrl(randomAlternateId, datastoreName, resourceIdentifier, includeDescendants);
+ const url = generatePassthroughUrl(cmHandleReference, datastoreName, resourceIdentifier, includeDescendants);
const payload = JSON.stringify({
"id": "123",
"attributes": {"userLabel": "test"}
export function makeRandomBatchOfAlternateIds() {
const alternateIds = new Set();
while (alternateIds.size < LEGACY_BATCH_THROUGHPUT_TEST_BATCH_SIZE) {
- alternateIds.add(getRandomAlternateId());
+ alternateIds.add(getRandomCmHandleReference(true));
}
return Array.from(alternateIds)
}
/**
- * Generates a random CM Handle alternate ID.
- *
- * This function selects a random CM Handle ID between 1 and TOTAL_CM_HANDLES (inclusive)
- * and returns its corresponding alternate ID by invoking `getAlternateId(id)`.
- *
- * @returns {string} A CM Handle alternate ID derived from a randomly selected CM Handle ID.
- */
-export function getRandomAlternateId() {
- let randomCmHandleId = randomIntBetween(1, TOTAL_CM_HANDLES);
- return getAlternateId(randomCmHandleId);
-}
-
-/**
- * Generates an alternate ID path for a CM handle based on its numeric identifier.
- *
- * The path follows the structure used in network models, embedding the numeric ID
- * into both the MeContext and ManagedElement components.
- *
- * Example output:
- * "/SubNetwork=Europe/SubNetwork=Ireland/MeContext=MyRadioNode123/ManagedElement=MyManagedElement123"
- *
- * @param {number} cmHandleNumericId - The numeric identifier extracted from the CM handle ID.
- * @returns {string} The alternate ID path string.
+ * Generates a random CM Handle reference based on the provided flag.
+ * @param useAlternateId
+ * @returns {string} CM Handle reference representing a CM handle ID or an alternate ID.
*/
-export function getAlternateId(cmHandleNumericId) {
- return `/SubNetwork=Europe/SubNetwork=Ireland/MeContext=MyRadioNode${cmHandleNumericId}/ManagedElement=MyManagedElement${cmHandleNumericId}`;
+export function getRandomCmHandleReference(useAlternateId) {
+ const prefix = useAlternateId ? 'Region=NorthAmerica,Segment=' : 'ch-';
+ return `${prefix}${randomIntBetween(1, TOTAL_CM_HANDLES)}`;
}
/**
makeSummaryCsvLine('5b', 'NCMP overhead for Synchronous single CM-handle pass-through read with alternate id', 'milliseconds', 'ncmp_overhead_passthrough_read_alt_id', 18, testResults, scenarioConfig),
makeSummaryCsvLine('6b', 'NCMP overhead for Synchronous single CM-handle pass-through write with alternate id', 'milliseconds', 'ncmp_overhead_passthrough_write_alt_id', 18, testResults, scenarioConfig),
makeSummaryCsvLine('7', 'Legacy batch read operation', 'events/second', 'legacy_batch_read_cmhandles_per_second', 1750, testResults, scenarioConfig),
- makeSummaryCsvLine('8', 'Write data job scenario - small', 'milliseconds', 'write_small_data_job_duration', 300, testResults, scenarioConfig),
- makeSummaryCsvLine('9', 'Write data job scenario - large', 'milliseconds', 'write_large_data_job_duration', 300, testResults, scenarioConfig),
];
return summaryCsvLines.join('\n') + '\n';
}
+++ /dev/null
-/*
- * ============LICENSE_START=======================================================
- * Copyright (C) 2025 OpenInfra Foundation Europe. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * SPDX-License-Identifier: Apache-2.0
- * ============LICENSE_END=========================================================
- */
-
-import {crypto} from 'k6/experimental/webcrypto';
-import {performPostRequest, getRandomAlternateId, NCMP_BASE_URL} from './utils.js';
-
-/**
- * Executes a write data job against the NCMP endpoint.
- *
- * @param {number} numberOfOperations - Number of base operation sets to include in the job.
- * @returns {*} The HTTP response from the POST request.
- */
-export function executeWriteDataJob(numberOfOperations) {
- const jobId = crypto.randomUUID();
- const requestPayload = buildDataJobRequestPayload(numberOfOperations);
-
- console.log(`[WriteJob] Starting job → ID: ${jobId}, Operations: ${numberOfOperations}`);
- return sendWriteDataJobRequest(jobId, requestPayload);
-}
-
-/**
- * Sends a write data job request to the NCMP API endpoint.
- *
- * @param {string} jobId - The unique identifier for this write job.
- * @param {Object} payload - The complete request body for the write operation.
- * @returns {*} The response from the HTTP POST request.
- */
-function sendWriteDataJobRequest(jobId, payload) {
- const targetUrl = `${NCMP_BASE_URL}/do-not-use/dataJobs/${jobId}/write`;
- const serializedPayload = JSON.stringify(payload);
- return performPostRequest(targetUrl, serializedPayload, 'WriteDataJob');
-}
-
-/**
- * Builds the full payload for a write data job.
- *
- * Each base operation set consists of three write operations:
- * - `add` at a nested child path
- * - `merge` at a different child path
- * - `remove` at the parent path
- *
- * The structure returned matches the expected `DataJobRequest` format on the server side:
- *
- * Java-side representation:
- * ```java
- * public record DataJobRequest(
- * DataJobMetadata dataJobMetadata,
- * DataJobWriteRequest dataJobWriteRequest
- * )
- * ```
- *
- * @param {number} numberOfWriteOperations - The number of base sets to generate (each set = 3 operations).
- * @returns {{
- * dataJobMetadata: {
- * destination: string,
- * dataAcceptType: string,
- * dataContentType: string
- * },
- * dataJobWriteRequest: {
- * data: Array<{
- * path: string,
- * op: "add" | "merge" | "remove",
- * operationId: string,
- * value: Map<String, Object>
- * }>
- * }
- * }} Fully-formed data job request payload.
- */
-function buildDataJobRequestPayload(numberOfWriteOperations) {
- const operations = [];
- for (let i = 1; i <= numberOfWriteOperations / 2; i++) {
- const basePath = getRandomAlternateId();
- operations.push(
- {
- path: `${basePath}/SomeChild=child-1`,
- op: 'add',
- operationId: `${i}-1`,
- value: {
- key: `some-value-one-${i}`
- }
- },
- {
- path: `${basePath}/SomeChild=child-2`,
- op: 'merge',
- operationId: `${i}-2`,
- value: {
- key: `some-value-two-${i}`
- }
- }
- );
- }
- return {
- dataJobMetadata: {
- destination: "device/managed-element-collection",
- dataAcceptType: "application/json",
- dataContentType: "application/merge-patch+json"
- },
- dataJobWriteRequest: {
- data: operations
- }
- };
-}
-
-
"rate": 1,
"timeUnit": "1s",
"preAllocatedVUs": 1
- },
- "write_large_data_job_scenario": {
- "executor": "constant-vus",
- "exec": "writeDataJobLargeScenario",
- "vus": 1,
- "duration": "2h"
- },
- "write_small_data_job_scenario": {
- "executor": "constant-vus",
- "exec": "writeDataJobSmallScenario",
- "vus": 10,
- "duration": "2h"
}
}
}
"vus": 1,
"iterations": 1
},
- "produce_kafka_messages": {
+ "produceKafkaMessages": {
"executor": "constant-arrival-rate",
"rate": 10,
"timeUnit": "1s",
"maxVUs": 12,
"exec": "produceAvcEventsScenario",
"gracefulStop": "10s"
- },
- "write_large_data_job_scenario": {
- "executor": "constant-arrival-rate",
- "exec": "writeDataJobLargeScenario",
- "rate": 1,
- "timeUnit": "60s",
- "duration": "15m",
- "preAllocatedVUs": 1,
- "startTime": "6s"
- },
- "write_small_data_job_scenario": {
- "executor": "constant-arrival-rate",
- "exec": "writeDataJobSmallScenario",
- "rate": 10,
- "timeUnit": "5s",
- "duration": "15m",
- "preAllocatedVUs": 10,
- "startTime": "6s"
}
},
"thresholds": {
"cm_search_property_duration": ["avg <= 24000"],
"cm_search_cpspath_duration": ["avg <= 24000"],
"cm_search_trustlevel_duration": ["avg <= 24000"],
- "legacy_batch_read_cmhandles_per_second": ["avg >= 150"],
- "write_large_data_job_duration": ["avg <= 60000"],
- "write_small_data_job_duration": ["avg <= 60000"]
+ "legacy_batch_read_cmhandles_per_second": ["avg >= 150"]
}
}
import { executeCmHandleSearch, executeCmHandleIdSearch } from './common/search-base.js';
import { passthroughRead, passthroughWrite, legacyBatchRead } from './common/passthrough-crud.js';
import { sendBatchOfKafkaMessages } from './common/produce-avc-event.js';
-import { executeWriteDataJob } from "./common/write-data-job.js";
let cmHandlesCreatedPerSecondTrend = new Trend('cmhandles_created_per_second', false);
let cmHandlesDeletedPerSecondTrend = new Trend('cmhandles_deleted_per_second', false);
let cmSearchCpsPathDurationTrend = new Trend('cm_search_cpspath_duration', true);
let cmSearchTrustLevelDurationTrend = new Trend('cm_search_trustlevel_duration', true);
let legacyBatchReadCmHandlesPerSecondTrend = new Trend('legacy_batch_read_cmhandles_per_second', false);
-let writeSmallDataJobDurationTrend = new Trend('write_small_data_job_duration', true);
-let writeLargeDataJobDurationTrend = new Trend('write_large_data_job_duration', true);
export const legacyBatchEventReader = new Reader({
brokers: [KAFKA_BOOTSTRAP_SERVERS],
}
export function passthroughReadAltIdScenario() {
- const response = passthroughRead();
+ const response = passthroughRead(true);
if (check(response, { 'passthrough read with alternate Id status equals 200': (r) => r.status === 200 })) {
const overhead = response.timings.duration - READ_DATA_FOR_CM_HANDLE_DELAY_MS;
passthroughReadNcmpOverheadTrendWithAlternateId.add(overhead);
}
export function passthroughWriteAltIdScenario() {
- const response = passthroughWrite();
+ const response = passthroughWrite(true);
if (check(response, { 'passthrough write with alternate Id status equals 201': (r) => r.status === 201 })) {
const overhead = response.timings.duration - WRITE_DATA_FOR_CM_HANDLE_DELAY_MS;
passthroughWriteNcmpOverheadTrendWithAlternateId.add(overhead);
check(response, { 'data operation batch read status equals 200': (r) => r.status === 200 });
}
-/**
- * Scenario for writing a large volume of DCM write operation.
- */
-export function writeDataJobLargeScenario() {
- const response = executeWriteDataJob(100000);
- if (check(response, {'large writeDataJob response status is 200': (r) => r.status === 200})
- && check(response, {'large writeDataJob received expected number of responses': (r) => r.json('#') === 1})) {
- writeLargeDataJobDurationTrend.add(response.timings.duration);
- }
-}
-
-/**
- * Scenario for writing a small volume of DCM write operation.
- */
-export function writeDataJobSmallScenario() {
- const response = executeWriteDataJob(100);
- if (check(response, {'small writeDataJob response status is 200': (r) => r.status === 200})
- && check(response, {'small writeDataJob received expected number of responses': (r) => r.json('#') === 1})) {
- writeSmallDataJobDurationTrend.add(response.timings.duration);
- }
-}
-
export function produceAvcEventsScenario() {
sendBatchOfKafkaMessages(250);
}