de7ffabe5eff0f4b988390fc7d64c096b0a38347
[cps.git] /
1 /*
2  *  ============LICENSE_START=======================================================
3  *  Copyright (C) 2025 OpenInfra Foundation Europe. All rights reserved.
4  *  ================================================================================
5  *  Licensed under the Apache License, Version 2.0 (the 'License');
6  *  you may not use this file except in compliance with the License.
7  *  You may obtain a copy of the License at
8  *
9  *        http://www.apache.org/licenses/LICENSE-2.0
10  *
11  *  Unless required by applicable law or agreed to in writing, software
12  *  distributed under the License is distributed on an 'AS IS' BASIS,
13  *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14  *  See the License for the specific language governing permissions and
15  *  limitations under the License.
16  *
17  *  SPDX-License-Identifier: Apache-2.0
18  *  ============LICENSE_END=========================================================
19  */
20
21 package org.onap.cps.integration.performance.ncmp
22
23 import org.onap.cps.integration.ResourceMeter
24 import org.onap.cps.integration.base.CpsIntegrationSpecBase
25 import org.onap.cps.ncmp.api.datajobs.DataJobService
26 import org.onap.cps.ncmp.api.datajobs.models.DataJobMetadata
27 import org.onap.cps.ncmp.api.datajobs.models.DataJobWriteRequest
28 import org.onap.cps.ncmp.api.datajobs.models.WriteOperation
29 import org.springframework.beans.factory.annotation.Autowired
30 import spock.lang.Ignore
31 import java.util.concurrent.CompletableFuture
32 import java.util.concurrent.Executors
33
34 /**
35  * This test does not depend on common performance test data. Hence it just extends the integration spec base.
36  */
37 class WriteDataJobPerfTest extends CpsIntegrationSpecBase {
38
39     @Autowired
40     DataJobService dataJobService
41
42     def populateDataJobWriteRequests(int numberOfWriteOperations) {
43         def writeOperations = []
44         for (int i = 1; i <= numberOfWriteOperations; i++) {
45             def basePath = "/SubNetwork=Europe/SubNetwork=Ireland/MeContext=MyRadioNode${i}/ManagedElement=MyManagedElement${i}"
46             writeOperations.add(new WriteOperation("${basePath}/SomeChild=child-1", 'operation1', '1', null))
47             writeOperations.add(new WriteOperation("${basePath}/SomeChild=child-2", 'operation2', '2', null))
48             writeOperations.add(new WriteOperation(basePath, 'operation3', '3', null))
49         }
50         return new DataJobWriteRequest(writeOperations)
51     }
52
53     @Ignore  // CPS-2691
54     def 'Performance test for writeDataJob method'() {
55         given: 'register 10_000 cm handles (with alternate ids)'
56             registerTestCmHandles(10_000)
57             def dataJobWriteRequest = populateDataJobWriteRequests(10_000)
58         when: 'sending a write job to NCMP with dynamically generated write operations'
59             def executionResult = executeWriteJob('d1', dataJobWriteRequest)
60         then: 'record the result. Not asserted, just recorded in See https://lf-onap.atlassian.net/browse/CPS-2691'
61             println "*** CPS-2691 Execution time: ${executionResult.executionTime} seconds | Memory usage: ${executionResult.memoryUsage} MB"
62         cleanup: 'deregister test cm handles'
63             deregisterTestCmHandles(10_000)
64     }
65
66     @Ignore  // CPS-2692
67     def 'Performance test for writeDataJob method with 10 parallel requests'() {
68         given: 'register 10_000 cm handles (with alternate ids)'
69             registerTestCmHandles(1_000)
70         when: 'sending 10 parallel write jobs to NCMP'
71             def executionResults = executeParallelWriteJobs(10, 1_000)
72         then: 'record execution times'
73             executionResults.eachWithIndex { result, index ->
74                 logExecutionResults("CPS-2692 Job-${index + 1}", result)
75             }
76         cleanup: 'deregister test cm handles'
77             deregisterSequenceOfCmHandles(DMI1_URL, 1_000, 1)
78     }
79
80     def registerTestCmHandles(numberOfCmHandles) {
81         registerSequenceOfCmHandlesWithManyModuleReferencesButDoNotWaitForReady(
82                 DMI1_URL, "tagA", numberOfCmHandles, 1, ModuleNameStrategy.UNIQUE,
83                 { "/SubNetwork=Europe/SubNetwork=Ireland/MeContext=MyRadioNode${it}/ManagedElement=MyManagedElement${it}" }
84         )
85     }
86
87     def executeParallelWriteJobs(numberOfJobs, numberOfWriteOperations) {
88         def executorService = Executors.newFixedThreadPool(numberOfJobs)
89         def futures = (0..<numberOfJobs).collect { jobId ->
90             CompletableFuture.supplyAsync({ -> executeWriteJob(jobId, populateDataJobWriteRequests(numberOfWriteOperations)) }, executorService)
91         }
92         def executionResults = futures.collect { it.join() }
93         executorService.shutdown()
94         return executionResults
95     }
96
97     def executeWriteJob(jobId, dataJobWriteRequest) {
98         def localMeter = new ResourceMeter()
99         localMeter.start()
100         dataJobService.writeDataJob('', '', new DataJobMetadata("job-${jobId}", '', ''), dataJobWriteRequest)
101         localMeter.stop()
102         ['executionTime': localMeter.totalTimeInSeconds, 'memoryUsage': localMeter.totalMemoryUsageInMB]
103     }
104
105     def logExecutionResults(jobId, result) {
106         println "*** ${jobId} Execution time: ${result.executionTime} seconds | Memory usage: ${result.memoryUsage} MB"
107     }
108
109     def deregisterTestCmHandles(numberOfCmHandles) {
110         deregisterSequenceOfCmHandles(DMI1_URL, numberOfCmHandles, 1)
111     }
112 }