afb594ab04ff00302a3a19ec52306c095818014d
[cps.git] /
1 /*
2  * ============LICENSE_START=======================================================
3  * Copyright (C) 2023-2024 Nordix Foundation
4  * ================================================================================
5  * Licensed under the Apache License, Version 2.0 (the "License");
6  * you may not use this file except in compliance with the License.
7  * You may obtain a copy of the License at
8  *
9  *       http://www.apache.org/licenses/LICENSE-2.0
10  *
11  * Unless required by applicable law or agreed to in writing, software
12  * distributed under the License is distributed on an "AS IS" BASIS,
13  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14  * See the License for the specific language governing permissions and
15  * limitations under the License.
16  *
17  * SPDX-License-Identifier: Apache-2.0
18  * ============LICENSE_END=========================================================
19  */
20
21 package org.onap.cps.ncmp.impl.data.async
22
23 import com.fasterxml.jackson.databind.ObjectMapper
24 import io.cloudevents.CloudEvent
25 import io.cloudevents.core.builder.CloudEventBuilder
26 import io.cloudevents.kafka.CloudEventDeserializer
27 import io.cloudevents.kafka.CloudEventSerializer
28 import io.cloudevents.kafka.impl.KafkaHeaders
29 import org.apache.kafka.clients.consumer.ConsumerRecord
30 import org.apache.kafka.clients.consumer.KafkaConsumer
31 import org.apache.kafka.common.header.internals.RecordHeaders
32 import org.onap.cps.events.EventsPublisher
33 import org.onap.cps.ncmp.events.async1_0_0.DataOperationEvent
34 import org.onap.cps.ncmp.utils.TestUtils
35 import org.onap.cps.ncmp.utils.events.MessagingBaseSpec
36 import org.onap.cps.utils.JsonObjectMapper
37 import org.spockframework.spring.SpringBean
38 import org.springframework.beans.factory.annotation.Autowired
39 import org.springframework.boot.test.context.SpringBootTest
40 import org.springframework.kafka.listener.adapter.RecordFilterStrategy
41 import org.springframework.test.annotation.DirtiesContext
42 import org.testcontainers.spock.Testcontainers
43
44 import java.time.Duration
45
46 import static org.onap.cps.ncmp.utils.events.CloudEventMapper.toTargetEvent
47
48 @SpringBootTest(classes = [EventsPublisher, DataOperationEventConsumer, RecordFilterStrategies, JsonObjectMapper, ObjectMapper])
49 @Testcontainers
50 @DirtiesContext
51 class DataOperationEventConsumerSpec extends MessagingBaseSpec {
52
53     @SpringBean
54     EventsPublisher asyncDataOperationEventPublisher = new EventsPublisher<CloudEvent>(legacyEventKafkaTemplate, cloudEventKafkaTemplate)
55
56     @SpringBean
57     DataOperationEventConsumer objectUnderTest = new DataOperationEventConsumer(asyncDataOperationEventPublisher)
58
59     @Autowired
60     JsonObjectMapper jsonObjectMapper
61
62     @Autowired
63     RecordFilterStrategy<String, CloudEvent> dataOperationRecordFilterStrategy
64
65     def cloudEventKafkaConsumer = new KafkaConsumer<>(eventConsumerConfigProperties('test', CloudEventDeserializer))
66     def static clientTopic = 'client-topic'
67     def static dataOperationType = 'org.onap.cps.ncmp.events.async1_0_0.DataOperationEvent'
68
69     def 'Consume and publish event to client specified topic'() {
70         given: 'consumer subscribing to client topic'
71             cloudEventKafkaConsumer.subscribe([clientTopic])
72         and: 'consumer record for data operation event'
73             def consumerRecordIn = createConsumerRecord(dataOperationType)
74         when: 'the data operation event is consumed and published to client specified topic'
75             objectUnderTest.consumeAndPublish(consumerRecordIn)
76         and: 'the client specified topic is polled'
77             def consumerRecordOut = cloudEventKafkaConsumer.poll(Duration.ofMillis(1500))[0]
78         then: 'verify cloud compliant headers'
79             def consumerRecordOutHeaders = consumerRecordOut.headers()
80             assert KafkaHeaders.getParsedKafkaHeader(consumerRecordOutHeaders, 'ce_correlationid') == 'request-id'
81             assert KafkaHeaders.getParsedKafkaHeader(consumerRecordOutHeaders, 'ce_id') == 'some-uuid'
82             assert KafkaHeaders.getParsedKafkaHeader(consumerRecordOutHeaders, 'ce_type') == dataOperationType
83         and: 'verify that extension is included into header'
84             assert KafkaHeaders.getParsedKafkaHeader(consumerRecordOutHeaders, 'ce_destination') == clientTopic
85         and: 'map consumer record to expected event type'
86             def dataOperationResponseEvent = toTargetEvent(consumerRecordOut.value(), DataOperationEvent.class)
87         and: 'verify published response data properties'
88             def response = dataOperationResponseEvent.data.responses[0]
89             response.operationId == 'some-operation-id'
90             response.statusCode == 'any-success-status-code'
91             response.statusMessage == 'Successfully applied changes'
92             response.result as String == '[some-key:some-value]'
93     }
94
95     def 'Filter an event with type #eventType'() {
96         given: 'consumer record for event with type #eventType'
97             def consumerRecord = createConsumerRecord(eventType)
98         when: 'while consuming the topic ncmp-async-m2m it executes the filter strategy'
99             def result = dataOperationRecordFilterStrategy.filter(consumerRecord)
100         then: 'the event is #description'
101             assert result == expectedResult
102         where: 'filter the event based on the eventType #eventType'
103             description                                     | eventType         || expectedResult
104             'not filtered(the consumer will see the event)' | dataOperationType || false
105             'filtered(the consumer will not see the event)' | 'wrongType'       || true
106     }
107
108     def createConsumerRecord(eventTypeAsString) {
109         def jsonData = TestUtils.getResourceFileContent('dataOperationEvent.json')
110         def testEventSentAsBytes = jsonObjectMapper.asJsonBytes(jsonObjectMapper.convertJsonString(jsonData, DataOperationEvent.class))
111
112         CloudEvent cloudEvent = getCloudEvent(eventTypeAsString, testEventSentAsBytes)
113
114         def headers = new RecordHeaders()
115         def cloudEventSerializer = new CloudEventSerializer()
116         cloudEventSerializer.serialize(clientTopic, headers, cloudEvent)
117
118         def consumerRecord = new ConsumerRecord<String, CloudEvent>(clientTopic, 0, 0L, 'sample-message-key', cloudEvent)
119         headers.forEach(header -> consumerRecord.headers().add(header))
120         return consumerRecord
121     }
122
123     def getCloudEvent(eventTypeAsString, byte[] testEventSentAsBytes) {
124         return CloudEventBuilder.v1()
125                 .withId("some-uuid")
126                 .withType(eventTypeAsString)
127                 .withSource(URI.create("sample-test-source"))
128                 .withData(testEventSentAsBytes)
129                 .withExtension("correlationid", "request-id")
130                 .withExtension("destination", clientTopic)
131                 .build();
132     }
133 }