From: efiacor Date: Tue, 20 Jul 2021 14:53:48 +0000 (+0100) Subject: [DCAE-PMSH] Refactoring csit suite X-Git-Url: https://gerrit.onap.org/r/gitweb?p=integration%2Fcsit.git;a=commitdiff_plain;h=693776120dfc1cc9fb14a1f6a06a9554dce2ee4d [DCAE-PMSH] Refactoring csit suite Signed-off-by: efiacor Change-Id: I60ca22a87c4f2e1b0c244b6a388e17c3f6d79b90 Issue-ID: INT-1946 --- diff --git a/plans/dcaegen2-services-pmsh/testsuite/assets/cbs_sim/cbs-initializer.json b/plans/dcaegen2-services-pmsh/testsuite/assets/cbs_sim/cbs-initializer.json index 68269622..194e556a 100644 --- a/plans/dcaegen2-services-pmsh/testsuite/assets/cbs_sim/cbs-initializer.json +++ b/plans/dcaegen2-services-pmsh/testsuite/assets/cbs_sim/cbs-initializer.json @@ -87,7 +87,7 @@ "dmaap_info": { "client_id": "1475976809466", "client_role": "org.onap.dcae.pmPublisher", - "topic_url": "http://dmaap:3904/events/unauthenticated.DCAE_CL_OUTPUT", + "topic_url": "http://dmaap-mr:3904/events/unauthenticated.DCAE_CL_OUTPUT", "location": "san-francisco" }, "type": "message_router" @@ -99,14 +99,14 @@ "dmaap_info": { "client_id": "1575976809466", "client_role": "org.onap.dcae.aaiSub", - "topic_url": "http://dmaap:3904/events/AAI_EVENT", + "topic_url": "http://dmaap-mr:3904/events/AAI_EVENT", "location": "san-francisco" } }, "policy_pm_subscriber": { "dmaap_info": { "location": "san-francisco", - "topic_url": "http://dmaap:3904/events/unauthenticated.PMSH_CL_INPUT", + "topic_url": "http://dmaap-mr:3904/events/unauthenticated.PMSH_CL_INPUT", "client_role": "org.onap.dcae.pmSubscriber", "client_id": "1575876809456" }, diff --git a/plans/dcaegen2-services-pmsh/testsuite/assets/kafka/zk_client_jaas.conf b/plans/dcaegen2-services-pmsh/testsuite/assets/kafka/zk_client_jaas.conf new file mode 100644 index 00000000..79a76017 --- /dev/null +++ b/plans/dcaegen2-services-pmsh/testsuite/assets/kafka/zk_client_jaas.conf @@ -0,0 +1,5 @@ +Client { + org.apache.zookeeper.server.auth.DigestLoginModule required + username="kafka" + password="kafka_secret"; + }; \ No newline at end of file diff --git a/plans/dcaegen2-services-pmsh/testsuite/assets/mr/MsgRtrApi.properties b/plans/dcaegen2-services-pmsh/testsuite/assets/mr/MsgRtrApi.properties new file mode 100644 index 00000000..47643216 --- /dev/null +++ b/plans/dcaegen2-services-pmsh/testsuite/assets/mr/MsgRtrApi.properties @@ -0,0 +1,166 @@ +# LICENSE_START======================================================= +# org.onap.dmaap +# ================================================================================ +# Copyright © 2017 AT&T Intellectual Property. All rights reserved. +# ================================================================================ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END========================================================= +# +# ECOMP is a trademark and service mark of AT&T Intellectual Property. +# +############################################################################### +############################################################################### +## +## Cambria API Server config +## +## Default values are shown as commented settings. +## +############################################################################### +## +## HTTP service +## +## 3904 is standard as of 7/29/14. +# +## Zookeeper Connection +## +## Both Cambria and Kafka make use of Zookeeper. +## +config.zk.servers=zookeeper:2181 + +############################################################################### +## +## Kafka Connection +## +## Items below are passed through to Kafka's producer and consumer +## configurations (after removing "kafka.") +## if you want to change request.required.acks it can take this one value +#kafka.metadata.broker.list=localhost:9092,localhost:9093 +#kafka.metadata.broker.list={{.Values.kafka.name}}:{{.Values.kafka.port}} +kafka.metadata.broker.list=kafka:9092 +##kafka.request.required.acks=-1 +#kafka.client.zookeeper=${config.zk.servers} +consumer.timeout.ms=100 +zookeeper.connection.timeout.ms=6000 +zookeeper.session.timeout.ms=20000 +zookeeper.sync.time.ms=2000 +auto.commit.interval.ms=1000 +fetch.message.max.bytes =1000000 +auto.commit.enable=false + +#(backoff*retries > zksessiontimeout) +kafka.rebalance.backoff.ms=10000 +kafka.rebalance.max.retries=6 + + +############################################################################### +## +## Secured Config +## +## Some data stored in the config system is sensitive -- API keys and secrets, +## for example. to protect it, we use an encryption layer for this section +## of the config. +## +## The key is a base64 encode AES key. This must be created/configured for +## each installation. +#cambria.secureConfig.key= +## +## The initialization vector is a 16 byte value specific to the secured store. +## This must be created/configured for each installation. +#cambria.secureConfig.iv= + +## Southfield Sandbox +cambria.secureConfig.key=b/7ouTn9FfEw2PQwL0ov/Q== +cambria.secureConfig.iv=wR9xP5k5vbz/xD0LmtqQLw== +authentication.adminSecret=fe3cCompound + + +############################################################################### +## +## Consumer Caching +## +## Kafka expects live connections from the consumer to the broker, which +## obviously doesn't work over connectionless HTTP requests. The Cambria +## server proxies HTTP requests into Kafka consumer sessions that are kept +## around for later re-use. Not doing so is costly for setup per request, +## which would substantially impact a high volume consumer's performance. +## +## This complicates Cambria server failover, because we often need server +## A to close its connection before server B brings up the replacement. +## + +## The consumer cache is normally enabled. +#cambria.consumer.cache.enabled=true + +## Cached consumers are cleaned up after a period of disuse. The server inspects +## consumers every sweepFreqSeconds and will clean up any connections that are +## dormant for touchFreqMs. +#cambria.consumer.cache.sweepFreqSeconds=15 +cambria.consumer.cache.touchFreqMs=120000 +##stickforallconsumerrequests=false +## The cache is managed through ZK. The default value for the ZK connection +## string is the same as config.zk.servers. +#cambria.consumer.cache.zkConnect=${config.zk.servers} + +## +## Shared cache information is associated with this node's name. The default +## name is the hostname plus the HTTP service port this host runs on. (The +## hostname is determined via InetAddress.getLocalHost ().getCanonicalHostName(), +## which is not always adequate.) You can set this value explicitly here. +## +#cambria.api.node.identifier= + +#cambria.rateLimit.maxEmptyPollsPerMinute=30 +#cambria.rateLimitActual.delay.ms=10 + +############################################################################### +## +## Metrics Reporting +## +## This server can report its metrics periodically on a topic. +## +#metrics.send.cambria.enabled=true +#metrics.send.cambria.topic=cambria.apinode.metrics +#msgrtr.apinode.metrics.dmaap +#metrics.send.cambria.sendEverySeconds=60 + +cambria.consumer.cache.zkBasePath=/fe3c/cambria/consumerCache +consumer.timeout=17 +default.partitions=3 +default.replicas=3 +############################################################################## +#100mb +maxcontentlength=10000 + + +############################################################################## +#AAF Properties +msgRtr.namespace.aaf=org.onap.dmaap.mr.topic +msgRtr.topicfactory.aaf=org.onap.dmaap.mr.topicFactory|:org.onap.dmaap.mr.topic: +enforced.topic.name.AAF=org.onap.dmaap.mr +forceAAF=false +transidUEBtopicreqd=false +defaultNSforUEB=org.onap.dmaap.mr +############################################################################## +#Mirror Maker Agent + +msgRtr.mirrormakeradmin.aaf=org.onap.dmaap.mr.mirrormaker|*|admin +msgRtr.mirrormakeruser.aaf=org.onap.dmaap.mr.mirrormaker|*|user +msgRtr.mirrormakeruser.aaf.create=org.onap.dmaap.mr.topicFactory|:org.onap.dmaap.mr.topic: +msgRtr.mirrormaker.timeout=15000 +msgRtr.mirrormaker.topic=org.onap.dmaap.mr.mirrormakeragent +msgRtr.mirrormaker.consumergroup=mmagentserver +msgRtr.mirrormaker.consumerid=1 + +kafka.max.poll.interval.ms=300000 +kafka.heartbeat.interval.ms=60000 +kafka.session.timeout.ms=240000 +kafka.max.poll.records=1000 \ No newline at end of file diff --git a/plans/dcaegen2-services-pmsh/testsuite/assets/zk/zk_server_jaas.conf b/plans/dcaegen2-services-pmsh/testsuite/assets/zk/zk_server_jaas.conf new file mode 100644 index 00000000..3d2767fa --- /dev/null +++ b/plans/dcaegen2-services-pmsh/testsuite/assets/zk/zk_server_jaas.conf @@ -0,0 +1,4 @@ +Server { + org.apache.zookeeper.server.auth.DigestLoginModule required + user_kafka="kafka_secret"; +}; \ No newline at end of file diff --git a/plans/dcaegen2-services-pmsh/testsuite/docker-compose.yml b/plans/dcaegen2-services-pmsh/testsuite/docker-compose.yml index 6541ddbf..8f51dfdc 100644 --- a/plans/dcaegen2-services-pmsh/testsuite/docker-compose.yml +++ b/plans/dcaegen2-services-pmsh/testsuite/docker-compose.yml @@ -1,6 +1,76 @@ version: '3.3' services: + zookeeper: + image: nexus3.onap.org:10001/onap/dmaap/zookeeper:6.1.0 + container_name: dmaap-zookeeper + ports: + - "2181:2181" + environment: + ZOOKEEPER_REPLICAS: 1 + ZOOKEEPER_TICK_TIME: 2000 + ZOOKEEPER_SYNC_LIMIT: 5 + ZOOKEEPER_INIT_LIMIT: 10 + ZOOKEEPER_MAX_CLIENT_CNXNS: 200 + ZOOKEEPER_AUTOPURGE_SNAP_RETAIN_COUNT: 3 + ZOOKEEPER_AUTOPURGE_PURGE_INTERVAL: 24 + ZOOKEEPER_CLIENT_PORT: 2181 + KAFKA_OPTS: -Djava.security.auth.login.config=/etc/zookeeper/secrets/jaas/zk_server_jaas.conf -Dzookeeper.kerberos.removeHostFromPrincipal=true -Dzookeeper.kerberos.removeRealmFromPrincipal=true -Dzookeeper.authProvider.1=org.apache.zookeeper.server.auth.SASLAuthenticationProvider -Dzookeeper.requireClientAuthScheme=sasl -Dzookeeper.4lw.commands.whitelist=* + ZOOKEEPER_SERVER_ID: 1 + volumes: + - ./assets/zk/zk_server_jaas.conf:/etc/zookeeper/secrets/jaas/zk_server_jaas.conf + networks: + net: + aliases: + - zookeeper + + kafka: + image: nexus3.onap.org:10001/onap/dmaap/kafka111:1.1.0 + container_name: dmaap-kafka + ports: + - "9092:9092" + environment: + enableCadi: 'false' + KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 + KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 40000 + KAFKA_ZOOKEEPER_SESSION_TIMEOUT_MS: 40000 + KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INTERNAL_PLAINTEXT:PLAINTEXT,EXTERNAL_PLAINTEXT:PLAINTEXT + KAFKA_ADVERTISED_LISTENERS: INTERNAL_PLAINTEXT://kafka:9092 + KAFKA_LISTENERS: INTERNAL_PLAINTEXT://0.0.0.0:9092 + KAFKA_INTER_BROKER_LISTENER_NAME: INTERNAL_PLAINTEXT + KAFKA_CONFLUENT_SUPPORT_METRICS_ENABLE: 'false' + KAFKA_OPTS: -Djava.security.auth.login.config=/etc/kafka/secrets/jaas/zk_client_jaas.conf + KAFKA_ZOOKEEPER_SET_ACL: 'true' + KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 + # Reduced the number of partitions only to avoid the timeout error for the first subscribe call in slow environment + KAFKA_OFFSETS_TOPIC_NUM_PARTITIONS: 1 + volumes: + - ./assets/kafka/zk_client_jaas.conf:/etc/kafka/secrets/jaas/zk_client_jaas.conf + networks: + net: + aliases: + - kafka + depends_on: + - zookeeper + + dmaap-mr: + image: nexus3.onap.org:10001/onap/dmaap/dmaap-mr:1.3.0 + container_name: dmaap-mr + ports: + - "3904:3904" + - "3905:3905" + environment: + enableCadi: 'false' + volumes: + - ./assets/mr/MsgRtrApi.properties:/appl/dmaapMR1/bundleconfig/etc/appprops/MsgRtrApi.properties + networks: + net: + aliases: + - dmaap-mr + depends_on: + - zookeeper + - kafka + db: container_name: db image: postgres @@ -9,7 +79,7 @@ services: POSTGRES_PASSWORD: $DB_PASSWORD POSTGRES_USER: $DB_USER networks: - dockercompose_net: + net: aliases: - db @@ -26,7 +96,7 @@ services: - ./assets/ssl_certs/mock_server_cacert.pem:/var/tmp/mock_server_cacert.pem - ./assets/ssl_certs/mock_server_key.pem:/var/tmp/mock_server_key.pem networks: - dockercompose_net: + net: aliases: - aai-sim @@ -44,7 +114,7 @@ services: - ./assets/ssl_certs/mock_server_cacert.pem:/var/tmp/mock_server_cacert.pem - ./assets/ssl_certs/mock_server_key.pem:/var/tmp/mock_server_key.pem networks: - dockercompose_net: + net: aliases: - cbs-sim @@ -66,11 +136,11 @@ services: AAI_SERVICE_PORT: 1080 DCAE_CA_CERTPATH: '/opt/app/pmsh/etc/certs/cacert.pem' networks: - dockercompose_net: + net: aliases: - pmsh networks: - dockercompose_net: - external: true + net: + driver: bridge diff --git a/plans/dcaegen2-services-pmsh/testsuite/setup.sh b/plans/dcaegen2-services-pmsh/testsuite/setup.sh index d7b988e4..73a219f7 100644 --- a/plans/dcaegen2-services-pmsh/testsuite/setup.sh +++ b/plans/dcaegen2-services-pmsh/testsuite/setup.sh @@ -1,16 +1,11 @@ #!/bin/bash -# Place the scripts in run order: - -source ${WORKSPACE}/scripts/dmaap-message-router/dmaap-mr-launch.sh -dmaap_mr_launch -DMAAP_MR_IP=${IP} export DB_USER=pmsh export DB_PASSWORD=pmsh TEST_PLANS_DIR=$WORKSPACE/plans/dcaegen2-services-pmsh/testsuite -docker-compose -f ${TEST_PLANS_DIR}/docker-compose.yml up -d db aai cbs-sim +docker-compose -f ${TEST_PLANS_DIR}/docker-compose.yml up -d zookeeper kafka dmaap-mr db aai cbs-sim # Slow machine running CSITs can affect db coming up in time for PMSH echo "Waiting for postgres db to come up..." @@ -26,6 +21,20 @@ for i in {1..30}; do done [[ "$db_response" != "0" ]] && echo "Error: postgres db not accessible" && exit 1 +DMAAP_MR_IP=$(docker inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}" dmaap-mr) + +echo "Waiting for dmaap-message-router to come up ..." +for i in {1..20}; do + dmaap_state=$(curl --write-out '%{http_code}' --silent --output /dev/null $DMAAP_MR_IP:3904/topics) + if [[ ${dmaap_state} == "200" ]] + then + break + else + sleep 5 + fi +done +[[ "$dmaap_state" != "200" ]] && echo "Error: DMaaP MR container state not healthy" && exit 1 + docker-compose -f ${TEST_PLANS_DIR}/docker-compose.yml up -d pmsh PMSH_IP=$(docker inspect -f "{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}" pmsh) diff --git a/plans/dcaegen2-services-pmsh/testsuite/teardown.sh b/plans/dcaegen2-services-pmsh/testsuite/teardown.sh index 75e624e5..a01dfda0 100644 --- a/plans/dcaegen2-services-pmsh/testsuite/teardown.sh +++ b/plans/dcaegen2-services-pmsh/testsuite/teardown.sh @@ -1,7 +1,5 @@ #!/bin/bash echo "Starting teardown script" -source ${WORKSPACE}/scripts/dmaap-message-router/dmaap-mr-teardown.sh -dmaap_mr_teardown TEST_PLANS_DIR=$WORKSPACE/plans/dcaegen2-services-pmsh/testsuite mkdir -p $WORKSPACE/archives docker exec pmsh /bin/sh -c "cat /var/log/ONAP/dcaegen2/services/pmsh/*" diff --git a/tests/dcaegen2-services-pmsh/testcases/assets/cbs-expectation-unlocked-config.json b/tests/dcaegen2-services-pmsh/testcases/assets/cbs-expectation-unlocked-config.json index 60ac2aab..f5de1952 100644 --- a/tests/dcaegen2-services-pmsh/testcases/assets/cbs-expectation-unlocked-config.json +++ b/tests/dcaegen2-services-pmsh/testcases/assets/cbs-expectation-unlocked-config.json @@ -87,7 +87,7 @@ "dmaap_info": { "client_id": "1475976809466", "client_role": "org.onap.dcae.pmPublisher", - "topic_url": "http://dmaap:3904/events/unauthenticated.DCAE_CL_OUTPUT", + "topic_url": "http://dmaap-mr:3904/events/unauthenticated.DCAE_CL_OUTPUT", "location": "san-francisco" }, "type": "message_router" @@ -99,14 +99,14 @@ "dmaap_info": { "client_id": "1575976809466", "client_role": "org.onap.dcae.aaiSub", - "topic_url": "http://dmaap:3904/events/AAI_EVENT", + "topic_url": "http://dmaap-mr:3904/events/AAI_EVENT", "location": "san-francisco" } }, "policy_pm_subscriber": { "dmaap_info": { "location": "san-francisco", - "topic_url": "http://dmaap:3904/events/unauthenticated.PMSH_CL_INPUT", + "topic_url": "http://dmaap-mr:3904/events/unauthenticated.PMSH_CL_INPUT", "client_role": "org.onap.dcae.pmSubscriber", "client_id": "1575876809456" }, diff --git a/tests/dcaegen2-services-pmsh/testcases/pmsh.robot b/tests/dcaegen2-services-pmsh/testcases/pmsh.robot index 4b0dc6e4..71452350 100644 --- a/tests/dcaegen2-services-pmsh/testcases/pmsh.robot +++ b/tests/dcaegen2-services-pmsh/testcases/pmsh.robot @@ -7,8 +7,6 @@ Library String Library Process Resource ../../common.robot - -Test Setup CreateSessions Test Teardown Delete All Sessions @@ -44,7 +42,7 @@ Verify database tables exist and are empty [Tags] PMSH_02 [Documentation] Verify database has been created and is empty [Timeout] 10 seconds - ${resp}= Get Request pmsh_session ${SUBSCRIPTIONS_ENDPOINT} + ${resp}= GetSubsCall ${SUBSCRIPTIONS_ENDPOINT} Should Be True ${resp.status_code} == 200 Should Contain ${resp.text} [] @@ -53,8 +51,8 @@ Verify PNF detected in AAI when administrative state unlocked [Documentation] Verify PNF detected when administrative state unlocked [Timeout] 60 seconds SetAdministrativeStateToUnlocked - Sleep 31 Allow PMSH time to pick up changes in CBS config - ${resp}= Get Request pmsh_session ${SUBSCRIPTIONS_ENDPOINT} + Sleep 31 Allow PMSH time to pick up changes in CBS config + ${resp}= GetSubsCall ${SUBSCRIPTIONS_ENDPOINT} Should Be Equal As Strings ${resp.json()[0]['subscription_status']} UNLOCKED Should Be Equal As Strings ${resp.json()[0]['network_functions'][0]['nf_name']} pnf-existing Should Be Equal As Strings ${resp.json()[0]['network_functions'][0]['nf_sub_status']} PENDING_CREATE @@ -65,7 +63,7 @@ Verify Policy response on MR is handled [Timeout] 60 seconds SimulatePolicyResponse ${MR_POLICY_RESPONSE_PNF_EXISTING} Sleep 31 seconds Ensure Policy response on MR is picked up - ${resp}= Get Request pmsh_session ${SUBSCRIPTIONS_ENDPOINT} + ${resp}= GetSubsCall ${SUBSCRIPTIONS_ENDPOINT} Should Be Equal As Strings ${resp.json()[0]['network_functions'][0]['nf_sub_status']} CREATED Verify AAI event on MR detailing new PNF being detected is handled @@ -74,7 +72,7 @@ Verify AAI event on MR detailing new PNF being detected is handled [Timeout] 60 seconds SimulateNewPNF ${MR_AAI_PNF_CREATED} Sleep 31 seconds Ensure AAI event on MR is picked up - ${resp}= Get Request pmsh_session ${SUBSCRIPTIONS_ENDPOINT} + ${resp}= GetSubsCall ${SUBSCRIPTIONS_ENDPOINT} Should Be Equal As Strings ${resp.json()[0]['network_functions'][1]['nf_name']} pnf_newly_discovered Should Be Equal As Strings ${resp.json()[0]['network_functions'][1]['nf_sub_status']} PENDING_CREATE @@ -84,29 +82,25 @@ Verify AAI event on MR detailing PNF being deleted is handled [Timeout] 60 seconds SimulateDeletedPNF ${MR_AAI_PNF_REMOVED} Sleep 31 seconds Ensure AAI event on MR is picked up - ${resp}= Get Request pmsh_session ${SUBSCRIPTIONS_ENDPOINT} + ${resp}= GetSubsCall ${SUBSCRIPTIONS_ENDPOINT} Should Not Contain ${resp.text} pnf_newly_discovered *** Keywords *** -CreateSessions - Create Session pmsh_session ${PMSH_BASE_URL} - Create Session mr_sim_session ${MR_BASE_URL} - Create Session cbs_sim_session ${CBS_BASE_URL} - SetAdministrativeStateToUnlocked ${data}= Get Data From File ${CBS_EXPECTATION_ADMIN_STATE_UNLOCKED} - ${resp} = Put Request cbs_sim_session /clear data={"path": "/service_component_all/.*"} + Create Session cbs_sim_session ${CBS_BASE_URL} verify=false + ${resp}= PUT On Session cbs_sim_session url=/clear data={"path": "/service_component_all/.*"} Should Be True ${resp.status_code} == 200 - Sleep 2 Allow CBS time to set expectation - ${resp} = Put Request cbs_sim_session /expectation data=${data} + Sleep 2 Allow CBS time to set expectation + ${resp} = PUT On Session cbs_sim_session url=/expectation data=${data} Should Be True ${resp.status_code} == 201 SimulatePolicyResponse [Arguments] ${expected_contents} ${json_value}= json_from_file ${expected_contents} - ${resp}= PostCall ${POLICY_PUBLISH_MR_TOPIC} ${json_value} + ${resp}= PostMrCall ${POLICY_PUBLISH_MR_TOPIC} ${json_value} log ${resp.text} Should Be Equal As Strings ${resp.status_code} 200 ${count}= Evaluate $resp.json().get('count') @@ -115,7 +109,7 @@ SimulatePolicyResponse SimulateNewPNF [Arguments] ${expected_contents} ${json_value}= json_from_file ${expected_contents} - ${resp}= PostCall ${AAI_MR_TOPIC} ${json_value} + ${resp}= PostMrCall ${AAI_MR_TOPIC} ${json_value} log ${resp.text} Should Be Equal As Strings ${resp.status_code} 200 ${count}= Evaluate $resp.json().get('count') @@ -124,14 +118,21 @@ SimulateNewPNF SimulateDeletedPNF [Arguments] ${expected_contents} ${json_value}= json_from_file ${expected_contents} - ${resp}= PostCall ${AAI_MR_TOPIC} ${json_value} + ${resp}= PostMrCall ${AAI_MR_TOPIC} ${json_value} log ${resp.text} Should Be Equal As Strings ${resp.status_code} 200 ${count}= Evaluate $resp.json().get('count') log 'JSON Response Code:'${resp} -PostCall +PostMrCall [Arguments] ${url} ${data} - ${headers}= Create Dictionary Accept=application/json Content-Type=application/json - ${resp}= Post Request mr_sim_session ${url} json=${data} headers=${headers} - [Return] ${resp} + Create Session mr_sim_session ${MR_BASE_URL} verify=false + ${headers}= Create Dictionary Accept=application/json Content-Type=application/json + ${resp}= POST On Session mr_sim_session url=${url} json=${data} headers=${headers} + [Return] ${resp} + +GetSubsCall + [Arguments] ${url} + Create Session pmsh_session ${PMSH_BASE_URL} verify=false + ${resp}= GET On Session pmsh_session url=${url} + [Return] ${resp}