Merge "Added functional CSIT tests for DFC"
authorGary Wu <gary.i.wu@huawei.com>
Thu, 28 Mar 2019 18:14:11 +0000 (18:14 +0000)
committerGerrit Code Review <gerrit@onap.org>
Thu, 28 Mar 2019 18:14:11 +0000 (18:14 +0000)
30 files changed:
plans/dcaegen2-collectors-hv-ves/testsuites/.env [deleted file]
plans/dcaegen2-collectors-hv-ves/testsuites/docker-compose.yml
plans/dcaegen2-collectors-hv-ves/testsuites/env.sh [new file with mode: 0755]
plans/dcaegen2-collectors-hv-ves/testsuites/env_local.sh [new file with mode: 0755]
plans/dcaegen2-collectors-hv-ves/testsuites/setup.sh
plans/dcaegen2-collectors-hv-ves/testsuites/teardown.sh
plans/dcaegen2-pmmapper/pmmapper/assets/config.json
plans/dcaegen2-pmmapper/pmmapper/composefile/docker-compose-pmmapper.yml
plans/dcaegen2-pmmapper/pmmapper/dmaapbc.sh
plans/dcaegen2-pmmapper/pmmapper/setup.sh
plans/usecases/5G-bulkpm/assets/addSubscriber.json
plans/usecases/5G-bulkpm/composefile/docker-compose-e2e.yml
plans/usecases/5G-bulkpm/setup.sh
plans/vid/healthCheck/setup.sh
plans/vid/healthCheck/teardown.sh
scripts/vid/kill_containers_and_remove_dataFolders.sh
tests/dcaegen2-collectors-hv-ves/testcases/__init__.robot
tests/dcaegen2-collectors-hv-ves/testcases/authorization.robot
tests/dcaegen2-collectors-hv-ves/testcases/libraries/DcaeAppSimulatorLibrary.py
tests/dcaegen2-collectors-hv-ves/testcases/libraries/XnfSimulatorLibrary.py
tests/dcaegen2-collectors-hv-ves/testcases/message-routing.robot
tests/dcaegen2-collectors-hv-ves/testcases/multiple-clients.robot
tests/dcaegen2-pmmapper/pmmapper/assets/A_meas_result.xml [deleted file]
tests/dcaegen2-pmmapper/pmmapper/pmmapper.robot
tests/usecases/5G-bulkpm/BulkpmE2E.robot
tests/vid/resources/docker-compose.yml
tests/vid/resources/simulators/Dockerfile
tests/vid/resources/simulators/SO.py
tests/vid/resources/simulators/test_data_assets/expected_aai_requests.json [new file with mode: 0644]
tests/vid/resources/simulators/test_data_assets/expected_aai_responses.json [new file with mode: 0644]

diff --git a/plans/dcaegen2-collectors-hv-ves/testsuites/.env b/plans/dcaegen2-collectors-hv-ves/testsuites/.env
deleted file mode 100644 (file)
index e028616..0000000
+++ /dev/null
@@ -1,33 +0,0 @@
-# ============LICENSE_START=======================================================
-# dcaegen2-collectors-veshv
-# ================================================================================
-# Copyright (C) 2018-2019 NOKIA
-# ================================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END=========================================================
-
-HV_VES_COLLECTOR_NAMESPACE=onap
-HV_VES_GROUP_ID=org.onap.dcaegen2.collectors.hv-ves
-HV_VES_IMAGE=hv-collector-main
-DCAE_APP_SIMULATOR_IMAGE=hv-collector-dcae-app-simulator
-
-# IF CHANGING BELOW VALUE, REMEMBER TO UPDATE ALSO XNF-SIMULATOR VERSION
-# IN XNF_SIMULATOR_LIBRARY PYTHON FILE.
-HV_VES_VERSION=1.1-SNAPSHOT
-
-HV_VES_HEALTHCHECK_CMD=curl --request GET --fail --silent --show-error localhost:6060/health/ready && nc -vz localhost 6061
-
-JAVA_OPTS=-Dio.netty.leakDetection.level=paranoid
-CONSUL_HOST=consul-server
-CONFIG_BINDING_SERVICE=config-binding-service
-HV_VES_HOSTNAME=dcae-hv-ves-collector
\ No newline at end of file
index bc7105e..1d34e9f 100644 (file)
@@ -19,7 +19,7 @@ version: "3"
 
 
 networks:
-  ves-hv-default:
+  hv-ves-default:
     external:
       name: $CONTAINERS_NETWORK
 
@@ -34,7 +34,7 @@ services:
     ports:
       - "2181:2181"
     networks:
-      - ves-hv-default
+      - hv-ves-default
 
   kafka:
     image: wurstmeister/kafka
@@ -50,7 +50,7 @@ services:
     depends_on:
       - zookeeper
     networks:
-      - ves-hv-default
+      - hv-ves-default
 
   #
   # Consul / CBS
@@ -63,7 +63,7 @@ services:
     volumes:
       - ./consul/:/consul/config
     networks:
-      - ves-hv-default
+      - hv-ves-default
 
   config-binding-service:
     image: nexus3.onap.org:10001/onap/org.onap.dcaegen2.platform.configbinding.app-app:2.2.4
@@ -74,13 +74,13 @@ services:
     depends_on:
       - consul-server
     networks:
-      - ves-hv-default
+      - hv-ves-default
 
   #
   # DCAE HV VES Collector
   #
 
-  ves-hv-collector:
+  hv-ves-collector:
     image: ${DOCKER_REGISTRY}/${HV_VES_COLLECTOR_NAMESPACE}/${HV_VES_GROUP_ID}.${HV_VES_IMAGE}:${HV_VES_VERSION}
     ports:
       - "6060:6060"
@@ -102,9 +102,9 @@ services:
       - config-binding-service
       - kafka
     networks:
-      - ves-hv-default
+      - hv-ves-default
 
-  unencrypted-ves-hv-collector:
+  unencrypted-hv-ves-collector:
     image: ${DOCKER_REGISTRY}/${HV_VES_COLLECTOR_NAMESPACE}/${HV_VES_GROUP_ID}.${HV_VES_IMAGE}:${HV_VES_VERSION}
     ports:
       - "7060:6060"
@@ -126,7 +126,7 @@ services:
       - config-binding-service
       - kafka
     networks:
-      - ves-hv-default
+      - hv-ves-default
 
   dcae-app-simulator:
     image: ${DOCKER_REGISTRY}/${HV_VES_COLLECTOR_NAMESPACE}/${HV_VES_GROUP_ID}.${DCAE_APP_SIMULATOR_IMAGE}:${HV_VES_VERSION}
@@ -141,5 +141,5 @@ services:
     depends_on:
       - kafka
     networks:
-      - ves-hv-default
+      - hv-ves-default
 
diff --git a/plans/dcaegen2-collectors-hv-ves/testsuites/env.sh b/plans/dcaegen2-collectors-hv-ves/testsuites/env.sh
new file mode 100755 (executable)
index 0000000..bbb5493
--- /dev/null
@@ -0,0 +1,42 @@
+#!/usr/bin/env bash
+# ============LICENSE_START=======================================================
+# csit-dcaegen2-collectors-hv-ves
+# ================================================================================
+# Copyright (C) 2019 NOKIA
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+
+# using WORKSPACE variable defined in run-csit.sh
+export ROBOT_VARIABLES="--pythonpath ${WORKSPACE}/tests/dcaegen2-collectors-hv-ves/testcases/libraries"
+
+export JAVA_OPTS="-Dio.netty.leakDetection.level=paranoid"
+export CONSUL_HOST="consul-server"
+export CONFIG_BINDING_SERVICE="config-binding-service"
+
+export DOCKER_REGISTRY="nexus3.onap.org:10001"
+export DOCKER_REGISTRY_PREFIX="${DOCKER_REGISTRY}/"
+export CONTAINERS_NETWORK="hv-ves-default"
+export HV_VES_SERVICE_NAME="hv-ves-collector"
+export UNENCRYPTED_HV_VES_SERVICE_NAME="unencrypted-hv-ves-collector"
+
+export HV_VES_GROUP_ID="org.onap.dcaegen2.collectors.hv-ves"
+export HV_VES_HOSTNAME="dcae-hv-ves-collector"
+export HV_VES_COLLECTOR_NAMESPACE="onap"
+export HV_VES_HEALTHCHECK_CMD=$(curl --request GET --fail --silent --show-error localhost:6060/health/ready && nc -vz localhost 6061)
+export HV_VES_VERSION="1.1-SNAPSHOT"
+export HV_VES_IMAGE="hv-collector-main"
+export DCAE_APP_SIMULATOR_IMAGE="hv-collector-dcae-app-simulator"
+export XNF_SIMULATOR_IMAGE="hv-ves-collector-xnf-simulator"
+
+
diff --git a/plans/dcaegen2-collectors-hv-ves/testsuites/env_local.sh b/plans/dcaegen2-collectors-hv-ves/testsuites/env_local.sh
new file mode 100755 (executable)
index 0000000..afe8a54
--- /dev/null
@@ -0,0 +1,43 @@
+#!/usr/bin/env bash
+# ============LICENSE_START=======================================================
+# csit-dcaegen2-collectors-hv-ves
+# ================================================================================
+# Copyright (C) 2019 NOKIA
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+
+export WORKSPACE=$(git rev-parse --show-toplevel)
+export ROBOT_VARIABLES="--pythonpath ${WORKSPACE}/tests/dcaegen2-collectors-hv-ves/testcases/libraries"
+
+export JAVA_OPTS="-Dio.netty.leakDetection.level=paranoid"
+export CONSUL_HOST="consul-server"
+export CONFIG_BINDING_SERVICE="config-binding-service"
+
+export DOCKER_REGISTRY="docker.io"
+export DOCKER_REGISTRY_PREFIX=""
+export CONTAINERS_NETWORK="hv-ves-default"
+export HV_VES_SERVICE_NAME="hv-ves-collector"
+export UNENCRYPTED_HV_VES_SERVICE_NAME="unencrypted-hv-ves-collector"
+
+export HV_VES_GROUP_ID="org.onap.dcaegen2.collectors.hv-ves"
+export HV_VES_HOSTNAME="dcae-hv-ves-collector"
+export HV_VES_COLLECTOR_NAMESPACE="onap"
+export HV_VES_HEALTHCHECK_CMD=$(curl --request GET --fail --silent --show-error localhost:6060/health/ready && nc -vz localhost 6061)
+export HV_VES_VERSION="latest"
+export HV_VES_IMAGE="hv-collector-main"
+export DCAE_APP_SIMULATOR_IMAGE="hv-collector-dcae-app-simulator"
+export XNF_SIMULATOR_IMAGE="hv-ves-collector-xnf-simulator"
+
+
+
index 34baae8..06eaf54 100755 (executable)
 
 set -euo pipefail
 
-if [[ $# -eq 1 ]] && [[ $1 == "local-test-run" ]]; then
+RUN_CSIT_LOCAL=${RUN_CSIT_LOCAL:-false}
+
+if ${RUN_CSIT_LOCAL} ; then
   echo "Building locally - assuming all dependencies are installed"
-  export DOCKER_REGISTRY=""
-  export DOCKER_REGISTRY_PREFIX=""
-  export WORKSPACE=$(git rev-parse --show-toplevel)
+  source env_local.sh
 else
   echo "Default run - install all dependencies"
-
   pip uninstall -y docker-py
   pip install docker
 
@@ -34,15 +33,12 @@ else
   COMPOSE_LOCATION='/usr/local/bin/docker-compose'
   sudo curl -L https://github.com/docker/compose/releases/download/${COMPOSE_VERSION}/docker-compose-$(uname -s)-$(uname -m) -o ${COMPOSE_LOCATION}
   sudo chmod +x ${COMPOSE_LOCATION}
-
-  export DOCKER_REGISTRY="nexus3.onap.org:10001"
-  export DOCKER_REGISTRY_PREFIX="${DOCKER_REGISTRY}/"
+  source env.sh
 fi
 
 echo "Removing not used docker networks"
 docker network prune -f
 
-export CONTAINERS_NETWORK=ves-hv-default
 echo "Creating network for containers: ${CONTAINERS_NETWORK}"
 docker network create ${CONTAINERS_NETWORK}
 
@@ -53,5 +49,3 @@ cd ../..
 docker-compose up -d
 
 mkdir -p ${WORKSPACE}/archives/containers_logs
-
-export ROBOT_VARIABLES="--pythonpath ${WORKSPACE}/tests/dcaegen2-collectors-hv-ves/testcases/libraries"
index ec39215..787f811 100755 (executable)
 # limitations under the License.
 # ============LICENSE_END=========================================================
 
+RUN_CSIT_LOCAL=${RUN_CSIT_LOCAL:-false}
+
 cd collector/ssl
 ./gen-certs.sh clean
 cd ../..
 
+if ${RUN_CSIT_LOCAL} ; then
+  echo "Tearing down local setup"
+  source env_local.sh
+else
+  echo "Tearing down"
+  source env.sh
+fi
+
+set +e
+
 COMPOSE_LOGS_FILE=${WORKSPACE}/archives/containers_logs/docker-compose.log
 docker-compose logs > ${COMPOSE_LOGS_FILE}
 docker-compose down
 docker-compose rm -f
 
+echo "Stopping leftover containers"
+LEFTOVER_CONTAINERS=$(docker ps -aqf network=${CONTAINERS_NETWORK} | awk '{print $1}')
+docker stop ${LEFTOVER_CONTAINERS}
+docker rm ${LEFTOVER_CONTAINERS}
 docker network rm ${CONTAINERS_NETWORK}
 
+set -e
+
 if grep "LEAK:" ${COMPOSE_LOGS_FILE}; then
     echo "WARNING: Memory leak detected in docker-compose logs."
 fi
index 5eec438..79b3201 100644 (file)
@@ -19,8 +19,8 @@
             "type": "message_router",
             "aaf_password": null,
             "dmaap_info": {
-                "topic_url": "https://message-router:3904/events/org.onap.dmaap.onapCSIT.pm_mapper",
-                "client_role": "org.onap.dmaap.client.pub",
+                "topic_url": "http://message-router:3904/events/topic.org.onap.dmaap.mr.test1",
+                "client_role": "org.onap.dmaap.mr.topic",
                 "location": "csit-pmmapper",
                 "client_id": null
             },
index b14a73c..a7f5b97 100644 (file)
@@ -12,3 +12,4 @@ services:
     extra_hosts:
       - "dmaap-bc:BUSIP"
       - "dmaap-dr-node:DRNODEIP"
+      - "message-router:DMAAPMRIP"
index 1405112..1430258 100755 (executable)
@@ -1,22 +1,23 @@
 #!/bin/bash
 # $1 is the IP address of the buscontroller
+
 # INITIALIZE: dmaap object
+echo $'\nInitializing /dmaap endpoint'
 JSON=/tmp/dmaap.json
 cat << EOF > $JSON
 {
 "version": "1",
-"topicNsRoot": "org.onap.dmaap",
+"topicNsRoot": "topic.org.onap.dmaap",
 "drProvUrl": "https://dmaap-dr-prov:8443",
-"dmaapName": "DataRouter",
+"dmaapName": "mr",
 "bridgeAdminTopic": "MM_AGENT_PROV"
 
 }
 EOF
-
-echo "Initializing /dmaap endpoint"
 curl -v -X POST -d @${JSON} -H "Content-Type: application/json" http://$1:8080/webapi/dmaap
 
 # INITIALIZE: dcaeLocation object
+echo $'\nInitializing /dcaeLocations endpoint'
 JSON=/tmp/dcaeLocation.json
 cat << EOF > $JSON
 {
@@ -26,26 +27,25 @@ cat << EOF > $JSON
 "zone": "zoneA"
 }
 EOF
-
-echo "Initializing /dcaeLocations endpoint"
 curl -v -X POST -d @${JSON} -H "Content-Type: application/json" http://$1:8080/webapi/dcaeLocations
 
 # INITIALIZE: MR object in 1 site
+echo $'\nInitializing /mr_clusters endpoint'
+DMAAP=$(docker ps -a -q --filter="name=dmaap_1")
 DMAAP_MR_IP=$(docker inspect --format='{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' $DMAAP)
 JSON=/tmp/mr.json
 cat << EOF > $JSON
 {
 "dcaeLocationName": "csit-pmmapper",
-"fqdn": "$DMAAP_MR_IP",
+"fqdn": "${DMAAP_MR_IP}",
 "topicProtocol" : "http",
 "topicPort": "3904"
 }
 EOF
-
-echo "Initializing /mr_clusters endpoint"
 curl -v -X POST -d @${JSON} -H "Content-Type: application/json" http://$1:8080/webapi/mr_clusters
 
 # CREATING: DR feed
+echo $'\nInitializing /feeds endpoint'
 JSON=/tmp/feed.json
 cat << EOF > $JSON
 {
@@ -67,6 +67,30 @@ cat << EOF > $JSON
         ]
 }
 EOF
-echo "Initializing /feeds endpoint"
 curl -v -X POST -d @${JSON} -H "Content-Type: application/json" http://$1:8080/webapi/feeds
+
+# CREATING: MR Topic
+echo $'\nInitializing /topic endpoint'
+JSON=/tmp/topic.json
+cat << EOF > $JSON
+{
+"topicName":"test1",
+"topicDescription":"PM Mapper - VES Event",
+"owner":"pmmapper"
+}
+EOF
+curl -v -X POST -d @${JSON} -H "Content-Type: application/json" http://$1:8080/webapi/topics
+
+# CREATING: MR Client
+echo $'\nInitializing /mr_clients endpoint'
+JSON=/tmp/mrclients.json
+cat << EOF > $JSON
+{
+"fqtn": "topic.org.onap.dmaap.mr.test1",
+"dcaeLocationName": "csit-pmmapper",
+"clientRole": "org.onap.dmaap.mr.topic",
+"action": [ "pub", "view" ]
+}
+EOF
+curl -v -X POST -d @${JSON} -H "Content-Type: application/json" http://$1:8080/webapi/mr_clients
 sleep 5
\ No newline at end of file
index 2924bd2..18d8237 100644 (file)
@@ -122,6 +122,7 @@ CBS_IP=$(docker inspect '--format={{range .NetworkSettings.Networks}}{{.IPAddres
 sed -i 's/CBSIP/'$CBS_IP'/g' docker-compose.yml
 sed -i 's/BUSIP/'$DMAAPBC_IP'/g' docker-compose.yml
 sed -i 's/DRNODEIP/'$DR_NODE_IP'/g' docker-compose.yml
+sed -i 's/DMAAPMRIP/'$DMAAP_MR_IP'/g' docker-compose.yml
 docker-compose up -d
 
 # Wait for initialization of Docker container for 3GPP PM Mapper
@@ -143,6 +144,7 @@ docker exec pmmapper /bin/sh -c "cat /var/log/ONAP/dcaegen2/services/pm-mapper/p
 cat /tmp/pmmapper.log
 docker exec buscontroller /bin/sh -c "cat /opt/app/dmaapbc/logs/ONAP/application.log"
 curl -k https://$DR_PROV_IP:8443/internal/prov
+curl http://${DMAAP_MR_IP}:3904/events/topic.org.onap.dmaap.mr.test1/CG1/C1?timeout=1000
 
 #Pass any variables required by Robot test suites in ROBOT_VARIABLES
-ROBOT_VARIABLES="-v CONSUL_IP:${CONSUL_IP} -v DR_PROV_IP:${DR_PROV_IP} -v DMAAPBC_IP:${DMAAPBC_IP} -v DMAAP_MR_IP:${DMAAP_MR_IP} -v CBS_IP:${CBS_IP} -v PMMAPPER_IP:${PMMAPPER_IP} -v DR_NODE_IP:${DR_NODE_IP}"
\ No newline at end of file
+ROBOT_VARIABLES="-v DMAAP_MR_IP:${DMAAP_MR_IP} -v CONSUL_IP:${CONSUL_IP} -v DR_PROV_IP:${DR_PROV_IP} -v DMAAPBC_IP:${DMAAPBC_IP} -v DMAAP_MR_IP:${DMAAP_MR_IP} -v CBS_IP:${CBS_IP} -v PMMAPPER_IP:${PMMAPPER_IP} -v DR_NODE_IP:${DR_NODE_IP}"
\ No newline at end of file
index 612c462..bab63c4 100644 (file)
@@ -14,5 +14,6 @@
        "log": "https://dmaap-dr-prov/feedlog/1",
        "feed": "https://dmaap-dr-prov/feed/1"
     },
-    "subscriber":"admin"
+    "subscriber":"admin",
+    "decompress":true
  }
\ No newline at end of file
index 8f1abb5..f3c47bb 100644 (file)
@@ -1,7 +1,7 @@
 version: '2.1'
 services:
   datarouter-prov:
-    image: nexus3.onap.org:10001/onap/dmaap/datarouter-prov:2.0.1
+    image: nexus3.onap.org:10001/onap/dmaap/datarouter-prov:2.0.2-SNAPSHOT-latest
     container_name: datarouter-prov
     hostname: dmaap-dr-prov
     ports:
@@ -22,7 +22,7 @@ services:
       retries: 5
 
   datarouter-node:
-    image: nexus3.onap.org:10001/onap/dmaap/datarouter-node:2.0.1
+    image: nexus3.onap.org:10001/onap/dmaap/datarouter-node:2.0.2-SNAPSHOT-latest
     container_name: datarouter-node
     hostname: dmaap-dr-node
     ports:
@@ -35,7 +35,7 @@ services:
         condition: service_healthy
 
   datarouter-subscriber:
-      image: nexus3.onap.org:10001/onap/dmaap/datarouter-subscriber:2.0.1
+      image: nexus3.onap.org:10001/onap/dmaap/datarouter-subscriber:2.0.2-SNAPSHOT-latest
       container_name: fileconsumer-node
       hostname: subscriber.com
       ports:
index da11aa5..3d3a9ef 100644 (file)
@@ -152,7 +152,7 @@ echo data_endpoints.json from DFC containter
 cat /tmp/datafile_endpoints.json.fromcontainer
 docker cp /tmp/datafile_endpoints.json dfc:/opt/app/datafile/config/
 #Increase Logging
-docker exec dfc /bin/sh -c " sed -i 's/org.onap.dcaegen2.collectors.datafile: ERROR/org.onap.dcaegen2.collectors.datafile: TRACE/g' /opt/app/datafile/config/application.yaml"
+docker exec dfc /bin/sh -c " sed -i 's/org.onap.dcaegen2.collectors.datafile: WARN/org.onap.dcaegen2.collectors.datafile: TRACE/g' /opt/app/datafile/config/application.yaml"
 docker restart dfc
 sleep 2
 
@@ -258,10 +258,10 @@ sed -i 's/sftpport/'${SFTP_PORT}'/g' $WORKSPACE/tests/usecases/5G-bulkpm/assets/
 docker cp $WORKSPACE/plans/usecases/5G-bulkpm/assets/xNF.pm.xml.gz sftp:/home/admin/
 
 # Create default feed and create file consumer subscriber on data router
-curl -v -X POST -H "Content-Type:application/vnd.att-dr.feed" -H "X-ATT-DR-ON-BEHALF-OF:dradmin" --data-ascii @$WORKSPACE/plans/usecases/5G-bulkpm/assets/createFeed.json --post301 --location-trusted -k https://${DR_PROV_IP}:8443
+curl -v -X POST -H "Content-Type:application/vnd.dmaap-dr.feed" -H "X-DMAAP-DR-ON-BEHALF-OF:dradmin" --data-ascii @$WORKSPACE/plans/usecases/5G-bulkpm/assets/createFeed.json --post301 --location-trusted -k https://${DR_PROV_IP}:8443
 cp $WORKSPACE/plans/usecases/5G-bulkpm/assets/addSubscriber.json /tmp/addSubscriber.json
 sed -i 's/fileconsumer/'${HOST_IP}'/g' /tmp/addSubscriber.json
-curl -v -X POST -H "Content-Type:application/vnd.att-dr.subscription" -H "X-ATT-DR-ON-BEHALF-OF:dradmin" --data-ascii @/tmp/addSubscriber.json --post301 --location-trusted -k https://${DR_PROV_IP}:8443/subscribe/1
+curl -v -X POST -H "Content-Type:application/vnd.dmaap-dr.subscription" -H "X-DMAAP-DR-ON-BEHALF-OF:dradmin" --data-ascii @/tmp/addSubscriber.json --post301 --location-trusted -k https://${DR_PROV_IP}:8443/subscribe/1
 sleep 10
 curl -k https://$DR_PROV_IP:8443/internal/prov
 
index 584a640..50a28b6 100644 (file)
@@ -25,7 +25,7 @@ source ${SCRIPTS}/common_functions.sh
 source ${WORKSPACE}/scripts/vid/clone_and_setup_vid_data.sh
 source ${WORKSPACE}/scripts/vid/start_vid_containers.sh
 
-echo `Obtaining ip of VID server...`
+echo "Obtaining ip of VID server..."
 VID_IP=`get-instance-ip.sh vid-server`
 SO_SIMULATOR_IP=`get-instance-ip.sh so-simulator`
 
index 8f16867..e5e5140 100644 (file)
@@ -18,6 +18,5 @@
 #
 
 source ${WORKSPACE}/scripts/vid/kill_containers_and_remove_dataFolders.sh
-docker kill so-simulator
 
 # $WORKSPACE/archives/clamp-clone deleted with archives folder when tests starts so we keep it at the end for debugging
index 9e72a42..d7e0553 100644 (file)
@@ -20,8 +20,9 @@
 echo "This is ${WORKSPACE}/scripts/vid/kill_and_remove_dataFolder.sh"
 
 #kill and remove all vid dockers
-docker stop $(docker ps -a -q --filter="name=vid")
-docker rm $(docker ps -a -q --filter="name=vid")
+cd ${WORKSPACE}/tests/vid/resources
+docker-compose down
+docker-compose rm -f
 
 
 #delete data folder
index 54d3e1c..1ac9150 100644 (file)
@@ -33,8 +33,8 @@ HV-VES Collector Suites Setup
 
 Configure collector
     ${CONSUL_API_ACCESS}=   Get Consul Api Access Url   ${HTTP_METHOD_URL}   ${CONSUL_CONTAINER_HOST}   ${CONSUL_CONTAINER_PORT}
-    ${CONSUL_API_URL}=  Catenate   SEPARATOR=   ${CONSUL_API_ACCESS}   ${CONSUL_VES_HV_CONFIGURATION_KEY_PATH}
-    Publish HV VES Configuration In Consul    ${CONSUL_API_URL}   ${VES_HV_CONFIGURATION_JSON_FILEPATH}
+    ${CONSUL_API_URL}=  Catenate   SEPARATOR=   ${CONSUL_API_ACCESS}   ${CONSUL_HV_VES_CONFIGURATION_KEY_PATH}
+    Publish HV VES Configuration In Consul    ${CONSUL_API_URL}   ${HV_VES_CONFIGURATION_JSON_FILEPATH}
 
 Configure Dcae App
     ${DCAE_APP_API_ACCESS}=   Get Dcae App Api Access Url   ${HTTP_METHOD_URL}   ${DCAE_APP_CONTAINER_HOST}   ${DCAE_APP_CONTAINER_PORT}
@@ -58,7 +58,7 @@ ${HTTP_METHOD_URL}                             http://
 
 ${CONSUL_CONTAINER_HOST}                       consul-server
 ${CONSUL_CONTAINER_PORT}                       8500
-${CONSUL_VES_HV_CONFIGURATION_KEY_PATH}        /v1/kv/dcae-hv-ves-collector
+${CONSUL_HV_VES_CONFIGURATION_KEY_PATH}        /v1/kv/dcae-hv-ves-collector
 
 ${DCAE_APP_CONTAINER_HOST}                     dcae-app-simulator
 ${DCAE_APP_CONTAINER_PORT}                     6063
@@ -70,5 +70,5 @@ ${DCAE_APP_API_MESSAGES_VALIDATION_PATH}       ${DCAE_APP_API_MESSAGES_PATH}/val
 
 ${ROUTED_MESSAGES_TOPIC}                       TEST_HV_VES_PERF3GPP
 
-${VES_HV_RESOURCES}                            %{WORKSPACE}/tests/dcaegen2-collectors-hv-ves/testcases/resources
-${VES_HV_CONFIGURATION_JSON_FILEPATH}          ${VES_HV_RESOURCES}/ves-hv-configuration.json
+${HV_VES_RESOURCES}                            %{WORKSPACE}/tests/dcaegen2-collectors-hv-ves/testcases/resources
+${HV_VES_CONFIGURATION_JSON_FILEPATH}          ${HV_VES_RESOURCES}/ves-hv-configuration.json
index d5fc1e9..4d13a6b 100644 (file)
@@ -72,9 +72,9 @@ Unencrypted connection on both ends
 
 
 *** Variables ***
-${VES_HV_SCENARIOS}                            %{WORKSPACE}/tests/dcaegen2-collectors-hv-ves/testcases/resources/scenarios
+${HV_VES_SCENARIOS}                            %{WORKSPACE}/tests/dcaegen2-collectors-hv-ves/testcases/resources/scenarios
 
-${XNF_VALID_MESSAGES_REQUEST}                  ${VES_HV_SCENARIOS}/authorization/xnf-valid-messages-request.json
+${XNF_VALID_MESSAGES_REQUEST}                  ${HV_VES_SCENARIOS}/authorization/xnf-valid-messages-request.json
 
 ${AMOUNT_0}                                    0
 ${AMOUNT_5000}                                 5000
index 3d811e4..dd41c6a 100644 (file)
@@ -33,8 +33,8 @@ class DcaeAppSimulatorLibrary:
         resp = HttpRequests.session_without_env().get(app_url, timeout=10)
         HttpRequests.checkStatusCode(resp.status_code, DCAE_APP_NAME)
 
-        assert resp.content == expected_messages_amount, \
-            "Messages consumed by simulator: " + resp.content + " expecting: " + expected_messages_amount
+        assert int(resp.content) == int(expected_messages_amount), \
+            "Messages consumed by simulator: " + str(resp.content) + " expecting: " + str(expected_messages_amount)
 
     def assert_DCAE_app_consumed_less_equal_than(self, app_url, messages_threshold):
         logger.info("GET at: " + app_url)
@@ -45,8 +45,8 @@ class DcaeAppSimulatorLibrary:
                      " expecting more than 0 and less/equal than " + messages_threshold)
 
         assert 0 < int(resp.content) <= int(messages_threshold), \
-            "Messages consumed by simulator: " + resp.content + \
-            " expecting more than 0 and less/equal than " + messages_threshold
+            "Messages consumed by simulator: " + str(resp.content) + \
+            " expecting more than 0 and less/equal than " + str(messages_threshold)
 
     def reset_DCAE_app_simulator(self, app_url):
         logger.info("DELETE at: " + app_url)
index 3f69f6a..789c0b6 100644 (file)
@@ -22,15 +22,21 @@ import docker
 from robot.api import logger
 from time import sleep
 
+HV_VES_VERSION = os.getenv("HV_VES_VERSION")
+HV_VES_COLLECTOR_NETWORK = os.getenv("CONTAINERS_NETWORK")
+HV_VES_COLLECTOR_NAMESPACE = os.getenv("HV_VES_COLLECTOR_NAMESPACE")
+HV_VES_GROUP_ID = os.getenv("HV_VES_GROUP_ID")
+HV_VES_SERVICE_NAME = os.getenv("HV_VES_SERVICE_NAME")
+UNENCRYPTED_HV_VES_SERVICE_NAME = os.getenv("UNENCRYPTED_HV_VES_SERVICE_NAME")
+
 XNF_SIMULATOR_NAME = "xNF Simulator"
-HV_VES_COLLECTOR_NAMESPACE="onap"
-HV_VES_GROUP_ID="org.onap.dcaegen2.collectors.hv-ves"
+XNF_SIMULATOR_CONTAINER_PREFIX = os.getenv("XNF_SIMULATOR_IMAGE")
 SIMULATOR_IMAGE_NAME = HV_VES_COLLECTOR_NAMESPACE + "/" + HV_VES_GROUP_ID + ".hv-collector-xnf-simulator"
-HV_VES_VERSION="1.1-SNAPSHOT"
+
 SIMULATOR_IMAGE_FULL_NAME = os.getenv("DOCKER_REGISTRY_PREFIX") + SIMULATOR_IMAGE_NAME + ":" + HV_VES_VERSION
 WORKSPACE_ENV = os.getenv("WORKSPACE")
-certificates_dir_path = WORKSPACE_ENV + "/plans/dcaegen2-collectors-hv-ves/testsuites/collector/ssl/"
-collector_certs_lookup_dir = "/etc/ves-hv/"
+CERTIFICATES_DIR_PATH = WORKSPACE_ENV + "/plans/dcaegen2-collectors-hv-ves/testsuites/collector/ssl/"
+COLLECTOR_CERTS_LOOKUP_DIR = "/etc/ves-hv/"
 ONE_SECOND_IN_NANOS = 10 ** 9
 
 
@@ -88,13 +94,13 @@ class XnfSimulatorLibrary:
                                            command=xNF_startup_command,
                                            healthcheck=xNF_healthcheck_command,
                                            detach=True,
-                                           network="ves-hv-default",
+                                           network=HV_VES_COLLECTOR_NETWORK,
                                            ports={port + "/tcp": port},
                                            volumes=self.container_volumes(),
-                                           name=xnf.container_name_prefix + port)
+                                           name=XNF_SIMULATOR_CONTAINER_PREFIX + port)
 
     def container_volumes(self):
-        return {certificates_dir_path: {"bind": collector_certs_lookup_dir, "mode": 'rw'}}
+        return {CERTIFICATES_DIR_PATH: {"bind": COLLECTOR_CERTS_LOOKUP_DIR, "mode": 'rw'}}
 
     def assert_containers_startup_was_successful(self, dockerClient):
         checks_amount = 6
@@ -119,7 +125,7 @@ class XnfSimulatorLibrary:
             log_filename = WORKSPACE_ENV + "/archives/containers_logs/" + \
                            suite_name.split(".")[-1] + "_" + container.name + ".log"
             file = open(log_filename, "w+")
-            file.write(container.logs())
+            file.write(str(container.logs()))
             file.close()
             container.stop()
             container.remove()
@@ -141,7 +147,6 @@ class XnfSimulatorLibrary:
 
 
 class XnfSimulator:
-    container_name_prefix = "ves-hv-collector-xnf-simulator"
 
     def __init__(self,
                  port,
@@ -151,13 +156,13 @@ class XnfSimulator:
         self.port = port
         self.healthcheck_server_port = "6063"
         cert_name_prefix = "" if should_use_valid_certs else "untrusted"
-        certificates_path_with_file_prefix = collector_certs_lookup_dir + cert_name_prefix
+        certificates_path_with_file_prefix = COLLECTOR_CERTS_LOOKUP_DIR + cert_name_prefix
         self.key_store_path = certificates_path_with_file_prefix + "client.p12"
         self.trust_store_path = certificates_path_with_file_prefix + "trust.p12"
         self.sec_store_passwd = "onaponap"
         self.disable_ssl = should_disable_ssl
-        self.hv_collector_host = "unencrypted-ves-hv-collector" \
-            if should_connect_to_unencrypted_hv_ves else "ves-hv-collector"
+        self.hv_collector_host = UNENCRYPTED_HV_VES_SERVICE_NAME \
+            if should_connect_to_unencrypted_hv_ves else HV_VES_SERVICE_NAME
 
     def get_startup_command(self):
         startup_command = ["--listen-port", self.port,
@@ -167,8 +172,7 @@ class XnfSimulator:
                            "--key-store", self.key_store_path,
                            "--trust-store", self.trust_store_path,
                            "--key-store-password", self.sec_store_passwd,
-                           "--trust-store-password", self.sec_store_passwd
-                           ]
+                           "--trust-store-password", self.sec_store_passwd]
         if self.disable_ssl:
             startup_command.append("--ssl-disable")
         return startup_command
index 15c8ee8..2fab329 100644 (file)
@@ -96,18 +96,18 @@ ${HTTP_METHOD_URL}                             http://
 
 ${XNF_SIM_API_PATH}                            /simulator/async
 
-${VES_HV_SCENARIOS}                            %{WORKSPACE}/tests/dcaegen2-collectors-hv-ves/testcases/resources/scenarios
-${XNF_FIXED_PAYLOAD_REQUEST}                   ${VES_HV_SCENARIOS}/fixed-payload/xnf-fixed-payload-request.json
-${XNF_TOO_BIG_PAYLOAD_REQUEST}                 ${VES_HV_SCENARIOS}/too-big-payload/xnf-too-big-payload-request.json
-${XNF_INVALID_WIRE_FRAME_REQUEST}              ${VES_HV_SCENARIOS}/invalid-wire-frame/xnf-invalid-wire-frame-request.json
-${XNF_INVALID_GPB_DATA_REQUEST}                ${VES_HV_SCENARIOS}/invalid-gpb-data/xnf-invalid-gpb-data-request.json
-${XNF_UNSUPPORTED_DOMAIN_REQUEST}              ${VES_HV_SCENARIOS}/unsupported-domain/xnf-unsupported-domain-request.json
-
-${DCAE_FIXED_PAYLOAD_REQUEST}                  ${VES_HV_SCENARIOS}/fixed-payload/dcae-fixed-payload-request.json
-${DCAE_TOO_BIG_PAYLOAD_REQUEST}                ${VES_HV_SCENARIOS}/too-big-payload/dcae-too-big-payload-request.json
-${DCAE_INVALID_WIRE_FRAME_REQUEST}             ${VES_HV_SCENARIOS}/invalid-wire-frame/dcae-invalid-wire-frame-request.json
-${DCAE_INVALID_GPB_DATA_REQUEST}               ${VES_HV_SCENARIOS}/invalid-gpb-data/dcae-invalid-gpb-data-request.json
-${DCAE_UNSUPPORTED_DOMAIN_REQUEST}             ${VES_HV_SCENARIOS}/unsupported-domain/dcae-unsupported-domain-request.json
+${HV_VES_SCENARIOS}                            %{WORKSPACE}/tests/dcaegen2-collectors-hv-ves/testcases/resources/scenarios
+${XNF_FIXED_PAYLOAD_REQUEST}                   ${HV_VES_SCENARIOS}/fixed-payload/xnf-fixed-payload-request.json
+${XNF_TOO_BIG_PAYLOAD_REQUEST}                 ${HV_VES_SCENARIOS}/too-big-payload/xnf-too-big-payload-request.json
+${XNF_INVALID_WIRE_FRAME_REQUEST}              ${HV_VES_SCENARIOS}/invalid-wire-frame/xnf-invalid-wire-frame-request.json
+${XNF_INVALID_GPB_DATA_REQUEST}                ${HV_VES_SCENARIOS}/invalid-gpb-data/xnf-invalid-gpb-data-request.json
+${XNF_UNSUPPORTED_DOMAIN_REQUEST}              ${HV_VES_SCENARIOS}/unsupported-domain/xnf-unsupported-domain-request.json
+
+${DCAE_FIXED_PAYLOAD_REQUEST}                  ${HV_VES_SCENARIOS}/fixed-payload/dcae-fixed-payload-request.json
+${DCAE_TOO_BIG_PAYLOAD_REQUEST}                ${HV_VES_SCENARIOS}/too-big-payload/dcae-too-big-payload-request.json
+${DCAE_INVALID_WIRE_FRAME_REQUEST}             ${HV_VES_SCENARIOS}/invalid-wire-frame/dcae-invalid-wire-frame-request.json
+${DCAE_INVALID_GPB_DATA_REQUEST}               ${HV_VES_SCENARIOS}/invalid-gpb-data/dcae-invalid-gpb-data-request.json
+${DCAE_UNSUPPORTED_DOMAIN_REQUEST}             ${HV_VES_SCENARIOS}/unsupported-domain/dcae-unsupported-domain-request.json
 
 ${AMOUNT_25000}                                25000
 ${AMOUNT_50000}                                50000
index 09b62c7..735a208 100644 (file)
@@ -45,9 +45,9 @@ Handle Multiple Connections
 
 
 *** Variables ***
-${VES_HV_SCENARIOS}                            %{WORKSPACE}/tests/dcaegen2-collectors-hv-ves/testcases/resources/scenarios
+${HV_VES_SCENARIOS}                            %{WORKSPACE}/tests/dcaegen2-collectors-hv-ves/testcases/resources/scenarios
 
-${XNF_SMALLER_PAYLOAD_REQUEST}                 ${VES_HV_SCENARIOS}/multiple-simulators-payload/xnf-simulator-smaller-valid-request.json
-${DCAE_SMALLER_PAYLOAD_REQUEST}                ${VES_HV_SCENARIOS}/multiple-simulators-payload/dcae-smaller-valid-request.json
+${XNF_SMALLER_PAYLOAD_REQUEST}                 ${HV_VES_SCENARIOS}/multiple-simulators-payload/xnf-simulator-smaller-valid-request.json
+${DCAE_SMALLER_PAYLOAD_REQUEST}                ${HV_VES_SCENARIOS}/multiple-simulators-payload/dcae-smaller-valid-request.json
 
 ${AMOUNT_15000}                                15000
diff --git a/tests/dcaegen2-pmmapper/pmmapper/assets/A_meas_result.xml b/tests/dcaegen2-pmmapper/pmmapper/assets/A_meas_result.xml
deleted file mode 100644 (file)
index 269fdf1..0000000
+++ /dev/null
@@ -1,24 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<measCollecFile xmlns="http://www.3gpp.org/ftp/specs/archive/32_series/32.435#measCollec">
-    <fileHeader dnPrefix="some dnPrefix" vendorName="FooBar Ltd"
-                fileFormatVersion="32.435 V10.0">
-        <fileSender localDn="Dublin"/>
-        <measCollec beginTime="2018-10-02T12:00:00+01:00"/>
-    </fileHeader>
-    <measData>
-        <managedElement swVersion="r0.1" localDn="Dublin"/>
-        <measInfo measInfoId="some measInfoId">
-            <job jobId="jobId"/>
-            <granPeriod endTime="2018-10-02T12:15:00Z" duration="PT900S"/>
-            <repPeriod duration="PT900S"/>
-            <measTypes>a b c</measTypes>
-            <measValue measObjLdn="objLdn">
-                <measResults>76 27 98</measResults>
-                <suspect>false</suspect>
-            </measValue>
-        </measInfo>
-    </measData>
-    <fileFooter>
-        <measCollec endTime="2018-10-02T12:15:00+01:00"/>
-    </fileFooter>
-</measCollecFile>
index 311ee43..cbb77f9 100644 (file)
@@ -17,7 +17,6 @@ ${DELIVERY_ENDPOINT}                     /delivery
 ${HEALTHCHECK_ENDPOINT}                  /healthcheck
 ${NO_MANAGED_ELEMENT_PATH}               %{WORKSPACE}/tests/dcaegen2-pmmapper/pmmapper/assets/A_no_managed_element.xml
 ${NO_MEASDATA_PATH}                      %{WORKSPACE}/tests/dcaegen2-pmmapper/pmmapper/assets/A_no_measdata.xml
-${MEASD_RESULT_PATH}                     %{WORKSPACE}/tests/dcaegen2-pmmapper/pmmapper/assets/A_meas_result.xml
 ${VALID_METADATA_PATH}                   %{WORKSPACE}/tests/dcaegen2-pmmapper/pmmapper/assets/valid_metadata.json
 ${DIFF_VENDOR_METADATA}                  %{WORKSPACE}/tests/dcaegen2-pmmapper/pmmapper/assets/diff_vendor_metadata.json
 ${CLI_EXEC_CLI_PM_LOG}                   docker exec pmmapper /bin/sh -c "tail -5 /var/log/ONAP/dcaegen2/services/pm-mapper/pm-mapper_output.log"
@@ -29,6 +28,7 @@ ${CLI_EXEC_PM_FILTER}                    curl 'http://${CONSUL_IP}:8500/v1/kv/pm
 ${CLI_RESTART_PMMAPPER}                  docker restart pmmapper
 ${CLI_DELETE_SUB1}                       curl -i -X DELETE -H "Content-Type:application/vnd.dmaap-dr.subscription" -H "X-DMAAP-DR-ON-BEHALF-OF:DGL" -k https://localhost:8443/subs/1
 ${CLI_DELETE_SUB2}                       curl -i -X DELETE -H "Content-Type:application/vnd.dmaap-dr.subscription" -H "X-DMAAP-DR-ON-BEHALF-OF:DGL" -k https://localhost:8443/subs/2
+${CLI_MESSAGE_ROUTER_TOPIC}              curl http://${DMAAP_MR_IP}:3904/events/topic.org.onap.dmaap.mr.test1/CG1/C1?timeout=2000
 
 *** Test Cases ***
 
@@ -43,7 +43,6 @@ Verify 3GPP PM Mapper Subscribes to Data Router
     CheckLog                        ${CLI_EXEC_CLI_SUBS}             3gpppmmapper
     CheckLog                        ${CLI_EXEC_CLI_SUBS}             "privilegedSubscriber":true
 
-
 Verify Health Check returns 200 when a REST GET request to healthcheck url
     [Tags]                          PM_MAPPER_03
     [Documentation]                 Verify Health Check returns 200 when a REST GET request to healthcheck url
@@ -71,56 +70,46 @@ Verify 3GPP PM Mapper responds appropriately when invalid metadata is provided
     VerifyResponse                  ${resp.content}                 Malformed Metadata.
     CheckLog                        ${CLI_EXEC_CLI_PM_LOG}          RequestID=2
 
-Verify 3GPP PM Mapper received pushed PM data from Data Router
+Verify 3GPP PM Mapper received pushed PM data from data router and publishes to message router.
     [Tags]                          PM_MAPPER_06
-    [Documentation]                 Verify 3GPP PM Mapper received pushed PM data from Data Router
+    [Documentation]                 Verify 3GPP PM Mapper received pushed PM data from data router and publishes to message router.
     [Timeout]                       1 minute
     ${PM_DATA}=                     Get File                         ${PM_DATA_FILE_PATH}
     ${valid_metatdata}              Get File                         ${VALID_METADATA_PATH}
     ${resp}=                        PutCall                          ${PUBLISH_NODE_URL}     3    ${PM_DATA}    ${PUBLISH_CONTENT_TYPE}    ${valid_metatdata.replace("\n","")}    pmmapper
     VerifyResponse                  ${resp.status_code}              204
     Sleep                           10s
-    CheckLog                        ${CLI_EXEC_CLI_PM_LOG}           Event Processed
+    CheckLog                        ${CLI_EXEC_CLI_PM_LOG}           Successfully published VES events to messagerouter
     CheckLog                        ${CLI_EXEC_CLI_PM_LOG}           RequestID=3
-
-Verify that PM Mapper logs successful when a file that contains measdata is provided
-    [Tags]                          PM_MAPPER_07
-    [Documentation]                 Verify that PM Mapper logs successful when a file that contains measdata is provided
-    [Timeout]                       1 minute
-    ${valid_meas_result_content}=   Get File                         ${MEASD_RESULT_PATH}
-    ${valid_metatdata}              Get File                         ${VALID_METADATA_PATH}
-    ${headers}=                     Create Dictionary                X-ONAP-RequestID=4  Content-Type=application/xml  X-DMAAP-DR-PUBLISH-ID=4  X-DMAAP-DR-META=${valid_metatdata.replace("\n","")}
-    ${resp}=                        Put Request                      mapper_session  ${DELIVERY_ENDPOINT}/A_meas_result.xml    data=${valid_meas_result_content}    headers=${headers}
-    VerifyResponse                  ${resp.status_code}              200
-    CheckLog                        ${CLI_EXEC_CLI_PM_LOG}           XML validation successful
-    CheckLog                        ${CLI_EXEC_CLI_PM_LOG}           RequestID=4
+    Sleep                           10s
+    CheckLog                        ${CLI_MESSAGE_ROUTER_TOPIC}      perf3gpp_gnb-Ericsson_pmMeasResult
 
 Verify that PM Mapper logs successful when a file that contains no measdata is provided
-    [Tags]                          PM_MAPPER_08
+    [Tags]                          PM_MAPPER_07
     [Documentation]                 Verify that PM Mapper logs successful when a file that contains no measdata is provided
     [Timeout]                       1 minute
     ${valid_no_measdata_content}=   Get File                         ${NO_MEASDATA_PATH}
     ${valid_metatdata}              Get File                         ${VALID_METADATA_PATH}
-    ${headers}=                     Create Dictionary                X-ONAP-RequestID=5  Content-Type=application/xml  X-DMAAP-DR-PUBLISH-ID=3  X-DMAAP-DR-META=${valid_metatdata.replace("\n","")}
+    ${headers}=                     Create Dictionary                X-ONAP-RequestID=4  Content-Type=application/xml  X-DMAAP-DR-PUBLISH-ID=3  X-DMAAP-DR-META=${valid_metatdata.replace("\n","")}
     ${resp}=                        Put Request                      mapper_session  ${DELIVERY_ENDPOINT}/A_no_measdata.xml    data=${valid_no_measdata_content}    headers=${headers}
     VerifyResponse                  ${resp.status_code}              200
-    CheckLog                        ${CLI_EXEC_CLI_PM_LOG}           XML validation successful
-    CheckLog                        ${CLI_EXEC_CLI_PM_LOG}           RequestID=5
+    CheckLog                        ${CLI_EXEC_CLI_PM_LOG}           MeasData is empty
+    CheckLog                        ${CLI_EXEC_CLI_PM_LOG}           RequestID=4
 
 Verify that PM Mapper throws Event failed validation against schema error when no managed element content is provided
-    [Tags]                          PM_MAPPER_09
+    [Tags]                          PM_MAPPER_08
     [Documentation]                 Verify 3gpp pm mapper responds with an error when no managed element content is provided
     [Timeout]                       1 minute
     ${no_managed_element_content}=  Get File                         ${NO_MANAGED_ELEMENT_PATH}
     ${valid_metatdata}              Get File                         ${VALID_METADATA_PATH}
-    ${headers}=                     Create Dictionary                X-ONAP-RequestID=6  Content-Type=application/xml  X-DMAAP-DR-PUBLISH-ID=2  X-DMAAP-DR-META=${valid_metatdata.replace("\n","")}
+    ${headers}=                     Create Dictionary                X-ONAP-RequestID=5  Content-Type=application/xml  X-DMAAP-DR-PUBLISH-ID=2  X-DMAAP-DR-META=${valid_metatdata.replace("\n","")}
     ${resp}=                        Put Request                      mapper_session  ${DELIVERY_ENDPOINT}/A_no_managed_element.xml    data=${no_managed_element_content}    headers=${headers}
     VerifyResponse                  ${resp.status_code}              200
     CheckLog                        ${CLI_EXEC_CLI_PM_LOG}           XML validation failed
-    CheckLog                        ${CLI_EXEC_CLI_PM_LOG}           RequestID=6
+    CheckLog                        ${CLI_EXEC_CLI_PM_LOG}           RequestID=5
 
 Verify that PM Mapper correctly identifies a file that should not be mapped based on metadata filtering.
-    [Tags]                          PM_MAPPER_10
+    [Tags]                          PM_MAPPER_09
     [Documentation]                 Verify that PM Mapper correctly identifies a file that should not be mapped based on metadata filtering.
     [Timeout]                       1 minute
     ${cli_cmd_output}=              Run Process                      ${CLI_EXEC_VENDOR_FILTER}                   shell=yes
@@ -129,12 +118,12 @@ Verify that PM Mapper correctly identifies a file that should not be mapped base
     Should Be Equal As Strings      ${cli_cmd_output.rc}             0
     ${cli_cmd_output}=              Run Process                      ${CLI_RESTART_PMMAPPER}                     shell=yes
     Sleep                           10s
-    ${valid_meas_result_content}=   Get File                         ${MEASD_RESULT_PATH}
+    ${pm_data}=                     Get File                         ${PM_DATA_FILE_PATH}
     ${diff_vendor_metadata}=        Get File                         ${DIFF_VENDOR_METADATA}
-    ${headers}=                     Create Dictionary                X-ONAP-RequestID=7  Content-Type=application/xml  X-DMAAP-DR-PUBLISH-ID=2  X-DMAAP-DR-META=${diff_vendor_metadata.replace("\n","")}
-    ${resp}=                        Put Request                      mapper_session  ${DELIVERY_ENDPOINT}/A_meas_result.xml    data=${valid_meas_result_content}    headers=${headers}
+    ${headers}=                     Create Dictionary                X-ONAP-RequestID=6  Content-Type=application/xml  X-DMAAP-DR-PUBLISH-ID=2  X-DMAAP-DR-META=${diff_vendor_metadata.replace("\n","")}
+    ${resp}=                        Put Request                      mapper_session  ${DELIVERY_ENDPOINT}/A20181002.0000-1000-0015-1000_5G.xml    data=${pm_data}    headers=${headers}
     CheckLog                        ${CLI_EXEC_CLI_PM_LOG}           Metadata does not match any filters,
-    CheckLog                        ${CLI_EXEC_CLI_PM_LOG}           RequestID=7
+    CheckLog                        ${CLI_EXEC_CLI_PM_LOG}           RequestID=6
 
 
 *** Keywords ***
index 08277bd..f8ba0fb 100644 (file)
@@ -17,12 +17,12 @@ ${EVENT_DATA_FILE}                       %{WORKSPACE}/tests/usecases/5G-bulkpm/a
 ${TARGETURL_TOPICS}                      http://${DMAAP_MR_IP}:3904/topics
 ${TARGETURL_SUBSCR}                      http://${DMAAP_MR_IP}:3904/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c12/C12?timeout=1000
 ${CLI_EXEC_CLI}                          curl -k https://${DR_PROV_IP}:8443/internal/prov
-${CLI_EXEC_CLI_FILECONSUMER}             docker exec fileconsumer-node /bin/sh -c "ls /opt/app/subscriber/delivery | grep .gz"
+${CLI_EXEC_CLI_FILECONSUMER}             docker exec fileconsumer-node /bin/sh -c "ls /opt/app/subscriber/delivery | grep .xml"
 ${CLI_EXEC_CLI_DFC_LOG}                  docker exec dfc /bin/sh -c "cat /var/log/ONAP/application.log" > /tmp/dfc_docker.log.robot
 ${CLI_EXEC_CLI_DFC_LOG_GREP}             grep "Publish to DR successful!" /tmp/dfc_docker.log.robot
 
-${CLI_EXEC_CLI_FILECONSUMER_CP}          docker cp fileconsumer-node:/opt/app/subscriber/delivery/${SFTP_IP}_xNF.pm.xml.gz.M %{WORKSPACE}
-${CLI_EXEC_RENAME_METADATA}              mv %{WORKSPACE}/${SFTP_IP}_xNF.pm.xml.gz.M  %{WORKSPACE}/metadata.json
+${CLI_EXEC_CLI_FILECONSUMER_CP}          docker cp fileconsumer-node:/opt/app/subscriber/delivery/oteNB5309_xNF.pm.xml.M %{WORKSPACE}
+${CLI_EXEC_RENAME_METADATA}              mv %{WORKSPACE}/oteNB5309_xNF.pm.xml.M  %{WORKSPACE}/metadata.json
 ${metadataSchemaPath}                    %{WORKSPACE}/tests/usecases/5G-bulkpm/assets/metadata.schema.json
 ${metadataJsonPath}                      %{WORKSPACE}/metadata.json
 
@@ -86,7 +86,7 @@ Verify Fileconsumer Receive PM file from Data Router
     ${cli_cmd_output}=              Run Process                     ${CLI_EXEC_CLI_FILECONSUMER}        shell=yes
     Log                             ${cli_cmd_output.stdout}
     Should Be Equal As Strings      ${cli_cmd_output.rc}            0
-    Should Contain                  ${cli_cmd_output.stdout}        xNF.pm.xml.gz
+    Should Contain                  ${cli_cmd_output.stdout}        oteNB5309_xNF.pm.xml
 
 Verify File Consumer Receive valid metadata from Data Router
     [Tags]                          Bulk_PM_E2E_06
@@ -94,7 +94,7 @@ Verify File Consumer Receive valid metadata from Data Router
     ${cli_cmd_output}=              Run Process                     ${CLI_EXEC_CLI_FILECONSUMER}        shell=yes
     Log                             ${cli_cmd_output.stdout}
     Should Be Equal As Strings      ${cli_cmd_output.rc}            0
-    Should Contain                  ${cli_cmd_output.stdout}        ${SFTP_IP}_xNF.pm.xml.gz.M
+    Should Contain                  ${cli_cmd_output.stdout}        oteNB5309_xNF.pm.xml.M
     ${cli_cmd_output}=              Run Process                     ${CLI_EXEC_CLI_FILECONSUMER_CP}     shell=yes
     ${cli_cmd_output}=              Run Process                     ${CLI_EXEC_RENAME_METADATA}         shell=yes
     ${validation_result}=           Validate                        ${metadataSchemaPath}    ${metadataJsonPath}
index 5f2c0fe..01fa92b 100644 (file)
@@ -1,11 +1,12 @@
 version: '3'
 services:
     vid-server:
-        image: nexus3.onap.org:10001/onap/vid:3.0-STAGING-latest
+        image: nexus3.onap.org:10001/onap/vid:4.0-STAGING-latest
         environment:
         - VID_MYSQL_DBNAME=vid_openecomp_epsdk
         - VID_MYSQL_PASS=Kp8bJ4SXszM0WXlhak3eHlcse2gAw84vaoGGmJvUy2U
         - VID_MSO_SERVER_URL=https://so-simulator:8443
+        - VID_AAI_URL=http://aai-simulator:8443
         ports:
         - "8080:8080"
         - "8443:8443"
@@ -29,6 +30,19 @@ services:
         build:
             context: simulators
             dockerfile: Dockerfile
+            args:
+                component: so
         ports:
         - "8444:8443"
-        container_name: so-simulator
\ No newline at end of file
+        container_name: so-simulator
+        
+    aai-simulator:
+        build:
+            context: simulators
+            dockerfile: Dockerfile
+            args:
+                component: aai
+        ports:
+        - "8445:8443"
+        container_name: aai-simulator
+
index ace6d56..e6586b1 100644 (file)
@@ -1,16 +1,12 @@
-FROM alpine:3.9
+FROM python:3-alpine3.9
 
-RUN apk add --no-cache python3 && \
-    python3 -m ensurepip && \
-    rm -r /usr/lib/python*/ensurepip && \
-    pip3 install --upgrade pip setuptools && \
-    if [ ! -e /usr/bin/pip ]; then ln -s pip3 /usr/bin/pip ; fi && \
-    if [[ ! -e /usr/bin/python ]]; then ln -sf /usr/bin/python3 /usr/bin/python; fi && \
-    rm -r /root/.cache
+# `component` should be `so` or `aai`
+ARG component
+ENV component=$component
 
 COPY SO.py /
 ADD ./test_data_assets/ /
 
 EXPOSE 8443
 
-CMD [ "python", "./SO.py", "expected_so_requests.json", "expected_so_responses.json" ]
+CMD python ./SO.py expected_${component}_requests.json expected_${component}_responses.json
index c119939..fa481b3 100644 (file)
@@ -103,7 +103,7 @@ class JsonFileToDictReader(object):
 def init_so_simulator():
     expected_so_requests = JsonFileToDictReader.read_expected_test_data(argv[1])
     expected_so_responses = JsonFileToDictReader.read_expected_test_data(argv[2])
-    logging.basicConfig(filename='output.log', level=logging.INFO)
+    logging.basicConfig(level=logging.INFO)
     handler = partial(SOHandler, expected_so_requests, expected_so_responses)
     handler.protocol_version = "HTTP/1.0"
     httpd = HTTPServer(('', DEFAULT_PORT), handler)
diff --git a/tests/vid/resources/simulators/test_data_assets/expected_aai_requests.json b/tests/vid/resources/simulators/test_data_assets/expected_aai_requests.json
new file mode 100644 (file)
index 0000000..3d77fe5
--- /dev/null
@@ -0,0 +1,4 @@
+{
+  "get": {
+  }
+}
\ No newline at end of file
diff --git a/tests/vid/resources/simulators/test_data_assets/expected_aai_responses.json b/tests/vid/resources/simulators/test_data_assets/expected_aai_responses.json
new file mode 100644 (file)
index 0000000..c809bca
--- /dev/null
@@ -0,0 +1,9 @@
+{
+  "get": {
+    "cloud-region": [{
+        "cloud-owner": "CloudOwner",
+        "cloud-region-id": "RegionOne"
+      }
+    ]
+  }
+}