Merge "Improvment to Test Case"
authorGary Wu <gary.i.wu@huawei.com>
Fri, 15 Feb 2019 16:03:08 +0000 (16:03 +0000)
committerGerrit Code Review <gerrit@onap.org>
Fri, 15 Feb 2019 16:03:08 +0000 (16:03 +0000)
40 files changed:
plans/policy/api/setup.sh
plans/policy/distribution/setup.sh
plans/policy/pap/setup.sh [new file with mode: 0644]
plans/policy/pap/teardown.sh [moved from scripts/vvp/start_vvp_sanity.sh with 64% similarity]
plans/policy/pap/testplan.txt [moved from plans/vvp/sanity/testplan.txt with 79% similarity]
plans/sdnc/healthcheck/setup.sh [changed mode: 0644->0755]
plans/usecases/5G-bulkpm/README.txt [new file with mode: 0644]
plans/usecases/5G-bulkpm/composefile/docker-compose-e2e.yml
plans/usecases/5G-bulkpm/composefile/onap.docker-compose-e2e [new file with mode: 0644]
plans/usecases/5G-bulkpm/onap.teardown.sh [new file with mode: 0644]
plans/usecases/5G-bulkpm/setup.sh
plans/vvp/sanity/setup.sh [deleted file]
plans/vvp/sanity/teardown.sh [deleted file]
scripts/vvp/clone_and_setup_vvp_data.sh [deleted file]
scripts/vvp/docker_health.sh [deleted file]
scripts/vvp/kill_containers_and_remove_dataFolders.sh [deleted file]
scripts/vvp/start_vvp_containers.sh [deleted file]
tests/dcaegen2/prh-testcases/assets/json_events/event_with_IPV4.json
tests/dcaegen2/prh-testcases/assets/json_events/event_with_IPV6.json
tests/dcaegen2/prh-testcases/assets/json_events/event_with_all_fields.json
tests/dcaegen2/prh-testcases/assets/json_events/event_with_empty_addtional_fields.json [new file with mode: 0644]
tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_IPV4_and_IPV6.json
tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_additional_fields.json [new file with mode: 0644]
tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_sourceName.json
tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_sourceName_IPV4_and_IPV6.json
tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_sourceName_and_IPV4.json
tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_sourceName_and_IPV6.json
tests/dcaegen2/prh-testcases/assets/json_events/event_without_IPV6_field.json
tests/dcaegen2/prh-testcases/assets/json_events/not_json_format.json
tests/dcaegen2/prh-testcases/resources/PrhLibrary.py
tests/policy/api/api-test.robot
tests/policy/distribution/distribution-test.robot
tests/policy/pap/pap-test.robot [new file with mode: 0644]
tests/usecases/5G-bulkpm/BulkpmE2E.robot
tests/usecases/5G-bulkpm/assets/json_events/FileExistNotification.json
tests/usecases/5G-bulkpm/assets/metadata.schema.json [new file with mode: 0644]
tests/usecases/5G-bulkpm/resources/JsonValidatorLibrary.py [new file with mode: 0644]
tests/usecases/5G-bulkpm/resources/bulkpm_keywords.robot
tests/vvp/sanity/__init__.robot [deleted file]
tests/vvp/sanity/test1.robot [deleted file]

index dcfcb9e..5ba95e9 100644 (file)
@@ -28,4 +28,4 @@ for i in {1..10}; do
    sleep $i
 done
 
-ROBOT_VARIABLES="-v POLICY_API_IP:${POLICY_API_IP}"
\ No newline at end of file
+ROBOT_VARIABLES="-v POLICY_API_IP:${POLICY_API_IP}"
index 9b894e3..40a15d1 100644 (file)
 # SPDX-License-Identifier: Apache-2.0
 # ============LICENSE_END=========================================================
 
-docker run -d --name policy-distribution -p 6969:6969 -it nexus3.onap.org:10001/onap/policy-distribution:2.0.0-SNAPSHOT-latest 
+docker run -d --name policy-distribution -p 6969:6969 -it nexus3.onap.org:10001/onap/policy-distribution:2.1.0-SNAPSHOT-latest
 
 POLICY_DISTRIBUTION_IP=`get-instance-ip.sh policy-distribution`
 echo DISTRIBUTION IP IS ${POLICY_DISTRIBUTION_IP}
-Wait for initialization
+Wait for initialization
 for i in {1..10}; do
    curl -sS ${POLICY_DISTRIBUTION_IP}:6969 && break
    echo sleep $i
diff --git a/plans/policy/pap/setup.sh b/plans/policy/pap/setup.sh
new file mode 100644 (file)
index 0000000..44a205a
--- /dev/null
@@ -0,0 +1,31 @@
+#!/bin/bash
+# ============LICENSE_START=======================================================
+#  Copyright (C) 2019 Nordix Foundation.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# SPDX-License-Identifier: Apache-2.0
+# ============LICENSE_END=========================================================
+
+docker run -d --name policy-pap -p 6969:6969 -it nexus3.onap.org:10001/onap/policy-pap:2.0.0-SNAPSHOT-latest
+
+POLICY_PAP_IP=`get-instance-ip.sh policy-pap`
+echo PAP IP IS ${POLICY_PAP_IP}
+# Wait for initialization
+for i in {1..10}; do
+   curl -sS ${POLICY_PAP_IP}:6969 && break
+   echo sleep $i
+   sleep $i
+done
+
+ROBOT_VARIABLES="-v POLICY_PAP_IP:${POLICY_PAP_IP}"
similarity index 64%
rename from scripts/vvp/start_vvp_sanity.sh
rename to plans/policy/pap/teardown.sh
index 1de1aaa..877b164 100644 (file)
@@ -1,26 +1,20 @@
 #!/bin/bash
-#
 # ============LICENSE_START=======================================================
-# ONAP CLAMP
-# ================================================================================
-# Copyright (C) 2017 AT&T Intellectual Property. All rights
-#                             reserved.
+#  Copyright (C) 2019 Nordix Foundation.
 # ================================================================================
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
 # You may obtain a copy of the License at
 #
-# http://www.apache.org/licenses/LICENSE-2.0
+#      http://www.apache.org/licenses/LICENSE-2.0
 #
 # Unless required by applicable law or agreed to in writing, software
 # distributed under the License is distributed on an "AS IS" BASIS,
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-# ============LICENSE_END============================================
-# ===================================================================
-# ECOMP is a trademark and service mark of AT&T Intellectual Property.
 #
+# SPDX-License-Identifier: Apache-2.0
+# ============LICENSE_END=========================================================
 
-
-# will run CI for sanity checks
+kill-instance.sh policy-pap
similarity index 79%
rename from plans/vvp/sanity/testplan.txt
rename to plans/policy/pap/testplan.txt
index 4957ef6..6a5aa20 100644 (file)
@@ -1,3 +1,3 @@
 # Test suites are relative paths under [integration/csit.git]/tests/.
 # Place the suites in run order.
-vvp/sanity
+policy/pap/pap-test.robot
old mode 100644 (file)
new mode 100755 (executable)
index dfbd32c..7a66351
@@ -24,8 +24,8 @@ export NEXUS_USERNAME=docker
 export NEXUS_PASSWD=docker
 export NEXUS_DOCKER_REPO=nexus3.onap.org:10001
 export DMAAP_TOPIC=AUTO
-export DOCKER_IMAGE_VERSION=1.4-STAGING-latest
-export CCSDK_DOCKER_IMAGE_VERSION=0.3-STAGING-latest
+export DOCKER_IMAGE_VERSION=1.5-STAGING-latest
+export CCSDK_DOCKER_IMAGE_VERSION=0.4-STAGING-latest
 
 export MTU=$(/sbin/ifconfig | grep MTU | sed 's/.*MTU://' | sed 's/ .*//' | sort -n | head -1)
 
@@ -100,9 +100,8 @@ while [ "$TIME" -lt "$TIME_OUT" ]; do
 docker exec sdnc_controller_container rm -f /opt/opendaylight/current/etc/host.key
 response=$(docker exec sdnc_controller_container /opt/opendaylight/current/bin/client system:start-level)
 docker exec sdnc_controller_container rm -f /opt/opendaylight/current/etc/host.key
-num_bundles=$(docker exec sdnc_controller_container /opt/opendaylight/current/bin/client bundle:list | tail -1 | cut -d\| -f1)
 
-  if [ "$response" == "Level 100" ] && [ "$num_bundles" -ge 333 ]; then
+  if [ "$response" == "Level 100" ] ; then
     echo SDNC karaf started in $TIME seconds
     break;
   fi
@@ -117,10 +116,8 @@ if [ "$TIME" -ge "$TIME_OUT" ]; then
 fi
 
 response=$(docker exec sdnc_controller_container /opt/opendaylight/current/bin/client system:start-level)
-num_bundles=$(docker exec sdnc_controller_container /opt/opendaylight/current/bin/client bundle:list | tail -1 | cut -d\| -f1)
 
-  if [ "$response" == "Level 100" ] && [ "$num_bundles" -ge 333 ]; then
-    num_bundles=$(docker exec sdnc_controller_container /opt/opendaylight/current/bin/client bundle:list | tail -1 | cut -d\| -f1)
+  if [ "$response" == "Level 100" ] ; then
     num_failed_bundles=$(docker exec sdnc_controller_container /opt/opendaylight/current/bin/client bundle:list | grep Failure | wc -l)
     failed_bundles=$(docker exec sdnc_controller_container /opt/opendaylight/current/bin/client bundle:list | grep Failure)
     echo There is/are $num_failed_bundles failed bundles out of $num_bundles installed bundles.
diff --git a/plans/usecases/5G-bulkpm/README.txt b/plans/usecases/5G-bulkpm/README.txt
new file mode 100644 (file)
index 0000000..1d0fc41
--- /dev/null
@@ -0,0 +1,55 @@
+###################################################################################################################
+By executing the below commands it will change the CSIT test from executing on a docker envirnoment to an ONAP one.
+###################################################################################################################
+
+1) Login to an ONAP instance,switch user and verify that the command kubectl executes before proceeding .
+# sudo -s
+# kubectl get svc -n onap| grep dcae
+
+2) Clone the csit repositry 
+# git clone https://gerrit.onap.org/r/integration/csit
+
+3) Install docker-compose 
+# sudo apt-get update
+# sudo curl -L https://github.com/docker/compose/releases/download/1.22.0/docker-compose-$(uname -s)-$(uname -m) -o /usr/local/bin/docker-compose
+# sudo chmod +x /usr/local/bin/docker-compose
+# docker-compose --version    
+
+4)  Install the packages required for the RobotFramework.              
+# apt install python-pip
+# sudo apt install python-pip virtualenv unzip sshuttle netcat libffi-dev libssl-dev
+# sudo pip install robotframework
+# sudo pip install -U requests
+# sudo pip install -U robotframework-requests
+
+5) Expose the Ves-collector
+# kubectl expose svc dcae-ves-collector --type=LoadBalancer --name=vesc -n onap
+       service "vesc" exposed
+
+6) Verify the Ves-collector is expose
+# kubectl get svc -n onap | grep vesc
+       vesc    LoadBalancer   10.43.203.47    10.209.63.55     8080:31835/TCP          1m
+
+7) Modify the file setup.sh and make the below change
+# cd csit
+# vi plans/usecases/5G-bulkpm/setup.sh 
+CSIT=TRUE
+ to
+CSIT=FALSE
+
+8) Excute the Bulk PM e2e csit.
+# ./run-csit.sh plans/usecases/5G-bulkpm/
+
+
+--> Trobleshooting 
+If the Test case "Verify Default Feed And File Consumer Subscription On Datarouter" is hanging, quit the test and execute the below
+Get the DR-PROV IP address  
+# kubectl -n onap -o=wide get pods | grep dmaap-dr-prov | awk '{print $6}'
+ 10.42.123.76
+Make sure there are no feeds
+# curl -k https://10.42.123.76:8443/internal/prov
+
+If there is feeds delete them
+curl -X DELETE -H "Content-Type:application/vnd.att-dr.subscription" -H "X-ATT-DR-ON-BEHALF-OF:dradmin" -k https://10.42.123.76:8443/subs/XX
+
+Where XX is the number of the feeds in the previous command.
index c5567d8..05ccb70 100644 (file)
@@ -1,7 +1,7 @@
 version: '2.1'
 services:
   datarouter-prov:
-    image: nexus3.onap.org:10001/onap/dmaap/datarouter-prov
+    image: nexus3.onap.org:10001/onap/dmaap/datarouter-prov:2.0.0-SNAPSHOT
     container_name: datarouter-prov
     hostname: dmaap-dr-prov
     ports:
@@ -22,7 +22,7 @@ services:
       retries: 5
 
   datarouter-node:
-    image: nexus3.onap.org:10001/onap/dmaap/datarouter-node
+    image: nexus3.onap.org:10001/onap/dmaap/datarouter-node:2.0.0-SNAPSHOT
     container_name: datarouter-node
     hostname: dmaap-dr-node
     ports:
@@ -35,7 +35,7 @@ services:
         condition: service_healthy
 
   datarouter-subscriber:
-      image: nexus3.onap.org:10001/onap/dmaap/datarouter-subscriber
+      image: nexus3.onap.org:10001/onap/dmaap/datarouter-subscriber:2.0.0-SNAPSHOT
       container_name: fileconsumer-node
       hostname: subscriber.com
       ports:
@@ -70,7 +70,7 @@ services:
 
   dfc:
     container_name: dfc
-    image: nexus3.onap.org:10001/onap/org.onap.dcaegen2.collectors.datafile.datafile-app-server:latest
+    image: nexus3.onap.org:10001/onap/org.onap.dcaegen2.collectors.datafile.datafile-app-server:1.1.1
     ports:
       - "8433:8433"
 
diff --git a/plans/usecases/5G-bulkpm/composefile/onap.docker-compose-e2e b/plans/usecases/5G-bulkpm/composefile/onap.docker-compose-e2e
new file mode 100644 (file)
index 0000000..1c05ca5
--- /dev/null
@@ -0,0 +1,19 @@
+version: '2.1'
+services:
+  datarouter-subscriber:
+      image: nexus3.onap.org:10001/onap/dmaap/datarouter-subscriber:2.0.0-SNAPSHOT
+      container_name: fileconsumer-node
+      hostname: subscriber.com
+      ports:
+       - "7070:7070"
+      volumes:
+       - ../subscriber_data/subscriber.properties:/opt/app/subscriber/etc/subscriber.properties
+
+  sftp:
+    container_name: sftp
+    image: atmoz/sftp
+    ports:
+      - "2222:22"
+    volumes:
+      - /host/upload:/home/admin
+    command: admin:admin:1001
\ No newline at end of file
diff --git a/plans/usecases/5G-bulkpm/onap.teardown.sh b/plans/usecases/5G-bulkpm/onap.teardown.sh
new file mode 100644 (file)
index 0000000..966be45
--- /dev/null
@@ -0,0 +1,8 @@
+#!/bin/bash
+echo "Starting teardown script"
+DFC_POD=$(kubectl -n onap get pods | grep datafile-collector | awk '{print $1}')
+kubectl -n onap exec $DFC_POD -it  cat /opt/log/application.log > /tmp/dfc_docker.log
+cat /tmp/dfc_docker.log
+sleep 3
+kill-instance.sh fileconsumer-node
+kill-instance.sh sftp
\ No newline at end of file
index 47e2532..5f3c4a3 100644 (file)
@@ -2,6 +2,18 @@
 # Place the scripts in run order:
 source ${SCRIPTS}/common_functions.sh
 
+CSIT=TRUE
+if [ ${CSIT} = "TRUE" ] ; then
+####################################################
+#Executes the below setup in an Docker Environment #
+####################################################
+
+echo "CSIT Test get executed in here"
+SFTP_PORT=22
+VESC_PORT=8080
+export VESC_PORT=${VESC_PORT}
+export CLI_EXEC_CLI_DFC="docker exec dfc /bin/sh -c \"ls /target | grep .gz\""
+
 # Clone DMaaP Message Router repo
 mkdir -p $WORKSPACE/archives/dmaapmr
 cd $WORKSPACE/archives/dmaapmr
@@ -116,7 +128,7 @@ export HOST_IP=${HOST_IP}
 export DMAAP_MR_IP=${DMAAP_MR_IP}
 
 #Pass any variables required by Robot test suites in ROBOT_VARIABLES
-ROBOT_VARIABLES="-v DR_PROV_IP:${DR_PROV_IP} -v DR_NODE_IP:${DR_NODE_IP} -v DMAAP_MR_IP:${DMAAP_MR_IP} -v VESC_IP:${VESC_IP} -v DR_SUBSCIBER_IP:${DR_SUBSCIBER_IP}"
+ROBOT_VARIABLES="-v DR_PROV_IP:${DR_PROV_IP} -v DR_NODE_IP:${DR_NODE_IP} -v DMAAP_MR_IP:${DMAAP_MR_IP} -v VESC_IP:${VESC_IP} -v VESC_PORT:${VESC_PORT} -v DR_SUBSCIBER_IP:${DR_SUBSCIBER_IP}"
 
 pip install jsonschema uuid
 # Wait container ready
@@ -141,6 +153,7 @@ docker exec dfc /bin/sh -c "echo '${DR_NODE_IP}' dmaap-dr-node >> /etc/hosts"
 # Update the File Ready Notification with actual sftp ip address and copy pm files to sftp server.
 cp $WORKSPACE/tests/usecases/5G-bulkpm/assets/json_events/FileExistNotification.json $WORKSPACE/tests/usecases/5G-bulkpm/assets/json_events/FileExistNotificationUpdated.json
 sed -i 's/sftpserver/'${SFTP_IP}'/g' $WORKSPACE/tests/usecases/5G-bulkpm/assets/json_events/FileExistNotificationUpdated.json
+sed -i 's/sftpport/'${SFTP_PORT}'/g' $WORKSPACE/tests/usecases/5G-bulkpm/assets/json_events/FileExistNotificationUpdated.json
 docker cp $WORKSPACE/plans/usecases/5G-bulkpm/assets/xNF.pm.xml.gz sftp:/home/admin/
 
 # Data Router Configuration:
@@ -151,3 +164,80 @@ sed -i 's/fileconsumer/'${DR_SUBSCIBER_IP}'/g' /tmp/addSubscriber.json
 curl -v -X POST -H "Content-Type:application/vnd.dmaap-dr.subscription" -H "X-DMAAP-DR-ON-BEHALF-OF:dradmin" --data-ascii @/tmp/addSubscriber.json --post301 --location-trusted -k https://${DR_PROV_IP}:8443/subscribe/1
 sleep 10
 curl -k https://$DR_PROV_IP:8443/internal/prov
+
+else
+############################################################
+############################################################
+# Executes the below setup in an ONAP Environment          #
+# Make sure the steps in the README are completed first !! #
+############################################################
+############################################################
+SFTP_PORT=2222
+
+cp $WORKSPACE/plans/usecases/5G-bulkpm/teardown.sh $WORKSPACE/plans/usecases/5G-bulkpm/teardown.sh.orig
+cp $WORKSPACE/plans/usecases/5G-bulkpm/onap.teardown.sh $WORKSPACE/plans/usecases/5G-bulkpm/teardown.sh
+
+#Get DataFileCollector POD name in this ONAP Deployment
+DFC_POD=$(kubectl -n onap get pods | grep datafile-collector | awk '{print $1}')
+export DFC_POD=${DFC_POD}
+export CLI_EXEC_CLI_DFC="kubectl exec -n onap ${DFC_POD} -it ls /target | grep .gz"
+
+# Get IP address of datarrouter-prov
+DR_PROV_IP=$(kubectl -n onap -o wide get pods | grep dmaap-dr-prov | awk '{print $6}')
+echo DR_PROV_IP=${DR_PROV_IP}
+DR_NODE_IP=$(kubectl -n onap -o=wide get pods | grep dmaap-dr-node | awk '{print $6}')
+echo DR_NODE_IP=${DR_NODE_IP}
+
+# Get IP address of exposed Ves and its port
+DMAAP_MR_IP=$(kubectl -n onap -o=wide get pods | grep dev-dmaap-message-router | grep -Ev "kafka|zookeeper" | awk '{print $6}')
+VESC_IP=$(kubectl get svc -n onap | grep vesc | awk '{print $4}')
+VESC_PORT=$(kubectl get svc -n onap | grep vesc | awk '{print $5}' | cut -d ":" -f2 | cut -d "/" -f1)
+echo VESC_IP=${VESC_IP}
+echo VESC_PORT=${VESC_PORT}
+
+export VESC_IP=${VESC_IP}
+export VESC_PORT=${VESC_PORT}
+export HOST_IP=${HOST_IP}
+export DMAAP_MR_IP=${DMAAP_MR_IP}
+
+#Get DataFileCollector POD name in this ONAP Deployment
+DFC_POD=$(kubectl -n onap get pods | grep datafile-collector | awk '{print $1}')
+export DFC_POD=${DFC_POD}
+
+pip install jsonschema uuid
+
+# Clone DMaaP Data Router repo
+mkdir -p $WORKSPACE/archives/dmaapdr
+cd $WORKSPACE/archives/dmaapdr
+git clone --depth 1 https://gerrit.onap.org/r/dmaap/datarouter -b master
+cd $WORKSPACE/archives/dmaapdr/datarouter/datarouter-docker-compose/src/main/resources
+mkdir docker-compose
+cd $WORKSPACE/archives/dmaapdr/datarouter/datarouter-docker-compose/src/main/resources/docker-compose
+cp $WORKSPACE/plans/usecases/5G-bulkpm/composefile/onap.docker-compose-e2e $WORKSPACE/archives/dmaapdr/datarouter/datarouter-docker-compose/src/main/resources/docker-compose/docker-compose.yml
+
+#Statup the SFTP and FileConsumer containers.
+docker login -u docker -p docker nexus3.onap.org:10001
+docker-compose up -d
+
+# Wait container ready
+sleep 2
+HOST_IP=$(ip route get 8.8.8.8 | awk '/8.8.8.8/ {print $NF}')
+# SFTP Configuration:
+# Update the File Ready Notification with actual sftp ip address and copy pm files to sftp server.
+cp $WORKSPACE/tests/usecases/5G-bulkpm/assets/json_events/FileExistNotification.json $WORKSPACE/tests/usecases/5G-bulkpm/assets/json_events/FileExistNotificationUpdated.json
+sed -i 's/sftpserver/'${HOST_IP}'/g' $WORKSPACE/tests/usecases/5G-bulkpm/assets/json_events/FileExistNotificationUpdated.json
+sed -i 's/sftpport/'${SFTP_PORT}'/g' $WORKSPACE/tests/usecases/5G-bulkpm/assets/json_events/FileExistNotificationUpdated.json
+docker cp $WORKSPACE/plans/usecases/5G-bulkpm/assets/xNF.pm.xml.gz sftp:/home/admin/
+
+# Create default feed and create file consumer subscriber on data router
+curl -v -X POST -H "Content-Type:application/vnd.att-dr.feed" -H "X-ATT-DR-ON-BEHALF-OF:dradmin" --data-ascii @$WORKSPACE/plans/usecases/5G-bulkpm/assets/createFeed.json --post301 --location-trusted -k https://${DR_PROV_IP}:8443
+cp $WORKSPACE/plans/usecases/5G-bulkpm/assets/addSubscriber.json /tmp/addSubscriber.json
+sed -i 's/fileconsumer/'${HOST_IP}'/g' /tmp/addSubscriber.json
+curl -v -X POST -H "Content-Type:application/vnd.att-dr.subscription" -H "X-ATT-DR-ON-BEHALF-OF:dradmin" --data-ascii @/tmp/addSubscriber.json --post301 --location-trusted -k https://${DR_PROV_IP}:8443/subscribe/1
+sleep 10
+curl -k https://$DR_PROV_IP:8443/internal/prov
+
+#Pass any variables required by Robot test suites in ROBOT_VARIABLES
+ROBOT_VARIABLES="-v DR_PROV_IP:${DR_PROV_IP} -v DR_NODE_IP:${DR_NODE_IP} -v DMAAP_MR_IP:${DMAAP_MR_IP} -v VESC_IP:${VESC_IP} -v VESC_PORT:${VESC_PORT} -v DR_SUBSCIBER_IP:${DR_SUBSCIBER_IP} -v DFC_POD:${DFC_POD} -v HOST_IP:${HOST_IP} "
+
+fi;
\ No newline at end of file
diff --git a/plans/vvp/sanity/setup.sh b/plans/vvp/sanity/setup.sh
deleted file mode 100644 (file)
index 12bb601..0000000
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/bin/bash
-#
-# Copyright 2016-2017 Huawei Technologies Co., Ltd.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# Modifications copyright (c) 2017 AT&T Intellectual Property
-#
-# Place the scripts in run order:
-
-
-source ${WORKSPACE}/scripts/vvp/clone_and_setup_vvp_data.sh
-
-source ${WORKSPACE}/scripts/vvp/start_vvp_containers.sh
-
-source ${WORKSPACE}/scripts/vvp/docker_health.sh
-
-source ${WORKSPACE}/scripts/vvp/start_vvp_sanity.sh
-
-
-VVP_IP=`${WORKSPACE}/scripts/get-instance-ip.sh vvp-engagementmgr`
-echo VVP_IP=${VVP_IP}
-
-
-# Pass any variables required by Robot test suites in ROBOT_VARIABLES
-ROBOT_VARIABLES="-v VVP_IP:${VVP_IP}"
diff --git a/plans/vvp/sanity/teardown.sh b/plans/vvp/sanity/teardown.sh
deleted file mode 100644 (file)
index 3369c02..0000000
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/bin/bash
-#
-# Copyright 2016-2017 Huawei Technologies Co., Ltd.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# Modifications copyright (c) 2017 AT&T Intellectual Property
-#
-
-source ${WORKSPACE}/scripts/vvp/kill_containers_and_remove_dataFolders.sh
diff --git a/scripts/vvp/clone_and_setup_vvp_data.sh b/scripts/vvp/clone_and_setup_vvp_data.sh
deleted file mode 100644 (file)
index 866a82e..0000000
+++ /dev/null
@@ -1,110 +0,0 @@
-#!/bin/bash
-#
-# ============LICENSE_START=======================================================
-# ONAP CLAMP
-# ================================================================================
-# Copyright (C) 2017 AT&T Intellectual Property. All rights
-#                             reserved.
-# ================================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END============================================
-# ===================================================================
-# ECOMP is a trademark and service mark of AT&T Intellectual Property.
-#
-
-echo "This is ${WORKSPACE}/scripts/vvp/clone_and_setup_vvp_data.sh"
-
-# Clone vvp enviroment template
-mkdir -p ${WORKSPACE}/data/environments/
-mkdir -p ${WORKSPACE}/data/clone/
-mkdir -p /opt/configmaps/settings/
-
-cd ${WORKSPACE}/data/clone
-git clone --depth 1 http://gerrit.onap.org/r/vvp/engagementmgr -b master
-
-chmod -R 775 ${WORKSPACE}/data/
-
-# copy settings file from tox environment infrastructure:
-cp -f ${WORKSPACE}/data/clone/engagementmgr/django/vvp/settings/tox_settings.py /opt/configmaps/settings/__init__.py
-
-# uwsgi.ini file creation
-echo "[uwsgi]
-http = :80
-plugin = python
-chdir = /srv
-module = vvp.wsgi:application
-master = True
-pidfile = /tmp/project-master.pid
-vacuum = True
-max-requests = 5000
-enable-threads = True
-stats = 0.0.0.0:9000
-stats-http = True" > /opt/configmaps/settings/uwsgi.ini
-
-# storage.py file creation
-echo "from storages.backends.s3boto import S3BotoStorage
-from django.conf import settings
-class S3StaticStorage(S3BotoStorage):
-    custom_domain = '%s/%s' % (settings.AWS_S3_HOST, settings.STATIC_BUCKET)
-    bucket_name = settings.STATIC_BUCKET
-class S3MediaStorage(S3BotoStorage):
-    custom_domain = '%s/%s' % (settings.AWS_S3_HOST, settings.MEDIA_BUCKET)
-    bucket_name = settings.MEDIA_BUCKET" > /opt/configmaps/settings/storage.py
-
-# envbool.py file creation
-echo "import os
-def envbool(key, default=False, unknown=True):
-    return {'true': True, '1': True, 'false': False, '0': False,
-        '': default,}.get(os.getenv(key, '').lower(), unknown)" > /opt/configmaps/settings/envbool.py
-
-# vvp_env.list file creation
-echo "# set enviroment variables
-OAUTHLIB_INSECURE_TRANSPORT=1
-HOST_IP=${IP}
-ENVNAME=${ENVIRONMENT}
-http_proxy=${http_proxy}
-https_proxy=${https_proxy}
-no_proxy=${no_proxy}
-DJANGO_SETTINGS_MODULE=vvp.settings
-# export PYTHONPATH={pwd}
-SECRET_KEY=6mo22&FAKEFALEFALEFKEuq0u*4ksk^aq8lte&)yul
-ENVIRONMENT=development
-SERVICE_PROVIDER=ExampleProvider
-PROGRAM_NAME=VVP
-PROGRAM_NAME_URL_PREFIX=vvp
-SERVICE_PROVIDER_DOMAIN=example-domain.com
-EMAIL_HOST=localhost
-EMAIL_HOST_PASSWORD=
-EMAIL_HOST_USER=
-EMAIL_PORT=25
-PGDATABASE=icedb
-PGUSER=iceuser
-PGPASSWORD=Aa123456
-PGHOST=localhost
-PGPORT=5433
-SECRET_WEBHOOK_TOKEN=AiwiFAKEFAKEFAKEmahch2zahshaGi
-SECRET_GITLAB_AUTH_TOKEN=ieNgFAKEFAKE4zohvee9a
-SECRET_JENKINS_PASSWORD=xaiyiFAKEFAKEqueuBu
-SECRET_CMS_APP_CLIENT_ID=MHmJo0ccDhFAKEFAKEFAKEPAC6H6HAMzhCCM16
-SECRET_CMS_APP_CLIENT_SECRET=nI8QFAKEEEpnw5nTs
-SLACK_API_TOKEN=
-S3_HOST=localhost
-S3_PORT=443
-AWS_ACCESS_KEY_ID=FD2FAKEFAKEFAKEVD1MWRN
-AWS_SECRET_ACCESS_KEY=TKoiwxzFAKEFAKEFAKEFAKEFAKEQ27nP2lCiutEsD
-STATIC_ROOT=/app/htdocs" > ${WORKSPACE}/data/environments/vvp_env.list
-
-ifconfig
-
-IP_ADDRESS=`ip route get 8.8.8.8 | awk '/src/{ print $7 }'`
-export HOST_IP=$IP_ADDRESS
diff --git a/scripts/vvp/docker_health.sh b/scripts/vvp/docker_health.sh
deleted file mode 100644 (file)
index 520b2dc..0000000
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/bin/bash
-#
-# ============LICENSE_START=======================================================
-# ONAP CLAMP
-# ================================================================================
-# Copyright (C) 2017 AT&T Intellectual Property. All rights
-#                             reserved.
-# ================================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END============================================
-# ===================================================================
-# ECOMP is a trademark and service mark of AT&T Intellectual Property.
-#
-
-echo "VVP-Engagement-Manager health-Check:"
-echo ""
-echo ""
-res=`curl -s -X GET -H "Accept: application/json" -H "Content-Type: application/json" "http://localhost:9090/vvp/v1/engmgr/vendors" | wc -w`
-if [ ${res} == 0 ]; then
-    echo "Error [${res}] while performing vvp engagement manager vendor existance check"
-    exit 1
-fi
-echo "check vvp engagement manager vendor existance: OK [${res}]"
diff --git a/scripts/vvp/kill_containers_and_remove_dataFolders.sh b/scripts/vvp/kill_containers_and_remove_dataFolders.sh
deleted file mode 100644 (file)
index 38bd331..0000000
+++ /dev/null
@@ -1,31 +0,0 @@
-#!/bin/bash
-#
-# Copyright 2016-2017 Huawei Technologies Co., Ltd.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# Modifications copyright (c) 2017 AT&T Intellectual Property
-#
-
-echo "This is ${WORKSPACE}/scripts/vvp/kill_and_remove_dataFolder.sh"
-
-
-CONTAINER_NAME="vvp-engagementmgr"
-
-#kill and remove all vvp dockers
-docker stop $CONTAINER_NAME
-docker rm -f $CONTAINER_NAME
-
-
-#delete data folder
-rm -rf ${WORKSPACE}/data/*
diff --git a/scripts/vvp/start_vvp_containers.sh b/scripts/vvp/start_vvp_containers.sh
deleted file mode 100644 (file)
index cafc040..0000000
+++ /dev/null
@@ -1,93 +0,0 @@
-#!/bin/bash
-#
-# ============LICENSE_START=======================================================
-# ONAP CLAMP
-# ================================================================================
-# Copyright (C) 2017 AT&T Intellectual Property. All rights
-#                             reserved.
-# ================================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END============================================
-# ===================================================================
-# ECOMP is a trademark and service mark of AT&T Intellectual Property.
-#
-
-echo "This is ${WORKSPACE}/scripts/vvp/start_vvp_containers.sh"
-
-export IP=$HOST_IP
-export PREFIX='nexus3.onap.org:10001/openecomp/vvp'
-export RELEASE='latest'
-
-#start Engagement Manager pod:
-docker run \
---detach \
---entrypoint="" \
---name vvp-engagementmgr \
---env-file ${WORKSPACE}/data/environments/vvp_env.list \
---log-driver=json-file \
---log-opt max-size=100m \
---log-opt max-file=10 \
---ulimit memlock=-1:-1 \
---memory 4g \
---memory-swap=4g \
---ulimit nofile=4096:100000 \
---volume /etc/localtime:/etc/localtime:ro \
---volume /opt/configmaps/settings:/opt/configmaps/settings/ \
---publish 9090:80 ${PREFIX}/engagementmgr:${RELEASE}
-
-docker cp /opt/configmaps/settings/uwsgi.ini vvp-engagementmgr:/srv/vvp/settings/
-
-echo "please wait while Engagement Manager is starting..."
-echo ""
-c=60 # seconds to wait
-REWRITE="\e[25D\e[1A\e[K"
-while [ $c -gt 0 ]; do
-    c=$((c-1))
-    sleep 1
-    echo -e "${REWRITE}$c"
-done
-echo -e ""
-
-#run migration again:
-docker exec -d vvp-engagementmgr sh -c "python3 /srv/manage.py migrate"
-
-#run initial populate db again:
-docker exec -d vvp-engagementmgr sh -c "python3 /srv/manage.py initial_populate_db"
-
-
-echo "Will copy the generated DB sqlite3 file into the application directory in 30 seconds..."
-sleep 30
-#copy the generated DB sqlite3 file into the application directory:
-docker exec -d vvp-engagementmgr sh -c "cp emdb.db /srv/emdb.db -f"
-
-TIME_OUT=600
-INTERVAL=5
-TIME=0
-while [ "$TIME" -lt "$TIME_OUT" ]; do
-  response=$(curl --write-out '%{http_code}' --silent --output /dev/null http://localhost:9090/vvp/v1/engmgr/vendors); echo $response
-
-  if [ "$response" == "200" ]; then
-    echo VVP-Engagement-Manager well started in $TIME seconds
-    break;
-  fi
-
-  echo Sleep: $INTERVAL seconds before testing if VVP-Engagement-Manager is up. Total wait time up now is: $TIME seconds. Timeout is: $TIME_OUT seconds
-  sleep $INTERVAL
-  TIME=$(($TIME+$INTERVAL))
-done
-
-if [ "$TIME" -ge "$TIME_OUT" ]; then
-   echo TIME OUT: Docker containers not started in $TIME_OUT seconds... Could cause problems for tests...
-else
-   echo "Done starting vvp containers!"
-fi
index 2ffe356..cdcab67 100644 (file)
@@ -5,7 +5,13 @@
     },
     "pnfRegistrationFields": {
       "oamV4IpAddress":"10.17.123.234",
-      "oamV6IpAddress":""
+      "oamV6IpAddress":"",
+      "serial-number":"NOkkaaa123",
+      "equip-vendor":"equipVendor",
+      "equip-model":"equipModel",
+      "equip-type":"equipType",
+      "nf-role":"nf-role",
+      "sw-version":"swVersion"
     }
   }
 }
index c4a0e72..f5ec23d 100644 (file)
@@ -5,7 +5,13 @@
     },
     "pnfRegistrationFields": {
       "oamV4IpAddress":"",
-      "oamV6IpAddress":"2001:0db8:85a3:0000:0000:8b2e:0370:7334"
+      "oamV6IpAddress":"2001:0db8:85a3:0000:0000:8b2e:0370:7334",
+      "serial-number":"NOkkaaa123",
+      "equip-vendor":"equipVendor",
+      "equip-model":"equipModel",
+      "equip-type":"equipType",
+      "nf-role":"nf-role",
+      "sw-version":"swVersion"
     }
   }
 }
index 16963e1..bc9cb1d 100644 (file)
@@ -5,7 +5,13 @@
     },
     "pnfRegistrationFields": {
       "oamV4IpAddress":"10.16.123.234",
-      "oamV6IpAddress":"2001:0db8:85a3:0000:0000:8a2e:0370:7334"
+      "oamV6IpAddress":"2001:0db8:85a3:0000:0000:8a2e:0370:7334",
+      "serial-number":"NOkkaaa123",
+      "equip-vendor":"equipVendor",
+      "equip-model":"equipModel",
+      "equip-type":"equipType",
+      "nf-role":"nf-role",
+      "sw-version":"swVersion"
     }
   }
 }
diff --git a/tests/dcaegen2/prh-testcases/assets/json_events/event_with_empty_addtional_fields.json b/tests/dcaegen2/prh-testcases/assets/json_events/event_with_empty_addtional_fields.json
new file mode 100644 (file)
index 0000000..7ca4d0c
--- /dev/null
@@ -0,0 +1,17 @@
+{
+  "event": {
+    "commonEventHeader": {
+      "sourceName":"NOK6061ZW1"
+    },
+    "pnfRegistrationFields": {
+      "oamV4IpAddress":"10.16.123.234",
+      "oamV6IpAddress":"2001:0db8:85a3:0000:0000:8a2e:0370:7334",
+      "serial-number":"",
+      "equip-vendor":"",
+      "equip-model":"",
+      "equip-type":"",
+      "nf-role":"",
+      "sw-version":""
+    }
+  }
+}
index 1e3afa9..4942a3d 100644 (file)
@@ -5,7 +5,13 @@
     },
     "pnfRegistrationFields": {
       "oamV4IpAddress":"",
-      "oamV6IpAddress":""
+      "oamV6IpAddress":"",
+      "serial-number":"NOkkaaa123",
+      "equip-vendor":"equipVendor",
+      "equip-model":"equipModel",
+      "equip-type":"equipType",
+      "nf-role":"nf-role",
+      "sw-version":"swVersion"
     }
   }
 }
diff --git a/tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_additional_fields.json b/tests/dcaegen2/prh-testcases/assets/json_events/event_with_missing_additional_fields.json
new file mode 100644 (file)
index 0000000..16963e1
--- /dev/null
@@ -0,0 +1,11 @@
+{
+  "event": {
+    "commonEventHeader": {
+      "sourceName":"NOK6061ZW1"
+    },
+    "pnfRegistrationFields": {
+      "oamV4IpAddress":"10.16.123.234",
+      "oamV6IpAddress":"2001:0db8:85a3:0000:0000:8a2e:0370:7334"
+    }
+  }
+}
index 126987f..5e2a612 100644 (file)
@@ -5,7 +5,13 @@
     },
     "pnfRegistrationFields": {
       "oamV4IpAddress":"10.18.123.234",
-      "oamV6IpAddress":"2001:0db8:85a3:0000:0000:8a2a:0370:7334"
+      "oamV6IpAddress":"2001:0db8:85a3:0000:0000:8a2a:0370:7334",
+      "serial-number":"NOkkaaa123",
+      "equip-vendor":"equipVendor",
+      "equip-model":"equipModel",
+      "equip-type":"equipType",
+      "nf-role":"nf-role",
+      "sw-version":"swVersion"
     }
   }
 }
index de1f576..a6a6f36 100644 (file)
@@ -5,7 +5,13 @@
     },
     "pnfRegistrationFields": {
       "oamV4IpAddress":"",
-      "oamV6IpAddress":""
+      "oamV6IpAddress":"",
+      "serial-number":"NOkkaaa123",
+      "equip-vendor":"equipVendor",
+      "equip-model":"equipModel",
+      "equip-type":"equipType",
+      "nf-role":"nf-role",
+      "sw-version":"swVersion"
     }
   }
 }
index 4838f1b..6d9eadf 100644 (file)
@@ -5,7 +5,13 @@
     },
     "pnfRegistrationFields": {
       "oamV4IpAddress":"",
-      "oamV6IpAddress":"2001:0db8:85a3:0000:0000:8b2f:0370:7334"
+      "oamV6IpAddress":"2001:0db8:85a3:0000:0000:8b2f:0370:7334",
+      "serial-number":"NOkkaaa123",
+      "equip-vendor":"equipVendor",
+      "equip-model":"equipModel",
+      "equip-type":"equipType",
+      "nf-role":"nf-role",
+      "sw-version":"swVersion"
     }
   }
 }
index 04ab7ce..9fac5b1 100644 (file)
@@ -5,7 +5,13 @@
     },
     "pnfRegistrationFields": {
       "oamV4IpAddress":"10.17.163.234",
-      "oamV6IpAddress":""
+      "oamV6IpAddress":"",
+      "serial-number":"NOkkaaa123",
+      "equip-vendor":"equipVendor",
+      "equip-model":"equipModel",
+      "equip-type":"equipType",
+      "nf-role":"nf-role",
+      "sw-version":"swVersion"
     }
   }
 }
index 0aa0372..a416bb7 100644 (file)
@@ -4,7 +4,13 @@
       "sourceName":"NOK6061ZW9"
     },
     "pnfRegistrationFields": {
-      "oamV4IpAddress":"10.17.123.24"
+      "oamV4IpAddress":"10.17.123.24",
+      "serial-number":"NOkkaaa123",
+      "equip-vendor":"equipVendor",
+      "equip-model":"equipModel",
+      "equip-type":"equipType",
+      "nf-role":"nf-role",
+      "sw-version":"swVersion"
     }
   }
 }
index c87e188..08d9a49 100644 (file)
@@ -6,6 +6,12 @@
     "pnfRegistrationFields": {
       "oamV4IpAddress":"10.16.123.234",
       "oamV6IpAddress":"2001:0db8:85a3:0000:0000:8a2e:0370:7334",
+      "serial-number":"NOkkaaa123",
+      "equip-vendor":"equipVendor",
+      "equip-model":"equipModel",
+      "equip-type":"equipType",
+      "nf-role":"nf-role",
+      "sw-version":"swVersion",
     }
   }
 }
index dc58936..d413be5 100644 (file)
@@ -24,8 +24,14 @@ class PrhLibrary(object):
         json_to_python = json.loads(json_file)
         ipv4 = json_to_python.get("event").get("pnfRegistrationFields").get("oamV4IpAddress")
         ipv6 = json_to_python.get("event").get("pnfRegistrationFields").get("oamV6IpAddress") if "oamV6IpAddress" in json_to_python["event"]["pnfRegistrationFields"] else ""
+        serial_number = json_to_python.get("event").get("pnfRegistrationFields").get("serial-number") if "serial-number" in json_to_python["event"]["pnfRegistrationFields"] else ""
+        equip_vendor = json_to_python.get("event").get("pnfRegistrationFields").get("equip-vendor") if "equip-vendor" in json_to_python["event"]["pnfRegistrationFields"] else ""
+        equip_model = json_to_python.get("event").get("pnfRegistrationFields").get("equip-model") if "equip-model" in json_to_python["event"]["pnfRegistrationFields"] else ""
+        equip_type = json_to_python.get("event").get("pnfRegistrationFields").get("equip-type") if "equip-type" in json_to_python["event"]["pnfRegistrationFields"] else ""
+        nf_role = json_to_python.get("event").get("pnfRegistrationFields").get("nf-role") if "nf-role" in json_to_python["event"]["pnfRegistrationFields"] else ""
+        sw_version = json_to_python.get("event").get("pnfRegistrationFields").get("sw-version") if "sw-version" in json_to_python["event"]["pnfRegistrationFields"] else ""
         correlation_id = json_to_python.get("event").get("commonEventHeader").get("sourceName")
-        str_json = '{"correlationId":"' + correlation_id + '","ipaddress-v4-oam":"' + ipv4 + '","ipaddress-v6-oam":"' + ipv6 + '"}'
+        str_json = '{"correlationId":"' + correlation_id + '","ipaddress-v4-oam":"' + ipv4 + '","ipaddress-v6-oam":"' + ipv6 + '","serial-number":"' + serial_number + '","equip-vendor":"' + equip_vendor + '","equip-model":"' + equip_model + '","equip-type":"' + equip_type + '","nf-role":"' + nf_role + '","sw-version":"' + sw_version + '"}'
         python_to_json = json.dumps(str_json)
         return python_to_json.replace("\\", "")[1:-1]
 
index 3753b3d..7ea2473 100644 (file)
@@ -8,8 +8,8 @@ Library     json
 Healthcheck
      [Documentation]    Runs Policy Api Health check
      ${auth}=    Create List    healthcheck    zb!XztG34 
-     Log    Creating session http://${POLICY_API_IP}:6969
-     ${session}=    Create Session      policy  http://${POLICY_API_IP}:6969   auth=${auth}
+     Log    Creating session https://${POLICY_API_IP}:6969
+     ${session}=    Create Session      policy  https://${POLICY_API_IP}:6969   auth=${auth}
      ${headers}=  Create Dictionary     Accept=application/json    Content-Type=application/json
      ${resp}=   Get Request     policy  /healthcheck     headers=${headers}
      Log    Received response from policy ${resp.text}
@@ -19,10 +19,10 @@ Healthcheck
 Statistics
      [Documentation]    Runs Policy Api Statistics
      ${auth}=    Create List    healthcheck    zb!XztG34 
-     Log    Creating session http://${POLICY_API_IP}:6969
-     ${session}=    Create Session      policy  http://${POLICY_API_IP}:6969   auth=${auth}
+     Log    Creating session https://${POLICY_API_IP}:6969
+     ${session}=    Create Session      policy  https://${POLICY_API_IP}:6969   auth=${auth}
      ${headers}=  Create Dictionary     Accept=application/json    Content-Type=application/json
      ${resp}=   Get Request     policy  /statistics     headers=${headers}
      Log    Received response from policy ${resp.text}
      Should Be Equal As Strings    ${resp.status_code}     200
-     Should Be Equal As Strings    ${resp.json()['code']}  200
\ No newline at end of file
+     Should Be Equal As Strings    ${resp.json()['code']}  200
index 1b9fa21..2ee1180 100644 (file)
@@ -8,8 +8,8 @@ Library     json
 Healthcheck
      [Documentation]    Runs Policy Distribution Health check
      ${auth}=    Create List    healthcheck    zb!XztG34 
-     Log    Creating session http://${POLICY_DISTRIBUTION_IP}:6969
-     ${session}=    Create Session      policy  http://${POLICY_DISTRIBUTION_IP}:6969   auth=${auth}
+     Log    Creating session https://${POLICY_DISTRIBUTION_IP}:6969
+     ${session}=    Create Session      policy  https://${POLICY_DISTRIBUTION_IP}:6969   auth=${auth}
      ${headers}=  Create Dictionary     Accept=application/json    Content-Type=application/json
      ${resp}=   Get Request     policy  /healthcheck     headers=${headers}
      Log    Received response from policy ${resp.text}
diff --git a/tests/policy/pap/pap-test.robot b/tests/policy/pap/pap-test.robot
new file mode 100644 (file)
index 0000000..7dca5b4
--- /dev/null
@@ -0,0 +1,28 @@
+*** Settings ***
+Library     Collections
+Library     RequestsLibrary
+Library     OperatingSystem
+Library     json
+
+*** Test Cases ***
+Healthcheck
+     [Documentation]    Runs Policy PAP Health check
+     ${auth}=    Create List    healthcheck    zb!XztG34
+     Log    Creating session https://${POLICY_PAP_IP}:6969
+     ${session}=    Create Session      policy  https://${POLICY_PAP_IP}:6969   auth=${auth}
+     ${headers}=  Create Dictionary     Accept=application/json    Content-Type=application/json
+     ${resp}=   Get Request     policy  /healthcheck     headers=${headers}
+     Log    Received response from policy ${resp.text}
+     Should Be Equal As Strings    ${resp.status_code}     200
+     Should Be Equal As Strings    ${resp.json()['code']}  200
+
+Statistics
+     [Documentation]    Runs Policy PAP Statistics
+     ${auth}=    Create List    healthcheck    zb!XztG34
+     Log    Creating session https://${POLICY_PAP_IP}:6969
+     ${session}=    Create Session      policy  https://${POLICY_PAP_IP}:6969   auth=${auth}
+     ${headers}=  Create Dictionary     Accept=application/json    Content-Type=application/json
+     ${resp}=   Get Request     policy  /statistics     headers=${headers}
+     Log    Received response from policy ${resp.text}
+     Should Be Equal As Strings    ${resp.status_code}     200
+     Should Be Equal As Strings    ${resp.json()['code']}  200
index fcc1cc7..4b85e6b 100644 (file)
@@ -8,7 +8,7 @@ Resource          resources/bulkpm_keywords.robot
 
 
 *** Variables ***
-${VESC_URL}                              http://%{VESC_IP}:8080
+${VESC_URL}                              http://%{VESC_IP}:%{VESC_PORT}
 ${GLOBAL_APPLICATION_ID}                 robot-ves
 ${VES_ANY_EVENT_PATH}                    /eventListener/v7
 ${HEADER_STRING}                         content-type=application/json
@@ -17,9 +17,13 @@ ${EVENT_DATA_FILE}                       %{WORKSPACE}/tests/usecases/5G-bulkpm/a
 ${TARGETURL_TOPICS}                      http://${DMAAP_MR_IP}:3904/topics
 ${TARGETURL_SUBSCR}                      http://${DMAAP_MR_IP}:3904/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c12/C12?timeout=1000
 ${CLI_EXEC_CLI}                          curl -k https://${DR_PROV_IP}:8443/internal/prov
-${CLI_EXEC_CLI_DFC}                      docker exec dfc /bin/sh -c "ls /target | grep .gz"
 ${CLI_EXEC_CLI_FILECONSUMER}             docker exec fileconsumer-node /bin/sh -c "ls /opt/app/subscriber/delivery | grep .gz"
 
+${CLI_EXEC_CLI_FILECONSUMER_CP}          docker cp fileconsumer-node:/opt/app/subscriber/delivery/xNF.pm.xml.gz.M %{WORKSPACE}
+${CLI_EXEC_RENAME_METADATA}              mv %{WORKSPACE}/xNF.pm.xml.gz.M  %{WORKSPACE}/metadata.json
+${metadataSchemaPath}                    %{WORKSPACE}/tests/usecases/5G-bulkpm/assets/metadata.schema.json
+${metadataJsonPath}                      %{WORKSPACE}/metadata.json
+
 *** Test Cases ***
 
 Send VES File Ready Event to VES Collector
@@ -55,7 +59,7 @@ Check VES Notification Topic is existing in Message Router
 Verify Downloaded PM file from xNF exist on Data File Collector
     [Tags]                          Bulk_PM_E2E_03
     [Documentation]                 Check the PM XML file exists on the data file collector
-    ${cli_cmd_output}=              Run Process                     ${CLI_EXEC_CLI_DFC}                 shell=yes
+    ${cli_cmd_output}=              Run Process                     %{CLI_EXEC_CLI_DFC}                 shell=yes
     Log                             ${cli_cmd_output.stdout}
     Should Be Equal As Strings      ${cli_cmd_output.rc}            0
     Should Contain                  ${cli_cmd_output.stdout}        xNF.pm.xml.gz
@@ -78,3 +82,15 @@ Verify Fileconsumer Receive PM file from Data Router
     Log                             ${cli_cmd_output.stdout}
     Should Be Equal As Strings      ${cli_cmd_output.rc}            0
     Should Contain                  ${cli_cmd_output.stdout}        xNF.pm.xml.gz
+
+Verify File Consumer Receive valid metadata from Data Router
+    [Tags]                          Bulk_PM_E2E_06
+    [Documentation]                 Check PM XML file is delivered to the FileConsumer Simulator with valid metadata
+    ${cli_cmd_output}=              Run Process                     ${CLI_EXEC_CLI_FILECONSUMER}        shell=yes
+    Log                             ${cli_cmd_output.stdout}
+    Should Be Equal As Strings      ${cli_cmd_output.rc}            0
+    Should Contain                  ${cli_cmd_output.stdout}        xNF.pm.xml.gz.M
+    ${cli_cmd_output}=              Run Process                     ${CLI_EXEC_CLI_FILECONSUMER_CP}     shell=yes
+    ${cli_cmd_output}=              Run Process                     ${CLI_EXEC_RENAME_METADATA}         shell=yes
+    ${validation_result}=           Validate                        ${metadataSchemaPath}    ${metadataJsonPath}
+    Should Be Equal As Strings      ${validation_result}            0
index 4064ea3..d5d8fd0 100644 (file)
@@ -21,7 +21,7 @@
             "arrayOfNamedHashMap": [
                 { "name": "xNF.pm.xml.gz",
                   "hashMap":{
-                            "location": "sftp://admin:admin@sftpserver:22/xNF.pm.xml.gz",
+                            "location": "sftp://admin:admin@sftpserver:sftpport/xNF.pm.xml.gz",
                             "compression": "gzip",
                             "fileFormatType": "org.3GPP.32.435#measCollec",
                             "fileFormatVersion": "V10"
diff --git a/tests/usecases/5G-bulkpm/assets/metadata.schema.json b/tests/usecases/5G-bulkpm/assets/metadata.schema.json
new file mode 100644 (file)
index 0000000..a41b354
--- /dev/null
@@ -0,0 +1,74 @@
+{
+    "$schema":  "http://json-schema.org/draft-07/schema",
+    "$id": "metadata.schema.json",
+    "title": "DataRouter PM File Metadata",
+    "description": "Metadata for 3GPP PM files that are placed on the DMaaP Data Router by the Data File Collector (VES 7.1)",
+    "type": "object",
+
+    "properties": {
+        "productName": {
+            "description": "from the FileReady event eventName",
+            "type": "string"
+        },
+
+        "vendorName": {
+            "description": "from the FileReady event eventName",
+            "type": "string"
+        },
+
+        "lastEpochMicrosec": {
+            "description": "the latest unix epoch time associated with the FileReady event",
+            "type": "string"
+        },
+
+        "sourceName": {
+            "description": "the name of the entity experiencing the event",
+            "type": "string"
+        },
+
+        "startEpochMicrosec": {
+            "description": "the earliest unix epoch time associated with the FileReady event",
+            "type": "string"
+        },
+
+        "timeZoneOffset": {
+            "description": "the timezone offset from UTC",
+            "type": "string",
+            "pattern": "^(?:(?:[a-zA-Z]{3})[+-](?:[01][0-9]|2[0-3]).[0-5][0-9])$"
+        },
+
+        "location": {
+            "description": "follows the format <protocol>://<ip address>:<port>/<path>/<filename>, the IP address is the node ip address, the port of the protocol server",
+            "type": "string"
+        },
+
+        "compression": {
+            "description": "specifies if the file is compressed",
+            "type": "string",
+            "enum": [ "gzip" ]
+        },
+
+        "fileFormatType": {
+            "description": "the file format",
+            "type": "string"
+        },
+
+        "fileFormatVersion": {
+            "description": "the version of the file format",
+            "type": "string"
+        }
+    },
+
+    "required": [
+        "productName",
+        "vendorName",
+        "lastEpochMicrosec",
+        "sourceName",
+        "startEpochMicrosec",
+        "timeZoneOffset",
+        "location",
+        "compression",
+        "fileFormatType",
+        "fileFormatVersion"
+    ]
+}
diff --git a/tests/usecases/5G-bulkpm/resources/JsonValidatorLibrary.py b/tests/usecases/5G-bulkpm/resources/JsonValidatorLibrary.py
new file mode 100644 (file)
index 0000000..12d5d85
--- /dev/null
@@ -0,0 +1,38 @@
+# -*- coding: utf-8 -*-
+
+import sys
+import logging
+from simplejson import load
+from jsonschema import validate, ValidationError, SchemaError
+
+
+class JsonValidatorLibrary(object):
+
+    def __init__(self):
+        pass
+
+    def validate(self, schemaPath, jsonPath):
+        logging.info("Schema path: " + schemaPath)
+        logging.info("JSON path: " + jsonPath)
+        schema = None
+        data = None
+        try:
+            schema = load(open(schemaPath, 'r'))
+            data = load(open(jsonPath, 'r'))
+        except (IOError, ValueError, OSError) as e:
+            logging.error(e.message)
+            return 1
+
+        try:
+            validate(data, schema)
+        except (ValidationError, SchemaError) as e:
+            logging.error(e.message)
+            return 1
+
+        # logger.log("JSON validation successful")
+        print("JSON validation successful")
+        return 0
+
+if __name__ == '__main__':
+    lib = JsonValidatorLibrary()
+    # sys.exit(JsonValidatorLibrary().validate(sys.argv[1], sys.argv[2]))
index 6859ea0..9ef56c8 100644 (file)
@@ -2,6 +2,7 @@
 Documentation     The main interface for interacting with VES. It handles low level stuff like managing the http request library and VES required fields
 Library              RequestsLibrary
 Library                  ../resources/xNFLibrary.py
+Library           ../resources/JsonValidatorLibrary.py
 Library           OperatingSystem
 Library           Collections
 Library           requests
diff --git a/tests/vvp/sanity/__init__.robot b/tests/vvp/sanity/__init__.robot
deleted file mode 100644 (file)
index 6bc0362..0000000
+++ /dev/null
@@ -1,2 +0,0 @@
-*** Settings ***
-Documentation    VVP - HealthCheck
diff --git a/tests/vvp/sanity/test1.robot b/tests/vvp/sanity/test1.robot
deleted file mode 100644 (file)
index 27612fd..0000000
+++ /dev/null
@@ -1,19 +0,0 @@
-*** Settings ***
-Library           Collections
-Library           OperatingSystem
-Library           RequestsLibrary
-Library           json
-
-# http://localhost:9090/vvp/v1/engmgr/vendors
-# vvp-engagementmgr
-
-*** Test Cases ***
-Get Requests health check ok
-    [Tags]    get
-    CreateSession    vvp-engagementmgr    http://localhost:9090
-    ${headers}=    Create Dictionary    Accept=application/json    Content-Type=application/json
-    ${resp}=    Get Request    vvp-engagementmgr    /vvp/v1/engmgr/vendors    headers=&{headers}
-    Should Be Equal As Strings    ${resp.status_code}    200
-    @{ITEMS}=    Copy List    ${resp.json()}
-    : FOR    ${ELEMENT}    IN    @{ITEMS}
-    \    Log    ${ELEMENT['uuid']} ${ELEMENT['name']}