From: Morgan Richomme Date: Mon, 8 Mar 2021 08:48:41 +0000 (+0000) Subject: Merge "CSIT for NST selection enhancements" X-Git-Url: https://gerrit.onap.org/r/gitweb?a=commitdiff_plain;h=aa686f94515f61d89652a587b92447c8f3dbe2d2;hp=ac6c8501503d272c691b71a98078cc3fab1cc8c9;p=integration%2Fcsit.git Merge "CSIT for NST selection enhancements" --- diff --git a/plans/ccsdk-oran/polmansuite/config/application_configuration.json.nosdnc b/plans/ccsdk-oran/polmansuite/config/application_configuration.json.nosdnc deleted file mode 100644 index deb88a08..00000000 --- a/plans/ccsdk-oran/polmansuite/config/application_configuration.json.nosdnc +++ /dev/null @@ -1,39 +0,0 @@ -{ - "config":{ - "//description":"Application configuration", - "ric":[ - { - "name":"ric1", - "baseUrl":"https://a1-sim-OSC:8185/", - "managedElementIds":[ - "kista_1", - "kista_2" - ] - }, - { - "name":"ric2", - "baseUrl":"https://a1-sim-STD-v2:8185/", - "managedElementIds":[ - "kista_1", - "kista_2" - ] - } - ], - "streams_publishes":{ - "dmaap_publisher":{ - "type":"message_router", - "dmaap_info":{ - "topic_url":"http://dmaap-mr:3904/events/A1-POLICY-AGENT-WRITE" - } - } - }, - "streams_subscribes":{ - "dmaap_subscriber":{ - "type":"message_router", - "dmaap_info":{ - "topic_url":"http://dmaap-mr:3904/events/A1-POLICY-AGENT-READ/users/policy-agent?timeout=15000&limit=100" - } - } - } - } - } \ No newline at end of file diff --git a/plans/ccsdk-oran/polmansuite/config/application_configuration.json.sdnc b/plans/ccsdk-oran/polmansuite/config/application_configuration.json.sdnc deleted file mode 100644 index 0f05de17..00000000 --- a/plans/ccsdk-oran/polmansuite/config/application_configuration.json.sdnc +++ /dev/null @@ -1,48 +0,0 @@ -{ - "config":{ - "controller": [ - { - "name": "controller1", - "baseUrl": "https://a1-controller:8443", - "userName": "admin", - "password": "Kp8bJ4SXszM0WXlhak3eHlcse2gAw84vaoGGmJvUy2U" - } - ], - "ric":[ - { - "name":"ric1", - "controller": "controller1", - "baseUrl":"https://a1-sim-OSC:8185/", - "managedElementIds":[ - "kista_1", - "kista_2" - ] - }, - { - "name":"ric2", - "controller": "controller1", - "baseUrl":"https://a1-sim-STD-v2:8185/", - "managedElementIds":[ - "kista_1", - "kista_2" - ] - } - ], - "streams_publishes":{ - "dmaap_publisher":{ - "type":"message_router", - "dmaap_info":{ - "topic_url":"http://dmaap-mr:3904/events/A1-POLICY-AGENT-WRITE" - } - } - }, - "streams_subscribes":{ - "dmaap_subscriber":{ - "type":"message_router", - "dmaap_info":{ - "topic_url":"http://dmaap-mr:3904/events/A1-POLICY-AGENT-READ/users/policy-agent?timeout=15000&limit=100" - } - } - } - } - } diff --git a/plans/ccsdk-oran/polmansuite/data/preparePmsData.sh b/plans/ccsdk-oran/polmansuite/data/preparePmsData.sh deleted file mode 100755 index 9644a41d..00000000 --- a/plans/ccsdk-oran/polmansuite/data/preparePmsData.sh +++ /dev/null @@ -1,157 +0,0 @@ -#!/bin/bash - -# ============LICENSE_START=============================================== -# Copyright (C) 2021 Nordix Foundation. All rights reserved. -# ======================================================================== -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============LICENSE_END================================================= -# - -# The scripts in data/ will generate some dummy data in the running system. -# It will create: -# one policy type in a1-sim-OSC -# one service in policy agent -# one policy in a1-sim-OSC -# one policy in a1-sim-STD - -# Run command: -# ./preparePmsData.sh [policy-agent port] [a1-sim-OSC port] [a1-sim-STD port] [http/https] - -policy_agent_port=${1:-8081} -a1_sim_OSC_port=${2:-30001} -a1_sim_STD_port=${3:-30005} -httpx=${4:-"http"} -SHELL_FOLDER=$(cd "$(dirname "$0")";pwd) - -echo "using policy_agent port: "$policy_agent_port -echo "using a1-sim-OSC port: "$a1_sim_OSC_port -echo "using a1-sim-STD port: "$a1_sim_STD_port -echo "using protocol: "$httpx -echo -e "\n" - -checkRes (){ - if [ "$res" != "$expect" ]; then - echo "$res is not expected! exit!" - exit 1; - fi -} - -echo "policy agent status:" -curlString="curl -skw %{http_code} $httpx://localhost:$policy_agent_port/status" -res=$($curlString) -echo "$res" -expect="hunky dory200" -checkRes -echo -e "\n" - -echo "ric1 version:" -curlString="curl -skw %{http_code} $httpx://localhost:$a1_sim_OSC_port/counter/interface" -res=$($curlString) -echo "$res" -expect="OSC_2.1.0200" -checkRes -echo -e "\n" - -echo "ric2 version:" -curlString="curl -skw %{http_code} $httpx://localhost:$a1_sim_STD_port/counter/interface" -res=$($curlString) -echo "$res" -expect="STD_2.0.0200" -checkRes -echo -e "\n" - -echo "create policy type 1 to ric1:" -curlString="curl -X PUT -skw %{http_code} $httpx://localhost:$a1_sim_OSC_port/policytype?id=1 -H Content-Type:application/json --data-binary @${SHELL_FOLDER}/testdata/OSC/policy_type.json" -res=$($curlString) -echo "$res" -expect="Policy type 1 is OK.201" -checkRes -echo -e "\n" - -echo "create policy type 2 to ric2:" -curlString="curl -skw %{http_code} $httpx://localhost:$a1_sim_STD_port/policytype?id=2 -X PUT -H Accept:application/json -H Content-Type:application/json -H X-Requested-With:XMLHttpRequest --data-binary @${SHELL_FOLDER}/testdata/v2/policy_type.json" -res=$($curlString) -echo "$res" -expect="Policy type 2 is OK.201" -checkRes -echo -e "\n" - -for i in {1..30}; do - echo "policy types from policy agent:" - curlString="curl -skw %{http_code} $httpx://localhost:$policy_agent_port/a1-policy/v2/policy-types" - res=$($curlString) - echo "$res" - expect="{\"policytype_ids\":[\"1\",\"2\"]}200" - if [ "$res" == "$expect" ]; then - echo -e "\n" - break; - else - sleep $i - fi -done - -echo "create service ric-registration to policy agent:" -curlString="curl -k -X PUT -sw %{http_code} -H accept:application/json -H Content-Type:application/json "$httpx://localhost:$policy_agent_port/a1-policy/v2/services" --data-binary @${SHELL_FOLDER}/testdata/v2/service.json" -res=$($curlString) -echo "$res" -expect="201" -checkRes -echo -e "\n" - -echo "create policy aa8feaa88d944d919ef0e83f2172a5000 to ric1 with type 1 and service controlpanel via policy agent:" -curlString="curl -k -X PUT -sw %{http_code} -H accept:application/json -H Content-Type:application/json "$httpx://localhost:$policy_agent_port/a1-policy/v2/policies" --data-binary @${SHELL_FOLDER}/testdata/v2/policy_osc.json" -res=$($curlString) -echo "$res" -expect="201" -checkRes -echo -e "\n" - -echo "policy numbers from ric1:" -curlString="curl -skw %{http_code} $httpx://localhost:$a1_sim_OSC_port/counter/num_instances" -res=$($curlString) -echo "$res" -expect="1200" -checkRes -echo -e "\n" - -echo "create policy aa8feaa88d944d919ef0e83f2172a5100 to ric2 with type 2 and service controlpanel via policy agent:" -curlString="curl -k -X PUT -sw %{http_code} -H accept:application/json -H Content-Type:application/json "$httpx://localhost:$policy_agent_port/a1-policy/v2/policies" --data-binary @${SHELL_FOLDER}/testdata/v2/policy_std_v2.json" -res=$($curlString) -echo "$res" -expect="201" -checkRes -echo -e "\n" - -echo "policy numbers from ric2:" -curlString="curl -skw %{http_code} $httpx://localhost:$a1_sim_STD_port/counter/num_instances" -res=$($curlString) -echo "$res" -expect="1200" -checkRes -echo -e "\n" - -echo "policy id aa8feaa88d944d919ef0e83f2172a5000 from policy agent:" -curlString="curl -s -o /dev/null -I -w %{http_code} $httpx://localhost:$policy_agent_port/a1-policy/v2/policies/aa8feaa88d944d919ef0e83f2172a5000" -res=$($curlString) -echo "$res" -expect="200" -checkRes -echo -e "\n" - -echo "policy id aa8feaa88d944d919ef0e83f2172a5100 from policy agent:" -curlString="curl -s -o /dev/null -I -w %{http_code} $httpx://localhost:$policy_agent_port/a1-policy/v2/policies/aa8feaa88d944d919ef0e83f2172a5100" -res=$($curlString) -echo "$res" -expect="200" -checkRes -echo -e "\n" diff --git a/plans/ccsdk-oran/polmansuite/data/testdata/OSC/policy_type.json b/plans/ccsdk-oran/polmansuite/data/testdata/OSC/policy_type.json deleted file mode 100644 index aeea7733..00000000 --- a/plans/ccsdk-oran/polmansuite/data/testdata/OSC/policy_type.json +++ /dev/null @@ -1,45 +0,0 @@ -{ - "name": "pt1", - "description": "pt1 policy type", - "policy_type_id": 1, - "create_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "OSC_Type1_1.0.0", - "description": "Type 1 policy type", - "type": "object", - "properties": { - "scope": { - "type": "object", - "properties": { - "ueId": { - "type": "string" - }, - "qosId": { - "type": "string" - } - }, - "additionalProperties": false, - "required": [ - "ueId", - "qosId" - ] - }, - "qosObjectives": { - "type": "object", - "properties": { - "priorityLevel": { - "type": "number" - } - }, - "additionalProperties": false, - "required": [ - "priorityLevel" - ] - } - }, - "additionalProperties": false, - "required": [ - "scope", "qosObjectives" - ] - } -} diff --git a/plans/ccsdk-oran/polmansuite/data/testdata/policy.json b/plans/ccsdk-oran/polmansuite/data/testdata/policy.json deleted file mode 100644 index ac1b538b..00000000 --- a/plans/ccsdk-oran/polmansuite/data/testdata/policy.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "scope": { - "ueId": "ue3100", - "qosId": "qos3100" - }, - "qosObjectives": { - "priorityLevel": 3100 - } -} diff --git a/plans/ccsdk-oran/polmansuite/data/testdata/service.json b/plans/ccsdk-oran/polmansuite/data/testdata/service.json deleted file mode 100644 index 7bb66514..00000000 --- a/plans/ccsdk-oran/polmansuite/data/testdata/service.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "callbackUrl": "https://callback-receiver:8091/callbacks/1", - "keepAliveIntervalSeconds": "3600", - "serviceName": "service1" -} diff --git a/plans/ccsdk-oran/polmansuite/data/testdata/v2/policy_osc.json b/plans/ccsdk-oran/polmansuite/data/testdata/v2/policy_osc.json deleted file mode 100644 index 902f9111..00000000 --- a/plans/ccsdk-oran/polmansuite/data/testdata/v2/policy_osc.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "ric_id": "ric1", - "policy_id": "aa8feaa88d944d919ef0e83f2172a5100", - "service_id": "controlpanel", - "policytype_id": "1", - "status_notification_uri": "http://callback-receiver:8090/callbacks/test", - "policy_data": { - "scope": { - "ueId": "ue5100", - "qosId": "qos5100" - }, - "qosObjectives": { - "priorityLevel": 5100 - } - } -} \ No newline at end of file diff --git a/plans/ccsdk-oran/polmansuite/data/testdata/v2/policy_std_v2.json b/plans/ccsdk-oran/polmansuite/data/testdata/v2/policy_std_v2.json deleted file mode 100644 index dcb7e38f..00000000 --- a/plans/ccsdk-oran/polmansuite/data/testdata/v2/policy_std_v2.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "ric_id": "ric2", - "policy_id": "aa8feaa88d944d919ef0e83f2172a5000", - "service_id": "controlpanel", - "policytype_id": "2", - "status_notification_uri": "http://callback-receiver:8090/callbacks/test", - "policy_data": { - "scope": { - "ueId": "ue5000", - "qosId": "qos5000" - }, - "qosObjectives": { - "priorityLevel": 5000 - } - } -} \ No newline at end of file diff --git a/plans/ccsdk-oran/polmansuite/data/testdata/v2/policy_type.json b/plans/ccsdk-oran/polmansuite/data/testdata/v2/policy_type.json deleted file mode 100644 index 931498c4..00000000 --- a/plans/ccsdk-oran/polmansuite/data/testdata/v2/policy_type.json +++ /dev/null @@ -1,56 +0,0 @@ -{ - "policySchema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "STD_QOS_0_2_0", - "description": "STD QOS policy type", - "type": "object", - "properties": { - "scope": { - "type": "object", - "properties": { - "ueId": { - "type": "string" - }, - "qosId": { - "type": "string" - } - }, - "additionalProperties": false, - "required": [ - "ueId", - "qosId" - ] - }, - "qosObjectives": { - "type": "object", - "properties": { - "priorityLevel": { - "type": "number" - } - }, - "additionalProperties": false, - "required": [ - "priorityLevel" - ] - } - } - }, - "statusSchema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "STD_QOS_0.2.0", - "description": "STD QOS policy type status", - "type": "object", - "properties": { - "enforceStatus": { - "type": "string" - }, - "enforceReason": { - "type": "string" - }, - "additionalProperties": false, - "required": [ - "enforceStatus" - ] - } - } - } \ No newline at end of file diff --git a/plans/ccsdk-oran/polmansuite/data/testdata/v2/service.json b/plans/ccsdk-oran/polmansuite/data/testdata/v2/service.json deleted file mode 100644 index d984cbab..00000000 --- a/plans/ccsdk-oran/polmansuite/data/testdata/v2/service.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "callback_url": "http://callback-receiver:8090/callbacks/ric-registration", - "keep_alive_interval_seconds": "0", - "service_id": "ric-registration" -} \ No newline at end of file diff --git a/plans/ccsdk-oran/polmansuite/docker-compose.yml b/plans/ccsdk-oran/polmansuite/docker-compose.yml deleted file mode 100644 index a50115f7..00000000 --- a/plans/ccsdk-oran/polmansuite/docker-compose.yml +++ /dev/null @@ -1,78 +0,0 @@ -# Copyright (C) 2021 Nordix Foundation. All rights reserved. -# ======================================================================== -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============LICENSE_END================================================= -# -version: '3' - -networks: - default: - driver: bridge - -services: - policy-agent: - image: nexus3.onap.org:10003/onap/ccsdk-oran-a1policymanagementservice:1.1.1-STAGING-latest - container_name: policy-agent - networks: - default: - aliases: - - policy-agent-container - ports: - - 8081:8081 - - 8433:8433 - volumes: - - ./config/application_configuration.json:/opt/app/policy-agent/data/application_configuration.json:ro - # For using own certs instead of the default ones (built into the container), - # place them in config/ directory, update the application-policyagent.yaml file, and uncomment the following lines - # - ./config/keystore-policyagent.jks:/opt/app/policy-agent/etc/cert/keystore.jks:ro - # - ./config/truststore-policyagent.jks:/opt/app/policy-agent/etc/cert/truststore.jks:ro - # - ./config/application-policyagent.yaml:/opt/app/policy-agent/config/application.yaml:ro - - a1-sim-OSC: - image: nexus3.o-ran-sc.org:10004/o-ran-sc/a1-simulator:2.1.0 - container_name: a1-sim-OSC - networks: - - default - ports: - - 30001:8085 - - 30002:8185 - environment: - - A1_VERSION=OSC_2.1.0 - - REMOTE_HOSTS_LOGGING=1 - - ALLOW_HTTP=true - - a1-sim-STD: - image: nexus3.o-ran-sc.org:10004/o-ran-sc/a1-simulator:2.1.0 - container_name: a1-sim-STD - networks: - - default - ports: - - 30003:8085 - - 30004:8185 - environment: - - A1_VERSION=STD_1.1.3 - - REMOTE_HOSTS_LOGGING=1 - - ALLOW_HTTP=true - - a1-sim-STD-v2: - image: nexus3.o-ran-sc.org:10004/o-ran-sc/a1-simulator:2.1.0 - container_name: a1-sim-STD-v2 - networks: - - default - ports: - - 30005:8085 - - 30006:8185 - environment: - - A1_VERSION=STD_2.0.0 - - REMOTE_HOSTS_LOGGING=1 - - ALLOW_HTTP=true diff --git a/plans/ccsdk-oran/polmansuite/sdnc/config/https-props-a1controller.properties b/plans/ccsdk-oran/polmansuite/sdnc/config/https-props-a1controller.properties deleted file mode 100644 index 7c4b1c17..00000000 --- a/plans/ccsdk-oran/polmansuite/sdnc/config/https-props-a1controller.properties +++ /dev/null @@ -1,24 +0,0 @@ -# ========================LICENSE_START================================= -# O-RAN-SC -# %% -# Copyright (C) 2021 Nordix Foundation -# %% -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ========================LICENSE_END=================================== - -key-store=/etc/ssl/certs/java/keystore.jks -key-password=sdnc-a1-controller -keystore-password=sdnc-a1-controller -isTrustStoreUsed=true -trust-store=/etc/ssl/certs/java/truststore.jks -truststore-password=sdnc-a1-controller diff --git a/plans/ccsdk-oran/polmansuite/sdnc/docker-compose.yml b/plans/ccsdk-oran/polmansuite/sdnc/docker-compose.yml deleted file mode 100644 index 86dd3a54..00000000 --- a/plans/ccsdk-oran/polmansuite/sdnc/docker-compose.yml +++ /dev/null @@ -1,69 +0,0 @@ -# Copyright (C) 2021 Nordix Foundation. All rights reserved. -# ======================================================================== -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============LICENSE_END================================================= -# -version: '3' - -networks: - default: - driver: bridge - -services: - db: - image: mysql/mysql-server:5.6 - container_name: sdnc-db - networks: - - default - ports: - - "3306" - environment: - - MYSQL_ROOT_PASSWORD=openECOMP1.0 - - MYSQL_ROOT_HOST=% - logging: - driver: "json-file" - options: - max-size: "30m" - max-file: "5" - - a1-controller: - image: onap/sdnc-image:2.1.0 - depends_on : - - db - container_name: a1-controller - networks: - - default - entrypoint: ["/opt/onap/sdnc/bin/startODL.sh"] - ports: - - 8282:8181 - - 8443:8443 - links: - - db:dbhost - - db:sdnctldb01 - - db:sdnctldb02 - environment: - - MYSQL_ROOT_PASSWORD=openECOMP1.0 - - SDNC_DB_INIT=true - - SDNC_CONFIG_DIR=/opt/onap/sdnc/data/properties - - A1_TRUSTSTORE_PASSWORD=a1adapter - # For using own certs instead of the default ones (built into the container), - # place them in config/ directory, update the https-props-a1controller.properties file, and uncomment the following lines - #volumes: - # - ./sdnc/config/keystore-a1controller.jks:/etc/ssl/certs/java/keystore.jks:ro - # - ./sdnc/config/truststore-a1controller.jks:/etc/ssl/certs/java/truststore.jks:ro - # - ./sdnc/config/https-props-a1controller.properties:/opt/onap/sdnc/data/properties/https-props.properties:ro - logging: - driver: "json-file" - options: - max-size: "30m" - max-file: "5" diff --git a/plans/ccsdk-oran/polmansuite/setup.sh b/plans/ccsdk-oran/polmansuite/setup.sh deleted file mode 100755 index 188b84db..00000000 --- a/plans/ccsdk-oran/polmansuite/setup.sh +++ /dev/null @@ -1,34 +0,0 @@ -#!/bin/bash - -# ============LICENSE_START=============================================== -# Copyright (C) 2021 Nordix Foundation. All rights reserved. -# ======================================================================== -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============LICENSE_END================================================= - - -cd $WORKSPACE/archives - -POLMAN_PLANS=$WORKSPACE/plans/ccsdk-oran/polmansuite -ARCHIVES=$WORKSPACE/archives - -#Copy test script -cp $POLMAN_PLANS/docker-compose.yml $WORKSPACE/archives/docker-compose.yml -cp -rf $POLMAN_PLANS/config/ $WORKSPACE/archives/config/ -cp -rf $POLMAN_PLANS/data/ $WORKSPACE/archives/data/ -cp -rf $POLMAN_PLANS/test/ $WORKSPACE/archives/test/ -cp -rf $POLMAN_PLANS/sdnc/ $WORKSPACE/archives/sdnc/ - -#Make the env vars availble to the robot scripts -ROBOT_VARIABLES="-b debug.log -v ARCHIVES:${ARCHIVES}" - diff --git a/plans/ccsdk-oran/polmansuite/teardown.sh b/plans/ccsdk-oran/polmansuite/teardown.sh deleted file mode 100755 index c619d0c9..00000000 --- a/plans/ccsdk-oran/polmansuite/teardown.sh +++ /dev/null @@ -1,21 +0,0 @@ -#!/bin/bash - -# ============LICENSE_START=============================================== -# Copyright (C) 2021 Nordix Foundation. All rights reserved. -# ======================================================================== -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============LICENSE_END================================================= - -# All started containers stopped and removed by the test case -docker stop $(docker ps -aq) -docker system prune -f diff --git a/plans/ccsdk-oran/polmansuite/test/pms_a1sim.sh b/plans/ccsdk-oran/polmansuite/test/pms_a1sim.sh deleted file mode 100755 index f3344cf1..00000000 --- a/plans/ccsdk-oran/polmansuite/test/pms_a1sim.sh +++ /dev/null @@ -1,62 +0,0 @@ -#!/bin/bash - -# ============LICENSE_START=============================================== -# Copyright (C) 2021 Nordix Foundation. All rights reserved. -# ======================================================================== -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============LICENSE_END================================================= - -SHELL_FOLDER=$(cd "$(dirname "$0")";pwd) -docker stop $(docker ps -aq) -docker system prune -f - -cd ${SHELL_FOLDER}/../config -cp application_configuration.json.nosdnc application_configuration.json - -cd ${SHELL_FOLDER}/../ -docker-compose up -d - -checkStatus(){ - for i in {1..20}; do - res=$($1) - echo "$res" - expect=$2 - if [ "$res" == "$expect" ]; then - echo -e "$3 is alive!\n" - break; - else - sleep $i - fi - done -} -# Healthcheck docker containers - -# check SIM1 status -echo "check SIM1 status:" -checkStatus "curl -skw %{http_code} http://localhost:30001/" "OK200" "SIM1" - -# check SIM2 status -echo "check SIM2 status:" -checkStatus "curl -skw %{http_code} http://localhost:30003/" "OK200" "SIM2" - -# check SIM3 status -echo "check SIM3 status:" -checkStatus "curl -skw %{http_code} http://localhost:30005/" "OK200" "SIM3" - -# check PMS status -echo "check PMS status:" -checkStatus "curl -skw %{http_code} http://localhost:8081/status" "hunky dory200" "PMS" - -cd ${SHELL_FOLDER}/../data -./preparePmsData.sh - diff --git a/plans/ccsdk-oran/polmansuite/test/pms_a1sim_sdnc.sh b/plans/ccsdk-oran/polmansuite/test/pms_a1sim_sdnc.sh deleted file mode 100755 index 5bf81b3f..00000000 --- a/plans/ccsdk-oran/polmansuite/test/pms_a1sim_sdnc.sh +++ /dev/null @@ -1,66 +0,0 @@ -#!/bin/bash - -# ============LICENSE_START=============================================== -# Copyright (C) 2021 Nordix Foundation. All rights reserved. -# ======================================================================== -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============LICENSE_END================================================= - -SHELL_FOLDER=$(cd "$(dirname "$0")";pwd) -docker stop $(docker ps -aq) -docker system prune -f - -cd ${SHELL_FOLDER}/../config -cp application_configuration.json.sdnc application_configuration.json - -cd ${SHELL_FOLDER}/../ -docker-compose -f docker-compose.yml -f sdnc/docker-compose.yml up -d - -checkStatus(){ - for i in {1..20}; do - res=$($1) - echo "$res" - expect=$2 - if [ "$res" == "$expect" ]; then - echo -e "$3 is alive!\n" - break; - else - sleep $i - fi - done -} -# Healthcheck docker containers - -# check SIM1 status -echo "check SIM1 status:" -checkStatus "curl -skw %{http_code} http://localhost:30001/" "OK200" "SIM1" - -# check SIM2 status -echo "check SIM2 status:" -checkStatus "curl -skw %{http_code} http://localhost:30003/" "OK200" "SIM2" - -# check SIM3 status -echo "check SIM3 status:" -checkStatus "curl -skw %{http_code} http://localhost:30005/" "OK200" "SIM3" - -# check PMS status -echo "check PMS status:" -checkStatus "curl -skw %{http_code} http://localhost:8081/status" "hunky dory200" "PMS" - -# check SDNC status -echo "check SDNC status:" -checkStatus "curl -s -o /dev/null -I -w %{http_code} http://localhost:8282/apidoc/explorer/" "200" "SDNC" - -cd ${SHELL_FOLDER}/../data -./preparePmsData.sh - diff --git a/plans/ccsdk-oran/polmansuite/testplan.txt b/plans/ccsdk-oran/polmansuite/testplan.txt deleted file mode 100644 index 29191bd8..00000000 --- a/plans/ccsdk-oran/polmansuite/testplan.txt +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/bash - -# ============LICENSE_START=============================================== -# Copyright (C) 2020 Nordix Foundation. All rights reserved. -# ======================================================================== -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============LICENSE_END================================================= - - -# Test suites are relative paths under [integration/csit.git]/tests/. -# Place the suites in run order. -# Temporary change to trigger verify job. -ccsdk-oran/polmansuite - diff --git a/plans/dcaegen2-collectors-datafile/Functional-suite/setup.sh b/plans/dcaegen2-collectors-datafile/Functional-suite/setup.sh index 02572d2a..f04b2303 100644 --- a/plans/dcaegen2-collectors-datafile/Functional-suite/setup.sh +++ b/plans/dcaegen2-collectors-datafile/Functional-suite/setup.sh @@ -59,7 +59,7 @@ cd ../ftpes-sftp-server docker build -t ftpes_vsftpd:latest -f Dockerfile-ftpes . cd ../http-https-server -docker build -t http_httpd:latest -f Dockerfile-http . +docker build -t http_https_httpd:latest -f Dockerfile-http-https . #All containers will be started and stopped via the robot tests. diff --git a/plans/dcaegen2-collectors-datafile/Functional-suite/testplan.txt b/plans/dcaegen2-collectors-datafile/Functional-suite/testplan.txt index 065deb10..25a7d8c2 100755 --- a/plans/dcaegen2-collectors-datafile/Functional-suite/testplan.txt +++ b/plans/dcaegen2-collectors-datafile/Functional-suite/testplan.txt @@ -3,3 +3,4 @@ dcaegen2-collectors-datafile/testsuites/Functional-Single-File-Ftp-suite dcaegen2-collectors-datafile/testsuites/Functional-Single-File-Http-suite dcaegen2-collectors-datafile/testsuites/Strict-Host-Checking-suite +dcaegen2-collectors-datafile/testsuites/HTTP-Various-Connection-Types-suite diff --git a/plans/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/addSubscriber.json b/plans/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/addSubscriber.json new file mode 100644 index 00000000..0666a7d5 --- /dev/null +++ b/plans/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/addSubscriber.json @@ -0,0 +1,20 @@ +{ + "delivery":{ + "url":"http://dcae-pm-mapper:8081/delivery", + "user":"username", + "password":"password", + "use100":true + }, + "follow_redirect":false, + "metadataOnly":false, + "suspend":false, + "groupid":0, + "links":{ + "self": "https://dmaap-dr-prov/subscribe/1", + "log": "https://dmaap-dr-prov/feedlog/1", + "feed": "https://dmaap-dr-prov/feed/1" + }, + "subscriber":"pmmapper", + "decompress":true, + "privilegedSubscriber": true + } \ No newline at end of file diff --git a/plans/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/cbs.json b/plans/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/cbs.json new file mode 100644 index 00000000..a29956fb --- /dev/null +++ b/plans/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/cbs.json @@ -0,0 +1,7 @@ +{ + "ID": "cbs", + "Name": "config_binding_service", + "Tags": ["cbs"], + "Address": "ipaddress", + "Port": 10000 +} \ No newline at end of file diff --git a/plans/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/cert.jks b/plans/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/cert.jks new file mode 100644 index 00000000..33dc9a31 Binary files /dev/null and b/plans/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/cert.jks differ diff --git a/plans/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/config.json b/plans/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/config.json new file mode 100644 index 00000000..3f1009d4 --- /dev/null +++ b/plans/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/config.json @@ -0,0 +1,35 @@ +{ + "pm-mapper-filter": { "filters":[]}, + "key_store_path": "/opt/app/pm-mapper/etc/certs/cert.jks", + "key_store_pass_path": "/opt/app/pm-mapper/etc/certs/jks.pass", + "trust_store_path": "/opt/app/pm-mapper/etc/certs/trust.jks", + "trust_store_pass_path": "/opt/app/pm-mapper/etc/certs/trust.pass", + "dmaap_dr_delete_endpoint": "https://dmaap-dr-node:8443/delete", + "dmaap_dr_feed_name": "1", + "aaf_identity": "aaf_admin@people.osaaf.org", + "aaf_password": "demo123456!", + "enable_http": true, + "streams_publishes": { + "dmaap_publisher": { + "type": "message_router", + "dmaap_info": { + "topic_url": "http://message-router:3904/events/org.onap.dmaap.mr.VES_PM", + "client_role": "org.onap.dcae.pmPublisher", + "location": "csit-pmmapper", + "client_id": "1562763644939" + } + } + }, + "streams_subscribes": { + "dmaap_subscriber": { + "type": "data_router", + "dmaap_info": { + "username": "username", + "password": "password", + "location": "csit-pmmapper", + "delivery_url": "http://dcae-pm-mapper:8081/delivery", + "subscriber_id": 1 + } + } + } +} \ No newline at end of file diff --git a/plans/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/createFeed.json b/plans/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/createFeed.json new file mode 100644 index 00000000..f93633dc --- /dev/null +++ b/plans/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/createFeed.json @@ -0,0 +1,18 @@ +{ + "name": "PM Mapper Feed", + "version": "m1.0", + "description": "PM Mapper Feed", + "business_description": "PM Mapper", + "suspend": false, + "deleted": false, + "changeowner": true, + "authorization": { + "classification": "unclassified", + "endpoint_addrs": [], + "endpoint_ids": [ + { + "password": "pmmapper", + "id": "pmmapper" + }] + } + } \ No newline at end of file diff --git a/plans/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/jks.pass b/plans/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/jks.pass new file mode 100644 index 00000000..ae8f7e72 --- /dev/null +++ b/plans/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/jks.pass @@ -0,0 +1 @@ +Er1tmip;T4w[%1}YE?x{fN9v \ No newline at end of file diff --git a/plans/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/mrserver.js b/plans/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/mrserver.js new file mode 100644 index 00000000..cc845712 --- /dev/null +++ b/plans/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/mrserver.js @@ -0,0 +1,28 @@ +var httpServer = function () { + var http = require('http'), + url = require('url'), + fs = require('fs'), + + start = function (port) { + var server = http.createServer(function (req, res) { + processHttpRequest(res); + }); + server.listen(port, function () { + console.log('Listening on ' + port + '...'); + }); + }, + + processHttpRequest = function (res) { + res.writeHead(200, {'Content-Type': 'text/plain'}); + console.log('received message'); + setTimeout(() => { + res.end('Published Successfully.\n'); + }, 100); + }; + + return { + start: start + } +}(); + +httpServer.start(3904); diff --git a/plans/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/node.properties b/plans/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/node.properties new file mode 100644 index 00000000..7abaf60a --- /dev/null +++ b/plans/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/node.properties @@ -0,0 +1,89 @@ +#------------------------------------------------------------------------------- +# ============LICENSE_START================================================== +# * org.onap.dmaap +# * =========================================================================== +# * Copyright � 2017 AT&T Intellectual Property. All rights reserved. +# * =========================================================================== +# * Modifications Copyright (C) 2021 Nokia Intellectual Property +# * =========================================================================== +# * Licensed under the Apache License, Version 2.0 (the "License"); +# * you may not use this file except in compliance with the License. +# * You may obtain a copy of the License at +# * +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * Unless required by applicable law or agreed to in writing, software +# * distributed under the License is distributed on an "AS IS" BASIS, +# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# * See the License for the specific language governing permissions and +# * limitations under the License. +# * ============LICENSE_END==================================================== +# * +# * ECOMP is a trademark and service mark of AT&T Intellectual Property. +# * +#------------------------------------------------------------------------------- +# +# Configuration parameters set at startup for the DataRouter node +# +# URL to retrieve dynamic configuration +ProvisioningURL = https://dmaap-dr-prov:8443/internal/prov +# +# URL to upload PUB/DEL/EXP logs +LogUploadURL = https://dmaap-dr-prov:8443/internal/logs +# +# The port number for http as seen within the server +IntHttpPort = 8080 +# +# The port number for https as seen within the server +IntHttpsPort = 8443 +# +# The external port number for https taking port mapping into account +ExtHttpsPort = 443 +# +# The minimum interval between fetches of the dynamic configuration from the provisioning server +MinProvFetchInterval = 10000 +# +# The minimum interval between saves of the redirection data file +MinRedirSaveInterval = 10000 +# +# The path to the directory where log files are stored +LogDir = /opt/app/datartr/logs +# +# The retention interval (in days) for log files +LogRetention = 30 +# +# The path to the directories where data and meta data files are stored +SpoolDir = /opt/app/datartr/spool +# +# The path to the redirection data file +RedirectionFile = etc/redirections.dat +# +# The type of keystore for https +KeyStoreType = PKCS12 +# +# The type of truststore for https +TrustStoreType = jks +# +# The path to the file used to trigger an orderly shutdown +QuiesceFile = etc/SHUTDOWN +# +# The key used to generate passwords for node to node transfers +NodeAuthKey = Node123! +# +# DR_NODE DEFAULT ENABLED TLS PROTOCOLS +NodeHttpsProtocols = TLSv1.1|TLSv1.2 +# +# AAF type to generate permission string +AAFType = org.onap.dmaap-dr.feed +# +# AAF default instance to generate permission string - default should be legacy +AAFInstance = legacy +# +# AAF action to generate permission string - default should be publish +AAFAction = publish +# +# AAF CADI enabled flag +CadiEnabled = false +# +# AAF Props file path +AAFPropsFilePath = /opt/app/osaaf/local/org.onap.dmaap-dr.props diff --git a/plans/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/provserver.properties b/plans/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/provserver.properties new file mode 100644 index 00000000..cd333efb --- /dev/null +++ b/plans/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/provserver.properties @@ -0,0 +1,62 @@ +#------------------------------------------------------------------------------- +# ============LICENSE_START================================================== +# * org.onap.dmaap +# * =========================================================================== +# * Copyright � 2017 AT&T Intellectual Property. All rights reserved. +# * =========================================================================== +# * Modifications Copyright (C) 2021 Nokia Intellectual Property +# * =========================================================================== +# * Licensed under the Apache License, Version 2.0 (the "License"); +# * you may not use this file except in compliance with the License. +# * You may obtain a copy of the License at +# * +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * Unless required by applicable law or agreed to in writing, software +# * distributed under the License is distributed on an "AS IS" BASIS, +# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# * See the License for the specific language governing permissions and +# * limitations under the License. +# * ============LICENSE_END==================================================== +# * +# * ECOMP is a trademark and service mark of AT&T Intellectual Property. +# * +#------------------------------------------------------------------------------- + +#Jetty Server properties +org.onap.dmaap.datarouter.provserver.http.port = 8080 +org.onap.dmaap.datarouter.provserver.https.port = 8443 +org.onap.dmaap.datarouter.provserver.https.relaxation = true + +org.onap.dmaap.datarouter.provserver.aafprops.path = /opt/app/osaaf/local/org.onap.dmaap-dr.props + +org.onap.dmaap.datarouter.provserver.accesslog.dir = /opt/app/datartr/logs +org.onap.dmaap.datarouter.provserver.spooldir = /opt/app/datartr/spool +org.onap.dmaap.datarouter.provserver.dbscripts = /opt/app/datartr/etc/misc +org.onap.dmaap.datarouter.provserver.logretention = 30 + +#DMAAP-597 (Tech Dept) REST request source IP auth +# relaxation to accommodate OOM kubernetes deploy +org.onap.dmaap.datarouter.provserver.isaddressauthenabled = false + +#Localhost address config +org.onap.dmaap.datarouter.provserver.localhost = 127.0.0.1 + +# Database access +org.onap.dmaap.datarouter.db.driver = org.mariadb.jdbc.Driver +org.onap.dmaap.datarouter.db.url = jdbc:mariadb://datarouter-mariadb:3306/datarouter +org.onap.dmaap.datarouter.db.login = datarouter +org.onap.dmaap.datarouter.db.password = datarouter + +# PROV - DEFAULT ENABLED TLS PROTOCOLS +org.onap.dmaap.datarouter.provserver.https.include.protocols = TLSv1.1|TLSv1.2 + +# AAF config +org.onap.dmaap.datarouter.provserver.cadi.enabled = false + +org.onap.dmaap.datarouter.provserver.passwordencryption = PasswordEncryptionKey#@$%^&1234# +org.onap.dmaap.datarouter.provserver.aaf.feed.type = org.onap.dmaap-dr.feed +org.onap.dmaap.datarouter.provserver.aaf.sub.type = org.onap.dmaap-dr.sub +org.onap.dmaap.datarouter.provserver.aaf.instance = legacy +org.onap.dmaap.datarouter.provserver.aaf.action.publish = publish +org.onap.dmaap.datarouter.provserver.aaf.action.subscribe = subscribe diff --git a/plans/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/trust.jks b/plans/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/trust.jks new file mode 100644 index 00000000..679c95a5 Binary files /dev/null and b/plans/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/trust.jks differ diff --git a/plans/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/trust.pass b/plans/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/trust.pass new file mode 100644 index 00000000..d3d01b0a --- /dev/null +++ b/plans/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/trust.pass @@ -0,0 +1 @@ +583Ls;XF(qDQu3p!L22gyh1t \ No newline at end of file diff --git a/plans/dcaegen2-pmmapper/files-processing-config-pmmapper/docker-compose.yml b/plans/dcaegen2-pmmapper/files-processing-config-pmmapper/docker-compose.yml new file mode 100644 index 00000000..66946ea0 --- /dev/null +++ b/plans/dcaegen2-pmmapper/files-processing-config-pmmapper/docker-compose.yml @@ -0,0 +1,119 @@ +version: '3.3' + +services: + datarouter-prov: + image: nexus3.onap.org:10001/onap/dmaap/datarouter-prov + container_name: datarouter-prov + hostname: dmaap-dr-prov + ports: + - "443:8443" + - "8443:8443" + - "8080:8080" + volumes: + - /var/tmp/provserver.properties:/opt/app/datartr/etc/provserver.properties + depends_on: + - mariadb + healthcheck: + test: ["CMD", "curl", "-f", "http://dmaap-dr-prov:8080/internal/prov"] + interval: 10s + timeout: 10s + retries: 5 + networks: + pmmapper-network: + ipv4_address: $DR_PROV_IP + extra_hosts: + - "dmaap-dr-node:$DR_NODE_IP" + - "dcae-pm-mapper:$PMMAPPER_IP" + + datarouter-node: + image: nexus3.onap.org:10001/onap/dmaap/datarouter-node + container_name: datarouter-node + hostname: dmaap-dr-node + ports: + - "9443:8443" + - "9090:8080" + volumes: + - /var/tmp/node.properties:/opt/app/datartr/etc/node.properties + depends_on: + - datarouter-prov + networks: + pmmapper-network: + ipv4_address: $DR_NODE_IP + extra_hosts: + - "dmaap-dr-prov:$DR_PROV_IP" + - "dcae-pm-mapper:$PMMAPPER_IP" + + node: + image: nexus3.onap.org:10001/node:10-slim + container_name: mr-simulator + volumes: + - /var/tmp/mrserver.js:/tmp/mrserver.js + command: + nodejs /tmp/mrserver.js + networks: + pmmapper-network: + ipv4_address: $NODE_IP + + mariadb: + image: nexus3.onap.org:10001/mariadb:10.2.14 + container_name: mariadb + ports: + - "3306:3306" + environment: + MYSQL_ROOT_PASSWORD: datarouter + MYSQL_DATABASE: datarouter + MYSQL_USER: datarouter + MYSQL_PASSWORD: datarouter + healthcheck: + test: ["CMD", "mysqladmin" ,"ping", "-h", "localhost", "--silent"] + interval: 10s + timeout: 30s + retries: 5 + networks: + pmmapper-network: + ipv4_address: $MARIADB_IP + + consul: + container_name: consul + image: nexus3.onap.org:10001/consul:latest + networks: + pmmapper-network: + ipv4_address: $CONSUL_IP + + cbs: + container_name: cbs + image: nexus3.onap.org:10001/onap/org.onap.dcaegen2.platform.configbinding.app-app:latest + environment: + CONSUL_HOST: $CONSUL_IP + networks: + pmmapper-network: + ipv4_address: $CBS_IP + + pmmapper: + container_name: pmmapper + image: nexus3.onap.org:10001/onap/org.onap.dcaegen2.services.pm-mapper:latest + ports: + - "8081:8081" + volumes: + - /var/tmp/:/opt/app/pm-mapper/etc/certs/ + depends_on: + - datarouter-prov + environment: + CONFIG_BINDING_SERVICE_SERVICE_HOST: $CBS_IP + CONFIG_BINDING_SERVICE_SERVICE_PORT: 10000 + HOSTNAME: pmmapper + networks: + pmmapper-network: + ipv4_address: $PMMAPPER_IP + extra_hosts: + - "dmaap-dr-node:$DR_NODE_IP" + - "message-router:$NODE_IP" + + +networks: + pmmapper-network: + driver: bridge + ipam: + config: + - subnet: 172.18.0.0/16 + diff --git a/plans/dcaegen2-pmmapper/files-processing-config-pmmapper/setup.sh b/plans/dcaegen2-pmmapper/files-processing-config-pmmapper/setup.sh new file mode 100644 index 00000000..f314055e --- /dev/null +++ b/plans/dcaegen2-pmmapper/files-processing-config-pmmapper/setup.sh @@ -0,0 +1,101 @@ +#!/bin/bash +# Place the scripts in run order: +source ${SCRIPTS}/common_functions.sh + +docker login -u docker -p docker nexus3.onap.org:10001 + +TEST_PLANS_DIR=$WORKSPACE/plans/dcaegen2-pmmapper/files-processing-config-pmmapper + +export GATEWAY_IP=172.18.0.1 +export DR_NODE_IP=172.18.0.2 +export DR_PROV_IP=172.18.0.3 +export CONSUL_IP=172.18.0.4 +export CBS_IP=172.18.0.5 +export MARIADB_IP=172.18.0.6 +export NODE_IP=172.18.0.7 +export PMMAPPER_IP=172.18.0.8 + +for asset in provserver.properties node.properties cbs.json mrserver.js cert.jks jks.pass trust.jks trust.pass; do + cp $TEST_PLANS_DIR/assets/${asset} /var/tmp/ +done + +sed -i 's/datarouter-mariadb/'$MARIADB_IP'/g' /var/tmp/provserver.properties +sed -i 's/ipaddress//g' /var/tmp/cbs.json + +# ------------------------------------ +#Prepare enviroment for client +#install docker sdk +echo "Uninstall docker-py and reinstall docker." +pip uninstall -y docker-py +pip uninstall -y docker +pip install -U docker==2.7.0 + +docker-compose -f $TEST_PLANS_DIR/docker-compose.yml up -d mariadb consul cbs node + +echo "Waiting for MariaDB to come up healthy..." +for i in {1..30}; do + mariadb_state=$(docker inspect --format='{{json .State.Health.Status}}' mariadb) + if [ $mariadb_state = '"healthy"' ] + then + break + else + sleep 2 + fi +done +[ "$mariadb_state" != '"healthy"' ] && echo "Error: MariaDB container state not healthy" && exit 1 + +docker-compose -f $TEST_PLANS_DIR/docker-compose.yml up -d datarouter-node datarouter-prov + +unset http_proxy +unset https_proxy +curl --request PUT --data @/var/tmp/cbs.json http://$CONSUL_IP:8500/v1/agent/service/register +curl 'http://'$CONSUL_IP':8500/v1/kv/pmmapper?dc=dc1' -X PUT \ + -H 'Accept: application/json' \ + -H 'Content-Type: application/json' \ + -H 'X-Requested-With: XMLHttpRequest' \ + --data @$TEST_PLANS_DIR/assets/config.json + +docker-compose -f $TEST_PLANS_DIR/docker-compose.yml up -d pmmapper +sleep 2 + +# Wait for initialization of Docker container for datarouter-node, datarouter-prov and mariadb, Consul, CBS +containers_ok=false +for i in {1..5}; do + if [ $(docker inspect --format '{{ .State.Running }}' datarouter-node) ] && \ + [ $(docker inspect --format '{{ .State.Running }}' datarouter-prov) ] && \ + [ $(docker inspect --format '{{ .State.Running }}' mariadb) ] && \ + [ $(docker inspect --format '{{ .State.Running }}' mr-simulator) ] && \ + [ $(docker inspect --format '{{ .State.Running }}' consul) ] && \ + [ $(docker inspect --format '{{ .State.Running }}' cbs) ] && \ + [ $(docker inspect --format '{{ .State.Running }}' pmmapper) ] + then + echo "All required docker containers are up." + containers_ok=true + break + else + sleep $i + fi +done +[ "$containers_ok" = "false" ] && echo "Error: required container not running." && exit 1 + +# Data Router Configuration. +docker exec -i datarouter-prov sh -c \ + "curl -k -X PUT https://$DR_PROV_IP:8443/internal/api/NODES?val=dmaap-dr-node\|$GATEWAY_IP" +docker exec -i datarouter-prov sh -c \ + "curl -k -X PUT https://$DR_PROV_IP:8443/internal/api/PROV_AUTH_ADDRESSES?val=dmaap-dr-prov\|$GATEWAY_IP" + +# Create PM Mapper feed and create PM Mapper subscriber on data router +curl -v -X POST -H "Content-Type:application/vnd.dmaap-dr.feed" -H "X-DMAAP-DR-ON-BEHALF-OF:pmmapper" \ + --data-ascii @$TEST_PLANS_DIR/assets/createFeed.json \ + --post301 --location-trusted -k https://${DR_PROV_IP}:8443 +curl -v -X POST -H "Content-Type:application/vnd.dmaap-dr.subscription" -H "X-DMAAP-DR-ON-BEHALF-OF:pmmapper" \ + --data-ascii @$TEST_PLANS_DIR/assets/addSubscriber.json \ + --post301 --location-trusted -k https://${DR_PROV_IP}:8443/subscribe/1 + +docker cp pmmapper:/var/log/ONAP/dcaegen2/services/pm-mapper/pm-mapper_output.log /tmp/pmmapper.log +sleep 10 +docker exec -it datarouter-prov sh -c "curl http://dmaap-dr-node:8080/internal/fetchProv" +curl -k https://$DR_PROV_IP:8443/internal/prov + +#Pass any variables required by Robot test suites in ROBOT_VARIABLES +ROBOT_VARIABLES="-v CONSUL_IP:${CONSUL_IP} -v DR_PROV_IP:${DR_PROV_IP} -v DMAAP_MR_IP:${DMAAP_MR_IP} -v CBS_IP:${CBS_IP} -v PMMAPPER_IP:${PMMAPPER_IP} -v DR_NODE_IP:${DR_NODE_IP} -v NODE_IP:${NODE_IP}" diff --git a/plans/dcaegen2-pmmapper/files-processing-config-pmmapper/teardown.sh b/plans/dcaegen2-pmmapper/files-processing-config-pmmapper/teardown.sh new file mode 100644 index 00000000..ffa2da60 --- /dev/null +++ b/plans/dcaegen2-pmmapper/files-processing-config-pmmapper/teardown.sh @@ -0,0 +1,8 @@ +#!/bin/bash +echo "Starting teardown script" +TEST_PLANS_DIR=$WORKSPACE/plans/dcaegen2-pmmapper/files-processing-config-pmmapper +mkdir -p $WORKSPACE/archives +docker exec pmmapper /bin/sh -c "cat /var/log/ONAP/dcaegen2/services/pm-mapper/pm-mapper_output.log" +kill-instance.sh pmmapper +docker-compose -f $TEST_PLANS_DIR/docker-compose.yml logs > $WORKSPACE/archives/files-processing-config-pmmapper-docker-compose.log +docker-compose -f $TEST_PLANS_DIR/docker-compose.yml down -v diff --git a/plans/dcaegen2-pmmapper/files-processing-config-pmmapper/testplan.txt b/plans/dcaegen2-pmmapper/files-processing-config-pmmapper/testplan.txt new file mode 100644 index 00000000..543973d0 --- /dev/null +++ b/plans/dcaegen2-pmmapper/files-processing-config-pmmapper/testplan.txt @@ -0,0 +1,3 @@ +# Test suites are relative paths under [integration/csit.git]/tests/. +# Place the suites in run order. +dcaegen2-pmmapper/files-processing-config-pmmapper diff --git a/plans/policy/xacml-pdp/setup.sh b/plans/policy/xacml-pdp/setup.sh index 7f557d13..6842e634 100644 --- a/plans/policy/xacml-pdp/setup.sh +++ b/plans/policy/xacml-pdp/setup.sh @@ -1,6 +1,6 @@ #!/bin/bash # ============LICENSE_START======================================================= -# Copyright (C) 2020 AT&T Intellectual Property. All rights reserved. +# Copyright (C) 2020-2021 AT&T Intellectual Property. All rights reserved. # ================================================================================ # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -39,14 +39,16 @@ unset http_proxy https_proxy POLICY_API_IP=`get-instance-ip.sh policy-api` MARIADB_IP=`get-instance-ip.sh mariadb` POLICY_PDPX_IP=`get-instance-ip.sh policy-xacml-pdp` -DMAAP_IP=`get-instance-ip.sh policy.api.simpledemo.onap.org` +SIM_IP=`get-instance-ip.sh policy.api.simpledemo.onap.org` POLICY_PAP_IP=`get-instance-ip.sh policy-pap` +export SIM_IP + echo PDP IP IS ${POLICY_PDPX_IP} echo API IP IS ${POLICY_API_IP} echo PAP IP IS ${POLICY_PAP_IP} echo MARIADB IP IS ${MARIADB_IP} -echo DMAAP_IP IS ${DMAAP_IP} +echo SIM_IP IS ${SIM_IP} # wait for the app to start up ${SCRIPTS}/policy/wait_for_port.sh ${POLICY_PDPX_IP} 6969 @@ -59,3 +61,4 @@ ROBOT_VARIABLES="${ROBOT_VARIABLES} -v DATA2:${DATA2}" ROBOT_VARIABLES="${ROBOT_VARIABLES} -v POLICY_PDPX_IP:${POLICY_PDPX_IP}" ROBOT_VARIABLES="${ROBOT_VARIABLES} -v POLICY_API_IP:${POLICY_API_IP}" ROBOT_VARIABLES="${ROBOT_VARIABLES} -v POLICY_PAP_IP:${POLICY_PAP_IP}" +ROBOT_VARIABLES="${ROBOT_VARIABLES} -v SIM_IP:${SIM_IP}" diff --git a/plans/sdnc/healthcheck/setup.sh b/plans/sdnc/healthcheck/setup.sh index 99753dfb..2934cd58 100755 --- a/plans/sdnc/healthcheck/setup.sh +++ b/plans/sdnc/healthcheck/setup.sh @@ -26,6 +26,39 @@ export DMAAP_TOPIC=AUTO export DOCKER_IMAGE_VERSION=2.1-STAGING-latest export CCSDK_DOCKER_IMAGE_VERSION=1.1-STAGING-latest +# Set credentials +export MYSQL_ROOT_PASSWORD=${MYSQL_ROOT_PASSWORD:-mySecretPassword} +export MYSQL_USER=${MYSQL_USER:-sdnc} +export MYSQL_PASSWORD=${MYSQL_PASSWORD:-test123} +export MYSQL_DATABASE=${MYSQL_DATABASE:-sdncdb} +export ODL_USER=${ODL_USER:-admin} +export ODL_PASSWORD=${ODL_PASSWORD:-Kp8bJ4SXszM0WXlhak3eHlcse2gAw84vaoGGmJvUy2U} +export ODL_ADMIN_USER=${ODL_ADMIN_USER:-${ODL_USER}} +export ODL_ADMIN_PASSWORD=${ODL_ADMIN_PASSWORD:-${ODL_PASSWORD}} +export DMAAP_USER=${DMAAP_USER:-admin} +export DMAAP_PASSWORD=${DMAAP_PASSWORD:-admin} +export DMAAP_AUTHKEY=${DMAAP_AUTHKEY:-""} +export AAI_TRUSTSTORE_PASSWORD=${AAI_TRUSTSTORE_PASSWORD:-changeit} +export AAI_CLIENT_NAME=${AAI_CLIENT_NAME:-sdnc@sdnc.onap.org} +export AAI_CLIENT_PASSWORD=${AAI_CLIENT_PASSWORD:-demo123456!} +export ANSIBLE_TRUSTSTORE_PASSWORD=${ANSIBLE_TRUSTSTURE_PASSWORD:-changeit} +export HONEYCOMB_USER=${HONEYCOMB_USER:-admin} +export HONEYCOMB_PASSWORD=${HONEYCOMB_PASSWORD:-admin} +export TRUSTSTORE_PASSWORD=${TRUSTSTORE_PASSWORD:-changeit} +export KEYSTORE_PASSWORD=${KEYSTORE_PASSWORD:-adminadmin} +export NENG_USER=${NENG_USER:-ccsdkapps} +export NENG_PASSWORD=${NENG_PASSWORD:-ccsdkapps} +export SO_USER=${SO_USER:-sdncaBpmn} +export SO_PASSWORD=${SO_PASSWORD:-password1$} +export CDS_USER=${CDS_USER:-ccsdkapps} +export CDS_PASSWORD=${CDS_PASSWORD:-ccsdkapps} +export ANSIBLE_USER=${ANSIBLE_USER:-sdnc} +export ANSIBLE_PASSWORD=${ANSIBLE_PASSWORD:-sdnc} +export SQL_CRYPTKEY=${SQL_CRYPTKEY:-fakECryptKey} +export ASDC_USER=${ASDC_USER:-sdnc} +export ASDC_PASSWORD=${ASDC_PASSWORD:-Kp8bJ4SXszM0WXlhak3eHlcse2gAw84vaoGGmJvUy2U} + + export MTU=$(/sbin/ifconfig | grep MTU | sed 's/.*MTU://' | sed 's/ .*//' | sort -n | head -1) if [ "$MTU" == "" ]; then diff --git a/plans/sdnc/sdnc_netconf_tls_post_deploy/sdnc-csit.env b/plans/sdnc/sdnc_netconf_tls_post_deploy/sdnc-csit.env index 45a0a204..d10b9b1c 100644 --- a/plans/sdnc/sdnc_netconf_tls_post_deploy/sdnc-csit.env +++ b/plans/sdnc/sdnc_netconf_tls_post_deploy/sdnc-csit.env @@ -2,7 +2,7 @@ GERRIT_BRANCH=master NEXUS_USERNAME=docker NEXUS_PASSWD=docker SDNC_CONTAINER_NAME=sdnc -SDNC_IMAGE_TAG=1.8.3-STAGING-latest +SDNC_IMAGE_TAG=2.1-STAGING-latest NEXUS_DOCKER_REPO=nexus3.onap.org:10001 CLIENT_CONTAINER_NAME=CertServiceClient SDNC_CERT_PATH=${SCRIPTS}/sdnc/sdnc/certs @@ -13,4 +13,36 @@ AAF_CERTSERVICE_SCRIPTS_PATH=${SCRIPTS}/sdnc/certservice/scripts TEMP_DIR_PATH=${WORKSPACE}/tests/sdnc/sdnc_netconf_tls_post_deploy/tmp NETCONF_CONFIG_PATH=${SCRIPTS}/sdnc/netconf-pnp-simulator/netconf-config AAF_INITIAL_CERTS=${WORKSPACE}/plans/sdnc/sdnc_netconf_tls_post_deploy/certs -AAF_CERTSERVICE_CONFIG_PATH=${SCRIPTS}/sdnc/certservice/config/cmpServers.json \ No newline at end of file +AAF_CERTSERVICE_CONFIG_PATH=${SCRIPTS}/sdnc/certservice/config/cmpServers.json + +# Set vars with default credentials +export MYSQL_ROOT_PASSWORD=${MYSQL_ROOT_PASSWORD:-mySecretPassword} +export MYSQL_USER=${MYSQL_USER:-sdnc} +export MYSQL_PASSWORD=${MYSQL_PASSWORD:-test123} +export MYSQL_DATABASE=${MYSQL_DATABASE:-sdncdb} +export ODL_USER=${ODL_USER:-admin} +export ODL_PASSWORD=${ODL_PASSWORD:-Kp8bJ4SXszM0WXlhak3eHlcse2gAw84vaoGGmJvUy2U} +export ODL_ADMIN_USER=${ODL_ADMIN_USER:-${ODL_USER}} +export ODL_ADMIN_PASSWORD=${ODL_ADMIN_PASSWORD:-${ODL_PASSWORD}} +export DMAAP_USER=${DMAAP_USER:-admin} +export DMAAP_PASSWORD=${DMAAP_PASSWORD:-admin} +export DMAAP_AUTHKEY=${DMAAP_AUTHKEY:-""} +export AAI_TRUSTSTORE_PASSWORD=${AAI_TRUSTSTORE_PASSWORD:-changeit} +export AAI_CLIENT_NAME=${AAI_CLIENT_NAME:-sdnc@sdnc.onap.org} +export AAI_CLIENT_PASSWORD=${AAI_CLIENT_PASSWORD:-demo123456!} +export ANSIBLE_TRUSTSTORE_PASSWORD=${ANSIBLE_TRUSTSTURE_PASSWORD:-changeit} +export HONEYCOMB_USER=${HONEYCOMB_USER:-admin} +export HONEYCOMB_PASSWORD=${HONEYCOMB_PASSWORD:-admin} +export TRUSTSTORE_PASSWORD=${TRUSTSTORE_PASSWORD:-changeit} +export KEYSTORE_PASSWORD=${KEYSTORE_PASSWORD:-adminadmin} +export NENG_USER=${NENG_USER:-ccsdkapps} +export NENG_PASSWORD=${NENG_PASSWORD:-ccsdkapps} +export SO_USER=${SO_USER:-sdncaBpmn} +export SO_PASSWORD=${SO_PASSWORD:-password1$} +export CDS_USER=${CDS_USER:-ccsdkapps} +export CDS_PASSWORD=${CDS_PASSWORD:-ccsdkapps} +export ANSIBLE_USER=${ANSIBLE_USER:-sdnc} +export ANSIBLE_PASSWORD=${ANSIBLE_PASSWORD:-sdnc} +export SQL_CRYPTKEY=${SQL_CRYPTKEY:-fakECryptKey} +export ASDC_USER=${ASDC_USER:-sdnc} +export ASDC_PASSWORD=${ASDC_PASSWORD:-Kp8bJ4SXszM0WXlhak3eHlcse2gAw84vaoGGmJvUy2U} diff --git a/plans/sdnc/sdnc_netconf_tls_post_deploy/setup.sh b/plans/sdnc/sdnc_netconf_tls_post_deploy/setup.sh index a2020aee..6f4e547e 100644 --- a/plans/sdnc/sdnc_netconf_tls_post_deploy/setup.sh +++ b/plans/sdnc/sdnc_netconf_tls_post_deploy/setup.sh @@ -2,6 +2,7 @@ # # ============LICENSE_START======================================================= # Copyright (C) 2020 Nordix Foundation. +# Modification copyright (C) 2021 Samsung Electronics, Co., Ltd. # ================================================================================ # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -29,9 +30,6 @@ chmod +x "${WORKSPACE}"/tests/sdnc/sdnc_netconf_tls_post_deploy/libraries/config # Export temp directory export TEMP_DIR_PATH=${TEMP_DIR_PATH} -# Create temp directory to bind with docker containers -mkdir -m 755 -p "${WORKSPACE}"/tests/sdnc/sdnc_netconf_tls_post_deploy/certs -mkdir -m 755 -p "${WORKSPACE}"/tests/sdnc/sdnc_netconf_tls_post_deploy/cert-data export MTU=$(/sbin/ifconfig | grep MTU | sed 's/.*MTU://' | sed 's/ .*//' | sort -n | head -1) @@ -59,55 +57,57 @@ pip install pyjks # Disable Proxy - for local run unset http_proxy https_proxy -# Export AAF Certservice config path -export AAF_INITIAL_CERTS -export EJBCA_CERTPROFILE_PATH -export AAF_CERTSERVICE_CONFIG_PATH -export AAF_CERTSERVICE_SCRIPTS_PATH -export CERT_PROFILE=${EJBCA_CERTPROFILE_PATH} -export SCRIPTS_PATH=${AAF_CERTSERVICE_SCRIPTS_PATH} -export CONFIGURATION_PATH=${AAF_CERTSERVICE_CONFIG_PATH} - -# Generate Keystores, Truststores, Certificates and Keys -make all -C ./certs/ - -cp "${WORKSPACE}"/plans/sdnc/sdnc_netconf_tls_post_deploy/certs/root.crt "${WORKSPACE}"/tests/sdnc/sdnc_netconf_tls_post_deploy/certs/root.crt -openssl pkcs12 -in "${WORKSPACE}"/plans/sdnc/sdnc_netconf_tls_post_deploy/certs/certServiceServer-keystore.p12 -clcerts -nokeys -password pass:secret | sed -ne '/-BEGIN CERTIFICATE-/,/-END CERTIFICATE-/p' >"${WORKSPACE}"/tests/sdnc/sdnc_netconf_tls_post_deploy/certs/certServiceServer.crt -openssl pkcs12 -in "${WORKSPACE}"/plans/sdnc/sdnc_netconf_tls_post_deploy/certs/certServiceServer-keystore.p12 -nocerts -nodes -password pass:secret | sed -ne '/-BEGIN PRIVATE KEY-/,/-END PRIVATE KEY-/p' >"${WORKSPACE}"/tests/sdnc/sdnc_netconf_tls_post_deploy/certs/certServiceServer.key - -echo "Generated KeyStores, Server Certificate and Key" - -# Start EJBCA, AAF-CertService Containers with docker-compose and configuration from docker-compose.yml -docker-compose -f "${SCRIPTS}"/sdnc/certservice/docker-compose.yml up -d - -# Check if AAF-Certservice Service is healthy and ready -AAFCERT_IP='none' -for i in {1..9}; do - AAFCERT_IP=$(get-instance-ip.sh aaf-cert-service) - RESP_CODE=$(curl -s https://localhost:8443/actuator/health --cacert ./certs/root.crt --cert-type p12 --cert ./certs/certServiceServer-keystore.p12 --pass secret | - python2 -c 'import json,sys;obj=json.load(sys.stdin);print obj["status"]') - if [[ "${RESP_CODE}" == "UP" ]]; then - echo "AAF Cert Service is Ready." - export AAFCERT_IP=${AAFCERT_IP} - docker exec aafcert-ejbca /opt/primekey/scripts/ejbca-configuration.sh - break - fi - echo "Waiting for AAF Cert Service to Start Up..." - sleep 2m -done +###################### Netconf Simulator Setup ###################### -if [[ "${AAFCERT_IP}" == "none" || "${AAFCERT_IP}" == '' || "${RESP_CODE}" != "UP" ]]; then - echo "AAF CertService not started Could cause problems for testing activities...!" +# Get integration/simulators +if [ -d ${SCRIPTS}/sdnc/pnf-simulator ] +then + rm -rf ${SCRIPTS}/sdnc/pnf-simulator fi +mkdir ${SCRIPTS}/sdnc/pnf-simulator +git clone "https://gerrit.onap.org/r/integration/simulators/pnf-simulator" ${SCRIPTS}/sdnc/pnf-simulator + +# Fix docker-compose to add nexus repo for onap dockers +mv ${SCRIPTS}/sdnc/pnf-simulator/netconfsimulator/docker-compose.yml ${SCRIPTS}/sdnc/pnf-simulator/netconfsimulator/docker-compose.yml.orig +cat ${SCRIPTS}/sdnc/pnf-simulator/netconfsimulator/docker-compose.yml.orig | sed -e "s/image: onap/image: nexus3.onap.org:10001\/onap/" > ${SCRIPTS}/sdnc/pnf-simulator/netconfsimulator/docker-compose.yml + +# Remove carriage returns (if any) from netopeer start script +mv ${SCRIPTS}/sdnc/pnf-simulator/netconfsimulator/netconf/initialize_netopeer.sh ${SCRIPTS}/sdnc/pnf-simulator/netconfsimulator/netconf/initialize_netopeer.sh.orig +cat ${SCRIPTS}/sdnc/pnf-simulator/netconfsimulator/netconf/initialize_netopeer.sh.orig | sed -e "s/\r$//g" > ${SCRIPTS}/sdnc/pnf-simulator/netconfsimulator/netconf/initialize_netopeer.sh +chmod 755 ${SCRIPTS}/sdnc/pnf-simulator/netconfsimulator/netconf/initialize_netopeer.sh + + +# Start Netconf Simulator Container with docker-compose and configuration from docker-compose.yml +docker-compose -f "${SCRIPTS}"/sdnc/pnf-simulator/netconfsimulator/docker-compose.yml up -d + +# Add test user in netopeer container +sleep 60 +docker exec netconfsimulator_netopeer_1 useradd --system test + ############################## SDNC Setup ############################## +# Copy client certs from netconf simulator to SDNC certs directory +mkdir /tmp/keys0 +cp ${SCRIPTS}/sdnc/pnf-simulator/netconfsimulator/tls/client.crt /tmp/keys0 +cp ${SCRIPTS}/sdnc/pnf-simulator/netconfsimulator/tls/client.key /tmp/keys0 +cp ${SCRIPTS}/sdnc/pnf-simulator/netconfsimulator/tls/ca.crt /tmp/keys0/trustedCertificates.crt +cwd=$(pwd) +cd /tmp +zip -r $SDNC_CERT_PATH/keys0.zip keys0 +rm -rf /tmp/keys0 + # Export Mariadb, SDNC tmp, cert directory path export SDNC_CERT_PATH=${SDNC_CERT_PATH} docker pull "${NEXUS_DOCKER_REPO}"/onap/sdnc-image:"${SDNC_IMAGE_TAG}" docker tag "${NEXUS_DOCKER_REPO}"/onap/sdnc-image:"${SDNC_IMAGE_TAG}" onap/sdnc-image:latest +# Fix permissions on certs directory to guarantee directory is read/ +# writable and that files are readable +chmod ugo+rwx ${SCRIPTS}/sdnc/sdnc/certs +chmod ugo+r ${SCRIPTS}/sdnc/sdnc/certs/* + # Start Mariadb, SDNC Containers with docker-compose and configuration from docker-compose.yml docker-compose -f "${SCRIPTS}"/sdnc/sdnc/docker-compose.yml up -d @@ -120,47 +120,65 @@ for i in {1..10}; do break fi echo "Waiting for SDNC Service to Start Up..." - sleep 2m + sleep 30s done if [[ "${SDNC_IP}" == 'none' || "${SDNC_IP}" == '' || "${RESP_CODE}" != '200' ]]; then - echo "SDNC Service not started Could cause problems for testing activities...!" + echo "SDNC Service not started, setup failed" + exit 1 fi # Check if SDNC-ODL Karaf Session started -for i in {1..15}; do - EXEC_RESP=$(docker exec -it sdnc /opt/opendaylight/current/bin/client system:start-level) - if grep -q 'Level 100' <<<"${EXEC_RESP}"; then - echo "SDNC-ODL Karaf Session Started." - break +TIME_OUT=300 +INTERVAL=10 +TIME=0 +while [ "$TIME" -lt "$TIME_OUT" ]; do + + docker exec sdnc cat /opt/opendaylight/data/log/karaf.log | grep 'warp coils' + + if [ $? == 0 ] ; then + echo SDNC karaf started in $TIME seconds + break; fi - echo "Waiting for SDNC-ODL Karaf Session to Start Up..." - sleep 2m + + echo Sleep: $INTERVAL seconds before testing if SDNC is up. Total wait time up now is: $TIME seconds. Timeout is: $TIME_OUT seconds + sleep $INTERVAL + TIME=$(($TIME+$INTERVAL)) done -if ! grep -q 'Level 100' <<<"${EXEC_RESP}"; then - echo "SDNC-ODL Karaf Session not Started, Could cause problems for testing activities...!" +if [ "$TIME" -ge "$TIME_OUT" ]; then + echo TIME OUT: karaf session not started in $TIME_OUT seconds, setup failed + exit 1; fi -echo "Sleeping 5 minutes" -sleep 5m +# Check if certificate installation is done +TIME_OUT=300 +INTERVAL=10 +TIME=0 +while [ "$TIME" -lt "$TIME_OUT" ]; do -###################### Netconf-PNP-Simulator Setup ###################### + docker-compose -f "${SCRIPTS}"/sdnc/sdnc/docker-compose.yml logs sdnc | grep 'Everything OK in Certificate Installation' -# Export netconf-pnp simulator conf path -export NETCONF_CONFIG_PATH=${NETCONF_CONFIG_PATH} + if [ $? == 0 ] ; then + echo SDNC karaf started in $TIME seconds + break; + fi -# Start Netconf-Pnp-Simulator Container with docker-compose and configuration from docker-compose.yml -docker-compose -f "${SCRIPTS}"/sdnc/netconf-pnp-simulator/docker-compose.yml up -d + echo Sleep: $INTERVAL seconds before testing if SDNC is up. Total wait time up now is: $TIME seconds. Timeout is: $TIME_OUT seconds + sleep $INTERVAL + TIME=$(($TIME+$INTERVAL)) +done + +if [ "$TIME" -ge "$TIME_OUT" ]; then + echo TIME OUT: karaf session not started in $TIME_OUT seconds, setup failed + exit 1; +fi # Update default Networking bridge IP in mount.json file sed -i "s/pnfaddr/${LOCAL_IP}/g" "${REQUEST_DATA_PATH}"/mount.xml ######################################################################### -echo "Sleeping additional for 3 minutes to give application time to finish" -sleep 3m - # Export SDNC, AAF-Certservice-Cient, Netconf-Pnp-Simulator Continer Names export REQUEST_DATA_PATH="${REQUEST_DATA_PATH}" export SDNC_CONTAINER_NAME="${SDNC_CONTAINER_NAME}" diff --git a/plans/sdnc/sdnc_netconf_tls_post_deploy/teardown.sh b/plans/sdnc/sdnc_netconf_tls_post_deploy/teardown.sh index b780ed49..bbf4d075 100644 --- a/plans/sdnc/sdnc_netconf_tls_post_deploy/teardown.sh +++ b/plans/sdnc/sdnc_netconf_tls_post_deploy/teardown.sh @@ -15,11 +15,8 @@ # limitations under the License. # -docker-compose -f "${SCRIPTS}"/sdnc/certservice/docker-compose.yml down -v + docker-compose -f "${SCRIPTS}"/sdnc/sdnc/docker-compose.yml down -v -docker-compose -f "${SCRIPTS}"/sdnc/netconf-pnp-simulator/docker-compose.yml down -v +docker-compose -f "${SCRIPTS}"/sdnc/pnf-simulator/netconfsimulator/docker-compose.yml down -v -make clear -C "${WORKSPACE}"/plans/sdnc/sdnc_netconf_tls_post_deploy/certs -rm -rf "${WORKSPACE}"/tests/sdnc/sdnc_netconf_tls_post_deploy/certs -rm -rf "${WORKSPACE}"/tests/sdnc/sdnc_netconf_tls_post_deploy/cert-data \ No newline at end of file diff --git a/plans/so/integration-etsi-testing/config/aai-simulator-populate-data/customer.json b/plans/so/integration-etsi-testing/config/aai-simulator-populate-data/customer.json index 6c53c056..931e31eb 100644 --- a/plans/so/integration-etsi-testing/config/aai-simulator-populate-data/customer.json +++ b/plans/so/integration-etsi-testing/config/aai-simulator-populate-data/customer.json @@ -67,6 +67,38 @@ } ] } + }, + { + "service-type": "NetworkService", + "relationship-list": { + "relationship": [ + { + "related-to": "tenant", + "relationship-label": "org.onap.relationships.inventory.Uses", + "related-link": "/aai/v14/cloud-infrastructure/cloud-regions/cloud-region/CloudOwner/xyzcloud/tenants/tenant/693c7729b2364a26a3ca602e6f66187d", + "relationship-data": [ + { + "relationship-key": "cloud-region.cloud-owner", + "relationship-value": "CloudOwner" + }, + { + "relationship-key": "cloud-region.cloud-region-id", + "relationship-value": "xyzcloud" + }, + { + "relationship-key": "tenant.tenant-id", + "relationship-value": "693c7729b2364a26a3ca602e6f66187d" + } + ], + "related-to-property": [ + { + "property-key": "tenant.tenant-name", + "property-value": "admin" + } + ] + } + ] + } } ] } diff --git a/plans/so/integration-etsi-testing/config/camunda-sql/mariadb_engine_7.10.0.sql b/plans/so/integration-etsi-testing/config/camunda-sql/mariadb_engine_7.10.0.sql new file mode 100644 index 00000000..b8062107 --- /dev/null +++ b/plans/so/integration-etsi-testing/config/camunda-sql/mariadb_engine_7.10.0.sql @@ -0,0 +1,1361 @@ +-- +-- Copyright © 2012 - 2018 camunda services GmbH and various authors (info@camunda.com) +-- +-- Licensed under the Apache License, Version 2.0 (the "License"); +-- you may not use this file except in compliance with the License. +-- You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +USE `camundabpmn`; + + +create table ACT_GE_PROPERTY ( + NAME_ varchar(64), + VALUE_ varchar(300), + REV_ integer, + primary key (NAME_) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin; + +insert into ACT_GE_PROPERTY +values ('schema.version', 'fox', 1); + +insert into ACT_GE_PROPERTY +values ('schema.history', 'create(fox)', 1); + +insert into ACT_GE_PROPERTY +values ('next.dbid', '1', 1); + +insert into ACT_GE_PROPERTY +values ('deployment.lock', '0', 1); + +insert into ACT_GE_PROPERTY +values ('history.cleanup.job.lock', '0', 1); + +insert into ACT_GE_PROPERTY +values ('startup.lock', '0', 1); + +insert into ACT_GE_PROPERTY +values ('telemetry.lock', '0', 1); + +insert into ACT_GE_PROPERTY +values ('installationId.lock', '0', 1); + +create table ACT_GE_BYTEARRAY ( + ID_ varchar(64), + REV_ integer, + NAME_ varchar(255), + DEPLOYMENT_ID_ varchar(64), + BYTES_ LONGBLOB, + GENERATED_ TINYINT, + TENANT_ID_ varchar(64), + TYPE_ integer, + CREATE_TIME_ datetime(3), + ROOT_PROC_INST_ID_ varchar(64), + REMOVAL_TIME_ datetime(3), + primary key (ID_) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin; + +create table ACT_GE_SCHEMA_LOG ( + ID_ varchar(64), + TIMESTAMP_ datetime(3), + VERSION_ varchar(255), + primary key (ID_) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin; + +insert into ACT_GE_SCHEMA_LOG +values ('0', CURRENT_TIMESTAMP, '7.14.0'); + +create table ACT_RE_DEPLOYMENT ( + ID_ varchar(64), + NAME_ varchar(255), + DEPLOY_TIME_ datetime(3), + SOURCE_ varchar(255), + TENANT_ID_ varchar(64), + primary key (ID_) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin; + +create table ACT_RU_EXECUTION ( + ID_ varchar(64), + REV_ integer, + ROOT_PROC_INST_ID_ varchar(64), + PROC_INST_ID_ varchar(64), + BUSINESS_KEY_ varchar(255), + PARENT_ID_ varchar(64), + PROC_DEF_ID_ varchar(64), + SUPER_EXEC_ varchar(64), + SUPER_CASE_EXEC_ varchar(64), + CASE_INST_ID_ varchar(64), + ACT_ID_ varchar(255), + ACT_INST_ID_ varchar(64), + IS_ACTIVE_ TINYINT, + IS_CONCURRENT_ TINYINT, + IS_SCOPE_ TINYINT, + IS_EVENT_SCOPE_ TINYINT, + SUSPENSION_STATE_ integer, + CACHED_ENT_STATE_ integer, + SEQUENCE_COUNTER_ bigint, + TENANT_ID_ varchar(64), + primary key (ID_) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin; + +create table ACT_RU_JOB ( + ID_ varchar(64) NOT NULL, + REV_ integer, + TYPE_ varchar(255) NOT NULL, + LOCK_EXP_TIME_ datetime(3) NULL, + LOCK_OWNER_ varchar(255), + EXCLUSIVE_ boolean, + EXECUTION_ID_ varchar(64), + PROCESS_INSTANCE_ID_ varchar(64), + PROCESS_DEF_ID_ varchar(64), + PROCESS_DEF_KEY_ varchar(255), + RETRIES_ integer, + EXCEPTION_STACK_ID_ varchar(64), + EXCEPTION_MSG_ varchar(4000), + FAILED_ACT_ID_ varchar(255), + DUEDATE_ datetime(3) NULL, + REPEAT_ varchar(255), + REPEAT_OFFSET_ bigint DEFAULT 0, + HANDLER_TYPE_ varchar(255), + HANDLER_CFG_ varchar(4000), + DEPLOYMENT_ID_ varchar(64), + SUSPENSION_STATE_ integer NOT NULL DEFAULT 1, + JOB_DEF_ID_ varchar(64), + PRIORITY_ bigint NOT NULL DEFAULT 0, + SEQUENCE_COUNTER_ bigint, + TENANT_ID_ varchar(64), + CREATE_TIME_ datetime(3), + primary key (ID_) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin; + +create table ACT_RU_JOBDEF ( + ID_ varchar(64) NOT NULL, + REV_ integer, + PROC_DEF_ID_ varchar(64), + PROC_DEF_KEY_ varchar(255), + ACT_ID_ varchar(255), + JOB_TYPE_ varchar(255) NOT NULL, + JOB_CONFIGURATION_ varchar(255), + SUSPENSION_STATE_ integer, + JOB_PRIORITY_ bigint, + TENANT_ID_ varchar(64), + DEPLOYMENT_ID_ varchar(64), + primary key (ID_) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin; + +create table ACT_RE_PROCDEF ( + ID_ varchar(64) not null, + REV_ integer, + CATEGORY_ varchar(255), + NAME_ varchar(255), + KEY_ varchar(255) not null, + VERSION_ integer not null, + DEPLOYMENT_ID_ varchar(64), + RESOURCE_NAME_ varchar(4000), + DGRM_RESOURCE_NAME_ varchar(4000), + HAS_START_FORM_KEY_ TINYINT, + SUSPENSION_STATE_ integer, + TENANT_ID_ varchar(64), + VERSION_TAG_ varchar(64), + HISTORY_TTL_ integer, + STARTABLE_ boolean NOT NULL default TRUE, + primary key (ID_) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin; + +create table ACT_RU_TASK ( + ID_ varchar(64), + REV_ integer, + EXECUTION_ID_ varchar(64), + PROC_INST_ID_ varchar(64), + PROC_DEF_ID_ varchar(64), + CASE_EXECUTION_ID_ varchar(64), + CASE_INST_ID_ varchar(64), + CASE_DEF_ID_ varchar(64), + NAME_ varchar(255), + PARENT_TASK_ID_ varchar(64), + DESCRIPTION_ varchar(4000), + TASK_DEF_KEY_ varchar(255), + OWNER_ varchar(255), + ASSIGNEE_ varchar(255), + DELEGATION_ varchar(64), + PRIORITY_ integer, + CREATE_TIME_ datetime(3), + DUE_DATE_ datetime(3), + FOLLOW_UP_DATE_ datetime(3), + SUSPENSION_STATE_ integer, + TENANT_ID_ varchar(64), + primary key (ID_) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin; + +create table ACT_RU_IDENTITYLINK ( + ID_ varchar(64), + REV_ integer, + GROUP_ID_ varchar(255), + TYPE_ varchar(255), + USER_ID_ varchar(255), + TASK_ID_ varchar(64), + PROC_DEF_ID_ varchar(64), + TENANT_ID_ varchar(64), + primary key (ID_) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin; + +create table ACT_RU_VARIABLE ( + ID_ varchar(64) not null, + REV_ integer, + TYPE_ varchar(255) not null, + NAME_ varchar(255) not null, + EXECUTION_ID_ varchar(64), + PROC_INST_ID_ varchar(64), + PROC_DEF_ID_ varchar(64), + CASE_EXECUTION_ID_ varchar(64), + CASE_INST_ID_ varchar(64), + TASK_ID_ varchar(64), + BATCH_ID_ varchar(64), + BYTEARRAY_ID_ varchar(64), + DOUBLE_ double, + LONG_ bigint, + TEXT_ varchar(10000), + TEXT2_ varchar(4000), + VAR_SCOPE_ varchar(64) not null, + SEQUENCE_COUNTER_ bigint, + IS_CONCURRENT_LOCAL_ TINYINT, + TENANT_ID_ varchar(64), + primary key (ID_) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin; + +create table ACT_RU_EVENT_SUBSCR ( + ID_ varchar(64) not null, + REV_ integer, + EVENT_TYPE_ varchar(255) not null, + EVENT_NAME_ varchar(255), + EXECUTION_ID_ varchar(64), + PROC_INST_ID_ varchar(64), + ACTIVITY_ID_ varchar(255), + CONFIGURATION_ varchar(255), + CREATED_ datetime(3) not null, + TENANT_ID_ varchar(64), + primary key (ID_) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin; + +create table ACT_RU_INCIDENT ( + ID_ varchar(64) not null, + REV_ integer not null, + INCIDENT_TIMESTAMP_ datetime(3) not null, + INCIDENT_MSG_ varchar(4000), + INCIDENT_TYPE_ varchar(255) not null, + EXECUTION_ID_ varchar(64), + ACTIVITY_ID_ varchar(255), + FAILED_ACTIVITY_ID_ varchar(255), + PROC_INST_ID_ varchar(64), + PROC_DEF_ID_ varchar(64), + CAUSE_INCIDENT_ID_ varchar(64), + ROOT_CAUSE_INCIDENT_ID_ varchar(64), + CONFIGURATION_ varchar(255), + TENANT_ID_ varchar(64), + JOB_DEF_ID_ varchar(64), + primary key (ID_) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin; + +create table ACT_RU_AUTHORIZATION ( + ID_ varchar(64) not null, + REV_ integer not null, + TYPE_ integer not null, + GROUP_ID_ varchar(255), + USER_ID_ varchar(255), + RESOURCE_TYPE_ integer not null, + RESOURCE_ID_ varchar(255), + PERMS_ integer, + REMOVAL_TIME_ datetime(3), + ROOT_PROC_INST_ID_ varchar(64), + primary key (ID_) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin; + +create table ACT_RU_FILTER ( + ID_ varchar(64) not null, + REV_ integer not null, + RESOURCE_TYPE_ varchar(255) not null, + NAME_ varchar(255) not null, + OWNER_ varchar(255), + QUERY_ LONGTEXT not null, + PROPERTIES_ LONGTEXT, + primary key (ID_) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin; + +create table ACT_RU_METER_LOG ( + ID_ varchar(64) not null, + NAME_ varchar(64) not null, + REPORTER_ varchar(255), + VALUE_ bigint, + TIMESTAMP_ datetime(3), + MILLISECONDS_ bigint DEFAULT 0, + primary key (ID_) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin; + +create table ACT_RU_EXT_TASK ( + ID_ varchar(64) not null, + REV_ integer not null, + WORKER_ID_ varchar(255), + TOPIC_NAME_ varchar(255), + RETRIES_ integer, + ERROR_MSG_ varchar(4000), + ERROR_DETAILS_ID_ varchar(64), + LOCK_EXP_TIME_ datetime(3) NULL, + SUSPENSION_STATE_ integer, + EXECUTION_ID_ varchar(64), + PROC_INST_ID_ varchar(64), + PROC_DEF_ID_ varchar(64), + PROC_DEF_KEY_ varchar(255), + ACT_ID_ varchar(255), + ACT_INST_ID_ varchar(64), + TENANT_ID_ varchar(64), + PRIORITY_ bigint NOT NULL DEFAULT 0, + primary key (ID_) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin; + +create table ACT_RU_BATCH ( + ID_ varchar(64) not null, + REV_ integer not null, + TYPE_ varchar(255), + TOTAL_JOBS_ integer, + JOBS_CREATED_ integer, + JOBS_PER_SEED_ integer, + INVOCATIONS_PER_JOB_ integer, + SEED_JOB_DEF_ID_ varchar(64), + BATCH_JOB_DEF_ID_ varchar(64), + MONITOR_JOB_DEF_ID_ varchar(64), + SUSPENSION_STATE_ integer, + CONFIGURATION_ varchar(255), + TENANT_ID_ varchar(64), + CREATE_USER_ID_ varchar(255), + primary key (ID_) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin; + +create index ACT_IDX_EXEC_ROOT_PI on ACT_RU_EXECUTION(ROOT_PROC_INST_ID_); +create index ACT_IDX_EXEC_BUSKEY on ACT_RU_EXECUTION(BUSINESS_KEY_); +create index ACT_IDX_EXEC_TENANT_ID on ACT_RU_EXECUTION(TENANT_ID_); +create index ACT_IDX_TASK_CREATE on ACT_RU_TASK(CREATE_TIME_); +create index ACT_IDX_TASK_ASSIGNEE on ACT_RU_TASK(ASSIGNEE_); +create index ACT_IDX_TASK_OWNER on ACT_RU_TASK(OWNER_); +create index ACT_IDX_TASK_TENANT_ID on ACT_RU_TASK(TENANT_ID_); +create index ACT_IDX_IDENT_LNK_USER on ACT_RU_IDENTITYLINK(USER_ID_); +create index ACT_IDX_IDENT_LNK_GROUP on ACT_RU_IDENTITYLINK(GROUP_ID_); +create index ACT_IDX_EVENT_SUBSCR_CONFIG_ on ACT_RU_EVENT_SUBSCR(CONFIGURATION_); +create index ACT_IDX_EVENT_SUBSCR_TENANT_ID on ACT_RU_EVENT_SUBSCR(TENANT_ID_); + +create index ACT_IDX_VARIABLE_TASK_ID on ACT_RU_VARIABLE(TASK_ID_); +create index ACT_IDX_VARIABLE_TENANT_ID on ACT_RU_VARIABLE(TENANT_ID_); +create index ACT_IDX_VARIABLE_TASK_NAME_TYPE on ACT_RU_VARIABLE(TASK_ID_, NAME_, TYPE_); + +create index ACT_IDX_ATHRZ_PROCEDEF on ACT_RU_IDENTITYLINK(PROC_DEF_ID_); +create index ACT_IDX_INC_CONFIGURATION on ACT_RU_INCIDENT(CONFIGURATION_); +create index ACT_IDX_INC_TENANT_ID on ACT_RU_INCIDENT(TENANT_ID_); +-- CAM-5914 +create index ACT_IDX_JOB_EXECUTION_ID on ACT_RU_JOB(EXECUTION_ID_); +-- this index needs to be limited in mariadb see CAM-6938 +create index ACT_IDX_JOB_HANDLER on ACT_RU_JOB(HANDLER_TYPE_(100),HANDLER_CFG_(155)); +create index ACT_IDX_JOB_PROCINST on ACT_RU_JOB(PROCESS_INSTANCE_ID_); +create index ACT_IDX_JOB_TENANT_ID on ACT_RU_JOB(TENANT_ID_); +create index ACT_IDX_JOBDEF_TENANT_ID on ACT_RU_JOBDEF(TENANT_ID_); + +-- new metric milliseconds column +CREATE INDEX ACT_IDX_METER_LOG_MS ON ACT_RU_METER_LOG(MILLISECONDS_); +CREATE INDEX ACT_IDX_METER_LOG_NAME_MS ON ACT_RU_METER_LOG(NAME_, MILLISECONDS_); +CREATE INDEX ACT_IDX_METER_LOG_REPORT ON ACT_RU_METER_LOG(NAME_, REPORTER_, MILLISECONDS_); + +-- old metric timestamp column +CREATE INDEX ACT_IDX_METER_LOG_TIME ON ACT_RU_METER_LOG(TIMESTAMP_); +CREATE INDEX ACT_IDX_METER_LOG ON ACT_RU_METER_LOG(NAME_, TIMESTAMP_); + +create index ACT_IDX_EXT_TASK_TOPIC on ACT_RU_EXT_TASK(TOPIC_NAME_); +create index ACT_IDX_EXT_TASK_TENANT_ID on ACT_RU_EXT_TASK(TENANT_ID_); +create index ACT_IDX_EXT_TASK_PRIORITY ON ACT_RU_EXT_TASK(PRIORITY_); +create index ACT_IDX_EXT_TASK_ERR_DETAILS ON ACT_RU_EXT_TASK(ERROR_DETAILS_ID_); +create index ACT_IDX_AUTH_GROUP_ID ON ACT_RU_AUTHORIZATION(GROUP_ID_); +create index ACT_IDX_JOB_JOB_DEF_ID on ACT_RU_JOB(JOB_DEF_ID_); + +alter table ACT_GE_BYTEARRAY + add constraint ACT_FK_BYTEARR_DEPL + foreign key (DEPLOYMENT_ID_) + references ACT_RE_DEPLOYMENT (ID_); + +alter table ACT_RU_EXECUTION + add constraint ACT_FK_EXE_PROCINST + foreign key (PROC_INST_ID_) + references ACT_RU_EXECUTION (ID_) on delete cascade on update cascade; + +alter table ACT_RU_EXECUTION + add constraint ACT_FK_EXE_PARENT + foreign key (PARENT_ID_) + references ACT_RU_EXECUTION (ID_); + +alter table ACT_RU_EXECUTION + add constraint ACT_FK_EXE_SUPER + foreign key (SUPER_EXEC_) + references ACT_RU_EXECUTION (ID_); + +alter table ACT_RU_EXECUTION + add constraint ACT_FK_EXE_PROCDEF + foreign key (PROC_DEF_ID_) + references ACT_RE_PROCDEF (ID_); + +alter table ACT_RU_IDENTITYLINK + add constraint ACT_FK_TSKASS_TASK + foreign key (TASK_ID_) + references ACT_RU_TASK (ID_); + +alter table ACT_RU_IDENTITYLINK + add constraint ACT_FK_ATHRZ_PROCEDEF + foreign key (PROC_DEF_ID_) + references ACT_RE_PROCDEF(ID_); + +alter table ACT_RU_TASK + add constraint ACT_FK_TASK_EXE + foreign key (EXECUTION_ID_) + references ACT_RU_EXECUTION (ID_); + +alter table ACT_RU_TASK + add constraint ACT_FK_TASK_PROCINST + foreign key (PROC_INST_ID_) + references ACT_RU_EXECUTION (ID_); + +alter table ACT_RU_TASK + add constraint ACT_FK_TASK_PROCDEF + foreign key (PROC_DEF_ID_) + references ACT_RE_PROCDEF (ID_); + +alter table ACT_RU_VARIABLE + add constraint ACT_FK_VAR_EXE + foreign key (EXECUTION_ID_) + references ACT_RU_EXECUTION (ID_); + +alter table ACT_RU_VARIABLE + add constraint ACT_FK_VAR_PROCINST + foreign key (PROC_INST_ID_) + references ACT_RU_EXECUTION(ID_); + +alter table ACT_RU_VARIABLE + add constraint ACT_FK_VAR_BYTEARRAY + foreign key (BYTEARRAY_ID_) + references ACT_GE_BYTEARRAY (ID_); + +alter table ACT_RU_JOB + add constraint ACT_FK_JOB_EXCEPTION + foreign key (EXCEPTION_STACK_ID_) + references ACT_GE_BYTEARRAY (ID_); + +alter table ACT_RU_EVENT_SUBSCR + add constraint ACT_FK_EVENT_EXEC + foreign key (EXECUTION_ID_) + references ACT_RU_EXECUTION(ID_); + +alter table ACT_RU_INCIDENT + add constraint ACT_FK_INC_EXE + foreign key (EXECUTION_ID_) + references ACT_RU_EXECUTION (ID_); + +alter table ACT_RU_INCIDENT + add constraint ACT_FK_INC_PROCINST + foreign key (PROC_INST_ID_) + references ACT_RU_EXECUTION (ID_); + +alter table ACT_RU_INCIDENT + add constraint ACT_FK_INC_PROCDEF + foreign key (PROC_DEF_ID_) + references ACT_RE_PROCDEF (ID_); + +alter table ACT_RU_INCIDENT + add constraint ACT_FK_INC_CAUSE + foreign key (CAUSE_INCIDENT_ID_) + references ACT_RU_INCIDENT (ID_) on delete cascade on update cascade; + +alter table ACT_RU_INCIDENT + add constraint ACT_FK_INC_RCAUSE + foreign key (ROOT_CAUSE_INCIDENT_ID_) + references ACT_RU_INCIDENT (ID_) on delete cascade on update cascade; + +alter table ACT_RU_EXT_TASK + add constraint ACT_FK_EXT_TASK_ERROR_DETAILS + foreign key (ERROR_DETAILS_ID_) + references ACT_GE_BYTEARRAY (ID_); + +create index ACT_IDX_INC_JOB_DEF on ACT_RU_INCIDENT(JOB_DEF_ID_); +alter table ACT_RU_INCIDENT + add constraint ACT_FK_INC_JOB_DEF + foreign key (JOB_DEF_ID_) + references ACT_RU_JOBDEF (ID_); + +alter table ACT_RU_AUTHORIZATION + add constraint ACT_UNIQ_AUTH_USER + unique (USER_ID_,TYPE_,RESOURCE_TYPE_,RESOURCE_ID_); + +alter table ACT_RU_AUTHORIZATION + add constraint ACT_UNIQ_AUTH_GROUP + unique (GROUP_ID_,TYPE_,RESOURCE_TYPE_,RESOURCE_ID_); + +alter table ACT_RU_VARIABLE + add constraint ACT_UNIQ_VARIABLE + unique (VAR_SCOPE_, NAME_); + +alter table ACT_RU_EXT_TASK + add constraint ACT_FK_EXT_TASK_EXE + foreign key (EXECUTION_ID_) + references ACT_RU_EXECUTION (ID_); + +create index ACT_IDX_BATCH_SEED_JOB_DEF ON ACT_RU_BATCH(SEED_JOB_DEF_ID_); +alter table ACT_RU_BATCH + add constraint ACT_FK_BATCH_SEED_JOB_DEF + foreign key (SEED_JOB_DEF_ID_) + references ACT_RU_JOBDEF (ID_); + +create index ACT_IDX_BATCH_MONITOR_JOB_DEF ON ACT_RU_BATCH(MONITOR_JOB_DEF_ID_); +alter table ACT_RU_BATCH + add constraint ACT_FK_BATCH_MONITOR_JOB_DEF + foreign key (MONITOR_JOB_DEF_ID_) + references ACT_RU_JOBDEF (ID_); + +create index ACT_IDX_BATCH_JOB_DEF ON ACT_RU_BATCH(BATCH_JOB_DEF_ID_); +alter table ACT_RU_BATCH + add constraint ACT_FK_BATCH_JOB_DEF + foreign key (BATCH_JOB_DEF_ID_) + references ACT_RU_JOBDEF (ID_); + +create index ACT_IDX_BATCH_ID ON ACT_RU_VARIABLE(BATCH_ID_); +alter table ACT_RU_VARIABLE + add constraint ACT_FK_VAR_BATCH + foreign key (BATCH_ID_) + references ACT_RU_BATCH (ID_); + +-- indexes for deadlock problems - https://app.camunda.com/jira/browse/CAM-2567 -- +create index ACT_IDX_INC_CAUSEINCID on ACT_RU_INCIDENT(CAUSE_INCIDENT_ID_); +create index ACT_IDX_INC_EXID on ACT_RU_INCIDENT(EXECUTION_ID_); +create index ACT_IDX_INC_PROCDEFID on ACT_RU_INCIDENT(PROC_DEF_ID_); +create index ACT_IDX_INC_PROCINSTID on ACT_RU_INCIDENT(PROC_INST_ID_); +create index ACT_IDX_INC_ROOTCAUSEINCID on ACT_RU_INCIDENT(ROOT_CAUSE_INCIDENT_ID_); +-- index for deadlock problem - https://app.camunda.com/jira/browse/CAM-4440 -- +create index ACT_IDX_AUTH_RESOURCE_ID on ACT_RU_AUTHORIZATION(RESOURCE_ID_); +-- index to prevent deadlock on fk constraint - https://app.camunda.com/jira/browse/CAM-5440 -- +create index ACT_IDX_EXT_TASK_EXEC on ACT_RU_EXT_TASK(EXECUTION_ID_); + +-- indexes to improve deployment +create index ACT_IDX_BYTEARRAY_ROOT_PI on ACT_GE_BYTEARRAY(ROOT_PROC_INST_ID_); +create index ACT_IDX_BYTEARRAY_RM_TIME on ACT_GE_BYTEARRAY(REMOVAL_TIME_); +create index ACT_IDX_BYTEARRAY_NAME on ACT_GE_BYTEARRAY(NAME_); +create index ACT_IDX_DEPLOYMENT_NAME on ACT_RE_DEPLOYMENT(NAME_); +create index ACT_IDX_DEPLOYMENT_TENANT_ID on ACT_RE_DEPLOYMENT(TENANT_ID_); +create index ACT_IDX_JOBDEF_PROC_DEF_ID ON ACT_RU_JOBDEF(PROC_DEF_ID_); +create index ACT_IDX_JOB_HANDLER_TYPE ON ACT_RU_JOB(HANDLER_TYPE_); +create index ACT_IDX_EVENT_SUBSCR_EVT_NAME ON ACT_RU_EVENT_SUBSCR(EVENT_NAME_); +create index ACT_IDX_PROCDEF_DEPLOYMENT_ID ON ACT_RE_PROCDEF(DEPLOYMENT_ID_); +create index ACT_IDX_PROCDEF_TENANT_ID ON ACT_RE_PROCDEF(TENANT_ID_); +create index ACT_IDX_PROCDEF_VER_TAG ON ACT_RE_PROCDEF(VERSION_TAG_); + +-- indices for history cleanup: https://jira.camunda.com/browse/CAM-11616 +create index ACT_IDX_AUTH_ROOT_PI on ACT_RU_AUTHORIZATION(ROOT_PROC_INST_ID_); +create index ACT_IDX_AUTH_RM_TIME on ACT_RU_AUTHORIZATION(REMOVAL_TIME_); +-- +-- Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH +-- under one or more contributor license agreements. See the NOTICE file +-- distributed with this work for additional information regarding copyright +-- ownership. Camunda licenses this file to you under the Apache License, +-- Version 2.0; you may not use this file except in compliance with the License. +-- You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +-- create case definition table -- +create table ACT_RE_CASE_DEF ( + ID_ varchar(64) not null, + REV_ integer, + CATEGORY_ varchar(255), + NAME_ varchar(255), + KEY_ varchar(255) not null, + VERSION_ integer not null, + DEPLOYMENT_ID_ varchar(64), + RESOURCE_NAME_ varchar(4000), + DGRM_RESOURCE_NAME_ varchar(4000), + TENANT_ID_ varchar(64), + HISTORY_TTL_ integer, + primary key (ID_) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin; + +-- create case execution table -- +create table ACT_RU_CASE_EXECUTION ( + ID_ varchar(64) NOT NULL, + REV_ integer, + CASE_INST_ID_ varchar(64), + SUPER_CASE_EXEC_ varchar(64), + SUPER_EXEC_ varchar(64), + BUSINESS_KEY_ varchar(255), + PARENT_ID_ varchar(64), + CASE_DEF_ID_ varchar(64), + ACT_ID_ varchar(255), + PREV_STATE_ integer, + CURRENT_STATE_ integer, + REQUIRED_ boolean, + TENANT_ID_ varchar(64), + primary key (ID_) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin; + +-- create case sentry part table -- + +create table ACT_RU_CASE_SENTRY_PART ( + ID_ varchar(64) NOT NULL, + REV_ integer, + CASE_INST_ID_ varchar(64), + CASE_EXEC_ID_ varchar(64), + SENTRY_ID_ varchar(255), + TYPE_ varchar(255), + SOURCE_CASE_EXEC_ID_ varchar(64), + STANDARD_EVENT_ varchar(255), + SOURCE_ varchar(255), + VARIABLE_EVENT_ varchar(255), + VARIABLE_NAME_ varchar(255), + SATISFIED_ boolean, + TENANT_ID_ varchar(64), + primary key (ID_) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin; + +-- create index on business key -- +create index ACT_IDX_CASE_EXEC_BUSKEY on ACT_RU_CASE_EXECUTION(BUSINESS_KEY_); + +-- https://app.camunda.com/jira/browse/CAM-9165 +create index ACT_IDX_CASE_EXE_CASE_INST on ACT_RU_CASE_EXECUTION(CASE_INST_ID_); + +-- create foreign key constraints on ACT_RU_CASE_EXECUTION -- +alter table ACT_RU_CASE_EXECUTION + add constraint ACT_FK_CASE_EXE_CASE_INST + foreign key (CASE_INST_ID_) + references ACT_RU_CASE_EXECUTION(ID_) on delete cascade on update cascade; + +alter table ACT_RU_CASE_EXECUTION + add constraint ACT_FK_CASE_EXE_PARENT + foreign key (PARENT_ID_) + references ACT_RU_CASE_EXECUTION(ID_); + +alter table ACT_RU_CASE_EXECUTION + add constraint ACT_FK_CASE_EXE_CASE_DEF + foreign key (CASE_DEF_ID_) + references ACT_RE_CASE_DEF(ID_); + +-- create foreign key constraints on ACT_RU_VARIABLE -- +alter table ACT_RU_VARIABLE + add constraint ACT_FK_VAR_CASE_EXE + foreign key (CASE_EXECUTION_ID_) + references ACT_RU_CASE_EXECUTION(ID_); + +alter table ACT_RU_VARIABLE + add constraint ACT_FK_VAR_CASE_INST + foreign key (CASE_INST_ID_) + references ACT_RU_CASE_EXECUTION(ID_); + +-- create foreign key constraints on ACT_RU_TASK -- +alter table ACT_RU_TASK + add constraint ACT_FK_TASK_CASE_EXE + foreign key (CASE_EXECUTION_ID_) + references ACT_RU_CASE_EXECUTION(ID_); + +alter table ACT_RU_TASK + add constraint ACT_FK_TASK_CASE_DEF + foreign key (CASE_DEF_ID_) + references ACT_RE_CASE_DEF(ID_); + +-- create foreign key constraints on ACT_RU_CASE_SENTRY_PART -- +alter table ACT_RU_CASE_SENTRY_PART + add constraint ACT_FK_CASE_SENTRY_CASE_INST + foreign key (CASE_INST_ID_) + references ACT_RU_CASE_EXECUTION(ID_); + +alter table ACT_RU_CASE_SENTRY_PART + add constraint ACT_FK_CASE_SENTRY_CASE_EXEC + foreign key (CASE_EXEC_ID_) + references ACT_RU_CASE_EXECUTION(ID_); + +create index ACT_IDX_CASE_DEF_TENANT_ID on ACT_RE_CASE_DEF(TENANT_ID_); +create index ACT_IDX_CASE_EXEC_TENANT_ID on ACT_RU_CASE_EXECUTION(TENANT_ID_); +-- +-- Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH +-- under one or more contributor license agreements. See the NOTICE file +-- distributed with this work for additional information regarding copyright +-- ownership. Camunda licenses this file to you under the Apache License, +-- Version 2.0; you may not use this file except in compliance with the License. +-- You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +-- create decision definition table -- +create table ACT_RE_DECISION_DEF ( + ID_ varchar(64) not null, + REV_ integer, + CATEGORY_ varchar(255), + NAME_ varchar(255), + KEY_ varchar(255) not null, + VERSION_ integer not null, + DEPLOYMENT_ID_ varchar(64), + RESOURCE_NAME_ varchar(4000), + DGRM_RESOURCE_NAME_ varchar(4000), + DEC_REQ_ID_ varchar(64), + DEC_REQ_KEY_ varchar(255), + TENANT_ID_ varchar(64), + HISTORY_TTL_ integer, + VERSION_TAG_ varchar(64), + primary key (ID_) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin; + +-- create decision requirements definition table -- +create table ACT_RE_DECISION_REQ_DEF ( + ID_ varchar(64) NOT NULL, + REV_ integer, + CATEGORY_ varchar(255), + NAME_ varchar(255), + KEY_ varchar(255) NOT NULL, + VERSION_ integer NOT NULL, + DEPLOYMENT_ID_ varchar(64), + RESOURCE_NAME_ varchar(4000), + DGRM_RESOURCE_NAME_ varchar(4000), + TENANT_ID_ varchar(64), + primary key (ID_) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin; + +alter table ACT_RE_DECISION_DEF + add constraint ACT_FK_DEC_REQ + foreign key (DEC_REQ_ID_) + references ACT_RE_DECISION_REQ_DEF(ID_); + +create index ACT_IDX_DEC_DEF_TENANT_ID on ACT_RE_DECISION_DEF(TENANT_ID_); +create index ACT_IDX_DEC_DEF_REQ_ID on ACT_RE_DECISION_DEF(DEC_REQ_ID_); +create index ACT_IDX_DEC_REQ_DEF_TENANT_ID on ACT_RE_DECISION_REQ_DEF(TENANT_ID_); +-- +-- Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH +-- under one or more contributor license agreements. See the NOTICE file +-- distributed with this work for additional information regarding copyright +-- ownership. Camunda licenses this file to you under the Apache License, +-- Version 2.0; you may not use this file except in compliance with the License. +-- You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +create table ACT_HI_PROCINST ( + ID_ varchar(64) not null, + PROC_INST_ID_ varchar(64) not null, + BUSINESS_KEY_ varchar(255), + PROC_DEF_KEY_ varchar(255), + PROC_DEF_ID_ varchar(64) not null, + START_TIME_ datetime(3) not null, + END_TIME_ datetime(3), + REMOVAL_TIME_ datetime(3), + DURATION_ bigint, + START_USER_ID_ varchar(255), + START_ACT_ID_ varchar(255), + END_ACT_ID_ varchar(255), + SUPER_PROCESS_INSTANCE_ID_ varchar(64), + ROOT_PROC_INST_ID_ varchar(64), + SUPER_CASE_INSTANCE_ID_ varchar(64), + CASE_INST_ID_ varchar(64), + DELETE_REASON_ varchar(4000), + TENANT_ID_ varchar(64), + STATE_ varchar(255), + primary key (ID_), + unique (PROC_INST_ID_) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin; + +create table ACT_HI_ACTINST ( + ID_ varchar(64) not null, + PARENT_ACT_INST_ID_ varchar(64), + PROC_DEF_KEY_ varchar(255), + PROC_DEF_ID_ varchar(64) not null, + ROOT_PROC_INST_ID_ varchar(64), + PROC_INST_ID_ varchar(64) not null, + EXECUTION_ID_ varchar(64) not null, + ACT_ID_ varchar(255) not null, + TASK_ID_ varchar(64), + CALL_PROC_INST_ID_ varchar(64), + CALL_CASE_INST_ID_ varchar(64), + ACT_NAME_ varchar(255), + ACT_TYPE_ varchar(255) not null, + ASSIGNEE_ varchar(255), + START_TIME_ datetime(3) not null, + END_TIME_ datetime(3), + DURATION_ bigint, + ACT_INST_STATE_ integer, + SEQUENCE_COUNTER_ bigint, + TENANT_ID_ varchar(64), + REMOVAL_TIME_ datetime(3), + primary key (ID_) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin; + +create table ACT_HI_TASKINST ( + ID_ varchar(64) not null, + TASK_DEF_KEY_ varchar(255), + PROC_DEF_KEY_ varchar(255), + PROC_DEF_ID_ varchar(64), + ROOT_PROC_INST_ID_ varchar(64), + PROC_INST_ID_ varchar(64), + EXECUTION_ID_ varchar(64), + CASE_DEF_KEY_ varchar(255), + CASE_DEF_ID_ varchar(64), + CASE_INST_ID_ varchar(64), + CASE_EXECUTION_ID_ varchar(64), + ACT_INST_ID_ varchar(64), + NAME_ varchar(255), + PARENT_TASK_ID_ varchar(64), + DESCRIPTION_ varchar(4000), + OWNER_ varchar(255), + ASSIGNEE_ varchar(255), + START_TIME_ datetime(3) not null, + END_TIME_ datetime(3), + DURATION_ bigint, + DELETE_REASON_ varchar(4000), + PRIORITY_ integer, + DUE_DATE_ datetime(3), + FOLLOW_UP_DATE_ datetime(3), + TENANT_ID_ varchar(64), + REMOVAL_TIME_ datetime(3), + primary key (ID_) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin; + +create table ACT_HI_VARINST ( + ID_ varchar(64) not null, + PROC_DEF_KEY_ varchar(255), + PROC_DEF_ID_ varchar(64), + ROOT_PROC_INST_ID_ varchar(64), + PROC_INST_ID_ varchar(64), + EXECUTION_ID_ varchar(64), + ACT_INST_ID_ varchar(64), + CASE_DEF_KEY_ varchar(255), + CASE_DEF_ID_ varchar(64), + CASE_INST_ID_ varchar(64), + CASE_EXECUTION_ID_ varchar(64), + TASK_ID_ varchar(64), + NAME_ varchar(255) not null, + VAR_TYPE_ varchar(100), + CREATE_TIME_ datetime(3), + REV_ integer, + BYTEARRAY_ID_ varchar(64), + DOUBLE_ double, + LONG_ bigint, + TEXT_ varchar(10000), + TEXT2_ varchar(4000), + TENANT_ID_ varchar(64), + STATE_ varchar(20), + REMOVAL_TIME_ datetime(3), + primary key (ID_) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin; + +create table ACT_HI_DETAIL ( + ID_ varchar(64) not null, + TYPE_ varchar(255) not null, + PROC_DEF_KEY_ varchar(255), + PROC_DEF_ID_ varchar(64), + ROOT_PROC_INST_ID_ varchar(64), + PROC_INST_ID_ varchar(64), + EXECUTION_ID_ varchar(64), + CASE_DEF_KEY_ varchar(255), + CASE_DEF_ID_ varchar(64), + CASE_INST_ID_ varchar(64), + CASE_EXECUTION_ID_ varchar(64), + TASK_ID_ varchar(64), + ACT_INST_ID_ varchar(64), + VAR_INST_ID_ varchar(64), + NAME_ varchar(255) not null, + VAR_TYPE_ varchar(255), + REV_ integer, + TIME_ datetime(3) not null, + BYTEARRAY_ID_ varchar(64), + DOUBLE_ double, + LONG_ bigint, + TEXT_ varchar(10000), + TEXT2_ varchar(4000), + SEQUENCE_COUNTER_ bigint, + TENANT_ID_ varchar(64), + OPERATION_ID_ varchar(64), + REMOVAL_TIME_ datetime(3), + INITIAL_ boolean, + primary key (ID_) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin; + +create table ACT_HI_IDENTITYLINK ( + ID_ varchar(64) not null, + TIMESTAMP_ timestamp(3) not null, + TYPE_ varchar(255), + USER_ID_ varchar(255), + GROUP_ID_ varchar(255), + TASK_ID_ varchar(64), + ROOT_PROC_INST_ID_ varchar(64), + PROC_DEF_ID_ varchar(64), + OPERATION_TYPE_ varchar(64), + ASSIGNER_ID_ varchar(64), + PROC_DEF_KEY_ varchar(255), + TENANT_ID_ varchar(64), + REMOVAL_TIME_ datetime(3), + primary key (ID_) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin; + +create table ACT_HI_COMMENT ( + ID_ varchar(64) not null, + TYPE_ varchar(255), + TIME_ datetime(3) not null, + USER_ID_ varchar(255), + TASK_ID_ varchar(64), + ROOT_PROC_INST_ID_ varchar(64), + PROC_INST_ID_ varchar(64), + ACTION_ varchar(255), + MESSAGE_ varchar(4000), + FULL_MSG_ LONGBLOB, + TENANT_ID_ varchar(64), + REMOVAL_TIME_ datetime(3), + primary key (ID_) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin; + +create table ACT_HI_ATTACHMENT ( + ID_ varchar(64) not null, + REV_ integer, + USER_ID_ varchar(255), + NAME_ varchar(255), + DESCRIPTION_ varchar(4000), + TYPE_ varchar(255), + TASK_ID_ varchar(64), + ROOT_PROC_INST_ID_ varchar(64), + PROC_INST_ID_ varchar(64), + URL_ varchar(4000), + CONTENT_ID_ varchar(64), + TENANT_ID_ varchar(64), + CREATE_TIME_ datetime(3), + REMOVAL_TIME_ datetime(3), + primary key (ID_) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin; + +create table ACT_HI_OP_LOG ( + ID_ varchar(64) not null, + DEPLOYMENT_ID_ varchar(64), + PROC_DEF_ID_ varchar(64), + PROC_DEF_KEY_ varchar(255), + ROOT_PROC_INST_ID_ varchar(64), + PROC_INST_ID_ varchar(64), + EXECUTION_ID_ varchar(64), + CASE_DEF_ID_ varchar(64), + CASE_INST_ID_ varchar(64), + CASE_EXECUTION_ID_ varchar(64), + TASK_ID_ varchar(64), + JOB_ID_ varchar(64), + JOB_DEF_ID_ varchar(64), + BATCH_ID_ varchar(64), + USER_ID_ varchar(255), + TIMESTAMP_ timestamp(3) not null, + OPERATION_TYPE_ varchar(64), + OPERATION_ID_ varchar(64), + ENTITY_TYPE_ varchar(30), + PROPERTY_ varchar(64), + ORG_VALUE_ varchar(4000), + NEW_VALUE_ varchar(4000), + TENANT_ID_ varchar(64), + REMOVAL_TIME_ datetime(3), + CATEGORY_ varchar(64), + EXTERNAL_TASK_ID_ varchar(64), + ANNOTATION_ varchar(4000), + primary key (ID_) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin; + +create table ACT_HI_INCIDENT ( + ID_ varchar(64) not null, + PROC_DEF_KEY_ varchar(255), + PROC_DEF_ID_ varchar(64), + ROOT_PROC_INST_ID_ varchar(64), + PROC_INST_ID_ varchar(64), + EXECUTION_ID_ varchar(64), + CREATE_TIME_ timestamp(3) not null, + END_TIME_ timestamp(3) null, + INCIDENT_MSG_ varchar(4000), + INCIDENT_TYPE_ varchar(255) not null, + ACTIVITY_ID_ varchar(255), + FAILED_ACTIVITY_ID_ varchar(255), + CAUSE_INCIDENT_ID_ varchar(64), + ROOT_CAUSE_INCIDENT_ID_ varchar(64), + CONFIGURATION_ varchar(255), + HISTORY_CONFIGURATION_ varchar(255), + INCIDENT_STATE_ integer, + TENANT_ID_ varchar(64), + JOB_DEF_ID_ varchar(64), + REMOVAL_TIME_ datetime(3), + primary key (ID_) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin; + +create table ACT_HI_JOB_LOG ( + ID_ varchar(64) not null, + TIMESTAMP_ datetime(3) not null, + JOB_ID_ varchar(64) not null, + JOB_DUEDATE_ datetime(3) NULL, + JOB_RETRIES_ integer, + JOB_PRIORITY_ bigint NOT NULL DEFAULT 0, + JOB_EXCEPTION_MSG_ varchar(4000), + JOB_EXCEPTION_STACK_ID_ varchar(64), + JOB_STATE_ integer, + JOB_DEF_ID_ varchar(64), + JOB_DEF_TYPE_ varchar(255), + JOB_DEF_CONFIGURATION_ varchar(255), + ACT_ID_ varchar(255), + FAILED_ACT_ID_ varchar(255), + ROOT_PROC_INST_ID_ varchar(64), + EXECUTION_ID_ varchar(64), + PROCESS_INSTANCE_ID_ varchar(64), + PROCESS_DEF_ID_ varchar(64), + PROCESS_DEF_KEY_ varchar(255), + DEPLOYMENT_ID_ varchar(64), + SEQUENCE_COUNTER_ bigint, + TENANT_ID_ varchar(64), + HOSTNAME_ varchar(255), + REMOVAL_TIME_ datetime(3), + primary key (ID_) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin; + +create table ACT_HI_BATCH ( + ID_ varchar(64) not null, + TYPE_ varchar(255), + TOTAL_JOBS_ integer, + JOBS_PER_SEED_ integer, + INVOCATIONS_PER_JOB_ integer, + SEED_JOB_DEF_ID_ varchar(64), + MONITOR_JOB_DEF_ID_ varchar(64), + BATCH_JOB_DEF_ID_ varchar(64), + TENANT_ID_ varchar(64), + CREATE_USER_ID_ varchar(255), + START_TIME_ datetime(3) not null, + END_TIME_ datetime(3), + REMOVAL_TIME_ datetime(3), + primary key (ID_) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin; + +create table ACT_HI_EXT_TASK_LOG ( + ID_ varchar(64) not null, + TIMESTAMP_ timestamp(3) not null, + EXT_TASK_ID_ varchar(64) not null, + RETRIES_ integer, + TOPIC_NAME_ varchar(255), + WORKER_ID_ varchar(255), + PRIORITY_ bigint NOT NULL DEFAULT 0, + ERROR_MSG_ varchar(4000), + ERROR_DETAILS_ID_ varchar(64), + ACT_ID_ varchar(255), + ACT_INST_ID_ varchar(64), + EXECUTION_ID_ varchar(64), + ROOT_PROC_INST_ID_ varchar(64), + PROC_INST_ID_ varchar(64), + PROC_DEF_ID_ varchar(64), + PROC_DEF_KEY_ varchar(255), + TENANT_ID_ varchar(64), + STATE_ integer, + REMOVAL_TIME_ datetime(3), + primary key (ID_) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin; + +create index ACT_IDX_HI_PRO_INST_END on ACT_HI_PROCINST(END_TIME_); +create index ACT_IDX_HI_PRO_I_BUSKEY on ACT_HI_PROCINST(BUSINESS_KEY_); +create index ACT_IDX_HI_PRO_INST_TENANT_ID on ACT_HI_PROCINST(TENANT_ID_); +create index ACT_IDX_HI_PRO_INST_PROC_DEF_KEY on ACT_HI_PROCINST(PROC_DEF_KEY_); +create index ACT_IDX_HI_PRO_INST_PROC_TIME on ACT_HI_PROCINST(START_TIME_, END_TIME_); +create index ACT_IDX_HI_PI_PDEFID_END_TIME on ACT_HI_PROCINST(PROC_DEF_ID_, END_TIME_); +create index ACT_IDX_HI_PRO_INST_ROOT_PI on ACT_HI_PROCINST(ROOT_PROC_INST_ID_); +create index ACT_IDX_HI_PRO_INST_RM_TIME on ACT_HI_PROCINST(REMOVAL_TIME_); + +create index ACT_IDX_HI_ACTINST_ROOT_PI on ACT_HI_ACTINST(ROOT_PROC_INST_ID_); +create index ACT_IDX_HI_ACT_INST_START_END on ACT_HI_ACTINST(START_TIME_, END_TIME_); +create index ACT_IDX_HI_ACT_INST_END on ACT_HI_ACTINST(END_TIME_); +create index ACT_IDX_HI_ACT_INST_PROCINST on ACT_HI_ACTINST(PROC_INST_ID_, ACT_ID_); +create index ACT_IDX_HI_ACT_INST_COMP on ACT_HI_ACTINST(EXECUTION_ID_, ACT_ID_, END_TIME_, ID_); +create index ACT_IDX_HI_ACT_INST_STATS on ACT_HI_ACTINST(PROC_DEF_ID_, PROC_INST_ID_, ACT_ID_, END_TIME_, ACT_INST_STATE_); +create index ACT_IDX_HI_ACT_INST_TENANT_ID on ACT_HI_ACTINST(TENANT_ID_); +create index ACT_IDX_HI_ACT_INST_PROC_DEF_KEY on ACT_HI_ACTINST(PROC_DEF_KEY_); +create index ACT_IDX_HI_AI_PDEFID_END_TIME on ACT_HI_ACTINST(PROC_DEF_ID_, END_TIME_); +create index ACT_IDX_HI_ACT_INST_RM_TIME on ACT_HI_ACTINST(REMOVAL_TIME_); + +create index ACT_IDX_HI_TASKINST_ROOT_PI on ACT_HI_TASKINST(ROOT_PROC_INST_ID_); +create index ACT_IDX_HI_TASK_INST_TENANT_ID on ACT_HI_TASKINST(TENANT_ID_); +create index ACT_IDX_HI_TASK_INST_PROC_DEF_KEY on ACT_HI_TASKINST(PROC_DEF_KEY_); +create index ACT_IDX_HI_TASKINST_PROCINST on ACT_HI_TASKINST(PROC_INST_ID_); +create index ACT_IDX_HI_TASKINSTID_PROCINST on ACT_HI_TASKINST(ID_,PROC_INST_ID_); +create index ACT_IDX_HI_TASK_INST_RM_TIME on ACT_HI_TASKINST(REMOVAL_TIME_); +create index ACT_IDX_HI_TASK_INST_START on ACT_HI_TASKINST(START_TIME_); +create index ACT_IDX_HI_TASK_INST_END on ACT_HI_TASKINST(END_TIME_); + +create index ACT_IDX_HI_DETAIL_ROOT_PI on ACT_HI_DETAIL(ROOT_PROC_INST_ID_); +create index ACT_IDX_HI_DETAIL_PROC_INST on ACT_HI_DETAIL(PROC_INST_ID_); +create index ACT_IDX_HI_DETAIL_ACT_INST on ACT_HI_DETAIL(ACT_INST_ID_); +create index ACT_IDX_HI_DETAIL_CASE_INST on ACT_HI_DETAIL(CASE_INST_ID_); +create index ACT_IDX_HI_DETAIL_CASE_EXEC on ACT_HI_DETAIL(CASE_EXECUTION_ID_); +create index ACT_IDX_HI_DETAIL_TIME on ACT_HI_DETAIL(TIME_); +create index ACT_IDX_HI_DETAIL_NAME on ACT_HI_DETAIL(NAME_); +create index ACT_IDX_HI_DETAIL_TASK_ID on ACT_HI_DETAIL(TASK_ID_); +create index ACT_IDX_HI_DETAIL_TENANT_ID on ACT_HI_DETAIL(TENANT_ID_); +create index ACT_IDX_HI_DETAIL_PROC_DEF_KEY on ACT_HI_DETAIL(PROC_DEF_KEY_); +create index ACT_IDX_HI_DETAIL_BYTEAR on ACT_HI_DETAIL(BYTEARRAY_ID_); +create index ACT_IDX_HI_DETAIL_RM_TIME on ACT_HI_DETAIL(REMOVAL_TIME_); +create index ACT_IDX_HI_DETAIL_TASK_BYTEAR on ACT_HI_DETAIL(BYTEARRAY_ID_, TASK_ID_); +create index ACT_IDX_HI_DETAIL_VAR_INST_ID on ACT_HI_DETAIL(VAR_INST_ID_); + +create index ACT_IDX_HI_IDENT_LNK_ROOT_PI on ACT_HI_IDENTITYLINK(ROOT_PROC_INST_ID_); +create index ACT_IDX_HI_IDENT_LNK_USER on ACT_HI_IDENTITYLINK(USER_ID_); +create index ACT_IDX_HI_IDENT_LNK_GROUP on ACT_HI_IDENTITYLINK(GROUP_ID_); +create index ACT_IDX_HI_IDENT_LNK_TENANT_ID on ACT_HI_IDENTITYLINK(TENANT_ID_); +create index ACT_IDX_HI_IDENT_LNK_PROC_DEF_KEY on ACT_HI_IDENTITYLINK(PROC_DEF_KEY_); +create index ACT_IDX_HI_IDENT_LINK_TASK on ACT_HI_IDENTITYLINK(TASK_ID_); +create index ACT_IDX_HI_IDENT_LINK_RM_TIME on ACT_HI_IDENTITYLINK(REMOVAL_TIME_); +create index ACT_IDX_HI_IDENT_LNK_TIMESTAMP on ACT_HI_IDENTITYLINK(TIMESTAMP_); + +create index ACT_IDX_HI_VARINST_ROOT_PI on ACT_HI_VARINST(ROOT_PROC_INST_ID_); +create index ACT_IDX_HI_PROCVAR_PROC_INST on ACT_HI_VARINST(PROC_INST_ID_); +create index ACT_IDX_HI_PROCVAR_NAME_TYPE on ACT_HI_VARINST(NAME_, VAR_TYPE_); +create index ACT_IDX_HI_CASEVAR_CASE_INST on ACT_HI_VARINST(CASE_INST_ID_); +create index ACT_IDX_HI_VAR_INST_TENANT_ID on ACT_HI_VARINST(TENANT_ID_); +create index ACT_IDX_HI_VAR_INST_PROC_DEF_KEY on ACT_HI_VARINST(PROC_DEF_KEY_); +create index ACT_IDX_HI_VARINST_BYTEAR on ACT_HI_VARINST(BYTEARRAY_ID_); +create index ACT_IDX_HI_VARINST_RM_TIME on ACT_HI_VARINST(REMOVAL_TIME_); +create index ACT_IDX_HI_VAR_PI_NAME_TYPE on ACT_HI_VARINST(PROC_INST_ID_, NAME_, VAR_TYPE_); + +create index ACT_IDX_HI_INCIDENT_TENANT_ID on ACT_HI_INCIDENT(TENANT_ID_); +create index ACT_IDX_HI_INCIDENT_PROC_DEF_KEY on ACT_HI_INCIDENT(PROC_DEF_KEY_); +create index ACT_IDX_HI_INCIDENT_ROOT_PI on ACT_HI_INCIDENT(ROOT_PROC_INST_ID_); +create index ACT_IDX_HI_INCIDENT_PROCINST on ACT_HI_INCIDENT(PROC_INST_ID_); +create index ACT_IDX_HI_INCIDENT_RM_TIME on ACT_HI_INCIDENT(REMOVAL_TIME_); +create index ACT_IDX_HI_INCIDENT_CREATE_TIME on ACT_HI_INCIDENT(CREATE_TIME_); +create index ACT_IDX_HI_INCIDENT_END_TIME on ACT_HI_INCIDENT(END_TIME_); + +create index ACT_IDX_HI_JOB_LOG_ROOT_PI on ACT_HI_JOB_LOG(ROOT_PROC_INST_ID_); +create index ACT_IDX_HI_JOB_LOG_PROCINST on ACT_HI_JOB_LOG(PROCESS_INSTANCE_ID_); +create index ACT_IDX_HI_JOB_LOG_PROCDEF on ACT_HI_JOB_LOG(PROCESS_DEF_ID_); +create index ACT_IDX_HI_JOB_LOG_TENANT_ID on ACT_HI_JOB_LOG(TENANT_ID_); +create index ACT_IDX_HI_JOB_LOG_JOB_DEF_ID on ACT_HI_JOB_LOG(JOB_DEF_ID_); +create index ACT_IDX_HI_JOB_LOG_PROC_DEF_KEY on ACT_HI_JOB_LOG(PROCESS_DEF_KEY_); +create index ACT_IDX_HI_JOB_LOG_EX_STACK on ACT_HI_JOB_LOG(JOB_EXCEPTION_STACK_ID_); +create index ACT_IDX_HI_JOB_LOG_RM_TIME on ACT_HI_JOB_LOG(REMOVAL_TIME_); +create index ACT_IDX_HI_JOB_LOG_JOB_CONF on ACT_HI_JOB_LOG(JOB_DEF_CONFIGURATION_); + +create index ACT_HI_BAT_RM_TIME on ACT_HI_BATCH(REMOVAL_TIME_); + +create index ACT_HI_EXT_TASK_LOG_ROOT_PI on ACT_HI_EXT_TASK_LOG(ROOT_PROC_INST_ID_); +create index ACT_HI_EXT_TASK_LOG_PROCINST on ACT_HI_EXT_TASK_LOG(PROC_INST_ID_); +create index ACT_HI_EXT_TASK_LOG_PROCDEF on ACT_HI_EXT_TASK_LOG(PROC_DEF_ID_); +create index ACT_HI_EXT_TASK_LOG_PROC_DEF_KEY on ACT_HI_EXT_TASK_LOG(PROC_DEF_KEY_); +create index ACT_HI_EXT_TASK_LOG_TENANT_ID on ACT_HI_EXT_TASK_LOG(TENANT_ID_); +create index ACT_IDX_HI_EXTTASKLOG_ERRORDET on ACT_HI_EXT_TASK_LOG(ERROR_DETAILS_ID_); +create index ACT_HI_EXT_TASK_LOG_RM_TIME on ACT_HI_EXT_TASK_LOG(REMOVAL_TIME_); + +create index ACT_IDX_HI_OP_LOG_ROOT_PI on ACT_HI_OP_LOG(ROOT_PROC_INST_ID_); +create index ACT_IDX_HI_OP_LOG_PROCINST on ACT_HI_OP_LOG(PROC_INST_ID_); +create index ACT_IDX_HI_OP_LOG_PROCDEF on ACT_HI_OP_LOG(PROC_DEF_ID_); +create index ACT_IDX_HI_OP_LOG_TASK on ACT_HI_OP_LOG(TASK_ID_); +create index ACT_IDX_HI_OP_LOG_RM_TIME on ACT_HI_OP_LOG(REMOVAL_TIME_); +create index ACT_IDX_HI_OP_LOG_TIMESTAMP on ACT_HI_OP_LOG(TIMESTAMP_); +create index ACT_IDX_HI_OP_LOG_USER_ID on ACT_HI_OP_LOG(USER_ID_); +create index ACT_IDX_HI_OP_LOG_OP_TYPE on ACT_HI_OP_LOG(OPERATION_TYPE_); +create index ACT_IDX_HI_OP_LOG_ENTITY_TYPE on ACT_HI_OP_LOG(ENTITY_TYPE_); + +create index ACT_IDX_HI_COMMENT_TASK on ACT_HI_COMMENT(TASK_ID_); +create index ACT_IDX_HI_COMMENT_ROOT_PI on ACT_HI_COMMENT(ROOT_PROC_INST_ID_); +create index ACT_IDX_HI_COMMENT_PROCINST on ACT_HI_COMMENT(PROC_INST_ID_); +create index ACT_IDX_HI_COMMENT_RM_TIME on ACT_HI_COMMENT(REMOVAL_TIME_); + +create index ACT_IDX_HI_ATTACHMENT_CONTENT on ACT_HI_ATTACHMENT(CONTENT_ID_); +create index ACT_IDX_HI_ATTACHMENT_ROOT_PI on ACT_HI_ATTACHMENT(ROOT_PROC_INST_ID_); +create index ACT_IDX_HI_ATTACHMENT_PROCINST on ACT_HI_ATTACHMENT(PROC_INST_ID_); +create index ACT_IDX_HI_ATTACHMENT_TASK on ACT_HI_ATTACHMENT(TASK_ID_); +create index ACT_IDX_HI_ATTACHMENT_RM_TIME on ACT_HI_ATTACHMENT(REMOVAL_TIME_); +-- +-- Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH +-- under one or more contributor license agreements. See the NOTICE file +-- distributed with this work for additional information regarding copyright +-- ownership. Camunda licenses this file to you under the Apache License, +-- Version 2.0; you may not use this file except in compliance with the License. +-- You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +create table ACT_HI_CASEINST ( + ID_ varchar(64) not null, + CASE_INST_ID_ varchar(64) not null, + BUSINESS_KEY_ varchar(255), + CASE_DEF_ID_ varchar(64) not null, + CREATE_TIME_ datetime(3) not null, + CLOSE_TIME_ datetime(3), + DURATION_ bigint, + STATE_ integer, + CREATE_USER_ID_ varchar(255), + SUPER_CASE_INSTANCE_ID_ varchar(64), + SUPER_PROCESS_INSTANCE_ID_ varchar(64), + TENANT_ID_ varchar(64), + primary key (ID_), + unique (CASE_INST_ID_) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin; + +create table ACT_HI_CASEACTINST ( + ID_ varchar(64) not null, + PARENT_ACT_INST_ID_ varchar(64), + CASE_DEF_ID_ varchar(64) not null, + CASE_INST_ID_ varchar(64) not null, + CASE_ACT_ID_ varchar(255) not null, + TASK_ID_ varchar(64), + CALL_PROC_INST_ID_ varchar(64), + CALL_CASE_INST_ID_ varchar(64), + CASE_ACT_NAME_ varchar(255), + CASE_ACT_TYPE_ varchar(255), + CREATE_TIME_ datetime(3) not null, + END_TIME_ datetime(3), + DURATION_ bigint, + STATE_ integer, + REQUIRED_ boolean, + TENANT_ID_ varchar(64), + primary key (ID_) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin; + +create index ACT_IDX_HI_CAS_I_CLOSE on ACT_HI_CASEINST(CLOSE_TIME_); +create index ACT_IDX_HI_CAS_I_BUSKEY on ACT_HI_CASEINST(BUSINESS_KEY_); +create index ACT_IDX_HI_CAS_I_TENANT_ID on ACT_HI_CASEINST(TENANT_ID_); +create index ACT_IDX_HI_CAS_A_I_CREATE on ACT_HI_CASEACTINST(CREATE_TIME_); +create index ACT_IDX_HI_CAS_A_I_END on ACT_HI_CASEACTINST(END_TIME_); +create index ACT_IDX_HI_CAS_A_I_COMP on ACT_HI_CASEACTINST(CASE_ACT_ID_, END_TIME_, ID_); +create index ACT_IDX_HI_CAS_A_I_CASEINST on ACT_HI_CASEACTINST(CASE_INST_ID_, CASE_ACT_ID_); +create index ACT_IDX_HI_CAS_A_I_TENANT_ID on ACT_HI_CASEACTINST(TENANT_ID_); +-- +-- Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH +-- under one or more contributor license agreements. See the NOTICE file +-- distributed with this work for additional information regarding copyright +-- ownership. Camunda licenses this file to you under the Apache License, +-- Version 2.0; you may not use this file except in compliance with the License. +-- You may obtain a copy of the License at +-- +-- http://www.apache.org/licenses/LICENSE-2.0 +-- +-- Unless required by applicable law or agreed to in writing, software +-- distributed under the License is distributed on an "AS IS" BASIS, +-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +-- See the License for the specific language governing permissions and +-- limitations under the License. +-- + +-- create history decision instance table -- +create table ACT_HI_DECINST ( + ID_ varchar(64) NOT NULL, + DEC_DEF_ID_ varchar(64) NOT NULL, + DEC_DEF_KEY_ varchar(255) NOT NULL, + DEC_DEF_NAME_ varchar(255), + PROC_DEF_KEY_ varchar(255), + PROC_DEF_ID_ varchar(64), + PROC_INST_ID_ varchar(64), + CASE_DEF_KEY_ varchar(255), + CASE_DEF_ID_ varchar(64), + CASE_INST_ID_ varchar(64), + ACT_INST_ID_ varchar(64), + ACT_ID_ varchar(255), + EVAL_TIME_ datetime(3) not null, + REMOVAL_TIME_ datetime(3), + COLLECT_VALUE_ double, + USER_ID_ varchar(255), + ROOT_DEC_INST_ID_ varchar(64), + ROOT_PROC_INST_ID_ varchar(64), + DEC_REQ_ID_ varchar(64), + DEC_REQ_KEY_ varchar(255), + TENANT_ID_ varchar(64), + primary key (ID_) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin; + +-- create history decision input table -- +create table ACT_HI_DEC_IN ( + ID_ varchar(64) NOT NULL, + DEC_INST_ID_ varchar(64) NOT NULL, + CLAUSE_ID_ varchar(64), + CLAUSE_NAME_ varchar(255), + VAR_TYPE_ varchar(100), + BYTEARRAY_ID_ varchar(64), + DOUBLE_ double, + LONG_ bigint, + TEXT_ varchar(10000), + TEXT2_ varchar(4000), + TENANT_ID_ varchar(64), + CREATE_TIME_ datetime(3), + ROOT_PROC_INST_ID_ varchar(64), + REMOVAL_TIME_ datetime(3), + primary key (ID_) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin; + +-- create history decision output table -- +create table ACT_HI_DEC_OUT ( + ID_ varchar(64) NOT NULL, + DEC_INST_ID_ varchar(64) NOT NULL, + CLAUSE_ID_ varchar(64), + CLAUSE_NAME_ varchar(255), + RULE_ID_ varchar(64), + RULE_ORDER_ integer, + VAR_NAME_ varchar(255), + VAR_TYPE_ varchar(100), + BYTEARRAY_ID_ varchar(64), + DOUBLE_ double, + LONG_ bigint, + TEXT_ varchar(10000), + TEXT2_ varchar(4000), + TENANT_ID_ varchar(64), + CREATE_TIME_ datetime(3), + ROOT_PROC_INST_ID_ varchar(64), + REMOVAL_TIME_ datetime(3), + primary key (ID_) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE utf8_bin; + + +create index ACT_IDX_HI_DEC_INST_ID on ACT_HI_DECINST(DEC_DEF_ID_); +create index ACT_IDX_HI_DEC_INST_KEY on ACT_HI_DECINST(DEC_DEF_KEY_); +create index ACT_IDX_HI_DEC_INST_PI on ACT_HI_DECINST(PROC_INST_ID_); +create index ACT_IDX_HI_DEC_INST_CI on ACT_HI_DECINST(CASE_INST_ID_); +create index ACT_IDX_HI_DEC_INST_ACT on ACT_HI_DECINST(ACT_ID_); +create index ACT_IDX_HI_DEC_INST_ACT_INST on ACT_HI_DECINST(ACT_INST_ID_); +create index ACT_IDX_HI_DEC_INST_TIME on ACT_HI_DECINST(EVAL_TIME_); +create index ACT_IDX_HI_DEC_INST_TENANT_ID on ACT_HI_DECINST(TENANT_ID_); +create index ACT_IDX_HI_DEC_INST_ROOT_ID on ACT_HI_DECINST(ROOT_DEC_INST_ID_); +create index ACT_IDX_HI_DEC_INST_REQ_ID on ACT_HI_DECINST(DEC_REQ_ID_); +create index ACT_IDX_HI_DEC_INST_REQ_KEY on ACT_HI_DECINST(DEC_REQ_KEY_); +create index ACT_IDX_HI_DEC_INST_ROOT_PI on ACT_HI_DECINST(ROOT_PROC_INST_ID_); +create index ACT_IDX_HI_DEC_INST_RM_TIME on ACT_HI_DECINST(REMOVAL_TIME_); + + +create index ACT_IDX_HI_DEC_IN_INST on ACT_HI_DEC_IN(DEC_INST_ID_); +create index ACT_IDX_HI_DEC_IN_CLAUSE on ACT_HI_DEC_IN(DEC_INST_ID_, CLAUSE_ID_); +create index ACT_IDX_HI_DEC_IN_ROOT_PI on ACT_HI_DEC_IN(ROOT_PROC_INST_ID_); +create index ACT_IDX_HI_DEC_IN_RM_TIME on ACT_HI_DEC_IN(REMOVAL_TIME_); + +create index ACT_IDX_HI_DEC_OUT_INST on ACT_HI_DEC_OUT(DEC_INST_ID_); +create index ACT_IDX_HI_DEC_OUT_RULE on ACT_HI_DEC_OUT(RULE_ORDER_, CLAUSE_ID_); +create index ACT_IDX_HI_DEC_OUT_ROOT_PI on ACT_HI_DEC_OUT(ROOT_PROC_INST_ID_); +create index ACT_IDX_HI_DEC_OUT_RM_TIME on ACT_HI_DEC_OUT(REMOVAL_TIME_); + diff --git a/plans/so/integration-etsi-testing/config/distribution-test-zip/macro_zipped_sdc_csar.tar.gz b/plans/so/integration-etsi-testing/config/distribution-test-zip/macro_zipped_sdc_csar.tar.gz new file mode 100755 index 00000000..f826c2eb Binary files /dev/null and b/plans/so/integration-etsi-testing/config/distribution-test-zip/macro_zipped_sdc_csar.tar.gz differ diff --git a/plans/so/integration-etsi-testing/config/env b/plans/so/integration-etsi-testing/config/env index c0d8210a..1b6a88f8 100644 --- a/plans/so/integration-etsi-testing/config/env +++ b/plans/so/integration-etsi-testing/config/env @@ -5,3 +5,7 @@ TIME_OUT_DEFAULT_VALUE_SEC=1200 PROJECT_NAME=etsiintegrationtesting DEFAULT_NETWORK_NAME=etsiintegrationtesting_default ETSI_CATALOG_IMAGE_VERSION=1.0.9 +SOL_003_ADAPTER_IMAGE_VERSION=1.8.1 +ETSI_NFVO_NS_LCM_IMAGE_VERSION=1.8.1 +SO_ADMIN_COCKPIT_IMAGE_VERSION=1.8.1 +MARIADB_VERSION=10.5.8 diff --git a/plans/so/integration-etsi-testing/config/override-files/bpmn-infra/onapheat/override.yaml b/plans/so/integration-etsi-testing/config/override-files/bpmn-infra/onapheat/override.yaml index 684c675e..df831c0b 100644 --- a/plans/so/integration-etsi-testing/config/override-files/bpmn-infra/onapheat/override.yaml +++ b/plans/so/integration-etsi-testing/config/override-files/bpmn-infra/onapheat/override.yaml @@ -358,7 +358,7 @@ spring: so: vnfm: adapter: - url: http://so-vnfm-adapter:9092/so/vnfm-adapter/v1/ + url: http://so-etsi-sol003-adapter.onap:9092/so/vnfm-adapter/v1/ auth: Basic dm5mbTpwYXNzd29yZDEk org: onap: diff --git a/plans/so/integration-etsi-testing/config/override-files/openstack-adapter/onapheat/override.yaml b/plans/so/integration-etsi-testing/config/override-files/openstack-adapter/onapheat/override.yaml new file mode 100644 index 00000000..7e2afa85 --- /dev/null +++ b/plans/so/integration-etsi-testing/config/override-files/openstack-adapter/onapheat/override.yaml @@ -0,0 +1,147 @@ +server: + port: 8087 + +spring: + datasource: + hikari: + jdbcUrl: jdbc:mariadb://mariadb:3306/catalogdb + username: cataloguser + password: catalog123 + driver-class-name: org.mariadb.jdbc.Driver + pool-name: catdb-pool + registerMbeans: false + + security: + usercredentials: + - + username: sdnc + password: '$2a$10$Fh9ffgPw2vnmsghsRD3ZauBL1aKXebigbq3BB1RPWtE62UDILsjke' + role: SDNC-Client + - + username: sitecontrol + password: '$2a$10$Fh9ffgPw2vnmsghsRD3ZauBL1aKXebigbq3BB1RPWtE62UDILsjke' + role: SiteControl-Client + - + username: bpel + password: '$2a$10$Fh9ffgPw2vnmsghsRD3ZauBL1aKXebigbq3BB1RPWtE62UDILsjke' + role: BPEL-Client + - + username: sniro + password: '$2a$10$Fh9ffgPw2vnmsghsRD3ZauBL1aKXebigbq3BB1RPWtE62UDILsjke' + role: SNIRO-Client + - + username: apih + password: '$2a$10$Fh9ffgPw2vnmsghsRD3ZauBL1aKXebigbq3BB1RPWtE62UDILsjke' + role: MSO-Client + - + username: mso_admin + password: '$2a$10$Fh9ffgPw2vnmsghsRD3ZauBL1aKXebigbq3BB1RPWtE62UDILsjke' + role: ACTUATOR + + +org: + onap: + so: + adapters: + default_keystone_url_version: /v2.0 + default_keystone_reg_ex: "/[vV][0-9]" + vnf: + bpelauth: D1A67FA93B6A6419132D0F83CC771AF774FD3C60853C50C22C8C6FC5088CC79E9E81EDE9EA39F22B2F66A0068E + checkRequiredParameters: true + addGetFilesOnVolumeReq: false + sockettimeout: 30 + connecttimeout: 30 + retrycount: 5 + retryinterval: -15 + retrylist: 408,429,500,502,503,504,900 + valet_enabled: false + fail_requests_on_valet_failure: false + network: + bpelauth: D1A67FA93B6A6419132D0F83CC771AF774FD3C60853C50C22C8C6FC5088CC79E9E81EDE9EA39F22B2F66A0068E + sockettimeout: 5 + connecttimeout: 5 + retrycount: 5 + retryinterval: -15 + retrylist: 408,429,500,502,503,504,900 + encryptionKey: 07a7159d3bf51a0e53be7a8f89699be7 + tenant: + default_keystone_url_version: /v2.0 + default_keystone_reg_ex: "/[vV][0-9]" + default_tenant_description: Tenant + default_region_type: single + default_user_role: admin + default_success_status_string: Success + default_no_regions_status_string: no regions + default_quota_value: 10 + set_default_quota: false + +ecomp: + mso: + adapters: + po: + retryCodes: 504 + retryDelay: 5 + retryCount: 3 + pollTimeout: 7500 + pollInterval: 15 + +mso: + adapters: + requestDb: + endpoint: http://so-request-db-adapter.onap:8083 + auth: Basic YnBlbDpwYXNzd29yZDEk + auth: BEA8637716A7EB617DF472BA6552D22F68C1CB17B0D094D77DDA562F4ADAAC4457CAB848E1A4 + msoKey: 07a7159d3bf51a0e53be7a8f89699be7 + logPath: ./logs/openstack + msb-ip: multicloud-simulator + msb-port: 9996 + msb-scheme: http + workflow: + endpoint: http://bpmn-infra:8081/sobpmnengine + config: + cadi: + aafId: poBpmn + catalog: + db: + spring: + endpoint: http://catalog-db-adapter:8082 + db: + auth: Basic YnBlbDpwYXNzd29yZDEk + site-name: localDevEnv + async: + core-pool-size: 50 + max-pool-size: 50 + queue-capacity: 500 + +cloud_config: + identity_services: + RAX_KEYSTONE: + identity_url: "https://identity.api.rackspacecloud.com/v2.0" + mso_id: "RACKSPACE_ACCOUNT_ID" + mso_pass: "RACKSPACE_ACCOUNT_APIKEY" + admin_tenant: "service" + member_role: "admin" + tenant_metadata: true + identity_server_type: "KEYSTONE" + identity_authentication_type: "RACKSPACE_APIKEY" + cloud_sites: + Dallas: + region_id: "DFW" + clli: "DFW" + aic_version: "2.5" + identity_service_id: "RAX_KEYSTONE" + Northern Virginia: + region_id: "IAD" + clli: "IAD" + aic_version: "2.5" + identity_service_id: "RAX_KEYSTONE" + Chicago: + region_id: "ORD" + clli: "ORD" + aic_version: "2.5" + identity_service_id: "RAX_KEYSTONE" + DEFAULT: + region_id: "DFW" + clli: "DFW" + aic_version: "2.5" + identity_service_id: "RAX_KEYSTONE" diff --git a/plans/so/integration-etsi-testing/config/override-files/so-etsi-nfvo-ns-lcm/onapheat/override.yaml b/plans/so/integration-etsi-testing/config/override-files/so-etsi-nfvo-ns-lcm/onapheat/override.yaml new file mode 100644 index 00000000..8b1a72c6 --- /dev/null +++ b/plans/so/integration-etsi-testing/config/override-files/so-etsi-nfvo-ns-lcm/onapheat/override.yaml @@ -0,0 +1,47 @@ +aai: + auth: 221187EFA3AD4E33600DE0488F287099934CE65C3D0697BCECC00BB58E784E07CD74A24581DC31DBC086FF63DF116378776E9BE3D1325885 + version: v15 + endpoint: https://aai-simulator:9993 +spring: + datasource: + hikari: + camunda: + jdbcUrl: jdbc:mariadb://mariadb:3306/camundabpmn + username: so_user + password: so_User123 + driver-class-name: org.mariadb.jdbc.Driver + pool-name: bpmn-pool + registerMbeans: true + nfvo: + jdbcUrl: jdbc:mariadb://mariadb:3306/nfvo + username: so_admin + password: so_Admin123 + driver-class-name: org.mariadb.jdbc.Driver + pool-name: nfvo-pool + registerMbeans: true + security: + usercredentials: + - username: so-etsi-nfvo-ns-lcm + password: $2a$10$Fh9ffgPw2vnmsghsRD3ZauBL1aKXebigbq3BB1RPWtE62UDILsjke + role: ETSI-NFVO-Client +server: + port: 9095 + tomcat: + max-threads: 50 +mso: + key: 07a7159d3bf51a0e53be7a8f89699be7 +so: + adapters: + sol003-adapter: + url: http://so-etsi-sol003-adapter:9092/so/vnfm-adapter/v1 + auth: Basic dm5mbTpwYXNzd29yZDEk +etsi-catalog-manager: + base: + endpoint: http://modeling-etsicatalog:8806/api +camunda: + bpm: + history-level: full + job-execution: + max-pool-size: 30 + core-pool-size: 3 + deployment-aware: true diff --git a/plans/so/integration-etsi-testing/config/override-files/so-vnfm-adapter/onapheat/override.yaml b/plans/so/integration-etsi-testing/config/override-files/so-vnfm-adapter/onapheat/override.yaml index 99c9dbd0..ee939021 100644 --- a/plans/so/integration-etsi-testing/config/override-files/so-vnfm-adapter/onapheat/override.yaml +++ b/plans/so/integration-etsi-testing/config/override-files/so-vnfm-adapter/onapheat/override.yaml @@ -28,7 +28,7 @@ sdc: endpoint: http://sdc-simulator:9991/ toscametapath: Artifacts/Deployment/OTHER/TOSCA.meta vnfmadapter: - endpoint: http://so-vnfm-adapter:9092 + endpoint: http://so-etsi-sol003-adapter.onap:9092 etsi-catalog-manager: vnfpkgm: endpoint: http://modeling-etsicatalog:8806/api/vnfpkgm/v1 diff --git a/plans/so/integration-etsi-testing/config/override-files/vnfm-simulator/onapheat/override.yaml b/plans/so/integration-etsi-testing/config/override-files/vnfm-simulator/onapheat/override.yaml index b57d0e85..3adfe78d 100644 --- a/plans/so/integration-etsi-testing/config/override-files/vnfm-simulator/onapheat/override.yaml +++ b/plans/so/integration-etsi-testing/config/override-files/vnfm-simulator/onapheat/override.yaml @@ -21,17 +21,17 @@ server: tomcat: max-threads: 50 ssl: - client-auth: need - key-alias: so@so.onap.org - key--store-password: '7Em3&j4.19xYiMelhD5?xbQ.' - key-store: classpath:so-vnfm-simulator.p12 - key-store-type: PKCS12 + enabled: false request: grant: - auth: twowaytls + auth: none dns: name: so-vnfm-simulator +vnfm-adapter: + base: + endpoint: http://so-etsi-sol003-adapter.onap:9092 + vnfds: vnfdlist: - vnfdid: sgsn-mme_12df452s04131 diff --git a/plans/so/integration-etsi-testing/docker-compose.local.yml b/plans/so/integration-etsi-testing/docker-compose.local.yml index 26b45a3d..1f07a032 100644 --- a/plans/so/integration-etsi-testing/docker-compose.local.yml +++ b/plans/so/integration-etsi-testing/docker-compose.local.yml @@ -2,7 +2,7 @@ version: '3' services: ################################################################################ mariadb: - image: mariadb:10.1.11 + image: mariadb:${MARIADB_VERSION} ################################################################################ catalog-db-adapter: image: onap/so/catalog-db-adapter:${TAG} @@ -22,11 +22,14 @@ services: api-handler-infra: image: onap/so/api-handler-infra:${TAG} ############################################################################# - so-monitoring: - image: onap/so/so-monitoring:${TAG} + so-admin-cockpit: + image: onap/so/so-admin-cockpit:${SO_ADMIN_COCKPIT_IMAGE_VERSION} ################################################################################ - so-vnfm-adapter: - image: onap/so/vnfm-adapter:${TAG} + so-etsi-sol003-adapter: + image: onap/so/so-etsi-sol003-adapter:${SOL_003_ADAPTER_IMAGE_VERSION} ################################################################################ modeling-etsicatalog: image: ${NEXUS_DOCKER_REPO_MSO}/onap/modeling/etsicatalog:${ETSI_CATALOG_IMAGE_VERSION} +################################################################################ + so-etsi-nfvo-ns-lcm: + image: onap/so/so-etsi-nfvo-ns-lcm:${ETSI_NFVO_NS_LCM_IMAGE_VERSION} diff --git a/plans/so/integration-etsi-testing/docker-compose.yml b/plans/so/integration-etsi-testing/docker-compose.yml index f1853e0e..0c4d03f0 100644 --- a/plans/so/integration-etsi-testing/docker-compose.yml +++ b/plans/so/integration-etsi-testing/docker-compose.yml @@ -2,7 +2,7 @@ version: '3' services: ################################################################################ mariadb: - image: ${NEXUS_DOCKER_REPO_MSO}/mariadb:10.1.11 + image: ${NEXUS_DOCKER_REPO_MSO}/mariadb:${MARIADB_VERSION} ports: - "3306:3306" volumes: @@ -108,6 +108,36 @@ services: max-file: "5" user: root entrypoint: /bin/sh -c '/app/wait-for.sh -q -t "300" request-db-adapter:8083 -- "/app/start-app.sh"' +################################################################################ + openstack-adapter: + image: ${NEXUS_DOCKER_REPO_MSO}/onap/so/openstack-adapter:1.8.0-STAGING-latest + ports: + - "8087:8087" + volumes: + - ${TEST_LAB_DIR}/volumes/so/ca-certificates/onapheat:/app/ca-certificates + - ${CONFIG_DIR_PATH}/override-files/openstack-adapter/onapheat:/app/config + environment: + - APP=openstack-adapter + - JVM_ARGS=-Xms64m -Xmx512m + - DB_HOST=mariadb + - DB_PORT=3306 + - DB_USERNAME=so_user + - DB_PASSWORD=so_User123 + - DB_ADMIN_USERNAME=so_admin + - DB_ADMIN_PASSWORD=so_Admin123 + hostname: + openstack-adapter.so.testlab.onap.org + depends_on: + - mariadb + - catalog-db-adapter + - request-db-adapter + logging: + driver: "json-file" + options: + max-size: "30m" + max-file: "5" + user: root + entrypoint: /bin/sh -c '/app/wait-for.sh -q -t "300" request-db-adapter:8083 -- "/app/start-app.sh"' ################################################################################ sdc-controller: image: ${NEXUS_DOCKER_REPO_MSO}/onap/so/sdc-controller:${TAG} @@ -115,7 +145,7 @@ services: - "8085:8085" volumes: - ${TEST_LAB_DIR}/volumes/so/ca-certificates/onapheat:/app/ca-certificates - - ${CONFIG_DIR_PATH}/distribution-test-zip/zipped_sdc_csar.tar.gz:/distribution-test-zip/zipped_sdc_csar.tar.gz + - ${CONFIG_DIR_PATH}/distribution-test-zip:/distribution-test-zip - ${CONFIG_DIR_PATH}/override-files/sdc-controller/onapheat:/app/config environment: - APP=sdc-controller @@ -138,7 +168,7 @@ services: max-size: "30m" max-file: "5" user: root - entrypoint: /bin/sh -c 'mkdir -p /distribution-test-zip/unzipped && tar -xvzf /distribution-test-zip/zipped_sdc_csar.tar.gz -C /distribution-test-zip/unzipped && chmod 777 -R /distribution-test-zip/ && /app/wait-for.sh -q -t "300" request-db-adapter:8083 -- "/app/start-app.sh"' + entrypoint: /bin/sh -c 'mkdir -p /distribution-test-zip/unzipped && tar -xvzf /distribution-test-zip/zipped_sdc_csar.tar.gz -C /distribution-test-zip/unzipped && tar -xvzf /distribution-test-zip/macro_zipped_sdc_csar.tar.gz -C /distribution-test-zip/unzipped && chmod 777 -R /distribution-test-zip/ && /app/wait-for.sh -q -t "300" request-db-adapter:8083 -- "/app/start-app.sh"' ################################################################################ bpmn-infra: image: ${NEXUS_DOCKER_REPO_MSO}/onap/so/bpmn-infra:${TAG} @@ -159,6 +189,8 @@ services: - DB_ADMIN_PASSWORD=so_Admin123 hostname: bpmn-infra.so.testlab.onap.org + links: + - "so-etsi-sol003-adapter:so-etsi-sol003-adapter.onap" depends_on: - mariadb - catalog-db-adapter @@ -201,24 +233,24 @@ services: user: root entrypoint: /bin/sh -c '/app/wait-for.sh -q -t "300" request-db-adapter:8083 -- "/app/start-app.sh"' ############################################################################# - so-monitoring: - image: ${NEXUS_DOCKER_REPO_MSO}/onap/so/so-monitoring:${TAG} + so-admin-cockpit: + image: ${NEXUS_DOCKER_REPO_MSO}/onap/so/so-admin-cockpit:${SO_ADMIN_COCKPIT_IMAGE_VERSION} ports: - "30224:30224" volumes: - ${CONFIG_DIR_PATH}/override-files/so-monitoring/onapheat:/app/config environment: - - APP=so-monitoring + - APP=so-admin-cockpit - JVM_ARGS=-Xms64m -Xmx512m hostname: - so-monitoring.so.testlab.onap.org + so-admin-cockpit.so.testlab.onap.org depends_on: - mariadb - catalog-db-adapter - request-db-adapter ################################################################################ - so-vnfm-adapter: - image: ${NEXUS_DOCKER_REPO_MSO}/onap/so/vnfm-adapter:${TAG} + so-etsi-sol003-adapter: + image: ${NEXUS_DOCKER_REPO_MSO}/onap/so/so-etsi-sol003-adapter:${SOL_003_ADAPTER_IMAGE_VERSION} ports: - "9092:9092" volumes: @@ -226,10 +258,10 @@ services: - ${CONFIG_DIR_PATH}/certificates/so-vnfm-adapter-certs:/app/so-vnfm-adapter-certs - ${CONFIG_DIR_PATH}/certificates/truststore/root-ca.crt:/app/ca-certificates/root-ca.crt environment: - - APP=so-vnfm-adapter + - APP=so-etsi-sol003-adapter - JVM_ARGS=-Xms64m -Xmx512m hostname: - so-vnfm-adapter + so-etsi-sol003-adapter depends_on: - request-db-adapter logging: @@ -251,7 +283,7 @@ services: sdc-simulator depends_on: - mariadb - - so-vnfm-adapter + - so-etsi-sol003-adapter logging: driver: "json-file" options: @@ -269,7 +301,7 @@ services: aai-simulator depends_on: - mariadb - - so-vnfm-adapter + - so-etsi-sol003-adapter logging: driver: "json-file" options: @@ -288,9 +320,11 @@ services: - JVM_ARGS=-Xms64m -Xmx512m hostname: so-vnfm-simulator + links: + - "so-etsi-sol003-adapter:so-etsi-sol003-adapter.onap" depends_on: - mariadb - - so-vnfm-adapter + - so-etsi-sol003-adapter logging: driver: "json-file" options: @@ -310,7 +344,7 @@ services: - CATALOG_DB=catalogdb depends_on: - mariadb - - so-vnfm-adapter + - so-etsi-sol003-adapter command: - "/config/apply-workarounds.sh" ################################################################################# @@ -325,7 +359,25 @@ services: sdnc-simulator depends_on: - mariadb - - so-vnfm-adapter + - so-etsi-sol003-adapter + logging: + driver: "json-file" + options: + max-size: "30m" + max-file: "5" +################################################################################# + multicloud-simulator: + image: simulators/multicloud-simulator:latest + ports: + - "9996:9996" + environment: + - APP=MULTICLOUD-SIMULATOR + - JVM_ARGS=-Xms64m -Xmx512m + hostname: + multicloud-simulator + depends_on: + - mariadb + - so-etsi-sol003-adapter logging: driver: "json-file" options: @@ -361,6 +413,8 @@ services: - SDC_ADDR=http://sdc-simulator:9991 hostname: modeling-etsicatalog + links: + - "so-etsi-sol003-adapter:so-etsi-sol003-adapter.onap" depends_on: - mariadb - sdc-simulator @@ -370,3 +424,31 @@ services: max-size: "30m" max-file: "5" entrypoint: /bin/sh -c '/service/wait-for.sh -t 300 -h mariadb -p 3306 && /service/modeling/etsicatalog/docker/docker-entrypoint.sh' +############################################################################### + so-etsi-nfvo-ns-lcm: + image: ${NEXUS_DOCKER_REPO_MSO}/onap/so/so-etsi-nfvo-ns-lcm:${ETSI_NFVO_NS_LCM_IMAGE_VERSION} + ports: + - "9095:9095" + volumes: + - ${CONFIG_DIR_PATH}/override-files/so-etsi-nfvo-ns-lcm/onapheat:/app/config + - ${CONFIG_DIR_PATH}/certificates/truststore/root-ca.crt:/app/ca-certificates/root-ca.crt + - ${TEST_LAB_DIR}/volumes/so/ca-certificates/onapheat:/app/ca-certificates + environment: + - APP=so-etsi-nfvo-ns-lcm + - JVM_ARGS=-Xms64m -Xmx512m + - DB_HOST=mariadb + - DB_PORT=3306 + - DB_USERNAME=so_user + - DB_PASSWORD=so_User123 + - DB_ADMIN_USERNAME=so_admin + - DB_ADMIN_PASSWORD=so_Admin123 + hostname: + etsi.nfvo.ns.lcm + depends_on: + - mariadb + - aai-simulator + - sdc-simulator + - modeling-etsicatalog + - so-etsi-sol003-adapter + user: root + entrypoint: /bin/sh -c '/app/wait-for.sh -q -t "300" mariadb:3306 -- "/app/start-app.sh"' diff --git a/plans/so/integration-etsi-testing/settings.xml b/plans/so/integration-etsi-testing/settings.xml index 5db52989..e645b41b 100755 --- a/plans/so/integration-etsi-testing/settings.xml +++ b/plans/so/integration-etsi-testing/settings.xml @@ -95,7 +95,7 @@ central - http://repo1.maven.org/maven2/ + https://repo1.maven.org/maven2/ onap-public @@ -112,7 +112,7 @@ central - http://repo1.maven.org/maven2/ + https://repo1.maven.org/maven2/ onap-public diff --git a/plans/so/integration-etsi-testing/setup.sh b/plans/so/integration-etsi-testing/setup.sh index 98fa1e32..950571f1 100755 --- a/plans/so/integration-etsi-testing/setup.sh +++ b/plans/so/integration-etsi-testing/setup.sh @@ -33,6 +33,9 @@ TEST_LAB_DIR_PATH=$TEMP_DIR_PATH/test_lab DOCKER_COMPOSE_FILE_PATH=$SCRIPT_HOME/docker-compose.yml DOCKER_COMPOSE_LOCAL_OVERRIDE_FILE=$SCRIPT_HOME/docker-compose.local.yml TEAR_DOWN_SCRIPT=$SCRIPT_HOME/teardown.sh +CAMUNDA_SQL_SCRIPT_NAME=mariadb_engine_7.10.0.sql +CAMUNDA_SQL_SCRIPT_DIR=$CONFIG_DIR/camunda-sql +TEST_LAB_SQL_SCRIPTS_DIR=$TEST_LAB_DIR_PATH/volumes/mariadb/docker-entrypoint-initdb.d/db-sql-scripts MAVEN_DIR=$TEMP_DIR_PATH/maven INSTALLED_MAVEN_DIR=$MAVEN_DIR/$MAVEN_VERSION_DIR @@ -49,7 +52,7 @@ echo "Running $SCRIPT_HOME/$SCRIPT_NAME ..." export $(egrep -v '^#' $ENV_FILE | xargs) -MANDATORY_VARIABLES_NAMES=( "NEXUS_DOCKER_REPO_MSO" "DOCKER_ENVIRONMENT" "TAG" "TIME_OUT_DEFAULT_VALUE_SEC" "PROJECT_NAME" "DEFAULT_NETWORK_NAME", "ETSI_CATALOG_IMAGE_VERSION") +MANDATORY_VARIABLES_NAMES=( "NEXUS_DOCKER_REPO_MSO" "DOCKER_ENVIRONMENT" "TAG" "TIME_OUT_DEFAULT_VALUE_SEC" "PROJECT_NAME" "DEFAULT_NETWORK_NAME", "ETSI_CATALOG_IMAGE_VERSION", "SOL_003_ADAPTER_IMAGE_VERSION", "ETSI_NFVO_NS_LCM_IMAGE_VERSION", "MARIADB_VERSION") for var in "${MANDATORY_VARIABLES_NAMES[@]}" do @@ -136,6 +139,10 @@ fi git clone http://gerrit.onap.org/r/so/docker-config.git $TEST_LAB_DIR_PATH +echo "Replacing $CAMUNDA_SQL_SCRIPT_NAME ..." +rm -rf $TEST_LAB_SQL_SCRIPTS_DIR/$CAMUNDA_SQL_SCRIPT_NAME +cp $CAMUNDA_SQL_SCRIPT_DIR/$CAMUNDA_SQL_SCRIPT_NAME $TEST_LAB_SQL_SCRIPTS_DIR + export TEST_LAB_DIR=$TEST_LAB_DIR_PATH export CONFIG_DIR_PATH=$CONFIG_DIR @@ -175,27 +182,20 @@ if [ $? -ne 0 ]; then exit 1 fi -API_INFRA_CONTAINER_NAME="api-handler-infra" -echo "Will execute $WAIT_FOR_CONTAINER_SCRIPT to wait for $API_INFRA_CONTAINER_NAME container to start up" -$WAIT_FOR_CONTAINER_SCRIPT -c "$API_INFRA_CONTAINER_NAME" -t "300" -n "$DEFAULT_NETWORK_NAME" - -if [ $? -ne 0 ]; then - echo "ERROR: $WAIT_FOR_CONTAINER_SCRIPT failed" - echo "Will stop running docker containers . . ." - $TEAR_DOWN_SCRIPT - exit 1 -fi +PODS_NAMES=( "api-handler-infra" "modeling-etsicatalog" "so-etsi-nfvo-ns-lcm") -MODELING_ETSI_CATALOG_CONTAINER_NAME="modeling-etsicatalog" -echo "Will execute $WAIT_FOR_CONTAINER_SCRIPT to wait for $MODELING_ETSI_CATALOG_CONTAINER_NAME container to start up" -$WAIT_FOR_CONTAINER_SCRIPT -c "$MODELING_ETSI_CATALOG_CONTAINER_NAME" -t "300" -n "$DEFAULT_NETWORK_NAME" +for pod in "${PODS_NAMES[@]}" + do + echo "Will execute $WAIT_FOR_CONTAINER_SCRIPT to wait for $pod container to start up" + $WAIT_FOR_CONTAINER_SCRIPT -c "$pod" -t "300" -n "$DEFAULT_NETWORK_NAME" -if [ $? -ne 0 ]; then - echo "ERROR: $WAIT_FOR_CONTAINER_SCRIPT failed" - echo "Will stop running docker containers . . ." - $TEAR_DOWN_SCRIPT - exit 1 -fi + if [ $? -ne 0 ]; then + echo "ERROR: $WAIT_FOR_CONTAINER_SCRIPT for pod: $pod failed" + echo "Will stop running docker containers . . ." + $TEAR_DOWN_SCRIPT + exit 1 + fi +done REPO_IP='127.0.0.1' ROBOT_VARIABLES="-v REPO_IP:${REPO_IP}" diff --git a/plans/so/integration-etsi-testing/so-simulators/aai-simulator/src/main/java/org/onap/so/aaisimulator/controller/GenericVnfsController.java b/plans/so/integration-etsi-testing/so-simulators/aai-simulator/src/main/java/org/onap/so/aaisimulator/controller/GenericVnfsController.java index 43fe47da..27a0e79d 100644 --- a/plans/so/integration-etsi-testing/so-simulators/aai-simulator/src/main/java/org/onap/so/aaisimulator/controller/GenericVnfsController.java +++ b/plans/so/integration-etsi-testing/so-simulators/aai-simulator/src/main/java/org/onap/so/aaisimulator/controller/GenericVnfsController.java @@ -22,6 +22,7 @@ package org.onap.so.aaisimulator.controller; import static org.onap.so.aaisimulator.utils.Constants.APPLICATION_MERGE_PATCH_JSON; import static org.onap.so.aaisimulator.utils.Constants.BI_DIRECTIONAL_RELATIONSHIP_LIST_URL; import static org.onap.so.aaisimulator.utils.Constants.GENERIC_VNF; +import static org.onap.so.aaisimulator.utils.Constants.VF_MODULE; import static org.onap.so.aaisimulator.utils.Constants.GENERIC_VNFS_URL; import static org.onap.so.aaisimulator.utils.Constants.RELATIONSHIP_LIST_RELATIONSHIP_URL; import static org.onap.so.aaisimulator.utils.Constants.X_HTTP_METHOD_OVERRIDE; @@ -35,6 +36,8 @@ import javax.ws.rs.core.MediaType; import org.onap.aai.domain.yang.GenericVnf; import org.onap.aai.domain.yang.GenericVnfs; import org.onap.aai.domain.yang.Relationship; +import org.onap.aai.domain.yang.VfModule; +import org.onap.aai.domain.yang.VfModules; import org.onap.so.aaisimulator.service.providers.GenericVnfCacheServiceProvider; import org.onap.so.aaisimulator.utils.HttpServiceUtils; import org.onap.so.aaisimulator.utils.RequestErrorResponseUtils; @@ -47,6 +50,7 @@ import org.springframework.http.ResponseEntity; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.DeleteMapping; import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PatchMapping; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.PutMapping; @@ -211,5 +215,66 @@ public class GenericVnfsController { return getRequestErrorResponseEntity(request, GENERIC_VNF); } + + + @GetMapping(value = "/generic-vnf/{vnf-id}/vf-modules/vf-module/{vf-module-id}", produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) + public ResponseEntity getVfModule(@PathVariable("vnf-id") final String vnfId, @PathVariable("vf-module-id") final String vfModuleId, + @RequestParam(name = "depth", required = false) final Integer depth, + @RequestParam(name = "resultIndex", required = false) final Integer resultIndex, + @RequestParam(name = "resultSize", required = false) final Integer resultSize, + @RequestParam(name = "format", required = false) final String format, final HttpServletRequest request) { + LOGGER.info( + "Will get VfModule for 'vf-module-id': {} with depth: {}, resultIndex: {}, resultSize:{}, format: {} ...", + vnfId, vfModuleId, depth, resultIndex, resultSize, format); + + final Optional optional = cacheServiceProvider.getVfModule(vnfId, vfModuleId); + + if (optional.isPresent()) { + final VfModule vfModule = optional.get(); + LOGGER.info("found VfModule {} in cache", vfModule); + return ResponseEntity.ok(vfModule); + } + + LOGGER.error( + "Unable to find VfModule in cache for 'vf-module-id': {} with depth: {}, resultIndex: {}, resultSize:{}, format:{} ...", + vnfId, vfModuleId, depth, resultIndex, resultSize, format); + return getRequestErrorResponseEntity(request, VF_MODULE); + + } + + + + @PutMapping(value = "/generic-vnf/{vnf-id}/vf-modules/vf-module/{vf-module-id}", consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}, + produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) + public ResponseEntity putVfModule(@RequestBody final VfModule vfModule, + @PathVariable("vnf-id") final String vnfId, @PathVariable("vf-module-id") final String vfModuleId, final HttpServletRequest request) { + LOGGER.info("Will add VfModule to cache with 'vf-module-id': {} ...", vfModuleId); + + cacheServiceProvider.putVfModule(vnfId, vfModuleId, vfModule); + return ResponseEntity.accepted().build(); + } + + @PostMapping(value = "/generic-vnf/{vnf-id}/vf-modules/vf-module/{vf-module-id}", + consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML, APPLICATION_MERGE_PATCH_JSON}, + produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) + public ResponseEntity patchVfModule(@RequestBody final VfModule vfModule, + @PathVariable("vnf-id") final String vnfId, @PathVariable("vf-module-id") final String vfModuleId, + @RequestHeader(value = X_HTTP_METHOD_OVERRIDE, required = false) final String xHttpHeaderOverride, + final HttpServletRequest request) { + + LOGGER.info("Will post VfModule to cache with 'vf-module-id': {} and '{}': {} ...", vfModuleId, X_HTTP_METHOD_OVERRIDE, + xHttpHeaderOverride); + + if (HttpMethod.PATCH.toString().equalsIgnoreCase(xHttpHeaderOverride)) { + if (cacheServiceProvider.patchVfModule(vnfId, vfModuleId, vfModule)) { + return ResponseEntity.accepted().build(); + } + LOGGER.error("Unable to apply patch to VmModule using 'vf-module-id': {} ... ", vfModule); + return getRequestErrorResponseEntity(request, VF_MODULE); + } + LOGGER.error("{} not supported ... ", xHttpHeaderOverride); + + return getRequestErrorResponseEntity(request, VF_MODULE); + } } diff --git a/plans/so/integration-etsi-testing/so-simulators/aai-simulator/src/main/java/org/onap/so/aaisimulator/service/providers/GenericVnfCacheServiceProvider.java b/plans/so/integration-etsi-testing/so-simulators/aai-simulator/src/main/java/org/onap/so/aaisimulator/service/providers/GenericVnfCacheServiceProvider.java index 901c2594..0ee2d00c 100644 --- a/plans/so/integration-etsi-testing/so-simulators/aai-simulator/src/main/java/org/onap/so/aaisimulator/service/providers/GenericVnfCacheServiceProvider.java +++ b/plans/so/integration-etsi-testing/so-simulators/aai-simulator/src/main/java/org/onap/so/aaisimulator/service/providers/GenericVnfCacheServiceProvider.java @@ -23,6 +23,9 @@ import java.util.List; import java.util.Optional; import org.onap.aai.domain.yang.GenericVnf; import org.onap.aai.domain.yang.Relationship; +import org.onap.aai.domain.yang.VfModule; +import org.onap.aai.domain.yang.VfModules; +import org.onap.aai.domain.yang.v10.VolumeGroup; import org.springframework.http.HttpHeaders; /** @@ -49,5 +52,9 @@ public interface GenericVnfCacheServiceProvider extends Clearable { boolean deleteGenericVnf(final String vnfId, final String resourceVersion); + void putVfModule(String vnfId, String vfModuleId, VfModule vfModule); + Optional getVfModule(final String vnfId, final String vfModuleId); + + boolean patchVfModule(String vnfId, String vfModuleId, VfModule vfModule); } diff --git a/plans/so/integration-etsi-testing/so-simulators/aai-simulator/src/main/java/org/onap/so/aaisimulator/service/providers/GenericVnfCacheServiceProviderImpl.java b/plans/so/integration-etsi-testing/so-simulators/aai-simulator/src/main/java/org/onap/so/aaisimulator/service/providers/GenericVnfCacheServiceProviderImpl.java index e7a42106..5a0423b5 100644 --- a/plans/so/integration-etsi-testing/so-simulators/aai-simulator/src/main/java/org/onap/so/aaisimulator/service/providers/GenericVnfCacheServiceProviderImpl.java +++ b/plans/so/integration-etsi-testing/so-simulators/aai-simulator/src/main/java/org/onap/so/aaisimulator/service/providers/GenericVnfCacheServiceProviderImpl.java @@ -24,6 +24,7 @@ import static org.onap.so.aaisimulator.utils.Constants.COMPOSED_OF; import static org.onap.so.aaisimulator.utils.Constants.GENERIC_VNF; import static org.onap.so.aaisimulator.utils.Constants.GENERIC_VNF_VNF_ID; import static org.onap.so.aaisimulator.utils.Constants.GENERIC_VNF_VNF_NAME; +import static org.onap.so.aaisimulator.utils.Constants.X_HTTP_METHOD_OVERRIDE; import static org.onap.so.aaisimulator.utils.HttpServiceUtils.getBiDirectionalRelationShipListRelatedLink; import static org.onap.so.aaisimulator.utils.HttpServiceUtils.getRelationShipListRelatedLink; import static org.onap.so.aaisimulator.utils.HttpServiceUtils.getTargetUrl; @@ -37,6 +38,10 @@ import org.onap.aai.domain.yang.RelatedToProperty; import org.onap.aai.domain.yang.Relationship; import org.onap.aai.domain.yang.RelationshipData; import org.onap.aai.domain.yang.RelationshipList; +import org.onap.aai.domain.yang.VfModule; +import org.onap.aai.domain.yang.v10.VfModules; +import org.onap.aai.domain.yang.VolumeGroup; +import org.onap.aai.domain.yang.v10.VolumeGroups; import org.onap.so.aaisimulator.utils.ShallowBeanCopy; import org.onap.so.simulator.cache.provider.AbstractCacheServiceProvider; import org.slf4j.Logger; @@ -58,6 +63,7 @@ public class GenericVnfCacheServiceProviderImpl extends AbstractCacheServiceProv private static final Logger LOGGER = LoggerFactory.getLogger(GenericVnfCacheServiceProviderImpl.class); private final HttpRestServiceProvider httpRestServiceProvider; + final org.onap.aai.domain.yang.VfModules vfModules = new org.onap.aai.domain.yang.VfModules(); @Autowired public GenericVnfCacheServiceProviderImpl(final CacheManager cacheManager, @@ -255,4 +261,61 @@ public class GenericVnfCacheServiceProviderImpl extends AbstractCacheServiceProv clearCache(GENERIC_VNF_CACHE.getName()); } -} + @Override + public Optional getVfModule(final String vnfId, final String vfModuleId) { + LOGGER.info("Getting vfModule from cache for vnfId: {} and vfModuleId: {}", + vnfId, vfModuleId); + final Cache cache = getCache(GENERIC_VNF_CACHE.getName()); + final GenericVnf value = cache.get(vnfId, GenericVnf.class); + LOGGER.info("Getting vfModule from cache for vnfId: {} and vfModuleId: {}", + vnfId, vfModuleId); + if (value.getVfModules() != null) { + for (int i=0; i genericVnfOptional = getGenericVnf(vnfId); + final Cache cache = getCache(GENERIC_VNF_CACHE.getName()); + if (genericVnfOptional.isPresent()) { + final GenericVnf genericVnf = genericVnfOptional.get(); + + vfModules.getVfModule().add(vfModule); + genericVnf.setVfModules(vfModules); + cache.put(vfModuleId, vfModule); + } + } + + @Override + public boolean patchVfModule(String vnfId, String vfModuleId, VfModule vfModule) { + final Optional genericVnfOptional = getGenericVnf(vnfId); + LOGGER.info("Create vfModule for vnfId: {} and vfModuleId: {}", + vnfId, vfModuleId); + if (genericVnfOptional.isPresent()) { + final GenericVnf cachedGenericVnf = genericVnfOptional.get(); + LOGGER.info("vfModuleId is Matched"); + try { + for (int i=0; i \(.*\)$'` + if expr "$link" : '/.*' > /dev/null; then + PRG="$link" + else + PRG="`dirname "$PRG"`/$link" + fi + done + + saveddir=`pwd` + + M2_HOME=`dirname "$PRG"`/.. + + # make it fully qualified + M2_HOME=`cd "$M2_HOME" && pwd` + + cd "$saveddir" + # echo Using m2 at $M2_HOME +fi + +# For Cygwin, ensure paths are in UNIX format before anything is touched +if $cygwin ; then + [ -n "$M2_HOME" ] && + M2_HOME=`cygpath --unix "$M2_HOME"` + [ -n "$JAVA_HOME" ] && + JAVA_HOME=`cygpath --unix "$JAVA_HOME"` + [ -n "$CLASSPATH" ] && + CLASSPATH=`cygpath --path --unix "$CLASSPATH"` +fi + +# For Mingw, ensure paths are in UNIX format before anything is touched +if $mingw ; then + [ -n "$M2_HOME" ] && + M2_HOME="`(cd "$M2_HOME"; pwd)`" + [ -n "$JAVA_HOME" ] && + JAVA_HOME="`(cd "$JAVA_HOME"; pwd)`" +fi + +if [ -z "$JAVA_HOME" ]; then + javaExecutable="`which javac`" + if [ -n "$javaExecutable" ] && ! [ "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then + # readlink(1) is not available as standard on Solaris 10. + readLink=`which readlink` + if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then + if $darwin ; then + javaHome="`dirname \"$javaExecutable\"`" + javaExecutable="`cd \"$javaHome\" && pwd -P`/javac" + else + javaExecutable="`readlink -f \"$javaExecutable\"`" + fi + javaHome="`dirname \"$javaExecutable\"`" + javaHome=`expr "$javaHome" : '\(.*\)/bin'` + JAVA_HOME="$javaHome" + export JAVA_HOME + fi + fi +fi + +if [ -z "$JAVACMD" ] ; then + if [ -n "$JAVA_HOME" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD="$JAVA_HOME/jre/sh/java" + else + JAVACMD="$JAVA_HOME/bin/java" + fi + else + JAVACMD="`which java`" + fi +fi + +if [ ! -x "$JAVACMD" ] ; then + echo "Error: JAVA_HOME is not defined correctly." >&2 + echo " We cannot execute $JAVACMD" >&2 + exit 1 +fi + +if [ -z "$JAVA_HOME" ] ; then + echo "Warning: JAVA_HOME environment variable is not set." +fi + +CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher + +# traverses directory structure from process work directory to filesystem root +# first directory with .mvn subdirectory is considered project base directory +find_maven_basedir() { + + if [ -z "$1" ] + then + echo "Path not specified to find_maven_basedir" + return 1 + fi + + basedir="$1" + wdir="$1" + while [ "$wdir" != '/' ] ; do + if [ -d "$wdir"/.mvn ] ; then + basedir=$wdir + break + fi + # workaround for JBEAP-8937 (on Solaris 10/Sparc) + if [ -d "${wdir}" ]; then + wdir=`cd "$wdir/.."; pwd` + fi + # end of workaround + done + echo "${basedir}" +} + +# concatenates all lines of a file +concat_lines() { + if [ -f "$1" ]; then + echo "$(tr -s '\n' ' ' < "$1")" + fi +} + +BASE_DIR=`find_maven_basedir "$(pwd)"` +if [ -z "$BASE_DIR" ]; then + exit 1; +fi + +########################################################################################## +# Extension to allow automatically downloading the maven-wrapper.jar from Maven-central +# This allows using the maven wrapper in projects that prohibit checking in binary data. +########################################################################################## +if [ -r "$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" ]; then + if [ "$MVNW_VERBOSE" = true ]; then + echo "Found .mvn/wrapper/maven-wrapper.jar" + fi +else + if [ "$MVNW_VERBOSE" = true ]; then + echo "Couldn't find .mvn/wrapper/maven-wrapper.jar, downloading it ..." + fi + if [ -n "$MVNW_REPOURL" ]; then + jarUrl="$MVNW_REPOURL/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar" + else + jarUrl="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar" + fi + while IFS="=" read key value; do + case "$key" in (wrapperUrl) jarUrl="$value"; break ;; + esac + done < "$BASE_DIR/.mvn/wrapper/maven-wrapper.properties" + if [ "$MVNW_VERBOSE" = true ]; then + echo "Downloading from: $jarUrl" + fi + wrapperJarPath="$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" + if $cygwin; then + wrapperJarPath=`cygpath --path --windows "$wrapperJarPath"` + fi + + if command -v wget > /dev/null; then + if [ "$MVNW_VERBOSE" = true ]; then + echo "Found wget ... using wget" + fi + if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then + wget "$jarUrl" -O "$wrapperJarPath" + else + wget --http-user=$MVNW_USERNAME --http-password=$MVNW_PASSWORD "$jarUrl" -O "$wrapperJarPath" + fi + elif command -v curl > /dev/null; then + if [ "$MVNW_VERBOSE" = true ]; then + echo "Found curl ... using curl" + fi + if [ -z "$MVNW_USERNAME" ] || [ -z "$MVNW_PASSWORD" ]; then + curl -o "$wrapperJarPath" "$jarUrl" -f + else + curl --user $MVNW_USERNAME:$MVNW_PASSWORD -o "$wrapperJarPath" "$jarUrl" -f + fi + + else + if [ "$MVNW_VERBOSE" = true ]; then + echo "Falling back to using Java to download" + fi + javaClass="$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.java" + # For Cygwin, switch paths to Windows format before running javac + if $cygwin; then + javaClass=`cygpath --path --windows "$javaClass"` + fi + if [ -e "$javaClass" ]; then + if [ ! -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then + if [ "$MVNW_VERBOSE" = true ]; then + echo " - Compiling MavenWrapperDownloader.java ..." + fi + # Compiling the Java class + ("$JAVA_HOME/bin/javac" "$javaClass") + fi + if [ -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then + # Running the downloader + if [ "$MVNW_VERBOSE" = true ]; then + echo " - Running MavenWrapperDownloader.java ..." + fi + ("$JAVA_HOME/bin/java" -cp .mvn/wrapper MavenWrapperDownloader "$MAVEN_PROJECTBASEDIR") + fi + fi + fi +fi +########################################################################################## +# End of extension +########################################################################################## + +export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-"$BASE_DIR"} +if [ "$MVNW_VERBOSE" = true ]; then + echo $MAVEN_PROJECTBASEDIR +fi +MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS" + +# For Cygwin, switch paths to Windows format before running java +if $cygwin; then + [ -n "$M2_HOME" ] && + M2_HOME=`cygpath --path --windows "$M2_HOME"` + [ -n "$JAVA_HOME" ] && + JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"` + [ -n "$CLASSPATH" ] && + CLASSPATH=`cygpath --path --windows "$CLASSPATH"` + [ -n "$MAVEN_PROJECTBASEDIR" ] && + MAVEN_PROJECTBASEDIR=`cygpath --path --windows "$MAVEN_PROJECTBASEDIR"` +fi + +# Provide a "standardized" way to retrieve the CLI args that will +# work with both Windows and non-Windows executions. +MAVEN_CMD_LINE_ARGS="$MAVEN_CONFIG $@" +export MAVEN_CMD_LINE_ARGS + +WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain + +exec "$JAVACMD" \ + $MAVEN_OPTS \ + -classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \ + "-Dmaven.home=${M2_HOME}" "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \ + ${WRAPPER_LAUNCHER} $MAVEN_CONFIG "$@" diff --git a/plans/so/integration-etsi-testing/so-simulators/multicloud-simulator/mvnw.cmd b/plans/so/integration-etsi-testing/so-simulators/multicloud-simulator/mvnw.cmd new file mode 100644 index 00000000..c8d43372 --- /dev/null +++ b/plans/so/integration-etsi-testing/so-simulators/multicloud-simulator/mvnw.cmd @@ -0,0 +1,182 @@ +@REM ---------------------------------------------------------------------------- +@REM Licensed to the Apache Software Foundation (ASF) under one +@REM or more contributor license agreements. See the NOTICE file +@REM distributed with this work for additional information +@REM regarding copyright ownership. The ASF licenses this file +@REM to you under the Apache License, Version 2.0 (the +@REM "License"); you may not use this file except in compliance +@REM with the License. You may obtain a copy of the License at +@REM +@REM https://www.apache.org/licenses/LICENSE-2.0 +@REM +@REM Unless required by applicable law or agreed to in writing, +@REM software distributed under the License is distributed on an +@REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +@REM KIND, either express or implied. See the License for the +@REM specific language governing permissions and limitations +@REM under the License. +@REM ---------------------------------------------------------------------------- + +@REM ---------------------------------------------------------------------------- +@REM Maven Start Up Batch script +@REM +@REM Required ENV vars: +@REM JAVA_HOME - location of a JDK home dir +@REM +@REM Optional ENV vars +@REM M2_HOME - location of maven2's installed home dir +@REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands +@REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a keystroke before ending +@REM MAVEN_OPTS - parameters passed to the Java VM when running Maven +@REM e.g. to debug Maven itself, use +@REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 +@REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files +@REM ---------------------------------------------------------------------------- + +@REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on' +@echo off +@REM set title of command window +title %0 +@REM enable echoing by setting MAVEN_BATCH_ECHO to 'on' +@if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO% + +@REM set %HOME% to equivalent of $HOME +if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%") + +@REM Execute a user defined script before this one +if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre +@REM check for pre script, once with legacy .bat ending and once with .cmd ending +if exist "%HOME%\mavenrc_pre.bat" call "%HOME%\mavenrc_pre.bat" +if exist "%HOME%\mavenrc_pre.cmd" call "%HOME%\mavenrc_pre.cmd" +:skipRcPre + +@setlocal + +set ERROR_CODE=0 + +@REM To isolate internal variables from possible post scripts, we use another setlocal +@setlocal + +@REM ==== START VALIDATION ==== +if not "%JAVA_HOME%" == "" goto OkJHome + +echo. +echo Error: JAVA_HOME not found in your environment. >&2 +echo Please set the JAVA_HOME variable in your environment to match the >&2 +echo location of your Java installation. >&2 +echo. +goto error + +:OkJHome +if exist "%JAVA_HOME%\bin\java.exe" goto init + +echo. +echo Error: JAVA_HOME is set to an invalid directory. >&2 +echo JAVA_HOME = "%JAVA_HOME%" >&2 +echo Please set the JAVA_HOME variable in your environment to match the >&2 +echo location of your Java installation. >&2 +echo. +goto error + +@REM ==== END VALIDATION ==== + +:init + +@REM Find the project base dir, i.e. the directory that contains the folder ".mvn". +@REM Fallback to current working directory if not found. + +set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR% +IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir + +set EXEC_DIR=%CD% +set WDIR=%EXEC_DIR% +:findBaseDir +IF EXIST "%WDIR%"\.mvn goto baseDirFound +cd .. +IF "%WDIR%"=="%CD%" goto baseDirNotFound +set WDIR=%CD% +goto findBaseDir + +:baseDirFound +set MAVEN_PROJECTBASEDIR=%WDIR% +cd "%EXEC_DIR%" +goto endDetectBaseDir + +:baseDirNotFound +set MAVEN_PROJECTBASEDIR=%EXEC_DIR% +cd "%EXEC_DIR%" + +:endDetectBaseDir + +IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig + +@setlocal EnableExtensions EnableDelayedExpansion +for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a +@endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS% + +:endReadAdditionalConfig + +SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe" +set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar" +set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain + +set DOWNLOAD_URL="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar" + +FOR /F "tokens=1,2 delims==" %%A IN ("%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.properties") DO ( + IF "%%A"=="wrapperUrl" SET DOWNLOAD_URL=%%B +) + +@REM Extension to allow automatically downloading the maven-wrapper.jar from Maven-central +@REM This allows using the maven wrapper in projects that prohibit checking in binary data. +if exist %WRAPPER_JAR% ( + if "%MVNW_VERBOSE%" == "true" ( + echo Found %WRAPPER_JAR% + ) +) else ( + if not "%MVNW_REPOURL%" == "" ( + SET DOWNLOAD_URL="%MVNW_REPOURL%/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar" + ) + if "%MVNW_VERBOSE%" == "true" ( + echo Couldn't find %WRAPPER_JAR%, downloading it ... + echo Downloading from: %DOWNLOAD_URL% + ) + + powershell -Command "&{"^ + "$webclient = new-object System.Net.WebClient;"^ + "if (-not ([string]::IsNullOrEmpty('%MVNW_USERNAME%') -and [string]::IsNullOrEmpty('%MVNW_PASSWORD%'))) {"^ + "$webclient.Credentials = new-object System.Net.NetworkCredential('%MVNW_USERNAME%', '%MVNW_PASSWORD%');"^ + "}"^ + "[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12; $webclient.DownloadFile('%DOWNLOAD_URL%', '%WRAPPER_JAR%')"^ + "}" + if "%MVNW_VERBOSE%" == "true" ( + echo Finished downloading %WRAPPER_JAR% + ) +) +@REM End of extension + +@REM Provide a "standardized" way to retrieve the CLI args that will +@REM work with both Windows and non-Windows executions. +set MAVEN_CMD_LINE_ARGS=%* + +%MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CONFIG% %* +if ERRORLEVEL 1 goto error +goto end + +:error +set ERROR_CODE=1 + +:end +@endlocal & set ERROR_CODE=%ERROR_CODE% + +if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost +@REM check for post script, once with legacy .bat ending and once with .cmd ending +if exist "%HOME%\mavenrc_post.bat" call "%HOME%\mavenrc_post.bat" +if exist "%HOME%\mavenrc_post.cmd" call "%HOME%\mavenrc_post.cmd" +:skipRcPost + +@REM pause the script if MAVEN_BATCH_PAUSE is set to 'on' +if "%MAVEN_BATCH_PAUSE%" == "on" pause + +if "%MAVEN_TERMINATE_CMD%" == "on" exit %ERROR_CODE% + +exit /B %ERROR_CODE% diff --git a/plans/so/integration-etsi-testing/so-simulators/multicloud-simulator/pom.xml b/plans/so/integration-etsi-testing/so-simulators/multicloud-simulator/pom.xml new file mode 100644 index 00000000..926ea140 --- /dev/null +++ b/plans/so/integration-etsi-testing/so-simulators/multicloud-simulator/pom.xml @@ -0,0 +1,93 @@ + + + 4.0.0 + + org.onap.so.simulators + so-simulators + 1.0-SNAPSHOT + + multicloud-simulator + ${project.artifactId} + Demo project for Spring Boot + + 11 + + + + ${project.parent.groupId} + common + ${project.version} + + + org.springframework.boot + spring-boot-starter + + + + org.springframework.boot + spring-boot-starter-test + test + + + org.springframework.boot + spring-boot-starter-security + + + org.springframework.boot + spring-boot-starter-tomcat + + + + + com.googlecode.json-simple + json-simple + 1.1 + + + + org.pacesys + openstack4j-core + 3.2.0 + + + org.pacesys.openstack4j.connectors + openstack4j-httpclient + 3.2.0 + + + org.onap.so.adapters + mso-adapters-rest-interface + 1.7.1-SNAPSHOT + + + org.springframework.security.oauth.boot + spring-security-oauth2-autoconfigure + 2.1.1.RELEASE + + + + + + + org.springframework.boot + spring-boot-maven-plugin + + org.onap.so.multicloudsimulator.MultiCloudSimulatorApplication + + + + + repackage + + + + + + org.springframework.boot + spring-boot-maven-plugin + + + + + diff --git a/plans/so/integration-etsi-testing/so-simulators/multicloud-simulator/src/main/java/org/onap/so/multicloudsimulator/MultiCloudSimulatorApplication.java b/plans/so/integration-etsi-testing/so-simulators/multicloud-simulator/src/main/java/org/onap/so/multicloudsimulator/MultiCloudSimulatorApplication.java new file mode 100644 index 00000000..949cd9ba --- /dev/null +++ b/plans/so/integration-etsi-testing/so-simulators/multicloud-simulator/src/main/java/org/onap/so/multicloudsimulator/MultiCloudSimulatorApplication.java @@ -0,0 +1,13 @@ +package org.onap.so.multicloudsimulator; + +import org.springframework.boot.SpringApplication; +import org.springframework.boot.autoconfigure.SpringBootApplication; + +@SpringBootApplication(scanBasePackages = {"org.onap"}) +public class MultiCloudSimulatorApplication { + + public static void main(String[] args) { + SpringApplication.run(MultiCloudSimulatorApplication.class, args); + } + +} diff --git a/plans/so/integration-etsi-testing/so-simulators/multicloud-simulator/src/main/java/org/onap/so/multicloudsimulator/beans/InstanceResponse.java b/plans/so/integration-etsi-testing/so-simulators/multicloud-simulator/src/main/java/org/onap/so/multicloudsimulator/beans/InstanceResponse.java new file mode 100644 index 00000000..e8f9e125 --- /dev/null +++ b/plans/so/integration-etsi-testing/so-simulators/multicloud-simulator/src/main/java/org/onap/so/multicloudsimulator/beans/InstanceResponse.java @@ -0,0 +1,93 @@ + +/*- + * ============LICENSE_START======================================================= + * ONAP - SO + * ================================================================================ + * Copyright (C) 2020 Huawei Technologies Co., Ltd. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ +package org.onap.so.multicloudsimulator.beans; + +import java.util.List; + + +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; + +@JsonInclude(JsonInclude.Include.NON_NULL) +@JsonPropertyOrder({"id", "request", "namespace", "resources"}) +@JsonIgnoreProperties(value = "true") +public class InstanceResponse extends Response { + + @JsonProperty("id") + private String id; + @JsonProperty("request") + private MulticloudInstanceRequest request; + @JsonProperty("namespace") + private String namespace; + @JsonProperty("resources") + private List resources = null; + + public InstanceResponse(String errorMsg) { + super(errorMsg); + } + + public InstanceResponse() { + super(""); + //new Response(""); + } + @JsonProperty("id") + public String getId() { + return id; + } + + @JsonProperty("id") + public void setId(String id) { + this.id = id; + } + + @JsonProperty("request") + public MulticloudInstanceRequest getRequest() { + return request; + } + + @JsonProperty("request") + public void setRequest(MulticloudInstanceRequest request) { + this.request = request; + } + + @JsonProperty("namespace") + public String getNamespace() { + return namespace; + } + + @JsonProperty("namespace") + public void setNamespace(String namespace) { + this.namespace = namespace; + } + + @JsonProperty("resources") + public List getResources() { + return resources; + } + + @JsonProperty("resources") + public void setResources(List resources) { + this.resources = resources; + } + +} diff --git a/plans/so/integration-etsi-testing/so-simulators/multicloud-simulator/src/main/java/org/onap/so/multicloudsimulator/beans/MulticloudCreateResponse.java b/plans/so/integration-etsi-testing/so-simulators/multicloud-simulator/src/main/java/org/onap/so/multicloudsimulator/beans/MulticloudCreateResponse.java new file mode 100644 index 00000000..f524aa97 --- /dev/null +++ b/plans/so/integration-etsi-testing/so-simulators/multicloud-simulator/src/main/java/org/onap/so/multicloudsimulator/beans/MulticloudCreateResponse.java @@ -0,0 +1,99 @@ +package org.onap.so.multicloudsimulator.beans; + +import java.io.Serializable; +import org.apache.commons.lang3.builder.ToStringBuilder; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import com.fasterxml.jackson.databind.JsonNode; + +@JsonInclude(JsonInclude.Include.NON_NULL) +@JsonIgnoreProperties(ignoreUnknown = true) +@JsonPropertyOrder({"template_type", "workload_id", "template_response", "workload_status_reason", "workload_status"}) +public class MulticloudCreateResponse implements Serializable { + private static final long serialVersionUID = -5215028275577848311L; + + @JsonProperty("template_type") + private String templateType; + @JsonProperty("workload_id") + private String workloadId; + @JsonProperty("template_response") + private JsonNode templateResponse; + @JsonProperty("workload_status_reason") + private JsonNode workloadStatusReason; + @JsonProperty("workload_status") + private String workloadStatus; + + @JsonCreator + public MulticloudCreateResponse(@JsonProperty("template_type") String templateType, + @JsonProperty("workload_id") String workloadId, + @JsonProperty("template_response") JsonNode templateResponse) { + this.templateType = templateType; + this.workloadId = workloadId; + this.templateResponse = templateResponse; + } + public MulticloudCreateResponse() { + + } + + @JsonProperty("template_type") + public String getTemplateType() { + return templateType; + } + + @JsonProperty("template_type") + public void setTemplateType(String templateType) { + this.templateType = templateType; + } + + @JsonProperty("workload_id") + public String getWorkloadId() { + return workloadId; + } + + @JsonProperty("workload_id") + public void setWorkloadId(String workloadId) { + this.workloadId = workloadId; + } + + @JsonProperty("template_response") + public void setTemplateResponse(JsonNode templateResponse) { + this.templateResponse = templateResponse; + } + + @JsonProperty("template_response") + public JsonNode getTemplateResponse() { + return templateResponse; + } + + @JsonProperty("workload_status_reason") + public void setWorkloadStatusReason(JsonNode workloadStatusReason) { + this.workloadStatusReason = workloadStatusReason; + } + + @JsonProperty("workload_status_reason") + public JsonNode getWorkloadStatusReason() { + return workloadStatusReason; + } + + @JsonProperty("workload_status") + public String getWorkloadSstatus() { + return workloadStatus; + } + + @JsonProperty("workload_status") + public void setWorkloadStatus(String workloadStatus) { + this.workloadStatus = workloadStatus; + } + + + @Override + public String toString() { + return new ToStringBuilder(this).append("templateType", templateType).append("workloadId", workloadId) + .append("templateResponse", templateResponse) + .append("workload_status_reason", workloadStatusReason.toString()) + .append("workload_status", workloadStatus).toString(); + } +} diff --git a/plans/so/integration-etsi-testing/so-simulators/multicloud-simulator/src/main/java/org/onap/so/multicloudsimulator/beans/MulticloudInstanceRequest.java b/plans/so/integration-etsi-testing/so-simulators/multicloud-simulator/src/main/java/org/onap/so/multicloudsimulator/beans/MulticloudInstanceRequest.java new file mode 100644 index 00000000..c88e7f15 --- /dev/null +++ b/plans/so/integration-etsi-testing/so-simulators/multicloud-simulator/src/main/java/org/onap/so/multicloudsimulator/beans/MulticloudInstanceRequest.java @@ -0,0 +1,87 @@ +package org.onap.so.multicloudsimulator.beans; + +import java.util.Map; +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import com.fasterxml.jackson.annotation.JsonProperty; + +@JsonIgnoreProperties(value = "true") +public class MulticloudInstanceRequest { + + @JsonProperty(value = "cloud-region") + private String cloudRegion; + + @JsonProperty(value = "rb-name") + private String rbName; + + @JsonProperty(value = "rb-version") + private String rbVersion; + + @JsonProperty(value = "profile-name") + private String profileName; + + @JsonProperty(value = "labels") + private Map labels; + + @JsonProperty(value = "override-values") + private Map overrideValues; + + @JsonProperty(value = "release-name") + private String vfModuleUuid; + + public String getCloudRegion() { + return cloudRegion; + } + + public void setCloudRegion(String cloudRegion) { + this.cloudRegion = cloudRegion; + } + + public String getRbName() { + return rbName; + } + + public void setRbName(String rbName) { + this.rbName = rbName; + } + + public String getRbVersion() { + return rbVersion; + } + + public void setRbVersion(String rbVersion) { + this.rbVersion = rbVersion; + } + + public String getProfileName() { + return profileName; + } + + public void setProfileName(String profileName) { + this.profileName = profileName; + } + + public Map getLabels() { + return labels; + } + + public void setLabels(Map labels) { + this.labels = labels; + } + + public Map getOverrideValues() { + return overrideValues; + } + + public void setOverrideValues(Map overrideValues) { + this.overrideValues = overrideValues; + } + + public String getVfModuleUuid() { + return vfModuleUuid; + } + + public void setVfModuleUuid(String vfModuleUuid) { + this.vfModuleUuid = vfModuleUuid; + } + +} diff --git a/plans/so/integration-etsi-testing/so-simulators/multicloud-simulator/src/main/java/org/onap/so/multicloudsimulator/beans/MulticloudRequest.java b/plans/so/integration-etsi-testing/so-simulators/multicloud-simulator/src/main/java/org/onap/so/multicloudsimulator/beans/MulticloudRequest.java new file mode 100644 index 00000000..c2ec1910 --- /dev/null +++ b/plans/so/integration-etsi-testing/so-simulators/multicloud-simulator/src/main/java/org/onap/so/multicloudsimulator/beans/MulticloudRequest.java @@ -0,0 +1,171 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP - SO + * ================================================================================ + * Copyright (C) 2019 Intel Corp. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.so.multicloudsimulator.beans; + +import java.io.Serializable; +import org.apache.commons.lang3.builder.ToStringBuilder; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; +import com.fasterxml.jackson.databind.JsonNode; +import com.woorea.openstack.heat.model.CreateStackParam; + +@JsonInclude(JsonInclude.Include.NON_NULL) +@JsonPropertyOrder({"generic-vnf-id", "vf-module-id", "vf-module-model-invariant-id", "vf-module-model-version-id", + "vf-module-model-customization-id", "oof_directives", "sdnc_directives", "user_directives", "template_type", + "template_data"}) +public class MulticloudRequest implements Serializable { + private static final long serialVersionUID = -5215028275577848311L; + + @JsonProperty("generic-vnf-id") + private String genericVnfId; + @JsonProperty("vf-module-id") + private String vfModuleId; + @JsonProperty("vf-module-model-invariant-id") + private String vfModuleModelInvariantId; + @JsonProperty("vf-module-model-version-id") + private String vfModuleModelVersionId; + @JsonProperty("vf-module-model-customization-id") + private String vfModuleModelCustomizationId; + @JsonProperty("oof_directives") + private JsonNode oofDirectives; + @JsonProperty("sdnc_directives") + private JsonNode sdncDirectives; + @JsonProperty("user_directives") + private JsonNode userDirectives; + @JsonProperty("template_type") + private String templateType; + @JsonProperty("template_data") + private CreateStackParam templateData; + + + @JsonProperty("generic-vnf-id") + public String getGenericVnfId() { + return genericVnfId; + } + + @JsonProperty("generic-vnf-id") + public void setGenericVnfId(String genericVnfId) { + this.genericVnfId = genericVnfId; + } + + @JsonProperty("vf-module-id") + public String getVfModuleId() { + return vfModuleId; + } + + @JsonProperty("vf-module-id") + public void setVfModuleId(String vfModuleId) { + this.vfModuleId = vfModuleId; + } + + @JsonProperty("vf-module-model-invariant-id") + public String getVfModuleModelInvariantId() { + return vfModuleModelInvariantId; + } + + @JsonProperty("vf-module-model-invariant-id") + public void setVfModuleModelInvariantId(String vfModuleModelInvariantId) { + this.vfModuleModelInvariantId = vfModuleModelInvariantId; + } + + @JsonProperty("vf-module-model-version-id") + public String getVfModuleModelVersionId() { + return vfModuleModelVersionId; + } + + @JsonProperty("vf-module-model-version-id") + public void setVfModuleModelVersionId(String vfModuleModelVersionId) { + this.vfModuleModelVersionId = vfModuleModelVersionId; + } + + @JsonProperty("vf-module-model-customization-id") + public String getVfModuleModelCustomizationId() { + return vfModuleModelCustomizationId; + } + + @JsonProperty("vf-module-model-customization-id") + public void setVfModuleModelCustomizationId(String vfModuleModelCustomizationId) { + this.vfModuleModelCustomizationId = vfModuleModelCustomizationId; + } + + @JsonProperty("oof_directives") + public JsonNode getOofDirectives() { + return oofDirectives; + } + + @JsonProperty("oof_directives") + public void setOofDirectives(JsonNode oofDirectives) { + this.oofDirectives = oofDirectives; + } + + @JsonProperty("sdnc_directives") + public JsonNode getSdncDirectives() { + return sdncDirectives; + } + + @JsonProperty("sdnc_directives") + public void setSdncDirectives(JsonNode sdncDirectives) { + this.sdncDirectives = sdncDirectives; + } + + @JsonProperty("user_directives") + public JsonNode getUserDirectives() { + return userDirectives; + } + + @JsonProperty("user_directives") + public void setUserDirectives(JsonNode userDirectives) { + this.userDirectives = userDirectives; + } + + @JsonProperty("template_type") + public String getTemplateType() { + return templateType; + } + + @JsonProperty("template_type") + public void setTemplateType(String templateType) { + this.templateType = templateType; + } + + @JsonProperty("template_data") + public CreateStackParam getTemplateData() { + return templateData; + } + + @JsonProperty("template_data") + public void setTemplateData(CreateStackParam templateData) { + this.templateData = templateData; + } + + @Override + public String toString() { + return new ToStringBuilder(this).append("genericVnfId", genericVnfId).append("vfModuleId", vfModuleId) + .append("vfModuleModelInvariantId", vfModuleModelInvariantId) + .append("vfModuleModelVersionId", vfModuleModelVersionId) + .append("vfModuleModelCustomizationId", vfModuleModelCustomizationId) + .append("oofDirectives", oofDirectives).append("sdncDirectives", sdncDirectives) + .append("userDirectives", userDirectives).append("templateType", templateType) + .append("templateData", templateData).toString(); + } + +} diff --git a/plans/so/integration-etsi-testing/so-simulators/multicloud-simulator/src/main/java/org/onap/so/multicloudsimulator/beans/Resource.java b/plans/so/integration-etsi-testing/so-simulators/multicloud-simulator/src/main/java/org/onap/so/multicloudsimulator/beans/Resource.java new file mode 100644 index 00000000..368ab6d7 --- /dev/null +++ b/plans/so/integration-etsi-testing/so-simulators/multicloud-simulator/src/main/java/org/onap/so/multicloudsimulator/beans/Resource.java @@ -0,0 +1,57 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP - SO + * ================================================================================ + * Copyright (C) 2017 - 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.so.multicloudsimulator.beans; + +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonPropertyOrder; + +@JsonInclude(JsonInclude.Include.NON_NULL) +@JsonPropertyOrder({"resource-type", "resource-link"}) +public class Resource { + + @JsonProperty("resource-type") + private String resourceType; + @JsonProperty("resource-link") + private String resourceLink; + + @JsonProperty("resource-type") + public String getResourceType() { + return resourceType; + } + + @JsonProperty("resource-type") + public void setResourceType(String resourceType) { + this.resourceType = resourceType; + } + + @JsonProperty("resource-link") + public String getResourceLink() { + return resourceLink; + } + + //@JsonProperty("resource-link") + public void setResourceLink(String resourceLink) { + this.resourceLink = resourceLink; + } + +} + diff --git a/plans/so/integration-etsi-testing/so-simulators/multicloud-simulator/src/main/java/org/onap/so/multicloudsimulator/beans/Response.java b/plans/so/integration-etsi-testing/so-simulators/multicloud-simulator/src/main/java/org/onap/so/multicloudsimulator/beans/Response.java new file mode 100644 index 00000000..77a6150c --- /dev/null +++ b/plans/so/integration-etsi-testing/so-simulators/multicloud-simulator/src/main/java/org/onap/so/multicloudsimulator/beans/Response.java @@ -0,0 +1,42 @@ +/*- + * ============LICENSE_START======================================================= + * ONAP - SO + * ================================================================================ + * Copyright (C) 2020 Huawei Technologies Co., Ltd. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.so.multicloudsimulator.beans; + +public class Response { + + private String errorMsg; + + public Response(String errorMsg) { + this.errorMsg = errorMsg; + } + + public Response() { + } + + public String getErrorMsg() { + return errorMsg; + } + + public void setErrorMsg(String errorMsg) { + this.errorMsg = errorMsg; + } + +} diff --git a/plans/so/integration-etsi-testing/so-simulators/multicloud-simulator/src/main/java/org/onap/so/multicloudsimulator/configration/ApplicationConfigration.java b/plans/so/integration-etsi-testing/so-simulators/multicloud-simulator/src/main/java/org/onap/so/multicloudsimulator/configration/ApplicationConfigration.java new file mode 100644 index 00000000..34015ea1 --- /dev/null +++ b/plans/so/integration-etsi-testing/so-simulators/multicloud-simulator/src/main/java/org/onap/so/multicloudsimulator/configration/ApplicationConfigration.java @@ -0,0 +1,48 @@ +/*- + * ============LICENSE_START======================================================= + * Copyright (C) 2019 Nordix Foundation. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ +package org.onap.so.multicloudsimulator.configration; + +import static org.onap.so.multicloudsimulator.utils.Constants.SERVICE_TOPOLOGY_OPERATION_CACHE; +import java.util.Arrays; +import org.springframework.cache.Cache; +import org.springframework.cache.CacheManager; +import org.springframework.cache.concurrent.ConcurrentMapCache; +import org.springframework.cache.support.SimpleCacheManager; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +/** + * @author Waqas Ikram (waqas.ikram@est.tech) + * + */ +@Configuration +public class ApplicationConfigration { + + @Bean + public CacheManager cacheManager() { + final SimpleCacheManager manager = new SimpleCacheManager(); + manager.setCaches(Arrays.asList(getCache(SERVICE_TOPOLOGY_OPERATION_CACHE))); + return manager; + } + + private Cache getCache(final String name) { + return new ConcurrentMapCache(name); + } +} diff --git a/plans/so/integration-etsi-testing/so-simulators/multicloud-simulator/src/main/java/org/onap/so/multicloudsimulator/configration/WebSecurityConfigImpl.java b/plans/so/integration-etsi-testing/so-simulators/multicloud-simulator/src/main/java/org/onap/so/multicloudsimulator/configration/WebSecurityConfigImpl.java new file mode 100644 index 00000000..a0c1f755 --- /dev/null +++ b/plans/so/integration-etsi-testing/so-simulators/multicloud-simulator/src/main/java/org/onap/so/multicloudsimulator/configration/WebSecurityConfigImpl.java @@ -0,0 +1,49 @@ +/*- + * ============LICENSE_START======================================================= + * Copyright (C) 2019 Nordix Foundation. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ +package org.onap.so.multicloudsimulator.configration; + +import static org.onap.so.multicloudsimulator.utils.Constants.OPERATIONS_URL; +import org.onap.so.simulator.configuration.SimulatorSecurityConfigurer; +import org.onap.so.simulator.model.UserCredentials; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Configuration; +import org.springframework.security.config.annotation.web.builders.HttpSecurity; +import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity; + +/** + * @author waqas.ikram@ericsson.com + * + */ +@Configuration +@EnableWebSecurity +public class WebSecurityConfigImpl extends SimulatorSecurityConfigurer { + + @Autowired + public WebSecurityConfigImpl(final UserCredentials userCredentials) { + super(userCredentials.getUsers()); + } + + @Override + protected void configure(final HttpSecurity http) throws Exception { + http.csrf().disable().authorizeRequests().antMatchers(OPERATIONS_URL + "/**/**").authenticated().and() + .httpBasic(); + } + +} diff --git a/plans/so/integration-etsi-testing/so-simulators/multicloud-simulator/src/main/java/org/onap/so/multicloudsimulator/controller/MultiCloudController.java b/plans/so/integration-etsi-testing/so-simulators/multicloud-simulator/src/main/java/org/onap/so/multicloudsimulator/controller/MultiCloudController.java new file mode 100644 index 00000000..b0e13362 --- /dev/null +++ b/plans/so/integration-etsi-testing/so-simulators/multicloud-simulator/src/main/java/org/onap/so/multicloudsimulator/controller/MultiCloudController.java @@ -0,0 +1,171 @@ +package org.onap.so.multicloudsimulator.controller; + +import javax.servlet.http.HttpServletRequest; +import javax.ws.rs.core.MediaType; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.json.simple.JSONArray; +import org.json.simple.JSONObject; +import org.onap.so.multicloudsimulator.beans.InstanceResponse; +import org.onap.so.multicloudsimulator.beans.MulticloudInstanceRequest; +import org.onap.so.multicloudsimulator.beans.MulticloudCreateResponse; +import org.onap.so.multicloudsimulator.beans.MulticloudRequest; +import org.onap.so.openstack.beans.HeatStatus; + +import org.springframework.http.ResponseEntity; + +import org.springframework.web.bind.annotation.*; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.net.URI; + +import static org.onap.so.multicloudsimulator.utils.Constants.BASE_URL; +@RestController +@RequestMapping(path = BASE_URL) +public class MultiCloudController { + + public static final String X_HTTP_METHOD_OVERRIDE = "X-HTTP-Method-Override"; + private static final Logger LOGGER = LoggerFactory.getLogger(MultiCloudController.class); + public MulticloudCreateResponse multicloudCreateResponse = new MulticloudCreateResponse(); + + @PostMapping(value="/v1/instance") + public ResponseEntity createInstance(@RequestBody MulticloudInstanceRequest req){ + System.out.println("MultiCloud createInstance "); + InstanceResponse InstanceResponse = new InstanceResponse(); + + return ResponseEntity.ok(InstanceResponse); + } + + @GetMapping(value = "/{cloud-owner}/{cloud-region-id}/infra_workload", produces = { + MediaType.APPLICATION_JSON }) + public ResponseEntity getInstance( + @PathVariable("cloud-owner") String cloudOwner, @PathVariable("cloud-region-id") String cloudRegionId, + @RequestParam(value = "depth", required = false, defaultValue = "0") Integer depth, + @RequestParam(name = "format", required = false) final String name, final HttpServletRequest request) throws IOException { + + LOGGER.info("found CloudOwner {} in cache", cloudOwner); + LOGGER.info("found cloudRegionId {} in cache", cloudRegionId); + LOGGER.info("found name {} in cache", name); + JSONObject json = new JSONObject(); + + json.put("template_type", "heat"); + json.put("workload_id", ""); + json.put("workload_status", "GET_COMPLETE"); + JSONObject workload = new JSONObject(); + workload.put("stacks", HeatStatus.NOTFOUND); + json.put("workload_status_reason", workload); + + return ResponseEntity.ok(json); + } + + @PostMapping(value = "/{cloud-owner}/{cloud-region-id}/infra_workload", + consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}, + produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) + public ResponseEntity postCreateInstance( + @RequestBody final MulticloudRequest inputRequest, @PathVariable("cloud-owner") final String cloudOwner, + @PathVariable("cloud-region-id") final String cloudRegionId, + @RequestHeader(value = X_HTTP_METHOD_OVERRIDE, required = false) final String xHttpHeaderOverride, + final HttpServletRequest request) throws IOException { + + LOGGER.info("input request {}: ",inputRequest.toString()); + String input = "{\n" + + " \"template_type\": \"heat\",\n" + + " \"workload_id\": \"sad_sammet\",\n" + + " \"template_response\": [\n" + + " {\n" + + " \"GVK\": {\n" + + " \"Group\": \"k8s.plugin.opnfv.org\",\n" + + " \"Version\": \"v1alpha1\",\n" + + " \"Kind\": \"Network\"\n" + + " },\n" + + " \"Name\": \"k8s-region-2-onap-nf-20210120t221126760z-management-network\"\n" + + " },\n" + + " {\n" + + " \"GVK\": {\n" + + " \"Group\": \"k8s.plugin.opnfv.org\",\n" + + " \"Version\": \"v1alpha1\",\n" + + " \"Kind\": \"Network\"\n" + + " },\n" + + " \"Name\": \"k8s-region-2-onap-nf-20210120t221126760z-protected-network\"\n" + + " },\n" + + " {\n" + + " \"GVK\": {\n" + + " \"Group\": \"k8s.plugin.opnfv.org\",\n" + + " \"Version\": \"v1alpha1\",\n" + + " \"Kind\": \"Network\"\n" + + " },\n" + + " \"Name\": \"k8s-region-2-onap-nf-20210120t221126760z-unprotected-network\"\n" + + " },\n" + + " {\n" + + " \"GVK\": {\n" + + " \"Group\": \"k8s.cni.cncf.io\",\n" + + " \"Version\": \"v1\",\n" + + " \"Kind\": \"NetworkAttachmentDefinition\"\n" + + " },\n" + + " \"Name\": \"k8s-region-2-onap-nf-20210120t221126760z-ovn-nat\"\n" + + " }\n" + + " ],\n" + + " \"workload_status\": \"CREATE_COMPLETE\",\n" + + " \"workload_status_reason\": \"test\"\n" + + "}"; + + ObjectMapper objectMapper = new ObjectMapper(); + JSONObject workload = new JSONObject(); + + workload.put("stack",true); + + JsonNode jsonNode = objectMapper.readTree(workload.toJSONString()); + MulticloudCreateResponse multiResponse = objectMapper.readValue(input, MulticloudCreateResponse.class); + multiResponse.setWorkloadStatusReason(null); + + LOGGER.info("workload reason: {}",multiResponse.getWorkloadStatusReason()); + multiResponse.setWorkloadId("sad_sammet"); + multiResponse.setTemplateType("heat"); + multiResponse.setWorkloadStatus("CREATE_COMPLETE"); + + return ResponseEntity.status(201).body(multiResponse); + } + + @GetMapping(value = "/{cloud-owner}/{cloud-region-id}/infra_workload/{workload-id}", produces = { + MediaType.APPLICATION_JSON }) + public ResponseEntity getInstanceName( + @PathVariable("cloud-owner") String cloudOwner, @PathVariable("cloud-region-id") String cloudRegionId, + @PathVariable("workload-id") String workloadId, + @RequestParam(value = "depth", required = false, defaultValue = "0") Integer depth, + @RequestParam(name = "format", required = false) final String name, final HttpServletRequest request) throws IOException { + + LOGGER.info("Calling getInstanceName"); + LOGGER.info("found CloudOwner {} in cache", cloudOwner); + LOGGER.info("found cloudRegionId {} in cache", cloudRegionId); + LOGGER.info("found name {} in cache", name); + JSONObject json = new JSONObject(); + + json.put("template_type", "heat"); + json.put("workload_id", "sad_sammet"); + json.put("workload_status", "CREATE_COMPLETE"); + JSONObject workload = new JSONObject(); + workload.put("stacks", true); + json.put("workload_status_reason", null); + + return ResponseEntity.ok(json); + } + + @PostMapping(value = "/{cloud-owner}/{cloud-region-id}/infra_workload/{workload-id}", + consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}, + produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) + public ResponseEntity postCreateInstanceName( + @RequestBody final MulticloudRequest inputRequest, @PathVariable("cloud-owner") final String cloudOwner, + @PathVariable("workload-id") String workloadId, + @PathVariable("cloud-region-id") final String cloudRegionId, + @RequestHeader(value = X_HTTP_METHOD_OVERRIDE, required = false) final String xHttpHeaderOverride, + final HttpServletRequest request) throws IOException { + + LOGGER.info("Calling postCreateInstanceName"); + + return ResponseEntity.status(405).build(); + } +} diff --git a/plans/so/integration-etsi-testing/so-simulators/multicloud-simulator/src/main/java/org/onap/so/multicloudsimulator/utils/Constants.java b/plans/so/integration-etsi-testing/so-simulators/multicloud-simulator/src/main/java/org/onap/so/multicloudsimulator/utils/Constants.java new file mode 100644 index 00000000..5f54f5da --- /dev/null +++ b/plans/so/integration-etsi-testing/so-simulators/multicloud-simulator/src/main/java/org/onap/so/multicloudsimulator/utils/Constants.java @@ -0,0 +1,47 @@ +/*- + * ============LICENSE_START======================================================= + * Copyright (C) 2019 Nordix Foundation. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ +package org.onap.so.multicloudsimulator.utils; + +/** + * @author Waqas Ikram (waqas.ikram@est.tech) + * + */ +public class Constants { + + public static final String BASE_URL = "/api/multicloud/v1"; + + public static final String OPERATIONS_URL = BASE_URL + "/operations"; + + public static final String SERVICE_TOPOLOGY_OPERATION_CACHE = "service-topology-operation-cache"; + + public static final String HEALTHY = "healthy"; + + public static final String YES = "Y"; + + public static final String SERVICE_TOPOLOGY_OPERATION = "service-topology-operation"; + + public static final String RESTCONF_CONFIG_END_POINT = "restconf/config/GENERIC-RESOURCE-API:services/service/"; + + public static final String VNF_DATA_VNF_TOPOLOGY = "/vnf-data/vnf-topology/"; + + public static final String SERVICE_DATA_VNFS_VNF = "/service-data/vnfs/vnf/"; + + private Constants() {} +} diff --git a/plans/so/integration-etsi-testing/so-simulators/multicloud-simulator/src/main/resources/application.properties b/plans/so/integration-etsi-testing/so-simulators/multicloud-simulator/src/main/resources/application.properties new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/plans/so/integration-etsi-testing/so-simulators/multicloud-simulator/src/main/resources/application.properties @@ -0,0 +1 @@ + diff --git a/plans/so/integration-etsi-testing/so-simulators/multicloud-simulator/src/main/resources/application.yaml b/plans/so/integration-etsi-testing/so-simulators/multicloud-simulator/src/main/resources/application.yaml new file mode 100644 index 00000000..7299ff92 --- /dev/null +++ b/plans/so/integration-etsi-testing/so-simulators/multicloud-simulator/src/main/resources/application.yaml @@ -0,0 +1,18 @@ +server: + port: 9996 + tomcat: + max-threads: 4 +ssl-enable: false +spring: + security: + users: + - username: mso + #password: Kp8bJ4SXszM0WXlhak3eHlcse2gAw84vaoGGmJvUy2U + password: $2a$04$f8SB6cW/VI26QvYM6z.GXu7hlEmwnFtePenD8zF18mS3Atu3QNqr2 + role: VID + - username: admin + #password: Kp8bJ4SXszM0WXlhak3eHlcse2gAw84vaoGGmJvUy2U + password: $2a$04$f8SB6cW/VI26QvYM6z.GXu7hlEmwnFtePenD8zF18mS3Atu3QNqr2 + role: VID + main: + allow-bean-definition-overriding: true \ No newline at end of file diff --git a/plans/so/integration-etsi-testing/so-simulators/multicloud-simulator/src/test/java/org/onap/so/multicloudsimulator/MultiCloudSimulatorApplicationTests.java b/plans/so/integration-etsi-testing/so-simulators/multicloud-simulator/src/test/java/org/onap/so/multicloudsimulator/MultiCloudSimulatorApplicationTests.java new file mode 100644 index 00000000..ccce88c9 --- /dev/null +++ b/plans/so/integration-etsi-testing/so-simulators/multicloud-simulator/src/test/java/org/onap/so/multicloudsimulator/MultiCloudSimulatorApplicationTests.java @@ -0,0 +1,13 @@ +package org.onap.so.multicloudsimulator; + +import org.junit.Test; +import org.springframework.boot.test.context.SpringBootTest; + +@SpringBootTest +class MultiCloudSimulatorApplicationTests { + + @Test + void contextLoads() { + } + +} diff --git a/plans/so/integration-etsi-testing/so-simulators/package/docker/pom.xml b/plans/so/integration-etsi-testing/so-simulators/package/docker/pom.xml index 9334a095..9173439f 100644 --- a/plans/so/integration-etsi-testing/so-simulators/package/docker/pom.xml +++ b/plans/so/integration-etsi-testing/so-simulators/package/docker/pom.xml @@ -99,6 +99,29 @@ + + simulators/multicloud-simulator + + try + docker-files + Dockerfile.so-simulator-base-image + + ${project.version} + + + + + + + org.onap.so.simulators:multicloud-simulator + + app.jar + + + + + + simulators/vnfm-simulator @@ -170,6 +193,11 @@ sdnc-simulator ${project.version} + + ${project.parent.groupId} + multicloud-simulator + ${project.version} + org.onap.so.simulators.vnfm vnfm-service diff --git a/plans/so/integration-etsi-testing/so-simulators/pom.xml b/plans/so/integration-etsi-testing/so-simulators/pom.xml index fb08bbb4..352c18b1 100644 --- a/plans/so/integration-etsi-testing/so-simulators/pom.xml +++ b/plans/so/integration-etsi-testing/so-simulators/pom.xml @@ -18,6 +18,7 @@ common + multicloud-simulator sdc-simulator aai-simulator sdnc-simulator diff --git a/plans/so/integration-etsi-testing/so-simulators/sdc-simulator/src/main/resources/csar/SERVICES/9bb8c882-44a1-4b67-a12c-5a998e18d6ba.csar b/plans/so/integration-etsi-testing/so-simulators/sdc-simulator/src/main/resources/csar/SERVICES/9bb8c882-44a1-4b67-a12c-5a998e18d6ba.csar index 6504cb1a..1610b7fc 100644 Binary files a/plans/so/integration-etsi-testing/so-simulators/sdc-simulator/src/main/resources/csar/SERVICES/9bb8c882-44a1-4b67-a12c-5a998e18d6ba.csar and b/plans/so/integration-etsi-testing/so-simulators/sdc-simulator/src/main/resources/csar/SERVICES/9bb8c882-44a1-4b67-a12c-5a998e18d6ba.csar differ diff --git a/plans/so/integration-etsi-testing/so-simulators/sdc-simulator/src/test/java/org/onap/so/sdcsimulator/controller/CatalogControllerTest.java b/plans/so/integration-etsi-testing/so-simulators/sdc-simulator/src/test/java/org/onap/so/sdcsimulator/controller/CatalogControllerTest.java index e3d040da..e5a6e7f7 100644 --- a/plans/so/integration-etsi-testing/so-simulators/sdc-simulator/src/test/java/org/onap/so/sdcsimulator/controller/CatalogControllerTest.java +++ b/plans/so/integration-etsi-testing/so-simulators/sdc-simulator/src/test/java/org/onap/so/sdcsimulator/controller/CatalogControllerTest.java @@ -102,7 +102,7 @@ public class CatalogControllerTest { assertEquals(HttpStatus.OK, response.getStatusCode()); assertTrue(response.hasBody()); - assertEquals(147743, response.getBody().length); + assertEquals(147255, response.getBody().length); } diff --git a/plans/so/integration-etsi-testing/so-simulators/sdnc-simulator/src/main/java/org/onap/so/sdncsimulator/controller/OperationsController.java b/plans/so/integration-etsi-testing/so-simulators/sdnc-simulator/src/main/java/org/onap/so/sdncsimulator/controller/OperationsController.java index 2f24ef69..6077bddd 100644 --- a/plans/so/integration-etsi-testing/so-simulators/sdnc-simulator/src/main/java/org/onap/so/sdncsimulator/controller/OperationsController.java +++ b/plans/so/integration-etsi-testing/so-simulators/sdnc-simulator/src/main/java/org/onap/so/sdncsimulator/controller/OperationsController.java @@ -23,14 +23,24 @@ import static org.onap.sdnc.northbound.client.model.GenericResourceApiRequestAct import static org.onap.sdnc.northbound.client.model.GenericResourceApiRequestActionEnumeration.DELETEVNFINSTANCE; import static org.onap.sdnc.northbound.client.model.GenericResourceApiSvcActionEnumeration.DELETE; import static org.onap.so.sdncsimulator.utils.Constants.OPERATIONS_URL; +import static org.onap.so.sdncsimulator.utils.Constants.BASE_URL; +import static org.onap.so.sdncsimulator.utils.Constants.RESTCONF_CONFIG_END_POINT; + +import java.util.ArrayList; +import java.util.List; + import javax.servlet.http.HttpServletRequest; import javax.ws.rs.core.MediaType; + import org.onap.sdnc.northbound.client.model.GenericResourceApiRequestActionEnumeration; import org.onap.sdnc.northbound.client.model.GenericResourceApiRequestinformationRequestInformation; import org.onap.sdnc.northbound.client.model.GenericResourceApiSdncrequestheaderSdncRequestHeader; import org.onap.sdnc.northbound.client.model.GenericResourceApiServiceOperationInformation; import org.onap.sdnc.northbound.client.model.GenericResourceApiSvcActionEnumeration; import org.onap.sdnc.northbound.client.model.GenericResourceApiVnfOperationInformation; +import org.onap.sdnc.northbound.client.model.GenericResourceApiVfModuleOperationInformation; +import org.onap.sdnc.northbound.client.model.GenericResourceApiVnfTopology; +import org.onap.sdnc.northbound.client.model.GenericResourceApiVfModuleTopology; import org.onap.so.sdncsimulator.models.InputRequest; import org.onap.so.sdncsimulator.models.Output; import org.onap.so.sdncsimulator.models.OutputRequest; @@ -41,6 +51,8 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.stereotype.Controller; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; @@ -50,7 +62,7 @@ import org.springframework.web.bind.annotation.RequestMapping; * */ @Controller -@RequestMapping(path = OPERATIONS_URL) +@RequestMapping(path = BASE_URL) public class OperationsController { private static final String HTTP_STATUS_OK = HttpStatus.OK.value() + ""; @@ -63,7 +75,7 @@ public class OperationsController { this.cacheServiceProvider = cacheServiceProvider; } - @PostMapping(value = "/GENERIC-RESOURCE-API:service-topology-operation/", + @PostMapping(value = "/operations/GENERIC-RESOURCE-API:service-topology-operation/", consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}, produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) public ResponseEntity postServiceOperationInformation( @@ -90,7 +102,7 @@ public class OperationsController { } - @PostMapping(value = "/GENERIC-RESOURCE-API:vnf-topology-operation/", + @PostMapping(value = "/operations/GENERIC-RESOURCE-API:vnf-topology-operation/", consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}, produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) public ResponseEntity postVnfOperationInformation( @@ -147,4 +159,60 @@ public class OperationsController { return cacheServiceProvider.putVnfOperationInformation(apiVnfOperationInformation); } + @PostMapping(value = "/operations/GENERIC-RESOURCE-API:vf-module-topology-operation/", + consumes = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}, + produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) + public ResponseEntity postVfModuleOperationInformation( + @RequestBody final InputRequest inputRequest, + final HttpServletRequest request) { + LOGGER.info("Request Received for VfModule : {} ...", inputRequest); + + final GenericResourceApiVfModuleOperationInformation apiVfModuleperationInformation = inputRequest.getInput(); + if (apiVfModuleperationInformation == null) { + LOGGER.error("Invalid input request: {}", inputRequest); + return ResponseEntity.badRequest().build(); + } + + final Output output = getOutput(apiVfModuleperationInformation); + final OutputRequest outputRequest = new OutputRequest(output); + + if (output.getResponseCode().equals(HTTP_STATUS_OK)) { + LOGGER.info("Sucessfully executed request vnf sending response: {}", outputRequest); + return ResponseEntity.ok(outputRequest); + } + + LOGGER.error("Unable to execute input request: {}, will send OutputRequest: {}", inputRequest, outputRequest); + return ResponseEntity.badRequest().body(outputRequest); + + } + + private Output getOutput(final GenericResourceApiVfModuleOperationInformation apiVfModuleOperationInformation) { + + return cacheServiceProvider.putVfModuleOperationInformation(apiVfModuleOperationInformation); + } + + + @GetMapping(value = "/config/GENERIC-RESOURCE-API:services/service/{service-id}/service-data/vnfs/vnf/{vnf-id}/vnf-data/vnf-topology/") + public ResponseEntity getVNf(@PathVariable("service-id") String serviceId, + @PathVariable("vnf-id") String vnfId) { + + LOGGER.info("Get vnf-topology with serviceId {} and vnfId {}",serviceId, vnfId); + GenericResourceApiVnfTopology genericResourceApiVnfTopology = new GenericResourceApiVnfTopology(); + + genericResourceApiVnfTopology = cacheServiceProvider.getGenericResourceApiVnfTopology(); + return ResponseEntity.ok(genericResourceApiVnfTopology); + } + + @GetMapping(value = "/config/GENERIC-RESOURCE-API:services/service/{service-id}/service-data/vnfs/vnf/{vnf-id}/vnf-data/vf-modules/vf-module/{vf-module-id}/vf-module-data/vf-module-topology/", produces = { + MediaType.APPLICATION_JSON }) + public ResponseEntity getVFmodule(@PathVariable("service-id") String serviceId, + @PathVariable("vnf-id") String vnfId, @PathVariable("vf-module-id") String vfModuleId) { + LOGGER.info("Get vfModule-topology with serviceId {}, vnfId {} and vfModuleId {}",serviceId, vnfId,vfModuleId); + + GenericResourceApiVfModuleTopology genericResourceApiVfModuleTopology = new GenericResourceApiVfModuleTopology(); + + genericResourceApiVfModuleTopology = cacheServiceProvider.getGenericResourceApiVfModuleTopology(); + return ResponseEntity.ok(genericResourceApiVfModuleTopology); + + } } diff --git a/plans/so/integration-etsi-testing/so-simulators/sdnc-simulator/src/main/java/org/onap/so/sdncsimulator/models/Output.java b/plans/so/integration-etsi-testing/so-simulators/sdnc-simulator/src/main/java/org/onap/so/sdncsimulator/models/Output.java index d64b0b0c..7cc0bdaf 100644 --- a/plans/so/integration-etsi-testing/so-simulators/sdnc-simulator/src/main/java/org/onap/so/sdncsimulator/models/Output.java +++ b/plans/so/integration-etsi-testing/so-simulators/sdnc-simulator/src/main/java/org/onap/so/sdncsimulator/models/Output.java @@ -48,7 +48,10 @@ public class Output implements Serializable { private GenericResourceApiInstanceReference serviceResponseInformation = null; @JsonProperty("vnf-response-information") - private GenericResourceApiInstanceReference vnfResponseInformation = null; + private GenericResourceApiInstanceReference vnfResponseInformation = null; + + @JsonProperty("vf-module-response-information") + private GenericResourceApiInstanceReference vfModuleResponseInformation = null; /** * @return the responseMessage @@ -164,6 +167,11 @@ public class Output implements Serializable { this.vnfResponseInformation = vnfResponseInformation; return this; + } + public Output vfModuleResponseInformation(final GenericResourceApiInstanceReference vfModuleResponseInformation) { + this.vfModuleResponseInformation = vfModuleResponseInformation; + return this; + } @JsonIgnore @@ -177,6 +185,7 @@ public class Output implements Serializable { sb.append(" responseCode: ").append(responseCode).append("\n"); sb.append(" serviceResponseInformation: ").append(serviceResponseInformation).append("\n"); sb.append(" vnfResponseInformation: ").append(vnfResponseInformation).append("\n"); + sb.append(" vfModuleResponseInformation: ").append(vfModuleResponseInformation).append("\n"); sb.append("}"); return sb.toString(); } diff --git a/plans/so/integration-etsi-testing/so-simulators/sdnc-simulator/src/main/java/org/onap/so/sdncsimulator/providers/ServiceOperationsCacheServiceProvider.java b/plans/so/integration-etsi-testing/so-simulators/sdnc-simulator/src/main/java/org/onap/so/sdncsimulator/providers/ServiceOperationsCacheServiceProvider.java index c3a80ec3..d7dbec8e 100644 --- a/plans/so/integration-etsi-testing/so-simulators/sdnc-simulator/src/main/java/org/onap/so/sdncsimulator/providers/ServiceOperationsCacheServiceProvider.java +++ b/plans/so/integration-etsi-testing/so-simulators/sdnc-simulator/src/main/java/org/onap/so/sdncsimulator/providers/ServiceOperationsCacheServiceProvider.java @@ -19,10 +19,15 @@ */ package org.onap.so.sdncsimulator.providers; +import java.util.List; import java.util.Optional; + import org.onap.sdnc.northbound.client.model.GenericResourceApiServiceOperationInformation; import org.onap.sdnc.northbound.client.model.GenericResourceApiServicemodelinfrastructureService; import org.onap.sdnc.northbound.client.model.GenericResourceApiVnfOperationInformation; +import org.onap.sdnc.northbound.client.model.GenericResourceApiVfModuleOperationInformation; +import org.onap.sdnc.northbound.client.model.GenericResourceApiVfModuleTopology; +import org.onap.sdnc.northbound.client.model.GenericResourceApiVnfTopology; import org.onap.so.sdncsimulator.models.Output; /** @@ -44,6 +49,11 @@ public interface ServiceOperationsCacheServiceProvider { Output deleteVnfOperationInformation(final GenericResourceApiVnfOperationInformation apiVnfOperationInformation); - void clearAll(); + Output putVfModuleOperationInformation(final GenericResourceApiVfModuleOperationInformation apiVfModuleOperationInformation); + + public GenericResourceApiVfModuleTopology getGenericResourceApiVfModuleTopology(); -} + public GenericResourceApiVnfTopology getGenericResourceApiVnfTopology(); + + void clearAll(); +} \ No newline at end of file diff --git a/plans/so/integration-etsi-testing/so-simulators/sdnc-simulator/src/main/java/org/onap/so/sdncsimulator/providers/ServiceOperationsCacheServiceProviderimpl.java b/plans/so/integration-etsi-testing/so-simulators/sdnc-simulator/src/main/java/org/onap/so/sdncsimulator/providers/ServiceOperationsCacheServiceProviderimpl.java index 88db4c13..90255de2 100644 --- a/plans/so/integration-etsi-testing/so-simulators/sdnc-simulator/src/main/java/org/onap/so/sdncsimulator/providers/ServiceOperationsCacheServiceProviderimpl.java +++ b/plans/so/integration-etsi-testing/so-simulators/sdnc-simulator/src/main/java/org/onap/so/sdncsimulator/providers/ServiceOperationsCacheServiceProviderimpl.java @@ -36,6 +36,7 @@ import java.util.ArrayList; import java.util.List; import java.util.Optional; import javax.validation.Valid; + import org.onap.sdnc.northbound.client.model.GenericResourceApiInstanceReference; import org.onap.sdnc.northbound.client.model.GenericResourceApiLastActionEnumeration; import org.onap.sdnc.northbound.client.model.GenericResourceApiLastRpcActionEnumeration; @@ -60,6 +61,17 @@ import org.onap.sdnc.northbound.client.model.GenericResourceApiVnfinformationVnf import org.onap.sdnc.northbound.client.model.GenericResourceApiVnfrequestinputVnfRequestInput; import org.onap.sdnc.northbound.client.model.GenericResourceApiVnftopologyVnfTopology; import org.onap.sdnc.northbound.client.model.GenericResourceApiVnftopologyidentifierstructureVnfTopologyIdentifierStructure; + +import org.onap.sdnc.northbound.client.model.GenericResourceApiServicedataServicedataVnfsVnfVnfdataVfmodulesVfmoduleVfModuleData; +import org.onap.sdnc.northbound.client.model.GenericResourceApiVfModuleTopology; +import org.onap.sdnc.northbound.client.model.GenericResourceApiVnfTopology; +import org.onap.sdnc.northbound.client.model.GenericResourceApiVfModuleOperationInformation; +import org.onap.sdnc.northbound.client.model.GenericResourceApiVfmoduleinformationVfModuleInformation; +import org.onap.sdnc.northbound.client.model.GenericResourceApiVfmoduletopologyVfModuleTopology; +import org.onap.sdnc.northbound.client.model.GenericResourceApiVfmoduletopologyidentifierVfModuleTopologyIdentifier; +import org.onap.sdnc.northbound.client.model.GenericResourceApiParam; +import org.onap.sdnc.northbound.client.model.GenericResourceApiParamParam; + import org.onap.so.sdncsimulator.models.Output; import org.onap.so.simulator.cache.provider.AbstractCacheServiceProvider; import org.slf4j.Logger; @@ -82,6 +94,9 @@ public class ServiceOperationsCacheServiceProviderimpl extends AbstractCacheServ private static final String HTTP_STATUS_OK = Integer.toString(HttpStatus.OK.value()); private static final String EMPTY_STRING = ""; private static final Logger LOGGER = LoggerFactory.getLogger(ServiceOperationsCacheServiceProviderimpl.class); + private static List vfModuleList; + GenericResourceApiVfModuleTopology genericResourceApiVfModuleTopology = new GenericResourceApiVfModuleTopology(); + GenericResourceApiVnfTopology genericResourceApiVnfTopology = new GenericResourceApiVnfTopology(); @Autowired public ServiceOperationsCacheServiceProviderimpl(final CacheManager cacheManager) { @@ -198,7 +213,7 @@ public class ServiceOperationsCacheServiceProviderimpl extends AbstractCacheServ if (ifVnfNotExists(vnfId, svcAction, vnfsList)) { vnfsList.add(getGenericResourceApiServicedataVnf(serviceInstanceId, vnfId, input)); - + getVnfsList(vnfsList); final GenericResourceApiServicestatusServiceStatus serviceStatus = service.getServiceStatus(); return new Output().ackFinalIndicator(serviceStatus.getFinalIndicator()) @@ -507,4 +522,128 @@ public class ServiceOperationsCacheServiceProviderimpl extends AbstractCacheServ } -} + @Override + public Output putVfModuleOperationInformation( + GenericResourceApiVfModuleOperationInformation input) { + + final GenericResourceApiServiceinformationServiceInformation serviceInformation = input.getServiceInformation(); + final GenericResourceApiVnfinformationVnfInformation vnfInformation = input.getVnfInformation(); + final GenericResourceApiVfmoduleinformationVfModuleInformation vfModuleInformation = input.getVfModuleInformation(); + // Call getVfModule to make a vfList for get the vf-module-information while GET reqest + vfModuleList = getVfModule(input); + + final GenericResourceApiSdncrequestheaderSdncRequestHeader requestHeader = input.getSdncRequestHeader(); + final String svcRequestId = getSvcRequestId(requestHeader); + + if (serviceInformation != null && isValid(serviceInformation.getServiceInstanceId()) && vnfInformation != null + && isValid(vnfInformation.getVnfId()) && vfModuleInformation !=null && isValid(vfModuleInformation.getVfModuleId())) { + + + final String serviceInstanceId = serviceInformation.getServiceInstanceId(); + final String vnfId = vnfInformation.getVnfId(); + final String vfModuleId = vfModuleInformation.getVfModuleId(); + + final Optional optional = + getGenericResourceApiServicemodelinfrastructureService(serviceInstanceId); + if (optional.isPresent()) { + final GenericResourceApiServicemodelinfrastructureService service = optional.get(); + final GenericResourceApiServicedataServiceData serviceData = service.getServiceData(); + if (serviceData != null) { + + final GenericResourceApiServicestatusServiceStatus serviceStatus = service.getServiceStatus(); + + return new Output().ackFinalIndicator(serviceStatus.getFinalIndicator()) + .responseCode(serviceStatus.getResponseCode()) + .responseMessage(serviceStatus.getResponseMessage()).svcRequestId(svcRequestId) + .serviceResponseInformation(new GenericResourceApiInstanceReference() + .instanceId(serviceInstanceId).objectPath(getObjectPath(serviceInstanceId))) + .vnfResponseInformation(new GenericResourceApiInstanceReference().instanceId(vnfId) + .objectPath(getObjectPath(serviceInstanceId, vnfId))) + .vfModuleResponseInformation(new GenericResourceApiInstanceReference().instanceId(vfModuleId) + .objectPath(getObjectPath(vnfId, vfModuleId))); + } + } + LOGGER.error( + "Unable to find existing GenericResourceApiServiceModelInfrastructure in cache using service instance id: {}", + serviceInstanceId); + } + LOGGER.error( + "Unable to add GenericResourceApiServiceOperationInformation in cache due to invalid input: {}... ", + input); + return new Output().ackFinalIndicator(YES).responseCode(HTTP_STATUS_BAD_REQUEST) + .responseMessage("Unable to add vfModule").svcRequestId(svcRequestId); + } + + private List getVfModule(GenericResourceApiVfModuleOperationInformation input) { + + final GenericResourceApiVfmoduletopologyVfModuleTopology apiVfModuletopologyVfModuleTopology = + new GenericResourceApiVfmoduletopologyVfModuleTopology(); + + GenericResourceApiServicedataServicedataVnfsVnfVnfdataVfmodulesVfmoduleVfModuleData vfModuleData = + new GenericResourceApiServicedataServicedataVnfsVnfVnfdataVfmodulesVfmoduleVfModuleData(); + vfModuleData.setVfModuleInformation(input.getVfModuleInformation()); + vfModuleData.setVfModuleRequestInput(input.getVfModuleRequestInput()); + + GenericResourceApiVfmoduletopologyVfModuleTopology vfModuleTopology = new GenericResourceApiVfmoduletopologyVfModuleTopology(); + + vfModuleTopology.setSdncGeneratedCloudResources(true); + GenericResourceApiParam vfModuleParametersData = new GenericResourceApiParam(); + List params = new ArrayList(); + GenericResourceApiParamParam param = new GenericResourceApiParamParam(); + param.setName("k8s-rb-profile-name"); + param.setValue("k8s-rb-profile-value"); + + params.add(param); + vfModuleParametersData.setParam(params); + vfModuleTopology.setVfModuleParameters(vfModuleParametersData); + + vfModuleTopology.setOnapModelInformation(vfModuleData.getVfModuleInformation().getOnapModelInformation()); + vfModuleTopology.setVfModuleParameters(vfModuleData.getVfModuleRequestInput().getVfModuleInputParameters()); + vfModuleTopology.setAicClli(vfModuleData.getVfModuleRequestInput().getAicClli()); + vfModuleTopology.setAicCloudRegion(vfModuleData.getVfModuleRequestInput().getAicCloudRegion()); + vfModuleTopology.setCloudOwner(vfModuleData.getVfModuleRequestInput().getCloudOwner()); + + apiVfModuletopologyVfModuleTopology.vfModuleTopologyIdentifier(getVfModuleTopologyIdentifierStructure(input)); + + vfModuleTopology.setVfModuleTopologyIdentifier(apiVfModuletopologyVfModuleTopology.getVfModuleTopologyIdentifier()); + vfModuleTopology.setTenant(vfModuleData.getVfModuleRequestInput().getTenant()); + + genericResourceApiVfModuleTopology.setVfModuleTopology(vfModuleTopology); + + return null; + + } + + public GenericResourceApiVfModuleTopology getGenericResourceApiVfModuleTopology() { + return genericResourceApiVfModuleTopology; + } + + private GenericResourceApiVfmoduletopologyidentifierVfModuleTopologyIdentifier getVfModuleTopologyIdentifierStructure( + @Valid final GenericResourceApiVfModuleOperationInformation input) { + + final GenericResourceApiVfmoduleinformationVfModuleInformation vfModuleInformation = input.getVfModuleInformation(); + return new GenericResourceApiVfmoduletopologyidentifierVfModuleTopologyIdentifier() + .vfModuleId(vfModuleInformation.getVfModuleId()).vfModuleType(vfModuleInformation.getVfModuleType()).vfModuleName(input.getVfModuleRequestInput().getVfModuleName()); + } + + public GenericResourceApiVnfTopology getGenericResourceApiVnfTopology() { + return genericResourceApiVnfTopology; + } + + public void getVnfsList(List vnfsList) { + + GenericResourceApiVnftopologyVnfTopology vnfTopology = new GenericResourceApiVnftopologyVnfTopology(); + LOGGER.info(String.valueOf(vnfsList)); + vnfTopology.setOnapModelInformation(vnfsList.get(0).getVnfData().getVnfInformation().getOnapModelInformation()); + vnfTopology.setAicClli(String.valueOf(vnfsList.get(0).getVnfData().getVnfRequestInput().getAicClli())); + vnfTopology.setAicCloudRegion(String.valueOf(vnfsList.get(0).getVnfData().getVnfRequestInput().getAicCloudRegion())); + vnfTopology.setCloudOwner(String.valueOf(vnfsList.get(0).getVnfData().getVnfRequestInput().getCloudOwner())); + vnfTopology.setTenant(String.valueOf(vnfsList.get(0).getVnfData().getVnfRequestInput().getTenant())); + vnfTopology.setVnfResourceAssignments(vnfsList.get(0).getVnfData().getVnfTopology().getVnfResourceAssignments()); + vnfTopology.setVnfTopologyIdentifierStructure(vnfsList.get(0).getVnfData().getVnfTopology().getVnfTopologyIdentifierStructure()); + vnfTopology.setVnfParametersData(vnfsList.get(0).getVnfData().getVnfTopology().getVnfParametersData()); + vnfTopology.setSdncGeneratedCloudResources(vnfsList.get(0).getVnfData().getVnfTopology().getSdncGeneratedCloudResources()); + + genericResourceApiVnfTopology.setVnfTopology(vnfTopology); + } +} \ No newline at end of file diff --git a/plans/so/integration-etsi-testing/so-simulators/vnfm-simulator/vnfm-service/pom.xml b/plans/so/integration-etsi-testing/so-simulators/vnfm-simulator/vnfm-service/pom.xml index 00fd81f1..bd507ca3 100644 --- a/plans/so/integration-etsi-testing/so-simulators/vnfm-simulator/vnfm-service/pom.xml +++ b/plans/so/integration-etsi-testing/so-simulators/vnfm-simulator/vnfm-service/pom.xml @@ -136,6 +136,11 @@ httpclient 4.5.8 + + org.onap.so.simulators + common + ${project.version} + diff --git a/plans/so/integration-etsi-testing/so-simulators/vnfm-simulator/vnfm-service/src/main/java/org/onap/so/svnfm/simulator/config/ApplicationConfig.java b/plans/so/integration-etsi-testing/so-simulators/vnfm-simulator/vnfm-service/src/main/java/org/onap/so/svnfm/simulator/config/ApplicationConfig.java index b4657922..2e11714d 100644 --- a/plans/so/integration-etsi-testing/so-simulators/vnfm-simulator/vnfm-service/src/main/java/org/onap/so/svnfm/simulator/config/ApplicationConfig.java +++ b/plans/so/integration-etsi-testing/so-simulators/vnfm-simulator/vnfm-service/src/main/java/org/onap/so/svnfm/simulator/config/ApplicationConfig.java @@ -1,7 +1,10 @@ package org.onap.so.svnfm.simulator.config; -import java.util.Arrays; +import java.util.ArrayList; +import java.util.List; import org.onap.so.svnfm.simulator.constants.Constant; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.boot.ApplicationArguments; @@ -16,6 +19,7 @@ import org.springframework.stereotype.Component; @Component public class ApplicationConfig implements ApplicationRunner { + private static final Logger LOGGER = LoggerFactory.getLogger(ApplicationConfig.class); private static final String PORT = "local.server.port"; @@ -45,9 +49,18 @@ public class ApplicationConfig implements ApplicationRunner { @Bean public CacheManager cacheManager() { - final Cache inlineResponse201 = new ConcurrentMapCache(Constant.IN_LINE_RESPONSE_201_CACHE); + final Cache inlineResponse201 = getCache(Constant.IN_LINE_RESPONSE_201_CACHE); + final Cache vnfPkgOnboardingNotificationCache = getCache(Constant.VNF_PKG_ONBOARDING_NOTIFICATION_CACHE); + final List caches = new ArrayList<>(); + caches.add(inlineResponse201); + caches.add(vnfPkgOnboardingNotificationCache); final SimpleCacheManager manager = new SimpleCacheManager(); - manager.setCaches(Arrays.asList(inlineResponse201)); + manager.setCaches(caches); return manager; } + + private Cache getCache(final String name) { + LOGGER.info("Creating cache with name: {}", name); + return new ConcurrentMapCache(name); + } } diff --git a/plans/so/integration-etsi-testing/so-simulators/vnfm-simulator/vnfm-service/src/main/java/org/onap/so/svnfm/simulator/constants/Constant.java b/plans/so/integration-etsi-testing/so-simulators/vnfm-simulator/vnfm-service/src/main/java/org/onap/so/svnfm/simulator/constants/Constant.java index ceb5be5a..c35be6d1 100644 --- a/plans/so/integration-etsi-testing/so-simulators/vnfm-simulator/vnfm-service/src/main/java/org/onap/so/svnfm/simulator/constants/Constant.java +++ b/plans/so/integration-etsi-testing/so-simulators/vnfm-simulator/vnfm-service/src/main/java/org/onap/so/svnfm/simulator/constants/Constant.java @@ -37,9 +37,11 @@ public class Constant { public static final String VNF_CONFIG_PROPERTIES = "{\"isAutoScaleEnabled\": \"true\",\"isAutoHealingEnabled\": \"true\"}"; public static final String IN_LINE_RESPONSE_201_CACHE = "inlineResponse201"; + public static final String VNF_PKG_ONBOARDING_NOTIFICATION_CACHE = "vnfPackageOnboardingNotificationCache"; public static final String PACKAGE_MANAGEMENT_BASE_URL = "/vnfpkgm/v1"; public static final String SUBSCRIPTION_ENDPOINT = "/subscribe"; public static final String NOTIFICATION_ENDPOINT = "/notification"; - public static final String VNFM_ADAPTER_ENDPOINT = "https://so-vnfm-adapter.onap:9092/so/vnfm-adapter/v1/"; + public static final String NOTIFICATION_CACHE_TEST_ENDPOINT = "/notification-cache-test/{vnfPkgId}"; + public static final String VNFM_ADAPTER_ENDPOINT = "/so/vnfm-adapter/v1/"; public static final String VNFM_ADAPTER_SUBSCRIPTION_ENDPOINT = VNFM_ADAPTER_ENDPOINT + "vnfpkgm/v1/subscriptions"; } diff --git a/plans/so/integration-etsi-testing/so-simulators/vnfm-simulator/vnfm-service/src/main/java/org/onap/so/svnfm/simulator/controller/SubscriptionNotificationController.java b/plans/so/integration-etsi-testing/so-simulators/vnfm-simulator/vnfm-service/src/main/java/org/onap/so/svnfm/simulator/controller/SubscriptionNotificationController.java index 2c6b8f24..1db20faf 100644 --- a/plans/so/integration-etsi-testing/so-simulators/vnfm-simulator/vnfm-service/src/main/java/org/onap/so/svnfm/simulator/controller/SubscriptionNotificationController.java +++ b/plans/so/integration-etsi-testing/so-simulators/vnfm-simulator/vnfm-service/src/main/java/org/onap/so/svnfm/simulator/controller/SubscriptionNotificationController.java @@ -21,23 +21,39 @@ package org.onap.so.svnfm.simulator.controller; import javax.ws.rs.core.MediaType; +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; +import com.google.gson.JsonObject; +import com.google.gson.JsonParser; +import com.google.gson.TypeAdapter; +import com.google.gson.stream.JsonReader; +import com.google.gson.stream.JsonWriter; import org.onap.so.adapters.vnfmadapter.extclients.vnfm.packagemanagement.model.InlineResponse201; import org.onap.so.adapters.vnfmadapter.extclients.vnfm.packagemanagement.model.PkgmSubscriptionRequest; import org.onap.so.adapters.vnfmadapter.extclients.vnfm.packagemanagement.model.SubscriptionsAuthentication; import org.onap.so.adapters.vnfmadapter.extclients.vnfm.packagemanagement.model.SubscriptionsAuthentication.AuthTypeEnum; import org.onap.so.adapters.vnfmadapter.extclients.vnfm.packagemanagement.model.SubscriptionsAuthenticationParamsBasic; +import org.onap.so.adapters.vnfmadapter.extclients.vnfm.packagemanagement.notification.model.VnfPackageChangeNotification; +import org.onap.so.adapters.vnfmadapter.extclients.vnfm.packagemanagement.notification.model.VnfPackageOnboardingNotification; import org.onap.so.svnfm.simulator.constants.Constant; import org.onap.so.svnfm.simulator.services.SubscriptionManager; +import org.onap.so.svnfm.simulator.services.providers.VnfPkgOnboardingNotificationCacheServiceProviderImpl; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; +import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RestController; +import java.io.IOException; +import java.time.LocalDateTime; +import java.time.format.DateTimeFormatter; +import java.util.Optional; /** * @author Eoin Hanan (eoin.hanan@est.tech) @@ -47,10 +63,18 @@ import org.springframework.web.bind.annotation.RestController; @RestController @RequestMapping(path = Constant.PACKAGE_MANAGEMENT_BASE_URL, produces = MediaType.APPLICATION_JSON, consumes = MediaType.APPLICATION_JSON) public class SubscriptionNotificationController { + + private static final Logger logger = LoggerFactory.getLogger(SubscriptionNotificationController.class); + private final Gson gson; @Autowired private SubscriptionManager subscriptionManager; + @Autowired + private VnfPkgOnboardingNotificationCacheServiceProviderImpl vnfPkgOnboardingNotificationCacheServiceProvider; - private static final Logger logger = LoggerFactory.getLogger(SubscriptionNotificationController.class); + @Autowired + public SubscriptionNotificationController() { + this.gson = new GsonBuilder().registerTypeAdapter(LocalDateTime.class, new LocalDateTimeTypeAdapter()).create(); + } @Value("${spring.security.usercredentials[0].username}") private String username; @@ -99,9 +123,94 @@ public class SubscriptionNotificationController { * @return */ @PostMapping(value = Constant.NOTIFICATION_ENDPOINT, produces = {MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) - public ResponseEntity postVnfPackageNotification(@RequestBody Object notification){ + public ResponseEntity postVnfPackageNotification(@RequestBody final Object notification){ logger.info("Vnf Notification received:\n{}", notification); + final String notificationString = gson.toJson(notification); + addNotificationObjectToCache(notificationString); return ResponseEntity.noContent().build(); } + /** + * Testing endpoint for checking that notifications have been received + * + * @param vnfPkgId + * @return + */ + @GetMapping(value = Constant.NOTIFICATION_CACHE_TEST_ENDPOINT) + public ResponseEntity getVnfPackageNotification(@PathVariable("vnfPkgId") final String vnfPkgId) { + logger.info("Getting notification with vnfPkgId: {}", vnfPkgId); + final Optional optionalVnfPackageOnboardingNotification = + vnfPkgOnboardingNotificationCacheServiceProvider.getVnfPkgOnboardingNotification(vnfPkgId); + if(optionalVnfPackageOnboardingNotification.isPresent()) { + VnfPackageOnboardingNotification vnfPackageOnboardingNotification = + optionalVnfPackageOnboardingNotification.get(); + logger.info("Return notification with vnfPkgId: {} and body {}", vnfPkgId, vnfPackageOnboardingNotification); + return ResponseEntity.ok().body(vnfPackageOnboardingNotification); + } + final String errorMessage = "No notification found with vnfPkgId: " + vnfPkgId; + logger.error(errorMessage); + return ResponseEntity.status(HttpStatus.NOT_FOUND).body(errorMessage); + } + + private void addNotificationObjectToCache(final String notification) { + logger.info("addNotificationObjectToCache(): {}", notification); + final String notificationType = getNotificationType(notification); + if (VnfPackageOnboardingNotification.NotificationTypeEnum.VNFPACKAGEONBOARDINGNOTIFICATION.getValue() + .equals(notificationType)) { + final VnfPackageOnboardingNotification pkgOnboardingNotification = + gson.fromJson(notification, VnfPackageOnboardingNotification.class); + logger.info("Onboarding notification received:\n{}", pkgOnboardingNotification); + final String vnfPkgId = pkgOnboardingNotification.getVnfPkgId(); + vnfPkgOnboardingNotificationCacheServiceProvider.addVnfPkgOnboardingNotification(vnfPkgId, pkgOnboardingNotification); + } else if (VnfPackageChangeNotification.NotificationTypeEnum.VNFPACKAGECHANGENOTIFICATION.getValue() + .equals(notificationType)) { + final VnfPackageChangeNotification pkgChangeNotification = + gson.fromJson(notification, VnfPackageChangeNotification.class); + logger.info("Change notification received:\n{}", pkgChangeNotification); + } else { + final String errorMessage = "An error occurred. Notification type not supported for: " + notificationType; + logger.error(errorMessage); + throw new RuntimeException(errorMessage); + } + } + + private String getNotificationType(final String notification) { + try { + logger.info("getNotificationType() notification: {}", notification); + final JsonParser parser = new JsonParser(); + final JsonObject element = (JsonObject) parser.parse(notification); + return element.get("notificationType").getAsString(); + } catch (final Exception e) { + logger.error("An error occurred processing notificiation: {}", e.getMessage()); + } + throw new RuntimeException( + "Unable to parse notification type in object \n" + notification); + } + + public static class LocalDateTimeTypeAdapter extends TypeAdapter { + + private static final DateTimeFormatter FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"); + + @Override + public void write(final JsonWriter out, final LocalDateTime localDateTime) throws IOException { + if (localDateTime == null) { + out.nullValue(); + } else { + out.value(FORMATTER.format(localDateTime)); + } + } + + @Override + public LocalDateTime read(final JsonReader in) throws IOException { + switch (in.peek()) { + case NULL: + in.nextNull(); + return null; + default: + final String dateTime = in.nextString(); + return LocalDateTime.parse(dateTime, FORMATTER); + } + } + } + } diff --git a/plans/so/integration-etsi-testing/so-simulators/vnfm-simulator/vnfm-service/src/main/java/org/onap/so/svnfm/simulator/services/SubscriptionManager.java b/plans/so/integration-etsi-testing/so-simulators/vnfm-simulator/vnfm-service/src/main/java/org/onap/so/svnfm/simulator/services/SubscriptionManager.java index 2050ab0d..ddda7b64 100644 --- a/plans/so/integration-etsi-testing/so-simulators/vnfm-simulator/vnfm-service/src/main/java/org/onap/so/svnfm/simulator/services/SubscriptionManager.java +++ b/plans/so/integration-etsi-testing/so-simulators/vnfm-simulator/vnfm-service/src/main/java/org/onap/so/svnfm/simulator/services/SubscriptionManager.java @@ -37,6 +37,9 @@ public class SubscriptionManager { @Value("${vnfm-adapter.auth.password}") private String password; + @Value("${vnfm-adapter.base.endpoint:http://so-etsi-sol003-adapter.onap:9092}") + private String baseEndpoint; + @Autowired public SubscriptionManager( @Qualifier(SSL_BASED_CONFIGURABLE_REST_TEMPLATE) final RestTemplate restTemplate) { @@ -52,7 +55,7 @@ public class SubscriptionManager { public InlineResponse201 createSubscription(final PkgmSubscriptionRequest pkgmSubscriptionRequest) { final byte[] encodedAuth = getBasicAuth(username, password); final String authHeader = "Basic " + new String(encodedAuth); - final String uri = Constant.VNFM_ADAPTER_SUBSCRIPTION_ENDPOINT; + final String uri = baseEndpoint + Constant.VNFM_ADAPTER_SUBSCRIPTION_ENDPOINT; final HttpHeaders headers = new HttpHeaders(); headers.add(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_JSON); headers.add(HttpHeaders.AUTHORIZATION, authHeader); diff --git a/plans/so/integration-etsi-testing/so-simulators/vnfm-simulator/vnfm-service/src/main/java/org/onap/so/svnfm/simulator/services/providers/VnfPkgOnboardingNotificationCacheServiceProvider.java b/plans/so/integration-etsi-testing/so-simulators/vnfm-simulator/vnfm-service/src/main/java/org/onap/so/svnfm/simulator/services/providers/VnfPkgOnboardingNotificationCacheServiceProvider.java new file mode 100644 index 00000000..b62fb862 --- /dev/null +++ b/plans/so/integration-etsi-testing/so-simulators/vnfm-simulator/vnfm-service/src/main/java/org/onap/so/svnfm/simulator/services/providers/VnfPkgOnboardingNotificationCacheServiceProvider.java @@ -0,0 +1,43 @@ +/*- + * ============LICENSE_START======================================================= + * Copyright (C) 2021 Nordix Foundation. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ +package org.onap.so.svnfm.simulator.services.providers; + +import org.onap.so.adapters.vnfmadapter.extclients.vnfm.packagemanagement.notification.model.VnfPackageOnboardingNotification; +import java.util.Optional; + +/** + * @author Andrew Lamb (andrew.a.lamb@est.tech) + */ +public interface VnfPkgOnboardingNotificationCacheServiceProvider { + + /** + * Add a VnfPkgOnboardingNotification to the cache + * @param vnfPkgId + * @param vnfPackageOnboardingNotification + */ + void addVnfPkgOnboardingNotification(final String vnfPkgId, final VnfPackageOnboardingNotification vnfPackageOnboardingNotification); + + /** + * Get a VnfPkgOnboardingNotification from the cache + * @param vnfPkgId + * @return + */ + Optional getVnfPkgOnboardingNotification(final String vnfPkgId); +} diff --git a/plans/so/integration-etsi-testing/so-simulators/vnfm-simulator/vnfm-service/src/main/java/org/onap/so/svnfm/simulator/services/providers/VnfPkgOnboardingNotificationCacheServiceProviderImpl.java b/plans/so/integration-etsi-testing/so-simulators/vnfm-simulator/vnfm-service/src/main/java/org/onap/so/svnfm/simulator/services/providers/VnfPkgOnboardingNotificationCacheServiceProviderImpl.java new file mode 100644 index 00000000..3f522138 --- /dev/null +++ b/plans/so/integration-etsi-testing/so-simulators/vnfm-simulator/vnfm-service/src/main/java/org/onap/so/svnfm/simulator/services/providers/VnfPkgOnboardingNotificationCacheServiceProviderImpl.java @@ -0,0 +1,63 @@ +/*- + * ============LICENSE_START======================================================= + * Copyright (C) 2021 Nordix Foundation. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + * ============LICENSE_END========================================================= + */ +package org.onap.so.svnfm.simulator.services.providers; + +import org.onap.so.adapters.vnfmadapter.extclients.vnfm.packagemanagement.notification.model.VnfPackageOnboardingNotification; +import org.onap.so.simulator.cache.provider.AbstractCacheServiceProvider; +import org.onap.so.svnfm.simulator.constants.Constant; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.cache.Cache; +import org.springframework.cache.CacheManager; +import org.springframework.stereotype.Service; +import java.util.Optional; + +/** + * @author Andrew Lamb (andrew.a.lamb@est.tech) + */ +@Service +public class VnfPkgOnboardingNotificationCacheServiceProviderImpl extends AbstractCacheServiceProvider + implements VnfPkgOnboardingNotificationCacheServiceProvider { + + private static final Logger LOGGER = LoggerFactory.getLogger(VnfPkgOnboardingNotificationCacheServiceProviderImpl.class); + + @Autowired + public VnfPkgOnboardingNotificationCacheServiceProviderImpl(final CacheManager cacheManager) { + super(cacheManager); + } + + @Override public void addVnfPkgOnboardingNotification(final String vnfPkgId, + final VnfPackageOnboardingNotification vnfPackageOnboardingNotification) { + LOGGER.debug("Adding {} to cache with vnfPkgId: {}", vnfPackageOnboardingNotification, vnfPkgId); + getCache(Constant.VNF_PKG_ONBOARDING_NOTIFICATION_CACHE).put(vnfPkgId, vnfPackageOnboardingNotification); + } + + @Override public Optional getVnfPkgOnboardingNotification(final String vnfPkgId) { + LOGGER.debug("Getting vnfPkgOnboardingNotification from cache using vnfPkgId: {}", vnfPkgId); + final Cache cache = getCache(Constant.VNF_PKG_ONBOARDING_NOTIFICATION_CACHE); + final VnfPackageOnboardingNotification vnfPackageOnboardingNotification = cache.get(vnfPkgId, VnfPackageOnboardingNotification.class); + if (vnfPackageOnboardingNotification != null) { + return Optional.of(vnfPackageOnboardingNotification); + } + LOGGER.error("Unable to find vnfPkgOnboardingNotification in cache using vnfPkgId: {}", vnfPkgId); + return Optional.empty(); + } +} diff --git a/plans/so/integration-etsi-testing/so-simulators/vnfm-simulator/vnfm-service/src/main/resources/application.yaml b/plans/so/integration-etsi-testing/so-simulators/vnfm-simulator/vnfm-service/src/main/resources/application.yaml index db2a6d4c..5d655b82 100644 --- a/plans/so/integration-etsi-testing/so-simulators/vnfm-simulator/vnfm-service/src/main/resources/application.yaml +++ b/plans/so/integration-etsi-testing/so-simulators/vnfm-simulator/vnfm-service/src/main/resources/application.yaml @@ -47,6 +47,8 @@ vnfm-adapter: auth: name: vnfm password: password1$ + base: + endpoint: http://so-etsi-sol003-adapter.onap:9092 vnfds: vnfdlist: - vnfdid: 1 @@ -88,4 +90,4 @@ vnfds: - vnfcid: VNFC8 resourceTemplateId: vnfd4_vnfc6 vduId: vnfd4_vduForVnfc6 - type: COMPUTE \ No newline at end of file + type: COMPUTE diff --git a/plans/so/integration-etsi-testing/so-simulators/vnfm-simulator/vnfm-service/src/test/java/org/onap/so/svnfm/simulator/controllers/TestSubscriptionNotificationController.java b/plans/so/integration-etsi-testing/so-simulators/vnfm-simulator/vnfm-service/src/test/java/org/onap/so/svnfm/simulator/controllers/TestSubscriptionNotificationController.java index 743e2c04..c35bbaf7 100644 --- a/plans/so/integration-etsi-testing/so-simulators/vnfm-simulator/vnfm-service/src/test/java/org/onap/so/svnfm/simulator/controllers/TestSubscriptionNotificationController.java +++ b/plans/so/integration-etsi-testing/so-simulators/vnfm-simulator/vnfm-service/src/test/java/org/onap/so/svnfm/simulator/controllers/TestSubscriptionNotificationController.java @@ -31,17 +31,20 @@ import org.onap.so.adapters.vnfmadapter.extclients.vnfm.packagemanagement.model. import org.onap.so.adapters.vnfmadapter.extclients.vnfm.packagemanagement.model.SubscriptionsFilter1; import org.onap.so.adapters.vnfmadapter.extclients.vnfm.packagemanagement.notification.model.VnfPackageOnboardingNotification; import org.onap.so.svnfm.simulator.config.SvnfmApplication; +import org.onap.so.svnfm.simulator.controller.SubscriptionNotificationController; import org.slf4j.Logger; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.boot.test.web.client.TestRestTemplate; +import org.springframework.boot.web.client.RestTemplateBuilder; import org.springframework.boot.web.server.LocalServerPort; import org.springframework.http.HttpEntity; import org.springframework.http.HttpHeaders; import org.springframework.http.HttpMethod; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; +import org.springframework.http.converter.json.GsonHttpMessageConverter; import org.springframework.test.context.ActiveProfiles; import org.springframework.test.context.junit4.SpringRunner; import org.springframework.test.web.client.MockRestServiceServer; @@ -77,6 +80,7 @@ import static org.springframework.test.web.client.response.MockRestResponseCreat public class TestSubscriptionNotificationController { private static final Logger LOGGER = getLogger(TestSubscriptionNotificationController.class); + private static final String SOL003_SUBSCRIPTION_URL = "http://so-etsi-sol003-adapter.onap:9092" + VNFM_ADAPTER_SUBSCRIPTION_ENDPOINT; @LocalServerPort private int port; @@ -85,7 +89,6 @@ public class TestSubscriptionNotificationController { private RestTemplate restTemplate; private MockRestServiceServer mockRestServiceServer; - @Autowired private TestRestTemplate testRestTemplate; private Gson gson; @@ -94,8 +97,11 @@ public class TestSubscriptionNotificationController { @Before public void setup() { mockRestServiceServer = MockRestServiceServer.bindTo(restTemplate).build(); - gson = new GsonBuilder().create(); + gson = new GsonBuilder().registerTypeAdapter(LocalDateTime.class, + new SubscriptionNotificationController.LocalDateTimeTypeAdapter()).create(); vnfmSimulatorCallbackUrl = getBaseUrl(port) + PACKAGE_MANAGEMENT_BASE_URL + NOTIFICATION_ENDPOINT; + testRestTemplate = new TestRestTemplate( + new RestTemplateBuilder().additionalMessageConverters(new GsonHttpMessageConverter(gson))); } @After @@ -118,7 +124,7 @@ public class TestSubscriptionNotificationController { new InlineResponse201().id("subscriptionId").filter(new SubscriptionsFilter1()) .callbackUri("callbackUri"); - mockRestServiceServer.expect(requestTo(VNFM_ADAPTER_SUBSCRIPTION_ENDPOINT)).andExpect(method(HttpMethod.POST)) + mockRestServiceServer.expect(requestTo(SOL003_SUBSCRIPTION_URL)).andExpect(method(HttpMethod.POST)) .andExpect(content().json(gson.toJson(pkgmSubscriptionRequest))) .andRespond(withSuccess(gson.toJson(inlineResponse), APPLICATION_JSON)); @@ -137,7 +143,7 @@ public class TestSubscriptionNotificationController { new InlineResponse201().id("subscriptionId").filter(new SubscriptionsFilter1()) .callbackUri("callbackUri"); - mockRestServiceServer.expect(requestTo(VNFM_ADAPTER_SUBSCRIPTION_ENDPOINT)).andExpect(method(HttpMethod.POST)) + mockRestServiceServer.expect(requestTo(SOL003_SUBSCRIPTION_URL)).andExpect(method(HttpMethod.POST)) .andExpect(content().json(gson.toJson(pkgmSubscriptionRequest))) .andRespond(withSuccess(gson.toJson(inlineResponse), APPLICATION_JSON)); @@ -152,8 +158,6 @@ public class TestSubscriptionNotificationController { final VnfPackageOnboardingNotification vnfPackageOnboardingNotification = gson.fromJson(getNotification(VNFPACKAGEONBOARDINGNOTIFICATION), VnfPackageOnboardingNotification.class); - final LocalDateTime timestamp = LocalDateTime.of(2020, 1, 1, 1, 1, 1, 1); - vnfPackageOnboardingNotification.setTimeStamp(timestamp); final ResponseEntity responseEntity = postNotification(vnfPackageOnboardingNotification); assertEquals(HttpStatus.NO_CONTENT, responseEntity.getStatusCode()); } diff --git a/plans/so/integration-etsi-testing/so-simulators/vnfm-simulator/vnfm-service/src/test/resources/application.yaml b/plans/so/integration-etsi-testing/so-simulators/vnfm-simulator/vnfm-service/src/test/resources/application.yaml index 3a2268cb..5e5aba8b 100644 --- a/plans/so/integration-etsi-testing/so-simulators/vnfm-simulator/vnfm-service/src/test/resources/application.yaml +++ b/plans/so/integration-etsi-testing/so-simulators/vnfm-simulator/vnfm-service/src/test/resources/application.yaml @@ -40,6 +40,8 @@ vnfm-adapter: auth: name: vnfm password: password1$ + base: + endpoint: http://so-etsi-sol003-adapter.onap:9092 vnfds: vnfdlist: - vnfdid: 1 diff --git a/plans/so/integration-etsi-testing/so-simulators/vnfm-simulator/vnfm-service/src/test/resources/test-data/vnf-package-onboarding-notification.json b/plans/so/integration-etsi-testing/so-simulators/vnfm-simulator/vnfm-service/src/test/resources/test-data/vnf-package-onboarding-notification.json index 40b565be..bcc36592 100644 --- a/plans/so/integration-etsi-testing/so-simulators/vnfm-simulator/vnfm-service/src/test/resources/test-data/vnf-package-onboarding-notification.json +++ b/plans/so/integration-etsi-testing/so-simulators/vnfm-simulator/vnfm-service/src/test/resources/test-data/vnf-package-onboarding-notification.json @@ -1,6 +1,7 @@ { "id": "string", "notificationType": "VnfPackageOnboardingNotification", + "timeStamp":"2020-01-01 01:01:01", "subscriptionId": "string", "vnfPkgId": "string", "vnfdId": "string", diff --git a/plans/so/integration-etsi-testing/testplan.txt b/plans/so/integration-etsi-testing/testplan.txt index 17e28078..963abb93 100644 --- a/plans/so/integration-etsi-testing/testplan.txt +++ b/plans/so/integration-etsi-testing/testplan.txt @@ -1,5 +1,8 @@ # Test suites are relative paths under [integration/csit.git]/tests/. # Place the suites in run order. +so/etsi/etsi_vnf_subscription_tests.robot so/etsi/etsi_package_onboarding_tests.robot so/etsi/etsi_vnf_lcm_tests.robot so/etsi/etsi_vnf_package_management_tests.robot +so/etsi/etsi_vnf_notification_tests.robot +so/etsi/etsi_nfvo_ns_lcm_tests.robot diff --git a/plans/so/macroflow/cloud_owner_sql/cloud_owner.sql b/plans/so/macroflow/cloud_owner_sql/cloud_owner.sql new file mode 100644 index 00000000..e5147993 --- /dev/null +++ b/plans/so/macroflow/cloud_owner_sql/cloud_owner.sql @@ -0,0 +1,2 @@ +Use catalogdb; +insert into cloud_sites(ID, REGION_ID, IDENTITY_SERVICE_ID, CLOUD_VERSION, CLLI, ORCHESTRATOR) values("EtsiCloudRegion", "EtsiCloudRegion", "DEFAULT_KEYSTONE", "2.5", "clli2", "multicloud"); \ No newline at end of file diff --git a/plans/so/macroflow/setup.sh b/plans/so/macroflow/setup.sh new file mode 100644 index 00000000..8255562f --- /dev/null +++ b/plans/so/macroflow/setup.sh @@ -0,0 +1,194 @@ +#!/bin/bash + +MAVEN_VERSION_DIR="apache-maven-3.3.9" +MAVEN_TAR_FILE="$MAVEN_VERSION_DIR-bin.tar.gz" +MAVEN_TAR_LOCATION="https://archive.apache.org/dist/maven/maven-3/3.3.9/binaries/$MAVEN_TAR_FILE" + +# SCRIPT_HOME="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +# Integration-Etsi Path +SCRIPT_HOME=$WORKSPACE/plans/so/integration-etsi-testing +SCRIPT_NAME=$(basename $0) +CONFIG_DIR=$SCRIPT_HOME/config +ENV_FILE=$CONFIG_DIR/env +TEMP_DIR_PATH=$SCRIPT_HOME/temp +TEST_LAB_DIR_PATH=$TEMP_DIR_PATH/test_lab +DOCKER_COMPOSE_FILE_PATH=$SCRIPT_HOME/docker-compose.yml +DOCKER_COMPOSE_LOCAL_OVERRIDE_FILE=$SCRIPT_HOME/docker-compose.local.yml +TEAR_DOWN_SCRIPT=$SCRIPT_HOME/teardown.sh +CAMUNDA_SQL_SCRIPT_NAME=mariadb_engine_7.10.0.sql +CAMUNDA_SQL_SCRIPT_DIR=$CONFIG_DIR/camunda-sql +TEST_LAB_SQL_SCRIPTS_DIR=$TEST_LAB_DIR_PATH/volumes/mariadb/docker-entrypoint-initdb.d/db-sql-scripts + +MAVEN_DIR=$TEMP_DIR_PATH/maven +INSTALLED_MAVEN_DIR=$MAVEN_DIR/$MAVEN_VERSION_DIR +MVN=$INSTALLED_MAVEN_DIR/bin/mvn +MVN_VERSION="$MVN -v" +MVN_SETTINGS_XML="$SCRIPT_HOME/settings.xml" +MVN_CLEAN_INSTALL="$MVN clean install" +SIMULATOR_MAVEN_PROJECT_POM="$SCRIPT_HOME/so-simulators/pom.xml" +WAIT_FOR_WORKAROUND_SCRIPT=$CONFIG_DIR/"wait-for-workaround-job.sh" +WAIT_FOR_POPULATE_AAI_SCRIPT=$CONFIG_DIR/"wait-for-aai-config-job.sh" +WAIT_FOR_CONTAINER_SCRIPT=$CONFIG_DIR/"wait-for-container.sh" + +# Macroflow Path +MACRO_HOME="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +SQL_PATH=$MACRO_HOME/cloud_owner_sql + + + +echo "Running $SCRIPT_HOME/$SCRIPT_NAME ..." + +export $(egrep -v '^#' $ENV_FILE | xargs) + +MANDATORY_VARIABLES_NAMES=( "NEXUS_DOCKER_REPO_MSO" "DOCKER_ENVIRONMENT" "TAG" "TIME_OUT_DEFAULT_VALUE_SEC" "PROJECT_NAME" "DEFAULT_NETWORK_NAME", "ETSI_CATALOG_IMAGE_VERSION", "SOL_003_ADAPTER_IMAGE_VERSION", "ETSI_NFVO_NS_LCM_IMAGE_VERSION", "MARIADB_VERSION") + +for var in "${MANDATORY_VARIABLES_NAMES[@]}" + do + if [ -z "${!var}" ]; then + echo "Missing mandatory attribute $var in $ENV_FILE" + exit 1 + fi +done + +if [[ ! "$TEMP_DIR_PATH" || ! -d "$TEMP_DIR_PATH" ]]; then + echo "Creating temporary directory $TEMP_DIR_PATH" + mkdir $TEMP_DIR_PATH + + if [ $? -ne 0 ]; then + echo "Could not create $TEMP_DIR_PATH" + exit 1 + fi + +fi +echo "Will use ${TEMP_DIR_PATH} directory" + +if [[ ! "$MAVEN_DIR" || ! -d "$MAVEN_DIR" ]]; then + echo "Creating temporary maven directory $MAVEN_DIR" + mkdir $MAVEN_DIR + + if [ $? -ne 0 ]; then + echo "Could not create $MAVEN_DIR" + exit 1 + fi +fi +echo "Will use ${MAVEN_DIR} directory for maven install" + +if [[ ! "$INSTALLED_MAVEN_DIR" || ! -d "$INSTALLED_MAVEN_DIR" ]]; then + echo "Installing maven ..." + cd $MAVEN_DIR + + CURL=`which curl` + if [[ ! "$CURL" ]]; then + echo "curl command is not installed" + echo "Unable to execute test plan" + exit 1 + fi + curl -O $MAVEN_TAR_LOCATION + + TAR=`which tar` + if [[ ! "$TAR" ]]; then + echo "tar command is not installed" + echo "Unable to execute test plan" + exit 1 + fi + + tar -xzvf $MAVEN_TAR_FILE + + echo "Finished installing maven ..." +fi + +echo "Maven installed under directory $INSTALLED_MAVEN_DIR" + +$MVN_VERSION + +if [ $? -ne 0 ]; then + echo "Unable to run mvn -v command" + exit 1 +fi + +cd $SCRIPT_HOME + +echo "Will build simulator project using $MVN_CLEAN_INSTALL -f $SIMULATOR_MAVEN_PROJECT_POM --settings $MVN_SETTINGS_XML" +$MVN_CLEAN_INSTALL -f $SIMULATOR_MAVEN_PROJECT_POM --settings $MVN_SETTINGS_XML + +if [ $? -ne 0 ]; then + echo "Maven build failed" + exit 1 +fi + +echo "Will clone docker-config project ... " + + +if [[ -d "$TEST_LAB_DIR_PATH" ]]; then + echo "$TEST_LAB_DIR_PATH already exists" + echo "Removing $TEST_LAB_DIR_PATH directory ..." + rm -rf $TEST_LAB_DIR_PATH +fi + +git clone http://gerrit.onap.org/r/so/docker-config.git $TEST_LAB_DIR_PATH + +echo "Replacing $CAMUNDA_SQL_SCRIPT_NAME ..." +rm -rf $TEST_LAB_SQL_SCRIPTS_DIR/$CAMUNDA_SQL_SCRIPT_NAME +cp $CAMUNDA_SQL_SCRIPT_DIR/$CAMUNDA_SQL_SCRIPT_NAME $TEST_LAB_SQL_SCRIPTS_DIR + +export TEST_LAB_DIR=$TEST_LAB_DIR_PATH +export CONFIG_DIR_PATH=$CONFIG_DIR + +if [ "$DOCKER_ENVIRONMENT" == "remote" ]; then + echo "Starting docker containers with remote images ..." + docker-compose -f $DOCKER_COMPOSE_FILE_PATH -p $PROJECT_NAME up -d +elif [ "$DOCKER_ENVIRONMENT" == "local" ]; then + echo "Starting docker containers with local images ..." + docker-compose -f $DOCKER_COMPOSE_FILE_PATH -f $DOCKER_COMPOSE_LOCAL_OVERRIDE_FILE -p $PROJECT_NAME up -d +else + echo "DOCKER_ENVIRONMENT not set correctly in $ENV_FILE. Allowed values: local | remote" + exit 1 +fi + +echo "Sleeping for 3m" +sleep 3m + +docker ps -a +export SQL_DIR_PATH=$SQL_PATH +docker cp $SQL_DIR_PATH/cloud_owner.sql $(docker container ls | grep mariadb | awk '{ print $1 }'):/ +docker exec $(docker container ls | grep mariadb | awk '{ print $1 }') sh -c 'mysql -u root -ppassword < /cloud_owner.sql' + +echo "Will execute $WAIT_FOR_WORKAROUND_SCRIPT script" +$WAIT_FOR_WORKAROUND_SCRIPT + +if [ $? -ne 0 ]; then + echo "ERROR: $WAIT_FOR_WORKAROUND_SCRIPT failed" + echo "Will stop running docker containers . . ." + $TEAR_DOWN_SCRIPT + exit 1 +fi + +echo "Will execute $WAIT_FOR_POPULATE_AAI_SCRIPT script" +$WAIT_FOR_POPULATE_AAI_SCRIPT + +if [ $? -ne 0 ]; then + echo "ERROR: $WAIT_FOR_POPULATE_AAI_SCRIPT failed" + echo "Will stop running docker containers . . ." + $TEAR_DOWN_SCRIPT + exit 1 +fi + +PODS_NAMES=( "api-handler-infra" "bpmn-infra") + +for pod in "${PODS_NAMES[@]}" + do + echo "Will execute $WAIT_FOR_CONTAINER_SCRIPT to wait for $pod container to start up" + $WAIT_FOR_CONTAINER_SCRIPT -c "$pod" -t "300" -n "$DEFAULT_NETWORK_NAME" + + if [ $? -ne 0 ]; then + echo "ERROR: $WAIT_FOR_CONTAINER_SCRIPT for pod: $pod failed" + echo "Will stop running docker containers . . ." + $TEAR_DOWN_SCRIPT + exit 1 + fi +done + +REPO_IP='127.0.0.1' +ROBOT_VARIABLES="-v REPO_IP:${REPO_IP}" + +echo "Finished executing $SCRIPT_HOME/$SCRIPT_NAME" diff --git a/plans/so/macroflow/teardown.sh b/plans/so/macroflow/teardown.sh new file mode 100644 index 00000000..82966887 --- /dev/null +++ b/plans/so/macroflow/teardown.sh @@ -0,0 +1,34 @@ +#!/bin/bash + +# SCRIPT_HOME="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +SCRIPT_HOME=$WORKSPACE/plans/so/integration-etsi-testing +SCRIPT_NAME=$(basename $0) +CONFIG_DIR=$SCRIPT_HOME/config +ENV_FILE=$CONFIG_DIR/env +TEMP_DIR_PATH=$SCRIPT_HOME/temp +TEST_LAB_DIR_PATH=$TEMP_DIR_PATH/test_lab +DOCKER_COMPOSE_FILE_PATH=$SCRIPT_HOME/docker-compose.yml +DOCKER_COMPOSE_LOCAL_OVERRIDE_FILE=$SCRIPT_HOME/docker-compose.local.yml + + +echo "Running $SCRIPT_HOME/$SCRIPT_NAME ..." +export $(egrep -v '^#' $ENV_FILE | xargs) +export TEST_LAB_DIR=$TEST_LAB_DIR_PATH +export CONFIG_DIR_PATH=$CONFIG_DIR + +echo "Sleeping 2m for completing the macroflow task" +sleep 2m + +if [ "$DOCKER_ENVIRONMENT" == "remote" ]; then + echo "Tearing down docker containers from remote images ..." + docker-compose -f $DOCKER_COMPOSE_FILE_PATH -p $PROJECT_NAME down +elif [ "$DOCKER_ENVIRONMENT" == "local" ]; then + echo "Tearing down docker containers from local images ..." + docker-compose -f $DOCKER_COMPOSE_FILE_PATH -f $DOCKER_COMPOSE_LOCAL_OVERRIDE_FILE -p $PROJECT_NAME down +else + echo "Couldn't find valid property for DOCKER_ENVIRONMENT in $ENV_FILE." + echo "Attempting normal teardown ..." + docker-compose -f $DOCKER_COMPOSE_FILE_PATH -p $PROJECT_NAME down +fi + +echo "Finished executing $SCRIPT_HOME/$SCRIPT_NAME" diff --git a/plans/so/macroflow/testplan.txt b/plans/so/macroflow/testplan.txt new file mode 100644 index 00000000..00a8d8b7 --- /dev/null +++ b/plans/so/macroflow/testplan.txt @@ -0,0 +1,3 @@ +# Test suites are relative paths under [integration/csit.git]/tests/. +# Place the suites in run order. +so/sanity-check/macroflow.robot diff --git a/plans/usecases-5G-bulkpm/5G-bulkpm/assets/dfc/datafile_endpoints.json b/plans/usecases-5G-bulkpm/5G-bulkpm/assets/dfc/datafile_endpoints.json index 8aaca058..c845b2db 100644 --- a/plans/usecases-5G-bulkpm/5G-bulkpm/assets/dfc/datafile_endpoints.json +++ b/plans/usecases-5G-bulkpm/5G-bulkpm/assets/dfc/datafile_endpoints.json @@ -1,10 +1,10 @@ { "config": { "//description": "This file is only used for testing purposes", - "dmaap.ftpesConfig.keyCert": "/opt/app/datafile/etc/cert/cert.jks", - "dmaap.ftpesConfig.keyPasswordPath": "/opt/app/datafile/etc/cert/jks.pass", - "dmaap.ftpesConfig.trustedCa": "/opt/app/datafile/etc/cert/trust.jks", - "dmaap.ftpesConfig.trustedCaPasswordPath": "/opt/app/datafile/etc/cert/trust.pass", + "dmaap.certificateConfig.keyCert": "/opt/app/datafile/etc/cert/cert.jks", + "dmaap.certificateConfig.keyPasswordPath": "/opt/app/datafile/etc/cert/jks.pass", + "dmaap.certificateConfig.trustedCa": "/opt/app/datafile/etc/cert/trust.jks", + "dmaap.certificateConfig.trustedCaPasswordPath": "/opt/app/datafile/etc/cert/trust.pass", "dmaap.security.trustStorePath": "/opt/app/datafile/etc/cert/trust.jks", "dmaap.security.trustStorePasswordPath": "/opt/app/datafile/etc/cert/trust.pass", "dmaap.security.keyStorePath": "/opt/app/datafile/etc/cert/cert.jks", diff --git a/scripts/dcaegen2-collectors-datafile/dfc-management/dfc-start.sh b/scripts/dcaegen2-collectors-datafile/dfc-management/dfc-start.sh index 38b78f2d..b7a595c1 100755 --- a/scripts/dcaegen2-collectors-datafile/dfc-management/dfc-start.sh +++ b/scripts/dcaegen2-collectors-datafile/dfc-management/dfc-start.sh @@ -26,6 +26,16 @@ DOCKER_SIM_NWNAME="dfcnet" echo "Creating docker network $DOCKER_SIM_NWNAME, if needed" docker network ls | grep $DOCKER_SIM_NWNAME >/dev/null || docker network create $DOCKER_SIM_NWNAME +if [ $HTTP_TYPE = "HTTPS" ] + then + docker run \ + --name oom-certservice-post-processor \ + --env-file $SIMGROUP_ROOT/../certservice/merger/merge-certs.env \ + --mount type=bind,src=$SIMGROUP_ROOT/tls,dst=/opt/app/datafile/etc/cert \ + --mount type=bind,src=$SIMGROUP_ROOT/../certservice/generated-certs/dfc-p12,dst=/opt/app/datafile/etc/ \ + nexus3.onap.org:10001/onap/org.onap.oom.platform.cert-service.oom-certservice-post-processor:latest +fi + docker-compose up -d DFC_APP="$(docker ps -q --filter='name=dfc_app0')" diff --git a/scripts/dmaap-datarouter/remove_cert_from_ca.py b/scripts/dmaap-datarouter/remove_cert_from_ca.py index 192e274f..4ed9b777 100644 --- a/scripts/dmaap-datarouter/remove_cert_from_ca.py +++ b/scripts/dmaap-datarouter/remove_cert_from_ca.py @@ -28,7 +28,7 @@ dr_cert_exists = False with open(cafile, 'r+b', buffering=0) as outfile: for line in outfile.readlines()[-35:-34]: - if "# Serial: 0x9EAEEDC0A7CEB59D" in line: + if '# Serial: 0x9EAEEDC0A7CEB59D'.encode() in line: dr_cert_exists = True if dr_cert_exists: outfile.seek(0, os.SEEK_END) diff --git a/scripts/policy/config/xacml-pdp/defaultConfig.json b/scripts/policy/config/xacml-pdp/defaultConfig.json index 5a6573a3..f489919a 100644 --- a/scripts/policy/config/xacml-pdp/defaultConfig.json +++ b/scripts/policy/config/xacml-pdp/defaultConfig.json @@ -1,6 +1,7 @@ { "name": "XacmlPdpParameters", "pdpGroup": "defaultGroup", + "pdpType": "xacml", "restServerParameters": { "host": "0.0.0.0", "port": 6969, diff --git a/scripts/sdnc/netconf-pnp-simulator/docker-compose.yml b/scripts/sdnc/netconf-pnp-simulator/docker-compose.yml deleted file mode 100755 index d8e723ba..00000000 --- a/scripts/sdnc/netconf-pnp-simulator/docker-compose.yml +++ /dev/null @@ -1,12 +0,0 @@ -version: '3' - -services: - netconf-pnp-simulator: - image: nexus3.onap.org:10001/onap/integration/simulators/netconf-pnp-simulator:2.8.6 - container_name: netconf-simulator - restart: always - ports: - - "830:830" - - "6513:6513" - volumes: - - ${NETCONF_CONFIG_PATH}:/config/modules/mynetconf diff --git a/scripts/sdnc/netconf-pnp-simulator/netconf-config/data.json b/scripts/sdnc/netconf-pnp-simulator/netconf-config/data.json deleted file mode 100644 index 63872eef..00000000 --- a/scripts/sdnc/netconf-pnp-simulator/netconf-config/data.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "mynetconf:netconflist": { - "netconf": [ - { - "netconf-id": 3, - "netconf-param": 3 - } - ] - } -} diff --git a/scripts/sdnc/netconf-pnp-simulator/netconf-config/model.yang b/scripts/sdnc/netconf-pnp-simulator/netconf-config/model.yang deleted file mode 100644 index 6c8c36ab..00000000 --- a/scripts/sdnc/netconf-pnp-simulator/netconf-config/model.yang +++ /dev/null @@ -1,29 +0,0 @@ -module mynetconf { - yang-version 1.1; - namespace "urn:mynetconf:test"; - - prefix nft; - - organization - "mynetconf"; - contact - "my netconf address"; - description - "yang model for mynetconf"; - revision "2019-03-01" { - description - "initial version"; - } - - container netconflist { - list netconf { - key netconf-id; - leaf netconf-id { - type uint16; - } - leaf netconf-param { - type uint32; - } - } - } -} diff --git a/scripts/sdnc/netconf-pnp-simulator/netconf-config/subscriber.py b/scripts/sdnc/netconf-pnp-simulator/netconf-config/subscriber.py deleted file mode 100755 index 61272967..00000000 --- a/scripts/sdnc/netconf-pnp-simulator/netconf-config/subscriber.py +++ /dev/null @@ -1,136 +0,0 @@ -#!/usr/bin/env python3 - -__author__ = "Mislav Novakovic " -__copyright__ = "Copyright 2018, Deutsche Telekom AG" -__license__ = "Apache 2.0" - -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# This sample application demonstrates use of Python programming language bindings for sysrepo library. -# Original c application was rewritten in Python to show similarities and differences -# between the two. -# -# Most notable difference is in the very different nature of languages, c is weakly statically typed language -# while Python is strongly dynamically typed. Python code is much easier to read and logic easier to comprehend -# for smaller scripts. Memory safety is not an issue but lower performance can be expected. -# -# The original c implementation is also available in the source, so one can refer to it to evaluate trade-offs. - -import sysrepo as sr -import sys - - -# Helper function for printing changes given operation, old and new value. -def print_change(op, old_val, new_val): - if op == sr.SR_OP_CREATED: - print(f"CREATED: {new_val.to_string()}") - elif op == sr.SR_OP_DELETED: - print(f"DELETED: {old_val.to_string()}") - elif op == sr.SR_OP_MODIFIED: - print(f"MODIFIED: {old_val.to_string()} to {new_val.to_string()}") - elif op == sr.SR_OP_MOVED: - print(f"MOVED: {new_val.xpath()} after {old_val.xpath()}") - - -# Helper function for printing events. -def ev_to_str(ev): - if ev == sr.SR_EV_VERIFY: - return "verify" - elif ev == sr.SR_EV_APPLY: - return "apply" - elif ev == sr.SR_EV_ABORT: - return "abort" - else: - return "unknown" - - -# Function to print current configuration state. -# It does so by loading all the items of a session and printing them out. -def print_current_config(session, module_name): - select_xpath = f"/{module_name}:*//*" - - values = session.get_items(select_xpath) - - if values is not None: - print("========== BEGIN CONFIG ==========") - for i in range(values.val_cnt()): - print(values.val(i).to_string(), end='') - print("=========== END CONFIG ===========") - - -# Function to be called for subscribed client of given session whenever configuration changes. -def module_change_cb(sess, module_name, event, private_ctx): - try: - print("========== Notification " + ev_to_str(event) + " =============================================") - if event == sr.SR_EV_APPLY: - print_current_config(sess, module_name) - - print("========== CHANGES: =============================================") - - change_path = f"/{module_name}:*" - - it = sess.get_changes_iter(change_path) - - while True: - change = sess.get_change_next(it) - if change is None: - break - print_change(change.oper(), change.old_val(), change.new_val()) - - print("========== END OF CHANGES =======================================") - except Exception as e: - print(e) - - return sr.SR_ERR_OK - - -def main(): - # Notable difference between c implementation is using exception mechanism for open handling unexpected events. - # Here it is useful because `Connection`, `Session` and `Subscribe` could throw an exception. - try: - module_name = "ietf-interfaces" - if len(sys.argv) > 1: - module_name = sys.argv[1] - else: - print("\nYou can pass the module name to be subscribed as the first argument") - - print(f"Application will watch for changes in {module_name}") - - # connect to sysrepo - conn = sr.Connection(module_name) - - # start session - sess = sr.Session(conn) - - # subscribe for changes in running config */ - subscribe = sr.Subscribe(sess) - - subscribe.module_change_subscribe(module_name, module_change_cb) - - try: - print_current_config(sess, module_name) - except Exception as e: - print(e) - - print("========== STARTUP CONFIG APPLIED AS RUNNING ==========") - - sr.global_loop() - - print("Application exit requested, exiting.") - - except Exception as e: - print(e) - - -if __name__ == '__main__': - main() diff --git a/scripts/sdnc/sdnc/certs/keys0.zip b/scripts/sdnc/sdnc/certs/keys0.zip index 48b4d90a..6f7f756b 100644 Binary files a/scripts/sdnc/sdnc/certs/keys0.zip and b/scripts/sdnc/sdnc/certs/keys0.zip differ diff --git a/scripts/sdnc/sdnc/docker-compose.yml b/scripts/sdnc/sdnc/docker-compose.yml index c47fab50..61bf8b6e 100755 --- a/scripts/sdnc/sdnc/docker-compose.yml +++ b/scripts/sdnc/sdnc/docker-compose.yml @@ -9,7 +9,10 @@ services: volumes: - /etc/localtime:/etc/localtime:ro environment: - - MYSQL_ROOT_PASSWORD=password + - MYSQL_ROOT_PASSWORD=${MYSQL_ROOT_PASSWORD} + - MYSQL_USER=${MYSQL_USER} + - MYSQL_PASSWORD=${MYSQL_PASSWORD} + - MYSQL_DATABASE=${MYSQL_DATABASE} hostname: mariadb.so.testlab.onap.org logging: @@ -29,12 +32,38 @@ services: - "8282:8181" hostname: sdnc + links: + - mariadb:dbhost + - mariadb:sdnctldb01 + - mariadb:sdnctldb02 environment: - - MYSQL_ROOT_PASSWORD=password + - MYSQL_ROOT_PASSWORD=${MYSQL_ROOT_PASSWORD} + - MYSQL_USER=${MYSQL_USER} + - MYSQL_PASSWORD=${MYSQL_PASSWORD} + - MYSQL_DATABASE=${MYSQL_DATABASE} - SDNC_CONFIG_DIR=/opt/onap/sdnc/data/properties - - MYSQL_PASSWD=password - - ODL_ADMIN_USERNAME=admin - - ODL_ADMIN_PASSWORD=Kp8bJ4SXszM0WXlhak3eHlcse2gAw84vaoGGmJvUy2U + - SDNC_BIN=/opt/onap/sdnc/bin + - ODL_CERT_DIR=/tmp + - ODL_ADMIN_USERNAME=${ODL_USER} + - ODL_ADMIN_PASSWORD=${ODL_PASSWORD} + - ODL_USER=${ODL_USER} + - ODL_PASSWORD=${ODL_PASSWORD} + - ODL_CERT_DIR=/opt/opendaylight/current/certs + - SDNC_DB_INIT=true + - HONEYCOMB_USER=${HONEYCOMB_USER} + - HONEYCOMB_PASSWORD=${HONEYCOMB_PASSWORD} + - TRUSTSTORE_PASSWORD=${TRUSTSTORE_PASSWORD} + - KEYSTORE_PASSWORD=${KEYSTORE_PASSWORD} + - SO_USER=${SO_USER} + - SO_PASSWORD=${SO_PASSWORD} + - NENG_USER=${NENG_USER} + - NENG_PASSWORD=${NENG_PASSWORD} + - CDS_USER=${CDS_USER} + - CDS_PASSWORD=${CDS_PASSWORD} + - ANSIBLE_USER=${ANSIBLE_USER} + - ANSIBLE_PASSWORD=${ANSIBLE_PASSWORD} + - SQL_CRYPTKEY=${SQL_CRYPTKEY} + - A1_TRUSTSTORE_PASSWORD=a1adapter depends_on: - mariadb dns: @@ -44,7 +73,3 @@ services: options: max-size: "30m" max-file: "5" - extra_hosts: - - sdnctldb02:${LOCAL_IP} - - sdnctldb01:${LOCAL_IP} - - dbhost:${LOCAL_IP} \ No newline at end of file diff --git a/tests/ccsdk-oran/polmansuite/__init__.robot b/tests/ccsdk-oran/polmansuite/__init__.robot deleted file mode 100644 index bf52713b..00000000 --- a/tests/ccsdk-oran/polmansuite/__init__.robot +++ /dev/null @@ -1,2 +0,0 @@ -*** Settings *** -Documentation Non-RT RIC Policy Management - polmansuite diff --git a/tests/ccsdk-oran/polmansuite/test.robot b/tests/ccsdk-oran/polmansuite/test.robot deleted file mode 100644 index 59d73eb8..00000000 --- a/tests/ccsdk-oran/polmansuite/test.robot +++ /dev/null @@ -1,17 +0,0 @@ -*** Settings *** -Library OperatingSystem -Library Process - -*** Test Cases *** - -Functional Test Case 1 - [Documentation] Deploy PMS without SDNC - Start Process ${ARCHIVES}/test/pms_a1sim.sh - ${cli_cmd_output}= Wait For Process timeout=3600 - Should Be Equal as Integers ${cli_cmd_output.rc} 0 - -Functional Test Case 2 - [Documentation] Deploy PMS with SDNC - Start Process ${ARCHIVES}/test/pms_a1sim_sdnc.sh - ${cli_cmd_output}= Wait For Process timeout=3600 - Should Be Equal as Integers ${cli_cmd_output.rc} 0 diff --git a/tests/dcaegen2-collectors-datafile/testsuites/Functional-Single-File-Ftp-suite/FuncSingleFileFtp.robot b/tests/dcaegen2-collectors-datafile/testsuites/Functional-Single-File-Ftp-suite/FuncSingleFileFtp.robot index 7d8afbbd..bb70b502 100755 --- a/tests/dcaegen2-collectors-datafile/testsuites/Functional-Single-File-Ftp-suite/FuncSingleFileFtp.robot +++ b/tests/dcaegen2-collectors-datafile/testsuites/Functional-Single-File-Ftp-suite/FuncSingleFileFtp.robot @@ -106,4 +106,8 @@ Set Default Environment Variables Set Environment Variable DR_REDIR_SIM drsim_redir Set Environment Variable SFTP_SIMS sftp-server0:22 Set Environment Variable FTPES_SIMS ftpes-server-vsftpd0:21 - Set Environment Variable HTTP_SIMS http-server0:80 + Set Environment Variable HTTP_SIMS http-https-server0:80 + Set Environment Variable HTTPS_SIMS http-https-server0:443 + Set Environment Variable HTTPS_SIMS_NO_AUTH http-https-server0:8080 + Set Environment Variable HTTP_JWT_SIMS http-https-server0:32000 + Set Environment Variable HTTPS_JWT_SIMS http-https-server0:32100 diff --git a/tests/dcaegen2-collectors-datafile/testsuites/Functional-Single-File-Http-suite/FuncSingleFileHttp.robot b/tests/dcaegen2-collectors-datafile/testsuites/Functional-Single-File-Http-suite/FuncSingleFileHttp.robot index e37a9106..f96c2f96 100755 --- a/tests/dcaegen2-collectors-datafile/testsuites/Functional-Single-File-Http-suite/FuncSingleFileHttp.robot +++ b/tests/dcaegen2-collectors-datafile/testsuites/Functional-Single-File-Http-suite/FuncSingleFileHttp.robot @@ -6,7 +6,7 @@ Library Process Resource ../../resources/common-keywords.robot *** Variables *** -${CONSUL_UPL_APP} /usr/bin/curl -v http://127.0.0.1:8500/v1/kv/dfc_app0?dc=dc1 -X PUT -H 'Accept: application/json' -H 'Content-Type: application/json' -H 'X-Requested-With: XMLHttpRequest' --data-binary @${SIMGROUP_ROOT}/consul/c12_feed2_PM_MEAS.json +${CONSUL_UPL_APP} /usr/bin/curl -v http://127.0.0.1:8500/v1/kv/dfc_app0?dc=dc1 -X PUT -H 'Accept: application/json' -H 'Content-Type: application/json' -H 'X-Requested-With: XMLHttpRequest' --data-binary @${SIMGROUP_ROOT}/consul/c12_feed2_PM_HTTPS.json ${CONSUL_GET_APP} /usr/bin/curl -v http://127.0.0.1:8500/v1/kv/dfc_app0?raw ${CBS_GET_MERGED_CONFIG} /usr/bin/curl -v http://127.0.0.1:10000/service_component_all/dfc_app0 @@ -32,6 +32,25 @@ Verify single event with single 50MB HTTP file. From event poll to published fil [Documentation] Verify single event with single HTTP 50MB file from event poll to published file. Verify Single Event From Event Poll To Published File 50 --tc302 HTTP +######## Single file, HTTP +Verify single event with single 1MB HTTPS (basic authentication) file. From event poll to published file + [TAGS] DFC_FUNCTIONAL_30 + [Documentation] Verify single event with single HTTPS (basic authentication) 1MB file from event poll to published file. + ${cli_cmd_output}= Run Process ${DFC_ROOT}/../dfc-containers-clean.sh stderr=STDOUT + Verify Single Event From Event Poll To Published File 1 --tc400 HTTPS + + +Verify single event with single 5MB HTTPS (basic authentication) file. From event poll to published file + [TAGS] DFC_FUNCTIONAL_31 + [Documentation] Verify single event with single HTTPS (basic authentication) 5MB file from event poll to published file. + Verify Single Event From Event Poll To Published File 5 --tc401 HTTPS + + +Verify single event with single 50MB HTTPS (basic authentication) file. From event poll to published file + [TAGS] DFC_FUNCTIONAL_32 + [Documentation] Verify single event with single HTTPS (basic authentication) 50MB file from event poll to published file. + Verify Single Event From Event Poll To Published File 50 --tc402 HTTPS + *** Keywords *** Verify Single Event From Event Poll To Published File @@ -90,4 +109,8 @@ Set Default Environment Variables Set Environment Variable DR_REDIR_SIM drsim_redir Set Environment Variable SFTP_SIMS sftp-server0:22 Set Environment Variable FTPES_SIMS ftpes-server-vsftpd0:21 - Set Environment Variable HTTP_SIMS http-server0:80 + Set Environment Variable HTTP_SIMS http-https-server0:80 + Set Environment Variable HTTPS_SIMS http-https-server0:443 + Set Environment Variable HTTPS_SIMS_NO_AUTH http-https-server0:8080 + Set Environment Variable HTTP_JWT_SIMS http-https-server0:32000 + Set Environment Variable HTTPS_JWT_SIMS http-https-server0:32100 diff --git a/tests/dcaegen2-collectors-datafile/testsuites/HTTP-Various-Connection-Types-suite/HttpVariousConnectionTypes.robot b/tests/dcaegen2-collectors-datafile/testsuites/HTTP-Various-Connection-Types-suite/HttpVariousConnectionTypes.robot new file mode 100755 index 00000000..9e69e536 --- /dev/null +++ b/tests/dcaegen2-collectors-datafile/testsuites/HTTP-Various-Connection-Types-suite/HttpVariousConnectionTypes.robot @@ -0,0 +1,110 @@ +*** Settings *** +Library OperatingSystem +Library RequestsLibrary +Library Process + +Resource ../../resources/common-keywords.robot + +*** Variables *** +${CONSUL_UPL_APP} /usr/bin/curl -v http://127.0.0.1:8500/v1/kv/dfc_app0?dc=dc1 -X PUT -H 'Accept: application/json' -H 'Content-Type: application/json' -H 'X-Requested-With: XMLHttpRequest' --data-binary @${SIMGROUP_ROOT}/consul/c12_feed2_PM_HTTPS.json +${CONSUL_GET_APP} /usr/bin/curl -v http://127.0.0.1:8500/v1/kv/dfc_app0?raw +${CBS_GET_MERGED_CONFIG} /usr/bin/curl -v http://127.0.0.1:10000/service_component_all/dfc_app0 + +*** Test Cases *** + +######## Single file, HTTPS with various connections +Verify single event with single 1MB file with HTTPS connection (basic authentication). From event poll to published file + [TAGS] DFC_FUNCTIONAL_40 + [Documentation] Verify single event with single HTTPS (basic authentication) 1MB file from event poll to published file. + ${cli_cmd_output}= Run Process ${DFC_ROOT}/../dfc-containers-clean.sh stderr=STDOUT + Verify Single Event From Event Poll To Published File 1 --tc400 HTTPS + + +Verify single event with single 1MB file HTTPS connection (client certificate authentication). From event poll to published file + [TAGS] DFC_FUNCTIONAL_41 + [Documentation] Verify single event with single 1MB file HTTPS connection (client certificate authentication). From event poll to published file + Verify Single Event From Event Poll To Published File 1 --tc403 HTTPS + + +Verify single event with single 1MB file HTTPS (no authentication). From event poll to published file + [TAGS] DFC_FUNCTIONAL_42 + [Documentation] Verify single event with single 1MB file HTTPS (no authentication). From event poll to published file + Verify Single Event From Event Poll To Published File 1 --tc404 HTTPS + + +Verify single event with single 1MB file with HTTP JWT. From event poll to published file + [TAGS] DFC_FUNCTIONAL_43 + [Documentation] Verify single event with single 1MB file with HTTP JWT. From event poll to published file + ${cli_cmd_output}= Run Process ${DFC_ROOT}/../dfc-containers-clean.sh stderr=STDOUT + Verify Single Event From Event Poll To Published File 1 --tc303 HTTP + + +Verify single event with single 1MB file with HTTPS JWT. From event poll to published file + [TAGS] DFC_FUNCTIONAL_44 + [Documentation] Verify single event with single 1MB file with HTTPS JWT. From event poll to published file + ${cli_cmd_output}= Run Process ${DFC_ROOT}/../dfc-containers-clean.sh stderr=STDOUT + Verify Single Event From Event Poll To Published File 1 --tc405 HTTPS + + +*** Keywords *** +Verify Single Event From Event Poll To Published File + [Documentation] Keyword to verify single event with file with given parameters. + [Arguments] ${file_size_in_mb} ${mr_tc} ${http_type} + Set Environment Variable MR_TC ${mr_tc} + Set Environment Variable FILE_SIZE ${file_size_in_mb}MB + Set Environment Variable HTTP_TYPE ${http_type} + Set Default Environment Variables + + ${cli_cmd_output}= Run Process ./simulators-start.sh cwd=${SIMGROUP_ROOT} + Log To Console Simulator-start: + Log To Console ${cli_cmd_output.stdout} ${cli_cmd_output.stderr} + MR Sim Emitted Files Equal 0 #Verify 0 file emitted from MR sim + DR Sim Published Files Equal 0 #Verify 0 file published to DR sim + + ${cli_cmd_output}= Run Process ${CONSUL_UPL_APP} shell=yes + Log To Console Consul APP write: + Log To Console ${cli_cmd_output.stdout} ${cli_cmd_output.stderr} + + ${cli_cmd_output}= Run Process ${CONSUL_GET_APP} shell=yes + Log To Console Consul APP read: + Log To Console ${cli_cmd_output.stdout} ${cli_cmd_output.stderr} + + ${cli_cmd_output}= Run Process ${CBS_GET_MERGED_CONFIG} shell=yes + Log To Console CBS merged configuration: + Log To Console ${cli_cmd_output.stdout} ${cli_cmd_output.stderr} + + Sleep 10 + + Start DFC + + Wait Until Keyword Succeeds 1 minute 10 sec MR Sim Emitted Files Equal 1 #Verify 1 file emitted from MR sim + Wait Until Keyword Succeeds 1 minute 10 sec DR Sim Query Not Published Equal 1 #Verify 1 query response for not published files + Wait Until Keyword Succeeds 1 minute 10 sec DR Sim Published Files Equal 1 #Verify 1 file published to DR sim + DR Redir Sim Downloaded Volume Equal ${file_size_in_mb} 000 000 #Verify correct number of bytes published file data in DR redir sim + + [Teardown] Test Teardown + +Set Default Environment Variables + [Documentation] Set default environment variables for simulators setup + Set Environment Variable DR_TC --tc normal + Set Environment Variable DR_REDIR_TC --tc normal + Set Environment Variable MR_GROUPS OpenDcae-c12:PM_MEAS_FILES + Set Environment Variable MR_FILE_PREFIX_MAPPING PM_MEAS_FILES:A + Set Environment Variable DR_REDIR_FEEDS 2:A + Set Environment Variable FTP_FILE_PREFIXES A + Set Environment Variable FTP_TYPE SFTP + Set Environment Variable HTTP_FILE_PREFIXES A + Set Environment Variable NUM_FTPFILES 1 + Set Environment Variable NUM_HTTPFILES 1 + Set Environment Variable NUM_PNFS 1 + Set Environment Variable NUM_FTP_SERVERS 1 + Set Environment Variable NUM_HTTP_SERVERS 1 + Set Environment Variable DR_FEEDS 2:A + Set Environment Variable DR_REDIR_SIM drsim_redir + Set Environment Variable SFTP_SIMS sftp-server0:22 + Set Environment Variable FTPES_SIMS ftpes-server-vsftpd0:21 + Set Environment Variable HTTP_SIMS http-https-server0:80 + Set Environment Variable HTTPS_SIMS http-https-server0:443 + Set Environment Variable HTTPS_SIMS_NO_AUTH http-https-server0:8080 + Set Environment Variable HTTP_JWT_SIMS http-https-server0:32000 + Set Environment Variable HTTPS_JWT_SIMS http-https-server0:32100 diff --git a/tests/dcaegen2-collectors-datafile/testsuites/HTTP-Various-Connection-Types-suite/__init__.robot b/tests/dcaegen2-collectors-datafile/testsuites/HTTP-Various-Connection-Types-suite/__init__.robot new file mode 100755 index 00000000..b4fa2714 --- /dev/null +++ b/tests/dcaegen2-collectors-datafile/testsuites/HTTP-Various-Connection-Types-suite/__init__.robot @@ -0,0 +1,2 @@ +*** Settings *** +Documentation DFC HTTP/HTTPS various connection types test suite. Single event with single file. diff --git a/tests/dcaegen2-collectors-datafile/testsuites/Strict-Host-Checking-suite/StrictHostChecking.robot b/tests/dcaegen2-collectors-datafile/testsuites/Strict-Host-Checking-suite/StrictHostChecking.robot index 55946201..61f689d1 100755 --- a/tests/dcaegen2-collectors-datafile/testsuites/Strict-Host-Checking-suite/StrictHostChecking.robot +++ b/tests/dcaegen2-collectors-datafile/testsuites/Strict-Host-Checking-suite/StrictHostChecking.robot @@ -90,7 +90,11 @@ Setup Strict Host Key Checking Test Set Environment Variable DR_REDIR_SIM drsim_redir Set Environment Variable SFTP_SIMS sftp-server0:22 Set Environment Variable FTPES_SIMS ftpes-server-vsftpd0:21 - Set Environment Variable HTTP_SIMS http-server0:80 + Set Environment Variable HTTP_SIMS http-https-server0:80 + Set Environment Variable HTTPS_SIMS http-https-server0:443 + Set Environment Variable HTTPS_SIMS_NO_AUTH http-https-server0:8080 + Set Environment Variable HTTP_JWT_SIMS http-https-server0:32000 + Set Environment Variable HTTPS_JWT_SIMS http-https-server0:32100 ${cli_cmd_output}= Run Process ./simulators-start.sh cwd=${SIMGROUP_ROOT} Log To Console Simulator-start: @@ -112,6 +116,6 @@ Setup Strict Host Key Checking Test Sleep 10 - ${cli_cmd_output}= Run Process ${DFC_ROOT}/dfc-start.sh cwd=${DFC_ROOT} env:KNOWN_HOSTS=${known_hosts_file} + ${cli_cmd_output}= Run Process ${DFC_ROOT}/dfc-start.sh cwd=${DFC_ROOT} env:KNOWN_HOSTS=${known_hosts_file} env:SIMGROUP_ROOT=${SIMGROUP_ROOT} Log To Console Dfc-start: Log To Console ${cli_cmd_output.stdout} ${cli_cmd_output.stderr} diff --git a/tests/dcaegen2-collectors-hv-ves/testcases/message-routing.robot b/tests/dcaegen2-collectors-hv-ves/testcases/message-routing.robot index 40923aab..9898a968 100644 --- a/tests/dcaegen2-collectors-hv-ves/testcases/message-routing.robot +++ b/tests/dcaegen2-collectors-hv-ves/testcases/message-routing.robot @@ -2,6 +2,7 @@ # csit-dcaegen2-collectors-hv-ves # ================================================================================ # Copyright (C) 2018-2019 NOKIA +# Modification copyright (C) 2021 Samsung Electronics Co., Ltd. # ================================================================================ # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -37,7 +38,7 @@ Message Routing Suite Setup *** Test Cases *** Correct Messages Routing [Documentation] VES-HV Collector should route all valid messages to topics specified in configuration - ... and do not change message payload generated in XNF simulator + ... without changing message payload generated in xNF simulator Send Messages From xNF Simulators ${XNF_SIMULATOR} ${XNF_FIXED_PAYLOAD_REQUEST} @@ -47,7 +48,7 @@ Correct Messages Routing Too big payload message handling - [Documentation] VES-HV Collector should interrupt the stream when encountered message with too big payload + [Documentation] VES-HV Collector should interrupt the stream when a message with too big payload is encountered Send Messages From xNF Simulators ${XNF_SIMULATOR} ${XNF_TOO_BIG_PAYLOAD_REQUEST} diff --git a/tests/dcaegen2-collectors-hv-ves/testcases/resources/common-keywords.robot b/tests/dcaegen2-collectors-hv-ves/testcases/resources/common-keywords.robot index fee8bfd8..bab8a907 100644 --- a/tests/dcaegen2-collectors-hv-ves/testcases/resources/common-keywords.robot +++ b/tests/dcaegen2-collectors-hv-ves/testcases/resources/common-keywords.robot @@ -2,6 +2,7 @@ # csit-dcaegen2-collectors-hv-ves # ================================================================================ # Copyright (C) 2018-2019 NOKIA +# Modification copyright (C) 2021 Samsung Electronics Co., Ltd. # ================================================================================ # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -53,11 +54,11 @@ Get xNF Simulators Using Valid Certificates Send Messages From xNF Simulators [Arguments] ${XNF_HOSTS_LIST} ${MESSAGE_FILEPATH} - :FOR ${HOST} IN @{XNF_HOSTS_LIST} - \ ${XNF_SIM_API_ACCESS}= Get xNF Sim Api Access Url ${HTTP_METHOD_URL} ${HOST} - \ ${XNF_SIM_API_URL}= Catenate SEPARATOR= ${XNF_SIM_API_ACCESS} ${XNF_SIM_API_PATH} - \ Send messages ${XNF_SIM_API_URL} ${MESSAGE_FILEPATH} - + FOR ${HOST} IN @{XNF_HOSTS_LIST} + ${XNF_SIM_API_ACCESS}= Get xNF Sim Api Access Url ${HTTP_METHOD_URL} ${HOST} + ${XNF_SIM_API_URL}= Catenate SEPARATOR= ${XNF_SIM_API_ACCESS} ${XNF_SIM_API_PATH} + Send messages ${XNF_SIM_API_URL} ${MESSAGE_FILEPATH} + END VES-HV Collector Test Shutdown Reset DCAE App Simulator ${DEFAULT_PERF3GPP_TOPIC} diff --git a/tests/dcaegen2-pmmapper/files-processing-config-pmmapper/__init__.robot b/tests/dcaegen2-pmmapper/files-processing-config-pmmapper/__init__.robot new file mode 100644 index 00000000..9eda6de0 --- /dev/null +++ b/tests/dcaegen2-pmmapper/files-processing-config-pmmapper/__init__.robot @@ -0,0 +1,2 @@ +*** Settings *** +Documentation Files Processing Config PM Mapper Testcases diff --git a/tests/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/.gitattributes b/tests/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/.gitattributes new file mode 100644 index 00000000..57cdc503 --- /dev/null +++ b/tests/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/.gitattributes @@ -0,0 +1 @@ +*.gz binary diff --git a/tests/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/ABigFile.xml b/tests/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/ABigFile.xml new file mode 100644 index 00000000..8a3bcf4d --- /dev/null +++ b/tests/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/ABigFile.xml @@ -0,0 +1,163 @@ + + + + + + + + + + + + + + attTCHSeizures + succTCHSeizures + attImmediateAssignProcs + succImmediateAssignProcs + + 234 + 345 + 567 + 789 + + + 890 + 901 + 123 + 234 + + + 456 + 567 + 678 + 789 + true + + + + + + + attTCHSeizures1 + succTCHSeizures2 + attImmediateAssignProcs3 + succImmediateAssignProcs4 + + 4 + 86,87,2,6,77,96,75,33,24 + 40 + 90 + false + + + + + + + attTCHSeizures5 + succTCHSeizures6 + attImmediateAssignProcs7 + succImmediateAssignProcs8 + + 238 + 344 + 563 + 787 + + + 898 + 905 + 127 + 238 + + + 454 + 569 + 672 + 785 + true + + + + + + + + + + attTCHSeizures + succTCHSeizures + attImmediateAssignProcs + succImmediateAssignProcs + + 234 + 345 + 567 + 789 + + + 890 + 901 + 123 + 234 + + + 456 + 567 + 678 + 789 + true + + + + + + + attTCHSeizures1 + succTCHSeizures2 + attImmediateAssignProcs3 + succImmediateAssignProcs4 + + 4 + 86,87,2,6,77,96,75,33,24 + 40 + 90 + false + + + + + + + attTCHSeizures5 + succTCHSeizures6 + attImmediateAssignProcs7 + succImmediateAssignProcs8 + + 238 + 344 + 563 + 787 + + + 898 + 905 + 127 + 238 + + + 454 + 569 + 672 + 785 + true + + + + + + + diff --git a/tests/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/config_10_1.env b/tests/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/config_10_1.env new file mode 100644 index 00000000..050d2f2a --- /dev/null +++ b/tests/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/config_10_1.env @@ -0,0 +1,3 @@ +PROCESSING_LIMIT_RATE=10 +THREADS_MULTIPLIER=1 +PROCESSING_THREADS_COUNT=1 diff --git a/tests/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/config_1_1.env b/tests/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/config_1_1.env new file mode 100644 index 00000000..c138912b --- /dev/null +++ b/tests/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/config_1_1.env @@ -0,0 +1,3 @@ +PROCESSING_LIMIT_RATE=1 +THREADS_MULTIPLIER=1 +PROCESSING_THREADS_COUNT=1 diff --git a/tests/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/config_1_10.env b/tests/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/config_1_10.env new file mode 100644 index 00000000..b4c290e8 --- /dev/null +++ b/tests/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/config_1_10.env @@ -0,0 +1,3 @@ +PROCESSING_LIMIT_RATE=1 +THREADS_MULTIPLIER=1 +PROCESSING_THREADS_COUNT=10 diff --git a/tests/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/valid_metadata.json b/tests/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/valid_metadata.json new file mode 100644 index 00000000..da809d7b --- /dev/null +++ b/tests/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/valid_metadata.json @@ -0,0 +1,12 @@ +{ + "productName": "gnb", + "vendorName": "Ericsson", + "lastEpochMicrosec": "1538478000000", + "sourceName": "oteNB5309", + "startEpochMicrosec": "1538478900000", + "timeZoneOffset": "UTC+05.00", + "location": "ftpes://192.168.0.101:22/ftp/rop/A20161224.1045-1100.bin.gz", + "compression": "gzip", + "fileFormatType": "org.3GPP.32.435#measCollec", + "fileFormatVersion": "V9" + } \ No newline at end of file diff --git a/tests/dcaegen2-pmmapper/files-processing-config-pmmapper/files-processing-config-pmmapper.robot b/tests/dcaegen2-pmmapper/files-processing-config-pmmapper/files-processing-config-pmmapper.robot new file mode 100644 index 00000000..09a7de4f --- /dev/null +++ b/tests/dcaegen2-pmmapper/files-processing-config-pmmapper/files-processing-config-pmmapper.robot @@ -0,0 +1,148 @@ +*** Settings *** +Documentation Testing PM Mapper functionality +Library Collections +Library OperatingSystem +Library RequestsLibrary +Library Process +Library String +Library libraries/DockerContainerManager.py +Library libraries/LogReader.py + +*** Variables *** + +${NR_VALID_METADATA_PATH} %{WORKSPACE}/tests/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/valid_metadata.json +${CLI_EXEC_CLI_PM_LOG_CLEAR} docker exec pmmapper /bin/sh -c "echo -n "" > /var/log/ONAP/dcaegen2/services/pm-mapper/pm-mapper_output.log" +${PUBLISH_NODE_URL} https://${DR_NODE_IP}:8443/publish/1 +${CLI_EXEC_LOGS_LIST} docker exec datarouter-node /bin/sh -c "ls /opt/app/datartr/logs" +${DOCKER_CLIENT_IMAGE} nexus3.onap.org:10001/onap/org.onap.dcaegen2.services.pm-mapper:latest +${CLIENT_CONTAINER_NAME} pmmapper +${FILE_PATH} %{WORKSPACE}/tests/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/ABigFile.xml +${CONFIG_ENVS_1_1} %{WORKSPACE}/tests/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/config_1_1.env +${CONFIG_ENVS_4_1} %{WORKSPACE}/tests/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/config_4_1.env +${CONFIG_ENVS_10_1} %{WORKSPACE}/tests/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/config_10_1.env +${CONFIG_ENVS_1_10} %{WORKSPACE}/tests/dcaegen2-pmmapper/files-processing-config-pmmapper/assets/config_1_10.env + +*** Test Cases *** + +Verify that PM Mapper rejects 6-9 messages when limitRate is 1 and threads count is 1 + [Tags] FILES_PROCESSING_CONFIG_PM_MAPPER_1 + [Documentation] Verify that PM Mapper rejects 6-9/10 messages. Configuration: limitRate=1, threadsCount=1 + [Timeout] 15 minute + + RestartPmmapper ${CONFIG_ENVS_1_1} + + ${testname}= Set Variable Afirst- + + SendFilesToDatarouter ${testname} + ${alllogs}= GetLogsOutput + ${filtered_logs}= GetFilteredLogs ${alllogs} ${testname} + ${dropped_nr}= GetDroppedNumber ${filtered_logs} + + Sleep 40s + ${isCorrectDroppedCount}= Evaluate ${5} < ${dropped_nr} < ${10} + SavePmMapperLogsAndDroppedCount config_1_1 ${dropped_nr} + Should Be True ${isCorrectDroppedCount} Pm-mapper drop: ${dropped_nr} messages. Expected drop count: 6-9 + ClearLogs + +Verify that PM Mapper rejects 0 messages when limitRate is 10 and threads count is 1 + [Tags] FILES_PROCESSING_CONFIG_PM_MAPPER_2 + [Documentation] Verify that PM Mapper rejects 0/10 messages. Configuration: limitRate=10, threadsCount=1 + [Timeout] 25 minute + + RestartPmmapper ${CONFIG_ENVS_10_1} + + ${testname}= Set Variable Athird- + + SendFilesToDatarouter ${testname} + ${alllogs}= GetLogsOutput + ${filtered_logs}= GetFilteredLogs ${alllogs} ${testname} + ${dropped_nr}= GetDroppedNumber ${filtered_logs} + + Sleep 15s + SavePmMapperLogsAndDroppedCount config_10_1 ${dropped_nr} + Should Be Equal As Numbers ${dropped_nr} 0 Pm-mapper drop: ${dropped_nr} messages. Expected drop count: 0 + ClearLogs + +Verify that PM Mapper rejects 0 messages when limitRate is 1 and threads count is 10 + [Tags] FILES_PROCESSING_CONFIG_PM_MAPPER_3 + [Documentation] Verify that PM Mapper rejects 0/10 messages. Configuration: limitRate=1, threadsCount=10 + [Timeout] 25 minute + + RestartPmmapper ${CONFIG_ENVS_1_10} + + ${testname}= Set Variable Afourth- + + SendFilesToDatarouter ${testname} + ${alllogs}= GetLogsOutput + ${filtered_logs}= GetFilteredLogs ${alllogs} ${testname} + ${dropped_nr}= GetDroppedNumber ${filtered_logs} + + Sleep 15s + SavePmMapperLogsAndDroppedCount config_1_10 ${dropped_nr} + Should Be Equal As Numbers ${dropped_nr} 0 Pm-mapper drop: ${dropped_nr} messages. Expected drop count: 0 + ClearLogs + +*** Keywords *** + +SendFilesToDatarouter + [Arguments] ${testnr} + FOR ${i} IN RANGE 10 + SendToDatarouter ${FILE_PATH} ${NR_VALID_METADATA_PATH} X-ONAP-RequestID=${i} ${testnr} ${i} + END + Sleep 20s + +SendToDatarouter + [Arguments] ${filepath} ${metadatapath} ${request_id} ${testnr} ${i} + ${pmdata}= Get File ${filepath} + ${metatdata} Get File ${metadatapath} + ${newFilename} Catenate SEPARATOR= ${testnr} ${i} .xml + ${resp}= PutCall ${PUBLISH_NODE_URL}/${newFilename} ${request_id} ${pmdata} ${metatdata.replace("\n","")} pmmapper + VerifyResponse ${resp.status_code} 204 + +PutCall + [Arguments] ${url} ${request_id} ${data} ${meta} ${user} + ${headers}= Create Dictionary X-ONAP-RequestID=${request_id} X-DMAAP-DR-META=${meta} Content-Type=application/octet-stream X-DMAAP-DR-ON-BEHALF-OF=${user} Authorization=Basic cG1tYXBwZXI6cG1tYXBwZXI= + ${resp}= Evaluate requests.put('${url}', data="""${data}""", headers=${headers}, verify=False, allow_redirects=False) requests + [Return] ${resp} + +VerifyResponse + [Arguments] ${actual_response_value} ${expected_response_value} + Should Be Equal As Strings ${actual_response_value} ${expected_response_value} + +ClearLogs + Run Process ${CLI_EXEC_CLI_PM_LOG_CLEAR} shell=yes + +GetLogsOutput + ${filesString}= Run Process ${CLI_EXEC_LOGS_LIST} shell=yes + ${filesList}= Get Log Files List ${filesString.stdout} + ${output}= Set Variable ${EMPTY} + FOR ${file} IN @{filesList} + ${file_path}= Catenate SEPARATOR= "cat /opt/app/datartr/logs/ ${file} " + ${exec}= Catenate docker exec datarouter-node /bin/sh -c ${file_path} + ${single_file}= Run Process ${exec} shell=yes + ${output}= Catenate SEPARATOR=\n ${output} ${single_file.stdout} + END + [Return] ${output} + +GetFilteredLogs + [Arguments] ${all_logs} ${testname} + ${filtered_logs}= Filter Unique ${all_logs} ${testname} + [Return] ${filtered_logs} + +GetDroppedNumber + [Arguments] ${logs_output} + ${number}= Get Number Of Dropped Messages ${logs_output} + [Return] ${number} + +RestartPmmapper + [Arguments] ${envs} + Remove Container ${CLIENT_CONTAINER_NAME} + Sleep 5s + Run Pmmapper Container ${DOCKER_CLIENT_IMAGE} ${CLIENT_CONTAINER_NAME} ${envs} ${DR_NODE_IP} ${NODE_IP} + Sleep 15s + +SavePmMapperLogsAndDroppedCount + [Arguments] ${test_name} ${dropped_count} + Run Process echo "Dropped: ${dropped_count}" > %{WORKSPACE}/archives/${test_name}_dropped_count.log shell=yes + Run Process docker logs ${CLIENT_CONTAINER_NAME} > %{WORKSPACE}/archives/${test_name}_pm_mapper_container_logs.log shell=yes + diff --git a/tests/dcaegen2-pmmapper/files-processing-config-pmmapper/libraries/DockerContainerManager.py b/tests/dcaegen2-pmmapper/files-processing-config-pmmapper/libraries/DockerContainerManager.py new file mode 100644 index 00000000..3e3ae58e --- /dev/null +++ b/tests/dcaegen2-pmmapper/files-processing-config-pmmapper/libraries/DockerContainerManager.py @@ -0,0 +1,29 @@ +import docker +from EnvsReader import EnvsReader +from docker.types import Mount + +class DockerContainerManager: + + def run_pmmapper_container(self, client_image, container_name, path_to_env, dr_node_ip, mr_ip): + client = docker.from_env() + environment = EnvsReader().read_env_list_from_file(path_to_env) + environment.append("CONFIG_BINDING_SERVICE_SERVICE_HOST=172.18.0.5") + environment.append("CONFIG_BINDING_SERVICE_SERVICE_PORT=10000") + environment.append("HOSTNAME=pmmapper") + client.containers.run( + image=client_image, + name=container_name, + environment=environment, + ports={'8081': 8081}, + network='filesprocessingconfigpmmapper_pmmapper-network', + extra_hosts={'dmaap-dr-node': dr_node_ip, 'message-router': mr_ip}, + user='root', + mounts=[Mount(target='/opt/app/pm-mapper/etc/certs/', source='/var/tmp/', type='bind')], + detach=True + ) + + def remove_container(self, container_name): + client = docker.from_env() + container = client.containers.get(container_name) + container.stop() + container.remove() diff --git a/tests/dcaegen2-pmmapper/files-processing-config-pmmapper/libraries/EnvsReader.py b/tests/dcaegen2-pmmapper/files-processing-config-pmmapper/libraries/EnvsReader.py new file mode 100644 index 00000000..cc60eed6 --- /dev/null +++ b/tests/dcaegen2-pmmapper/files-processing-config-pmmapper/libraries/EnvsReader.py @@ -0,0 +1,11 @@ + +class EnvsReader: + + def read_env_list_from_file(self, path): + f = open(path, "r") + r_list = [] + for line in f: + line = line.strip() + if line[0] != "#": + r_list.append(line) + return r_list diff --git a/tests/dcaegen2-pmmapper/files-processing-config-pmmapper/libraries/LogReader.py b/tests/dcaegen2-pmmapper/files-processing-config-pmmapper/libraries/LogReader.py new file mode 100644 index 00000000..01718e35 --- /dev/null +++ b/tests/dcaegen2-pmmapper/files-processing-config-pmmapper/libraries/LogReader.py @@ -0,0 +1,22 @@ +import re + +class LogReader: + + def filter_unique(self, merged_logs_output, testname): + logs = merged_logs_output.splitlines() + del_logs = list(filter(lambda line: "|DEL|" in line, logs)) + nrs_set = set() + ret_logs = set() + for log in del_logs: + filename = re.findall(testname + "\d", log) + if len(filename) > 0 and filename[0] not in nrs_set: + ret_logs.add(log) + nrs_set.add(filename[0]) + return ret_logs + + def get_number_of_dropped_messages(self, logs_output): + return len(list(filter(lambda line: "|429|" in line, logs_output))) + + def get_log_files_list(self, fileNames): + files = fileNames.split() + return files diff --git a/tests/dcaegen2/testcases/assets/json_events/ves7_pnf_registration_event.json b/tests/dcaegen2/testcases/assets/json_events/ves7_pnf_registration_event.json index dcd3aa5d..1726e064 100644 --- a/tests/dcaegen2/testcases/assets/json_events/ves7_pnf_registration_event.json +++ b/tests/dcaegen2/testcases/assets/json_events/ves7_pnf_registration_event.json @@ -28,7 +28,7 @@ "unitFamily": "BBU", "vendorName": "Nokia", "oamV4IpAddress": "10.11.12.13", - "oamV6IpAddress": "1::::9", + "oamV6IpAddress": "2001:db2:31:1041:204a::1337", "softwareVersion": "val7" } } diff --git a/tests/dmaap-buscontroller/ssl_suite/test1.robot b/tests/dmaap-buscontroller/ssl_suite/test1.robot index 999c6baf..b5723377 100644 --- a/tests/dmaap-buscontroller/ssl_suite/test1.robot +++ b/tests/dmaap-buscontroller/ssl_suite/test1.robot @@ -20,28 +20,28 @@ Dir Test Url Test [Documentation] Check if www.onap.org can be reached - Create Session openo http://www.onap.org + Create Session openo http://www.onap.org disable_warnings=True CheckUrl openo / 200 HTTPS Heartbeat Test [Documentation] Check ${DBC_URI}/info SSL endpoint - Create Session heartbeat https://${DMAAPBC_IP}:8443 + Create Session heartbeat https://${DMAAPBC_IP}:8443 disable_warnings=True CheckUrl heartbeat ${DBC_URI}/info 204 HTTPS Dmaap Init Test [Documentation] Check ${DBC_URI}/dmaap SSL endpoint - Create Session heartbeat https://${DMAAPBC_IP}:8443 - CheckStatus heartbeat ${DBC_URI}/dmaap "VALID" + Create Session heartbeat https://${DMAAPBC_IP}:8443 disable_warnings=True + CheckStatus heartbeat ${DBC_URI}/dmaap "VALID" HTTPS Dmaap dcaeLocations Test [Documentation] Check ${DBC_URI}/dcaeLocations SSL endpoint - Create Session heartbeat https://${DMAAPBC_IP}:8443 - CheckStatus0 heartbeat ${DBC_URI}/dcaeLocations "VALID" + Create Session heartbeat https://${DMAAPBC_IP}:8443 disable_warnings=True + CheckStatus0 heartbeat ${DBC_URI}/dcaeLocations "VALID" HTTPS Dmaap mr_clusters Test [Documentation] Check ${DBC_URI}/mr_clusters SSL endpoint - Create Session heartbeat https://${DMAAPBC_IP}:8443 - CheckStatus0 heartbeat ${DBC_URI}/mr_clusters "VALID" + Create Session heartbeat https://${DMAAPBC_IP}:8443 disable_warnings=True + CheckStatus0 heartbeat ${DBC_URI}/mr_clusters "VALID" *** Keywords *** @@ -51,12 +51,12 @@ CheckDir CheckUrl [Arguments] ${session} ${path} ${expect} - ${resp}= Get Request ${session} ${path} + ${resp}= RequestsLibrary.Get On Session ${session} ${path} Should Be Equal As Integers ${resp.status_code} ${expect} CheckStatus [Arguments] ${session} ${path} ${expect} - ${resp}= Get Request ${session} ${path} + ${resp}= RequestsLibrary.Get On Session ${session} ${path} log ${resp.content} ${val}= Get Json value ${resp.content} /status log ${val} @@ -64,7 +64,7 @@ CheckStatus CheckStatus0 [Arguments] ${session} ${path} ${expect} - ${resp}= Get Request ${session} ${path} + ${resp}= RequestsLibrary.Get On Session ${session} ${path} log ${resp.json()} log ${resp.content} # silliness to strip off the brackets returned for a List to get a Dict diff --git a/tests/policy/apex-pdp/apex-pdp-test.robot b/tests/policy/apex-pdp/apex-pdp-test.robot index 9fdbc12e..54838a39 100644 --- a/tests/policy/apex-pdp/apex-pdp-test.robot +++ b/tests/policy/apex-pdp/apex-pdp-test.robot @@ -48,7 +48,7 @@ DeployPolicy ${resp}= Post Request policy /policy/pap/v1/pdps/deployments/batch data=${postjson} headers=${headers} Log Received response from policy5 ${resp.text} ${postjsonobject} To Json ${postjson} - Should Be Equal As Strings ${resp.status_code} 200 + Should Be Equal As Strings ${resp.status_code} 202 RunEventOnApexEngine Create Session apexSession http://${APEX_IP}:23324 max_retries=1 diff --git a/tests/policy/drools-applications/drools-applications-test.robot b/tests/policy/drools-applications/drools-applications-test.robot index 0b73ccf8..2ab05810 100644 --- a/tests/policy/drools-applications/drools-applications-test.robot +++ b/tests/policy/drools-applications/drools-applications-test.robot @@ -122,7 +122,7 @@ DeployXacmlPolicies ${headers}= Create Dictionary Accept=application/json Content-Type=application/json ${resp}= Post Request policy /policy/pap/v1/pdps/deployments/batch data=${postjson} headers=${headers} Log Received response from pap ${resp.text} - Should Be Equal As Strings ${resp.status_code} 200 + Should Be Equal As Strings ${resp.status_code} 202 ${result}= Run Process ${SCR2}/wait_topic.sh POLICY-PDP-PAP ... responseTo xacml ACTIVE restart Log Received status ${result.stdout} @@ -140,7 +140,7 @@ DeployDroolsPolicies ${headers}= Create Dictionary Accept=application/json Content-Type=application/json ${resp}= Post Request policy /policy/pap/v1/pdps/deployments/batch data=${postjson} headers=${headers} Log Received response from pap ${resp.text} - Should Be Equal As Strings ${resp.status_code} 200 + Should Be Equal As Strings ${resp.status_code} 202 ${result}= Run Process ${SCR2}/wait_topic.sh POLICY-PDP-PAP ... responseTo drools ACTIVE Log Received status ${result.stdout} diff --git a/tests/policy/pap/pap-test.robot b/tests/policy/pap/pap-test.robot index 3e8bc211..d0837fa9 100644 --- a/tests/policy/pap/pap-test.robot +++ b/tests/policy/pap/pap-test.robot @@ -81,7 +81,7 @@ DeployPdpGroups ${headers}= Create Dictionary Accept=application/json Content-Type=application/json ${resp}= Post Request policy /policy/pap/v1/pdps/deployments/batch data=${postjson} headers=${headers} Log Received response from policy ${resp.text} - Should Be Equal As Strings ${resp.status_code} 200 + Should Be Equal As Strings ${resp.status_code} 202 UndeployPolicy [Documentation] Runs Policy PAP Undeploy a Policy from PDP Groups @@ -91,7 +91,7 @@ UndeployPolicy ${headers}= Create Dictionary Accept=application/json Content-Type=application/json ${resp}= Delete Request policy /policy/pap/v1/pdps/policies/onap.restart.tca headers=${headers} Log Received response from policy ${resp.text} - Should Be Equal As Strings ${resp.status_code} 200 + Should Be Equal As Strings ${resp.status_code} 202 QueryPdpGroupsAfterUndeploy [Documentation] Runs Policy PAP Query PDP Groups after Undeploy diff --git a/tests/policy/xacml-pdp/xacml-pdp-test.robot b/tests/policy/xacml-pdp/xacml-pdp-test.robot index 150c97e6..03b0ca7a 100644 --- a/tests/policy/xacml-pdp/xacml-pdp-test.robot +++ b/tests/policy/xacml-pdp/xacml-pdp-test.robot @@ -83,7 +83,7 @@ DeployPolicies ${resp}= Post Request policy /policy/pap/v1/pdps/policies data=${postjson} headers=${headers} Log Received response from policy5 ${resp.text} ${postjsonobject} To Json ${postjson} - Should Be Equal As Strings ${resp.status_code} 200 + Should Be Equal As Strings ${resp.status_code} 202 ${result}= Run Process ${SCR_DMAAP}/wait_topic.sh POLICY-PDP-PAP ... responseTo xacml ACTIVE onap.restart.tca @@ -202,7 +202,7 @@ UndeployMonitorPolicy ${headers}= Create Dictionary Accept=application/json Content-Type=application/json ${resp}= Delete Request policy /policy/pap/v1/pdps/policies/onap.restart.tca headers=${headers} Log Received response from policy ${resp.text} - Should Be Equal As Strings ${resp.status_code} 200 + Should Be Equal As Strings ${resp.status_code} 202 GetStatisticsAfterUndeploy [Documentation] Runs Policy Xacml PDP Statistics after policy is undeployed diff --git a/tests/sdnc/healthcheck/test1.robot b/tests/sdnc/healthcheck/test1.robot index 3648a8e4..85783a79 100644 --- a/tests/sdnc/healthcheck/test1.robot +++ b/tests/sdnc/healthcheck/test1.robot @@ -6,7 +6,7 @@ Library json Library String *** Variables *** -${SDN_APIDOCS_URI} /apidoc/apis +${SDN_APIDOCS_URI} /apidoc/openapi3/18/apis/single ${SDN_HEALTHCHECK_OPERATION_PATH} /operations/SLI-API:healthcheck *** Test Cases *** diff --git a/tests/sdnc/sdnc_netconf_tls_post_deploy/resources/sdnc-keywords.robot b/tests/sdnc/sdnc_netconf_tls_post_deploy/resources/sdnc-keywords.robot index 52cc5d2f..3ea61649 100644 --- a/tests/sdnc/sdnc_netconf_tls_post_deploy/resources/sdnc-keywords.robot +++ b/tests/sdnc/sdnc_netconf_tls_post_deploy/resources/sdnc-keywords.robot @@ -55,8 +55,6 @@ Send Get Request And Validate TLS Connection Response &{headers1}= Create Dictionary Authorization=Basic YWRtaW46S3A4Yko0U1hzek0wV1hsaGFrM2VIbGNzZTJnQXc4NHZhb0dHbUp2VXkyVQ== Content-Type=application/json Accept=application/json ${resp1}= Get Request sdnc_restconf ${PNFSIM_MOUNT_PATH} headers=${headers1} Should Be Equal As Strings ${resp1.status_code} ${resp_code} - Should Contain ${resp1.content} netconf-id - Should Contain ${resp1.content} netconf-param Send Delete Request And Validate PNF Mount Deleted [Documentation] Send request to passed url and validate received response diff --git a/tests/sdnc/sdnc_netconf_tls_post_deploy/resources/sdnc-properties.robot b/tests/sdnc/sdnc_netconf_tls_post_deploy/resources/sdnc-properties.robot index 131a52f9..2f2d6f5a 100644 --- a/tests/sdnc/sdnc_netconf_tls_post_deploy/resources/sdnc-properties.robot +++ b/tests/sdnc/sdnc_netconf_tls_post_deploy/resources/sdnc-properties.robot @@ -28,10 +28,10 @@ ${SDNC_NETWORK_TOPOLOGY} /config/network-topology:network-topolo ${MOUNT_PATH} %{WORKSPACE}/tests/sdnc/sdnc_netconf_tls_post_deploy/cert-data ${SDNC_CSR_FILE} %{WORKSPACE}/tests/sdnc/sdnc_netconf_tls_post_deploy/csr/sdnc_csr.env ${SDNC_MOUNT_PATH} /config/network-topology:network-topology/topology/topology-netconf/node/PNFDemo -${PNFSIM_MOUNT_PATH} /config/network-topology:network-topology/topology/topology-netconf/node/PNFDemo/yang-ext:mount/mynetconf:netconflist +${PNFSIM_MOUNT_PATH} /config/network-topology:network-topology/topology/topology-netconf/node/PNFDemo/yang-ext:mount/turing-machine:turing-machine # Netconf-Pnp-Simulator ${NETCONF_PNP_SIM_CONTAINER_NAME} %{NETCONF_PNP_SIM_CONTAINER_NAME} ${NETCONF_PNP_SIM_CSR_FILE} %{WORKSPACE}/tests/sdnc/sdnc_netconf_tls_post_deploy/csr/netconf_pnp_simulator_csr.env ${CONF_SCRIPT} %{WORKSPACE}/tests/sdnc/sdnc_netconf_tls_post_deploy/libraries/config.sh -${CONF_TLS_SCRIPT} %{WORKSPACE}/tests/sdnc/sdnc_netconf_tls_post_deploy/libraries/config_tls.sh \ No newline at end of file +${CONF_TLS_SCRIPT} %{WORKSPACE}/tests/sdnc/sdnc_netconf_tls_post_deploy/libraries/config_tls.sh diff --git a/tests/sdnc/sdnc_netconf_tls_post_deploy/sdnc_post_deploy_cert_check.robot b/tests/sdnc/sdnc_netconf_tls_post_deploy/sdnc_post_deploy_cert_check.robot index c2b35e12..4d935974 100644 --- a/tests/sdnc/sdnc_netconf_tls_post_deploy/sdnc_post_deploy_cert_check.robot +++ b/tests/sdnc/sdnc_netconf_tls_post_deploy/sdnc_post_deploy_cert_check.robot @@ -9,52 +9,19 @@ Suite Setup Create sessions *** Test Cases *** -Health Check AAF CertService - [Tags] AAF-CERT-SERVICE - [Documentation] Service is Up and Running - Run health check - -Reload AAF CertService Configuration - [Tags] AAF-CERT-SERVICE - [Documentation] Configuration is Reloaded - Send Get Request And Validate Response /reload 200 - -Check AAF CertService Container Is Ready - [Tags] AAF-CERT-SERVICE - [Documentation] Send Request to /ready Endpoint and Expect 200 - Send Get Request And Validate Response /ready 200 - Check SDNC Keystore For Netopeer2 Certificates [Tags] SDNC-NETOPEER2-CERT-DEPLOYMENT [Documentation] Checking Keystore after SDNC istallation Send Get Request And Validate Response Sdnc ${SDNC_KEYSTORE_CONFIG_PATH} 200 + Check SDNC And PNF TLS Connection Over Netopeer2 Certificates [Tags] SDNC-PNF-TLS-CONNECTION-CHECK - [Documentation] Checking PNF Mount after SDNC Installation + [Documentation] Checking PNF Mount after SDNC Installation Send Get Request And Validate TLS Connection Response ${SDNC_MOUNT_PATH} 200 Check PNF Delete And Remove Netopeer2 Certificates From Keystore [Tags] SDNC-PNF-MOUNT-DELETE-CLEAR-KEYSTORE [Documentation] Checking PNF Mount Delete from SDNC - Send Delete Request And Validate PNF Mount Deleted ${SDNC_MOUNT_PATH} 200 - -Check AAF-CertService Successfully Creates Certificates for SDNC - [Tags] AAF-CERT-SERVICE-SDNC - [Documentation] Run with SDNC CSR and Expected Exit Code 0 - Run Cert Service Client And Validate JKS File Creation And Client Exit Code ${SDNC_CSR_FILE} ${SDNC_CONTAINER_NAME} 0 - -Check SDNC-ODL Certificates Installation In Keystore And Truststore - [Tags] SDNC-ODL-CERTIFICATE-KEYSTORE-VALIDATE - [Documentation] Validate Certificates Got Installed in SDNC-ODL Keystore - Send Get Request And Validate Response Sdnc ${SDNC_KEYSTORE_CONFIG_PATH} 200 - -Check AAF-CertService Successfully Creates Certificates for Netconf-Pnp-Simulator - [Tags] AAF-CERT-SERVICE-NETCONF_PNP_SIMULATOR - [Documentation] Run with NETCONF-PNP-SIMULATOR CSR and Expect Exit Code 0 - Run Cert Service Client And Validate JKS File Creation And Client Exit Code ${NETCONF_PNP_SIM_CSR_FILE} ${NETCONF_PNP_SIM_CONTAINER_NAME} 0 + Send Delete Request And Validate PNF Mount Deleted ${SDNC_MOUNT_PATH} 200 -Check SDNC-ODL Netconf-Pnp-Simulatore TLS Connection Establishment - [Tags] SDNC-ODL-NETCONF-PNP_SIMULATION-TLS-CONNECTION - [Documentation] Validate SDNC-ODL and Netconf-Pnp-Simulation TLS Connection Establishment - Send Get Request And Validate TLS Connection Response ${SDNC_MOUNT_PATH} 200 \ No newline at end of file diff --git a/tests/so/etsi/data/createNetworkServiceRequest.json b/tests/so/etsi/data/createNetworkServiceRequest.json new file mode 100644 index 00000000..6980068e --- /dev/null +++ b/tests/so/etsi/data/createNetworkServiceRequest.json @@ -0,0 +1 @@ +{"nsdId": "9bb8c882-44a1-4b67-a12c-5a998e18d6ba", "nsName": "demo", "nsDescription": "demo"} diff --git a/tests/so/etsi/data/instantiateNetworkServiceRequest.json b/tests/so/etsi/data/instantiateNetworkServiceRequest.json new file mode 100644 index 00000000..44a5d436 --- /dev/null +++ b/tests/so/etsi/data/instantiateNetworkServiceRequest.json @@ -0,0 +1,14 @@ +{ + "nsFlavourId": "default", + "locationConstraints": [{ + "vnfProfileId": "b1bb0ce7-2222-4fa7-95ed-4840d70a1177" + }], + "additionalParamsForVnf": [{ + "vnfProfileId": "b1bb0ce7-2222-4fa7-95ed-4840d70a1177", + "vnfInstanceName": "etsiNsVnfCsit1", + "vnfInstanceDescription": "test", + "additionalParams": { + "vim_id": "CloudOwner_EtsiCloudRegion_693c7729b2364a26a3ca602e6f66187d" + } + }] +} diff --git a/tests/so/etsi/data/networkServicePackageOnboardRequest.json b/tests/so/etsi/data/networkServicePackageOnboardRequest.json new file mode 100644 index 00000000..f6ffeb2e --- /dev/null +++ b/tests/so/etsi/data/networkServicePackageOnboardRequest.json @@ -0,0 +1 @@ +{"csarId": "9bb8c882-44a1-4b67-a12c-5a998e18d6ba"} diff --git a/tests/so/etsi/data/responses/expectedVnfPackage.json b/tests/so/etsi/data/responses/expectedVnfPackage.json index c841956c..c97865ee 100644 --- a/tests/so/etsi/data/responses/expectedVnfPackage.json +++ b/tests/so/etsi/data/responses/expectedVnfPackage.json @@ -10,13 +10,13 @@ "operationalState": "ENABLED", "_links": { "self": { - "href": "http://so-vnfm-adapter:9092/so/vnfm-adapter/v1/vnfpkgm/v1/vnf_packages/73522444-e8e9-49c1-be29-d355800aa349" + "href": "http://so-etsi-sol003-adapter.onap:9092/so/vnfm-adapter/v1/vnfpkgm/v1/vnf_packages/73522444-e8e9-49c1-be29-d355800aa349" }, "vnfd": { - "href": "http://so-vnfm-adapter:9092/so/vnfm-adapter/v1/vnfpkgm/v1/vnf_packages/73522444-e8e9-49c1-be29-d355800aa349/vnfd" + "href": "http://so-etsi-sol003-adapter.onap:9092/so/vnfm-adapter/v1/vnfpkgm/v1/vnf_packages/73522444-e8e9-49c1-be29-d355800aa349/vnfd" }, "packageContent": { - "href": "http://so-vnfm-adapter:9092/so/vnfm-adapter/v1/vnfpkgm/v1/vnf_packages/73522444-e8e9-49c1-be29-d355800aa349/package_content" + "href": "http://so-etsi-sol003-adapter.onap:9092/so/vnfm-adapter/v1/vnfpkgm/v1/vnf_packages/73522444-e8e9-49c1-be29-d355800aa349/package_content" } } } diff --git a/tests/so/etsi/data/subscriptionRequest.json b/tests/so/etsi/data/subscriptionRequest.json new file mode 100644 index 00000000..c54bf3c2 --- /dev/null +++ b/tests/so/etsi/data/subscriptionRequest.json @@ -0,0 +1,23 @@ +{ + "filter": { + "notificationTypes": [ + "VnfPackageOnboardingNotification", + "VnfPackageChangeNotification" + ], + "vnfdId": [ + "b1bb0ce7-2222-4fa7-95ed-4840d70a1177" + ], + "operationalState": ["ENABLED", "DISABLED"] + }, + "callbackUri": "http://so-vnfm-simulator:9093/vnfpkgm/v1/notification", + "authentication": { + "authType": [ + "OAUTH2_CLIENT_CREDENTIALS" + ], + "paramsOauth2ClientCredentials": { + "clientId": "vnfm", + "clientPassword": "password1$", + "tokenEndpoint": "http://so-vnfm-simulator:9093/oauth/token?grant_type=client_credentials" + } + } +} diff --git a/tests/so/etsi/etsi_nfvo_ns_lcm_tests.robot b/tests/so/etsi/etsi_nfvo_ns_lcm_tests.robot new file mode 100644 index 00000000..23995d93 --- /dev/null +++ b/tests/so/etsi/etsi_nfvo_ns_lcm_tests.robot @@ -0,0 +1,76 @@ +*** Settings *** +Library Collections +Library RequestsLibrary +Library OperatingSystem +Library json +Library ArchiveLibrary + +*** Variables *** +${NFVO_NS_LCM_BASE_URL}= /so/so-etsi-nfvo-ns-lcm/v1/api/nslcm/v1 +${BASIC_AUTH}= Basic c28tZXRzaS1uZnZvLW5zLWxjbTpwYXNzd29yZDEk + +Documentation Test cases for ETSI NFVO NS Lifecycle Management Operations +... Create and Delete tests are synchronous +... Instantiate and Terminate tests are asynchronous, test status checked through request to NS_LCM_OP_OCCs endpoint +... Note, relies on: +... -Network Service package being onboarded in etsi_package_onboarding_tests + +*** Test Cases *** + +Invoke Create Network Service + Create Session etsi_nfvo_ns_lcm_session http://${REPO_IP}:9095 + ${data}= Get Binary File ${CURDIR}${/}data${/}createNetworkServiceRequest.json + &{headers}= Create Dictionary Authorization=${BASIC_AUTH} Content-Type=application/json Accept=application/json HTTP_GLOBALCUSTOMERID=DemoCustomer + ${create_network_service_request}= POST On Session etsi_nfvo_ns_lcm_session ${NFVO_NS_LCM_BASE_URL}/ns_instances data=${data} headers=${headers} + log to console ${create_network_service_request.content} + ${create_network_service_json_response}= Evaluate json.loads(r"""${create_network_service_request.content}""", strict=False) json + ${request_Id}= Set Variable ${create_network_service_json_response}[id] + SET GLOBAL VARIABLE ${request_Id} + + Should Be Equal As Strings '${create_network_service_request.status_code}' '201' + +Invoke Instantiate Network Service + Run Keyword If "${request_Id}"!="${EMPTY}" Log to Console Network Service ID :${request_Id} + ... ELSE Fail \nInvalid Network Service ID :${request_Id} received + Create Session etsi_nfvo_ns_lcm_session http://${REPO_IP}:9095 + ${data}= Get Binary File ${CURDIR}${/}data${/}instantiateNetworkServiceRequest.json + &{headers}= Create Dictionary Authorization=${BASIC_AUTH} Content-Type=application/json Accept=application/json + ${instantiate_network_service_request}= POST On Session etsi_nfvo_ns_lcm_session ${NFVO_NS_LCM_BASE_URL}/ns_instances/${request_Id}/instantiate data=${data} headers=${headers} + Run Keyword If '${instantiate_network_service_request.status_code}' == '202' log to console \nexecuted with expected result + ... ELSE Fail \nInstantiate Network Service Request Received Response: ${instantiate_network_service_request.status_code} + log to console \n${instantiate_network_service_request.content} + + Wait Until Keyword Succeeds 3 min 5 secs Get NS LCM OP OCCs + +Invoke Terminate Network Service + Run Keyword If "${actual_request_state}"=="COMPLETED" Log to Console NS LCM OP OCCs State: ${actual_request_state} + ... ELSE Fail \nTerminate Network Service Failed to Start. Instantiate Network Service Request State: ${actual_request_state} + Create Session etsi_nfvo_ns_lcm_session http://${REPO_IP}:9095 + &{headers}= Create Dictionary Authorization=${BASIC_AUTH} Content-Type=application/json Accept=application/json + ${terminate_network_service_request}= POST On Session etsi_nfvo_ns_lcm_session ${NFVO_NS_LCM_BASE_URL}/ns_instances/${request_Id}/terminate headers=${headers} + Run Keyword If '${terminate_network_service_request.status_code}' == '202' log to console \nexecuted with expected result + ... ELSE Fail \nTerminate Network Service Request Received Response: ${terminate_network_service_request.status_code} + log to console \n${terminate_network_service_request.content} + + Wait Until Keyword Succeeds 3 min 5 secs Get NS LCM OP OCCs + +Invoke Delete Network Service + Run Keyword If "${actual_request_state}"=="COMPLETED" Log to Console NS LCM OP OCCs State: ${actual_request_state} + ... ELSE Fail \nDelete Network Service Failed to Start. Invalid Previous Request State Received: ${actual_request_state} + Create Session etsi_nfvo_ns_lcm_session http://${REPO_IP}:9095 + &{headers}= Create Dictionary Authorization=${BASIC_AUTH} Content-Type=application/json Accept=application/json + ${delete_network_service_request}= DELETE On Session etsi_nfvo_ns_lcm_session ${NFVO_NS_LCM_BASE_URL}/ns_instances/${request_Id} headers=${headers} + log to console \n${delete_network_service_request.content} + + Should Be Equal As Strings '${delete_network_service_request.status_code}' '204' + +*** Keywords *** + +Get NS LCM OP OCCs + ${ns_lcm_status_request}= GET On Session etsi_nfvo_ns_lcm_session ${NFVO_NS_LCM_BASE_URL}/ns_lcm_op_occs/${request_Id} + log to console \n${ns_lcm_status_request.content} + ${ns_lcm_request_json_response}= Evaluate json.loads(r"""${ns_lcm_status_request.content}""", strict=False) json + ${actual_request_state}= SET VARIABLE ${ns_lcm_request_json_response}[operationState] + SET GLOBAL VARIABLE ${actual_request_state} + Should Be Equal As Strings ${ns_lcm_status_request.status_code} 200 + Should Be Equal As Strings ${actual_request_state} COMPLETED diff --git a/tests/so/etsi/etsi_package_onboarding_tests.robot b/tests/so/etsi/etsi_package_onboarding_tests.robot index 96c9896d..5b8a9013 100644 --- a/tests/so/etsi/etsi_package_onboarding_tests.robot +++ b/tests/so/etsi/etsi_package_onboarding_tests.robot @@ -3,6 +3,13 @@ Library Collections Library RequestsLibrary Library OperatingSystem Library json +Documentation Test cases for onboarding a VNF and Network Service package and distributing the VNF Package +... Note: onboarded VNF package is used in later VNF tests including: +... -etsi_vnf_lcm_tests.robot +... -etsi_vnf_package_management_tests.robot +... -etsi_vnf_notification_tests.robot +... Note: onboarded Network Service package is used in later test: +... -etsi_nfvo_ns_lcm_tests.robot *** Variables *** ${SLEEP_INTERVAL_SEC}= 5 @@ -46,6 +53,17 @@ OnBoard VNF Package In Etsi Catalog Run Keyword If '${actual_job_status}' == 'finished' log to console \nexecuted with expected result Should Be Equal As Strings '${actual_job_status}' 'finished' +Onboard Network Service Package In Etsi Catalog + Create Session etsi_catalog_session http://${REPO_IP}:8806 + ${data}= Get Binary File ${CURDIR}${/}data${/}networkServicePackageOnboardRequest.json + &{headers}= Create Dictionary Content-Type=application/json Accept=application/json + ${resp}= POST On Session etsi_catalog_session /api/catalog/v1/nspackages data=${data} headers=${headers} + log to console ${resp.content} + + Run Keyword If '${resp.status_code}' == '202' log to console \nexecuted with expected result + Should Be Equal As Strings '${resp.status_code}' '202' + + Distribute Service Template Create Session sdc_controller_session http://${REPO_IP}:8085 ${data}= Get Binary File ${CURDIR}${/}data${/}distributeServiceTemplate.json diff --git a/tests/so/etsi/etsi_vnf_lcm_tests.robot b/tests/so/etsi/etsi_vnf_lcm_tests.robot index fcfb515a..e7b291cb 100644 --- a/tests/so/etsi/etsi_vnf_lcm_tests.robot +++ b/tests/so/etsi/etsi_vnf_lcm_tests.robot @@ -3,6 +3,9 @@ Library Collections Library RequestsLibrary Library OperatingSystem Library json +Documentation Test cases for VNF lifecycle management operations +... Note, relies on: +... -package being onboarded in etsi_package_onboarding_tests *** Variables *** ${SLEEP_INTERVAL_SEC}= 5 diff --git a/tests/so/etsi/etsi_vnf_notification_tests.robot b/tests/so/etsi/etsi_vnf_notification_tests.robot new file mode 100644 index 00000000..0c3d5bc9 --- /dev/null +++ b/tests/so/etsi/etsi_vnf_notification_tests.robot @@ -0,0 +1,49 @@ +*** Settings *** +Library Collections +Library RequestsLibrary +Library OperatingSystem +Library json +Documentation Test cases for VNF package subscription notifications and subscription cleanup +... Test include checking that VNF package notification was received and deleting the subscription +... Note, relies on: +... -subscription being created in etsi_vnf_subscription_tests and +... -package being onboarded in etsi_package_onboarding_tests + +*** Variables *** +${SLEEP_INTERVAL_SEC}= 5 +${MAXIMUM_ATTEMPTS_BEFORE_TIMEOUT}= 48 # Represents the maximum number of attempts that will be made before a timeout. It sleeps for SLEEP_INTERVAL_SEC seconds before retry. +${PACKAGE_MANAGEMENT_BASE_URL}= /so/vnfm-adapter/v1/vnfpkgm/v1 +${BASIC_AUTH}= Basic dm5mbTpwYXNzd29yZDEk +${VNF_PACKAGE_ID}= 73522444-e8e9-49c1-be29-d355800aa349 + +*** Test Cases *** +VNF Package Onboarding Notification Received By Subscriber + &{headers}= Create Dictionary Authorization=Bearer ${ACCESS_TOKEN} Content-Type=application/json Accept=application/json + Log To Console \nChecking If VNF Package Notification was received for vnfPkgId: ${VNF_PACKAGE_ID} + ${response}= Get On Session vnfm_simulator_session /vnfpkgm/v1/notification-cache-test/${VNF_PACKAGE_ID} headers=${headers} + Log To Console \nResponse:${response} + Run Keyword If '${response.status_code}' == '200' Log To Console \nexecuted with expected result + Should Be Equal As Strings '${response.status_code}' '200' + Log To Console \nResponse Content:\n${response.content} + ${json_response}= Evaluate json.loads(r"""${response.content}""", strict=False) json + Dictionary Should Contain Key ${json_response} id + Dictionary Should Contain Key ${json_response} notificationType + Should be Equal As Strings VnfPackageOnboardingNotification ${json_response}[notificationType] + Dictionary Should Contain Key ${json_response} subscriptionId + Dictionary Should Contain Key ${json_response} timeStamp + Dictionary Should Contain Key ${json_response} vnfPkgId + Should Be Equal As Strings ${VNF_PACKAGE_ID} ${json_response}[vnfPkgId] + Dictionary Should Contain Key ${json_response} vnfdId + Dictionary Should Contain Key ${json_response} _links + Log To Console \nexecuted with expected result + +Delete Subscription By SubscriptionId + Create Session so_vnfm_adapter_session http://${REPO_IP}:9092 + &{headers}= Create Dictionary Authorization=${BASIC_AUTH} Content-Type=application/json Accept=application/json + Log To Console \nDeleting Subscription with subscriptionId: ${SUBSCRIPTION_ID} from so-vnfm-adapter + ${response}= Delete On Session so_vnfm_adapter_session ${PACKAGE_MANAGEMENT_BASE_URL}/subscriptions/${SUBSCRIPTION_ID} headers=${headers} + Log To Console \nResponse:${response} + Run Keyword If '${response.status_code}' == '204' Log To Console \nexecuted with expected result + Should Be Equal As Strings '${response.status_code}' '204' + + diff --git a/tests/so/etsi/etsi_vnf_package_management_tests.robot b/tests/so/etsi/etsi_vnf_package_management_tests.robot index b7cd337f..000148f8 100644 --- a/tests/so/etsi/etsi_vnf_package_management_tests.robot +++ b/tests/so/etsi/etsi_vnf_package_management_tests.robot @@ -4,6 +4,9 @@ Library RequestsLibrary Library OperatingSystem Library json Library ArchiveLibrary +Documentation Test cases for VNF package management operations including get packages, package, content and artifacts +... Note, relies on: +... -package being onboarded in etsi_package_onboarding_tests *** Variables *** ${VNF_PACKAGE_ID}= 73522444-e8e9-49c1-be29-d355800aa349 diff --git a/tests/so/etsi/etsi_vnf_subscription_tests.robot b/tests/so/etsi/etsi_vnf_subscription_tests.robot new file mode 100644 index 00000000..34913f0a --- /dev/null +++ b/tests/so/etsi/etsi_vnf_subscription_tests.robot @@ -0,0 +1,92 @@ +*** Settings *** +Library Collections +Library RequestsLibrary +Library OperatingSystem +Library json +Documentation Test cases that tests subscription functionality for VNF package onboarding/change notifications +... Tests include subscribing for notifications, and querying that the subscriptions have been created +... Note: creates ACCESS_TOKEN and SUBSCRIPTION_ID variables used in later etsi_vnf_notification_tests + +*** Variables *** +${SLEEP_INTERVAL_SEC}= 5 +${MAXIMUM_ATTEMPTS_BEFORE_TIMEOUT}= 48 # Represents the maximum number of attempts that will be made before a timeout. It sleeps for SLEEP_INTERVAL_SEC seconds before retry. +${PACKAGE_MANAGEMENT_BASE_URL}= /so/vnfm-adapter/v1/vnfpkgm/v1 +${BASIC_AUTH}= Basic dm5mbTpwYXNzd29yZDEk +${ACCESS_TOKEN}= "" +${SUBSCRIPTION_ID}= "" + +*** Test Cases *** +Subscribe for Notifications + Create Session vnfm_simulator_session http://${REPO_IP}:9093 + &{headers1}= Create Dictionary Authorization=${BASIC_AUTH} Content-Type=application/json Accept=application/json + Log To Console \nGetting Access Token + ${response}= Post On Session vnfm_simulator_session url=/oauth/token?grant_type=client_credentials headers=${headers1} + Log To Console \nResponse:${response} + Run Keyword If '${response.status_code}' == '200' Log To Console \nexecuted with expected result + Should Be Equal As Strings '${response.status_code}' '200' + Log To Console \nResponse Content:\n${response.content} + ${json_response} Evaluate json.loads(r"""${response.content}""", strict=False) json + Set Global Variable ${ACCESS_TOKEN} ${json_response}[access_token] + ${data}= Get Binary File ${CURDIR}${/}data${/}subscriptionRequest.json + &{headers2}= Create Dictionary Authorization=Bearer ${ACCESS_TOKEN} Content-Type=application/json Accept=application/json + Log To Console \nSubscribing For VNF Package Notifications + ${response2}= Post On Session vnfm_simulator_session /vnfpkgm/v1/subscribe data=${data} headers=${headers2} + Log To Console \nResponse:\n${response2} + Log To Console \nResponse Content:\n${response2.content} + Run Keyword If '${response2.status_code}' == '200' Log To Console \nexecuted with expected result + Should Be Equal As Strings '${response2.status_code}' '200' + ${json_response2}= Evaluate json.loads(r"""${response2.content}""", strict=False) json + Dictionary Should Contain Key ${json_response2} id + Set Global Variable ${SUBSCRIPTION_ID} ${json_response2}[id] + Log To Console \nid: ${SUBSCRIPTION_ID} + Dictionary Should Contain Key ${json_response2} filter + ${filter}= Set Variable ${json_response2}[filter] + Dictionary Should Contain Key ${filter} notificationTypes + Dictionary Should Contain Key ${filter} vnfdId + Dictionary Should Contain Key ${filter} operationalState + Dictionary Should Contain Key ${json_response2} callbackUri + Dictionary Should Contain Key ${json_response2} _links + Log To Console \nexecuted with expected result + +Get Subscriptions + Create Session so_vnfm_adapter_session http://${REPO_IP}:9092 + &{headers}= Create Dictionary Authorization=${BASIC_AUTH} Content-Type=application/json Accept=application/json + Log To Console \nGetting Subscriptions from so-vnfm-adapter + ${response}= Get On Session so_vnfm_adapter_session ${PACKAGE_MANAGEMENT_BASE_URL}/subscriptions headers=${headers} + Log To Console \nResponse:${response} + Run Keyword If '${response.status_code}' == '200' Log To Console \nexecuted with expected result + Should Be Equal As Strings '${response.status_code}' '200' + Log To Console \nResponse Content:\n${response.content} + ${json_response} Evaluate json.loads(r"""${response.content}""", strict=False) json + ${subscription}= Set Variable ${json_response}[0] + Dictionary Should Contain Key ${subscription} id + ${sub_id}= Set Variable ${subscription}[id] + Should Be Equal As Strings '${sub_id}' '${SUBSCRIPTION_ID}' + Dictionary Should Contain Key ${subscription} filter + ${filter}= Set Variable ${subscription}[filter] + Dictionary Should Contain Key ${filter} notificationTypes + Dictionary Should Contain Key ${filter} vnfdId + Dictionary Should Contain Key ${filter} operationalState + Dictionary Should Contain Key ${subscription} callbackUri + Log To Console \nexecuted with expected result + +Get Subscription By Subscription Id + Create Session so_vnfm_adapter_session http://${REPO_IP}:9092 + &{headers}= Create Dictionary Authorization=${BASIC_AUTH} Content-Type=application/json Accept=application/json + Log To Console \nGetting Subscription with id ${SUBSCRIPTION_ID} from so-vnfm-adapter + ${response}= Get On Session so_vnfm_adapter_session ${PACKAGE_MANAGEMENT_BASE_URL}/subscriptions/${SUBSCRIPTION_ID} headers=${headers} + Log To Console \nResponse:${response} + Run Keyword If '${response.status_code}' == '200' Log To Console \nexecuted with expected result + Should Be Equal As Strings '${response.status_code}' '200' + Log To Console \nResponse Content:\n${response.content} + ${json_response} Evaluate json.loads(r"""${response.content}""", strict=False) json + Dictionary Should Contain Key ${json_response} id + ${sub_id}= Set Variable ${json_response}[id] + Should Be Equal As Strings '${sub_id}' '${SUBSCRIPTION_ID}' + Dictionary Should Contain Key ${json_response} filter + ${filter}= Set Variable ${json_response}[filter] + Dictionary Should Contain Key ${filter} notificationTypes + Dictionary Should Contain Key ${filter} vnfdId + Dictionary Should Contain Key ${filter} operationalState + Dictionary Should Contain Key ${json_response} callbackUri + Log To Console \nexecuted with expected result diff --git a/tests/so/sanity-check/data/macroflow.json b/tests/so/sanity-check/data/macroflow.json new file mode 100644 index 00000000..87f782a4 --- /dev/null +++ b/tests/so/sanity-check/data/macroflow.json @@ -0,0 +1,161 @@ +{ + "requestDetails": { + "requestInfo": { + "suppressRollback": false, + "productFamilyId": "1234", + "requestorId": "demo", + "instanceName": "CsitEtsiInstance", + "source": "VID" + }, + "modelInfo": { + "modelType": "service", + "modelInvariantId": "b9202a74-4c16-4245-83ad-1cd53c813214", + "modelVersionId": "6f2c7614-571c-4482-aa2c-5eac0308aa16", + "modelName": "vfw_k8s_demo_CNF", + "modelVersion": "1.0" + }, + "cloudConfiguration": { + "tenantId": "693c7729b2364a26a3ca602e6f66187d", + "cloudOwner": "CloudOwner", + "lcpCloudRegionId": "EtsiCloudRegion" + }, + "subscriberInfo": { + "globalSubscriberId": "DemoCustomer" + }, + "requestParameters": { + "subscriptionServiceType": "vCPE", + "userParams": [{ + "Homing_Solution": "none" + }, { + "service": { + "instanceParams": [], + "instanceName": "CsitEtsiInstance", + "resources": { + "vnfs": [ + { + "modelInfo": { + "modelName": "VF_vfw_k8s_demo_CNF", + "modelVersionId": "daeb6a5c-3a8a-40b0-a575-8cca71dd0b7c", + "modelInvariantUuid": "517403c8-fab8-4cfe-9bc5-d94f5e34b257", + "modelVersion": "1.0", + "modelCustomizationId": "8b8b67bd-01a4-42f7-b0fc-1d3a0f2765fd", + "modelInstanceName": "VF_vfw_k8s_demo_CNF 0" + }, + "cloudConfiguration": { + "tenantId": "693c7729b2364a26a3ca602e6f66187d", + "cloudOwner": "CloudOwner", + "lcpCloudRegionId": "EtsiCloudRegion" + }, + "platform": { + "platformName": "test" + }, + "lineOfBusiness": { + "lineOfBusinessName": "LOB-Demonstration" + }, + "productFamilyId": "1234", + "instanceName": "VF_frankfurt_vfw_k8s_demo_final", + "instanceParams": [{ + "sdnc_model_name": "vFW_CNF_CDS", + "sdnc_model_version": "1.0.45", + "sdnc_artifact_name": "vnf" + } + ], + "vfModules": [ + { + "modelInfo": { + "modelName": "VfVfwK8sDemoCnf..base_template..module-0", + "modelVersionId": "86224a10-c17e-42c9-9809-f3c31ba1b781", + "modelInvariantUuid": "2bc34946-d57d-4053-9e9f-d60587c9aa12", + "modelVersion": "1", + "modelCustomizationId": "905ffa63-7011-4cbe-943c-237f303b4e9f" + }, + "instanceName": "vf_frankfurt_vfw_k8s_demo_final0..VfFrankfurtVfwK8sDemoFinal..base_template..module-0", + "instanceParams": [{ + "k8s-rb-profile-name": "vfw-cnf-cds-base-profile", + "k8s-rb-profile-namespace": "test-vfw1", + "sdnc_model_name": "vFW_CNF_CDS", + "sdnc_model_version": "1.0.45", + "vf_module_label": "base_template" + } + ] + }, + { + "modelInfo": { + "modelName": "VfVfwK8sDemoCnf..vsn..module-1", + "modelVersionId": "3c50323b-5f8c-4b31-938f-8e996a2e0001", + "modelInvariantUuid": "11e50367-a4c9-4203-a838-518446ec816a", + "modelVersion": "1", + "modelCustomizationId": "1812807a-9f6b-42e4-b78c-814c3fea5a4c" + }, + "instanceName": "vf_frankfurt_vfw_k8s_demo_final0..VfFrankfurtVfwK8sDemoFinal..vsn..module-1", + "instanceParams": [{ + "k8s-rb-profile-name": "vfw-cnf-cds-base-profile", + "k8s-rb-profile-namespace": "vfirewall", + "sdnc_model_name": "vFW_CNF_CDS", + "sdnc_model_version": "7.0.0", + "vf_module_label": "vsn" + } + ] + }, + { + "modelInfo": { + "modelName": "VfVfwK8sDemoCnf..vpkg..module-2", + "modelVersionId": "104e8b4e-d404-4a5b-9beb-f92a217fd6ee", + "modelInvariantUuid": "1782dcdb-2a4c-4665-8d04-be6b989fea8e", + "modelVersion": "1", + "modelCustomizationId": "02fb0319-f37d-4e8b-9009-263fffdb2549" + }, + "instanceName": "vf_frankfurt_vfw_k8s_demo_final0..VfFrankfurtVfwK8sDemoFinal..vpkg..module-2", + "instanceParams": [{ + "k8s-rb-profile-name": "vfw-cnf-cds-base-profile", + "k8s-rb-profile-namespace": "vfirewall", + "sdnc_model_name": "vFW_CNF_CDS", + "sdnc_model_version": "7.0.0", + "vf_module_label": "vpkg" + } + ] + }, + { + "modelInfo": { + "modelName": "VfVfwK8sDemoCnf..vfw..module-3", + "modelVersionId": "a5f678db-2e5f-47be-8fab-1540bc7482d8", + "modelInvariantUuid": "c0869243-7667-4434-b9cc-84791f07a13a", + "modelVersion": "1", + "modelCustomizationId": "f144c7c8-da93-4e06-ba93-d26e69eba509" + }, + "instanceName": "vf_frankfurt_vfw_k8s_demo_final0..VfFrankfurtVfwK8sDemoFinal..vfw..module-3", + "instanceParams": [{ + "k8s-rb-profile-name": "vfw-cnf-cds-base-profile", + "k8s-rb-profile-namespace": "vfirewall", + "sdnc_model_name": "vFW_CNF_CDS", + "sdnc_model_version": "7.0.0", + "vf_module_label": "vfw" + } + ] + } + + ] + } + ] + }, + "modelInfo": { + "modelVersion": "1.0", + "modelVersionId": "6f2c7614-571c-4482-aa2c-5eac0308aa16", + "modelInvariantId": "b9202a74-4c16-4245-83ad-1cd53c813214", + "modelName": "vfw_k8s_demo_CNF", + "modelType": "service" + } + } + } + ], + "aLaCarte": false + }, + "project": { + "projectName": "etsiCsitProject" + }, + "owningEntity": { + "owningEntityId": "f2e1071e-3d47-4a65-94d4-e473ec03326a", + "owningEntityName": "OE-Demonstration" + } + } +} diff --git a/tests/so/sanity-check/data/serviceBasicVfCnfnotification.json b/tests/so/sanity-check/data/serviceBasicVfCnfnotification.json new file mode 100644 index 00000000..02485e18 --- /dev/null +++ b/tests/so/sanity-check/data/serviceBasicVfCnfnotification.json @@ -0,0 +1,294 @@ +{ + "distributionID": "2d6c5aa8-b644-4f30-a632-5577801ef959", + "serviceName": "vfw_k8s_demo_CNF", + "serviceVersion": "1.0", + "serviceUUID": "6f2c7614-571c-4482-aa2c-5eac0308aa16", + "serviceDescription": "service", + "serviceInvariantUUID": "b9202a74-4c16-4245-83ad-1cd53c813214", + "resources": [ + { + "resourceInstanceName": "VF_vfw_k8s_demo_CNF 0", + "resourceName": "VF_vfw_k8s_demo_CNF", + "resourceVersion": "1.0", + "resoucreType": "VF", + "resourceUUID": "daeb6a5c-3a8a-40b0-a575-8cca71dd0b7c", + "resourceInvariantUUID": "517403c8-fab8-4cfe-9bc5-d94f5e34b257", + "resourceCustomizationUUID": "8b8b67bd-01a4-42f7-b0fc-1d3a0f2765fd", + "category": "Generic", + "subcategory": "Abstract", + "artifacts": [{ + "artifactName": "vf_vfw_k8s_demo_cnf0_modules.json", + "artifactType": "VF_MODULES_METADATA", + "artifactURL": "/unzipped_sdc_csar/vf_vfw_k8s_demo_cnf0_modules.json", + "artifactChecksum": "MDJkYjNmNjEzM2Y1ZDgzNzZiZWUxMjZkMzA3YzkwZDI\u003d", + "artifactDescription": "Auto-generated VF Modules information artifact", + "artifactTimeout": 120, + "artifactUUID": "86224a10-c17e-42c9-9809-f3c31ba1b781", + "artifactVersion": "1" + }, +{ + "artifactName": "vf_vfw_k8s_demo_cnf1_modules.json", + "artifactType": "VF_MODULES_METADATA", + "artifactURL": "/unzipped_sdc_csar/vf_vfw_k8s_demo_cnf0_modules.json", + "artifactChecksum": "MDJkYjNmNjEzM2Y1ZDgzNzZiZWUxMjZkMzA3YzkwZDI\u003d", + "artifactDescription": "Auto-generated VF Modules information artifact", + "artifactTimeout": 120, + "artifactUUID": "3c50323b-5f8c-4b31-938f-8e996a2e0001", + "artifactVersion": "1" + }, + +{ + "artifactName": "base_template.env", + "artifactType": "HEAT_ENV", + "artifactURL": "/unzipped_sdc_csar/base_template.env", + "artifactChecksum": "MDJkYjNmNjEzM2Y1ZDgzNzZiZWUxMjZkMzA3YzkwZDI\u003d", + "artifactDescription": "Auto-generated VF Modules information artifact", + "artifactTimeout": 120, + "artifactUUID": "ede4c2de-133f-49e3-9daa-8c56b65b5c6b", + "artifactVersion": "1" + }, +{ + "artifactName": "base_template_cloudtech_k8s_charts.env", + "artifactType": "HEAT_ENV", + "artifactURL": "/unzipped_sdc_csar/base_template_cloudtech_k8s_charts.env", + "artifactChecksum": "MDJkYjNmNjEzM2Y1ZDgzNzZiZWUxMjZkMzA3YzkwZDI\u003d", + "artifactDescription": "Auto-generated VF Modules information artifact", + "artifactTimeout": 120, + "artifactUUID": "6833f47d-e91b-4887-8b55-f68ef25bccf3", + "artifactVersion": "1" + }, + +{ + "artifactName": "base_template.env", + "artifactType": "HEAT_ENV", + "artifactURL": "/unzipped_sdc_csar/base_template.env", + "artifactChecksum": "MDJkYjNmNjEzM2Y1ZDgzNzZiZWUxMjZkMzA3YzkwZDI\u003d", + "artifactDescription": "Auto-generated VF Modules information artifact", + "artifactTimeout": 120, + "artifactUUID": "7f1dbc99-849e-4af8-bdcb-6c214f939428", + "artifactVersion": "1" + }, +{ + "artifactName": "base_template_cloudtech_k8s_charts.env", + "artifactType": "HEAT_ENV", + "artifactURL": "/unzipped_sdc_csar/base_template_cloudtech_k8s_charts.env", + "artifactChecksum": "MDJkYjNmNjEzM2Y1ZDgzNzZiZWUxMjZkMzA3YzkwZDI\u003d", + "artifactDescription": "Auto-generated VF Modules information artifact", + "artifactTimeout": 120, + "artifactUUID": "71607418-af27-4712-8e0c-cfe992b95a38", + "artifactVersion": "1" + }, +{ + "artifactName": "vfw.env", + "artifactType": "HEAT_ENV", + "artifactURL": "/unzipped_sdc_csar/vfw.env", + "artifactChecksum": "MDJkYjNmNjEzM2Y1ZDgzNzZiZWUxMjZkMzA3YzkwZDI\u003d", + "artifactDescription": "Auto-generated VF Modules information artifact", + "artifactTimeout": 120, + "artifactUUID": "406fe9df-aa4c-42c1-8f83-c7ac11169fd4", + "artifactVersion": "1" + }, +{ + "artifactName": "vfw_cloudtech_k8s_charts.env", + "artifactType": "HEAT_ENV", + "artifactURL": "/unzipped_sdc_csar/vfw_cloudtech_k8s_charts.env", + "artifactChecksum": "MDJkYjNmNjEzM2Y1ZDgzNzZiZWUxMjZkMzA3YzkwZDI\u003d", + "artifactDescription": "Auto-generated VF Modules information artifact", + "artifactTimeout": 120, + "artifactUUID": "33f7aab2-3324-4523-b37a-625b0cf77dfe", + "artifactVersion": "1" + }, +{ + "artifactName": "vpkg_cloudtech_k8s_charts.env", + "artifactType": "HEAT_ENV", + "artifactURL": "/unzipped_sdc_csar/vpkg_cloudtech_k8s_charts.env", + "artifactChecksum": "MDJkYjNmNjEzM2Y1ZDgzNzZiZWUxMjZkMzA3YzkwZDI\u003d", + "artifactDescription": "Auto-generated VF Modules information artifact", + "artifactTimeout": 120, + "artifactUUID": "8ab17b5a-3fbd-4142-a75f-c55ffd4c5af5", + "artifactVersion": "1" + }, +{ + "artifactName": "vsn.env", + "artifactType": "HEAT_ENV", + "artifactURL": "/unzipped_sdc_csar/vsn.env", + "artifactChecksum": "MDJkYjNmNjEzM2Y1ZDgzNzZiZWUxMjZkMzA3YzkwZDI\u003d", + "artifactDescription": "Auto-generated VF Modules information artifact", + "artifactTimeout": 120, + "artifactUUID": "e17c1e60-f7da-4767-9a0e-02c8ff9c0f2e", + "artifactVersion": "1" + }, +{ + "artifactName": "vsn_cloudtech_k8s_charts.env", + "artifactType": "HEAT_ENV", + "artifactURL": "/unzipped_sdc_csar/vsn_cloudtech_k8s_charts.env", + "artifactChecksum": "MDJkYjNmNjEzM2Y1ZDgzNzZiZWUxMjZkMzA3YzkwZDI\u003d", + "artifactDescription": "Auto-generated VF Modules information artifact", + "artifactTimeout": 120, + "artifactUUID": "989dbc01-2fa5-463b-abaa-cf73996ea12b", + "artifactVersion": "1" + }, +{ + "artifactName": "base_template_cloudtech_k8s_charts.tgz", + "artifactType": "CLOUD_TECHNOLOGY_SPECIFIC_ARTIFACT", + "artifactURL": "/unzipped_sdc_csar/base_template_cloudtech_k8s_charts.env", + "artifactChecksum": "MDJkYjNmNjEzM2Y1ZDgzNzZiZWUxMjZkMzA3YzkwZDI\u003d", + "artifactDescription": "Auto-generated VF Modules information artifact", + "artifactTimeout": 120, + "artifactUUID": "406fe9df-aa4c-42c1-8f83-c7ac11169fd4", + "artifactVersion": "1" + }, +{ + "artifactName": "vfw_cloudtech_k8s_charts.tgz", + "artifactType": "CLOUD_TECHNOLOGY_SPECIFIC_ARTIFACT", + "artifactURL": "/unzipped_sdc_csar/vfw_cloudtech_k8s_charts.env", + "artifactChecksum": "MDJkYjNmNjEzM2Y1ZDgzNzZiZWUxMjZkMzA3YzkwZDI\u003d", + "artifactDescription": "Auto-generated VF Modules information artifact", + "artifactTimeout": 120, + "artifactUUID": "6833f47d-e91b-4887-8b55-f68ef25bccf3", + "artifactVersion": "1" + }, +{ + "artifactName": "vpkg_cloudtech_k8s_charts.tgz", + "artifactType": "CLOUD_TECHNOLOGY_SPECIFIC_ARTIFACT", + "artifactURL": "/unzipped_sdc_csar/vpkg_cloudtech_k8s_charts.env", + "artifactChecksum": "MDJkYjNmNjEzM2Y1ZDgzNzZiZWUxMjZkMzA3YzkwZDI\u003d", + "artifactDescription": "Auto-generated VF Modules information artifact", + "artifactTimeout": 120, + "artifactUUID": "406fe9df-aa4c-42c1-8f83-c7ac11169fd4", + "artifactVersion": "1" + }, +{ + "artifactName": "vsn_cloudtech_k8s_charts.tgz", + "artifactType": "CLOUD_TECHNOLOGY_SPECIFIC_ARTIFACT", + "artifactURL": "/unzipped_sdc_csar/vsn_cloudtech_k8s_charts.tgz", + "artifactChecksum": "MDJkYjNmNjEzM2Y1ZDgzNzZiZWUxMjZkMzA3YzkwZDI\u003d", + "artifactDescription": "Auto-generated VF Modules information artifact", + "artifactTimeout": 120, + "artifactUUID": "8ab17b5a-3fbd-4142-a75f-c55ffd4c5af5", + "artifactVersion": "1" + } + +, +{ + "artifactName": "vsn_cloudtech_k8s_charts.tgz", + "artifactType": "CLOUD_TECHNOLOGY_SPECIFIC_ARTIFACT", + "artifactURL": "/unzipped_sdc_csar/vsn_cloudtech_k8s_charts.tgz", + "artifactChecksum": "MDJkYjNmNjEzM2Y1ZDgzNzZiZWUxMjZkMzA3YzkwZDI\u003d", + "artifactDescription": "Auto-generated VF Modules information artifact", + "artifactTimeout": 120, + "artifactUUID": "aaeee885-5821-4e08-8da6-0bb3b74f24e9", + "artifactVersion": "1" + }, +{ + "artifactName": "vsn_cloudtech_k8s_charts.tgz", + "artifactType": "CLOUD_TECHNOLOGY_SPECIFIC_ARTIFACT", + "artifactURL": "/unzipped_sdc_csar/vsn_cloudtech_k8s_charts.tgz", + "artifactChecksum": "MDJkYjNmNjEzM2Y1ZDgzNzZiZWUxMjZkMzA3YzkwZDI\u003d", + "artifactDescription": "Auto-generated VF Modules information artifact", + "artifactTimeout": 120, + "artifactUUID": "b4510b34-e39c-4239-bacc-7dd2f3c2a418", + "artifactVersion": "1" + } + +, +{ + "artifactName": "base_template.yaml", + "artifactType": "HEAT", + "artifactURL": "/unzipped_sdc_csar/base_template.yaml", + "artifactChecksum": "MDJkYjNmNjEzM2Y1ZDgzNzZiZWUxMjZkMzA3YzkwZDI\u003d", + "artifactDescription": "Auto-generated VF Modules information artifact", + "artifactTimeout": 120, + "artifactUUID": "33f7aab2-3324-4523-b37a-625b0cf77dfe", + "artifactVersion": "1" + } + +, +{ + "artifactName": "vfw.yaml", + "artifactType": "HEAT", + "artifactURL": "/unzipped_sdc_csar/vfw.yaml", + "artifactChecksum": "MDJkYjNmNjEzM2Y1ZDgzNzZiZWUxMjZkMzA3YzkwZDI\u003d", + "artifactDescription": "Auto-generated VF Modules information artifact", + "artifactTimeout": 120, + "artifactUUID": "6833f47d-e91b-4887-8b55-f68ef25bccf3", + "artifactVersion": "1" + } + +, +{ + "artifactName": "vpkg.yaml", + "artifactType": "HEAT", + "artifactURL": "/unzipped_sdc_csar/vpkg.yaml", + "artifactChecksum": "MDJkYjNmNjEzM2Y1ZDgzNzZiZWUxMjZkMzA3YzkwZDI\u003d", + "artifactDescription": "Auto-generated VF Modules information artifact", + "artifactTimeout": 120, + "artifactUUID": "42b80d18-69ec-4233-8844-f0df6ebb6e3d", + "artifactVersion": "1" + } +, +{ + "artifactName": "vsn.yaml", + "artifactType": "HEAT", + "artifactURL": "/unzipped_sdc_csar/vsn.yaml", + "artifactChecksum": "MDJkYjNmNjEzM2Y1ZDgzNzZiZWUxMjZkMzA3YzkwZDI\u003d", + "artifactDescription": "Auto-generated VF Modules information artifact", + "artifactTimeout": 120, + "artifactUUID": "406fe9df-aa4c-42c1-8f83-c7ac11169fd4", + "artifactVersion": "1" + } + +, +{ + "artifactName": "vendor-license-model.xml", + "artifactType": "VENDOR_LICENSE", + "artifactURL": "/unzipped_sdc_csar/vendor-license-model.xml", + "artifactChecksum": "MDJkYjNmNjEzM2Y1ZDgzNzZiZWUxMjZkMzA3YzkwZDI\u003d", + "artifactDescription": "Auto-generated VF Modules information artifact", + "artifactTimeout": 120, + "artifactUUID": "6e1dbc99-849e-4af8-bdcb-6c214f939491", + "artifactVersion": "1" + } + +, +{ + "artifactName": "vf-license-model.xml", + "artifactType": "VF_LICENSE", + "artifactURL": "/unzipped_sdc_csar/vf-license-model.xml", + "artifactChecksum": "MDJkYjNmNjEzM2Y1ZDgzNzZiZWUxMjZkMzA3YzkwZDI\u003d", + "artifactDescription": "Auto-generated VF Modules information artifact", + "artifactTimeout": 120, + "artifactUUID": "ede4c2de-133f-49e3-9daa-8c56b65b5c6b", + "artifactVersion": "1" + } + +] + } + + + ], + "serviceArtifacts": [ + { + "artifactName": "service-VfwK8sDemoCnf-template.yml", + "artifactType": "TOSCA_TEMPLATE", + "artifactURL": "/unzipped_sdc_csar/service-VfwK8sDemoCnf-template.yml", + "artifactChecksum": "NTUzMDU5YzM3MTk4OGNiNjQ2OGRlMWY2YjU3MjE2YjM\u003d", + "artifactDescription": "TOSCA representation of the asset", + "artifactTimeout": 0, + "artifactUUID": "6f2c7614-571c-4482-aa2c-5eac0308aa16", + "artifactVersion": "1" + }, + { + "artifactName": "service-VfwK8sDemoCnf-csar.csar", + "artifactType": "TOSCA_CSAR", + "artifactURL": "/unzipped_sdc_csar/service-VfwK8sDemoCnf-csar.csar", + "artifactChecksum": "ZTRhOGI0M2UxN2ZhYjQ0ODI5ZDZhZTExZTFkMGU3N2Y\u003d", + "artifactDescription": "TOSCA definition package of the asset", + "artifactTimeout": 0, + "artifactUUID": "6f2c7614-571c-4482-aa2c-5eac0308aa16", + "artifactVersion": "1" + } + ], + "workloadContext": "Production" +} + diff --git a/tests/so/sanity-check/macroflow.robot b/tests/so/sanity-check/macroflow.robot new file mode 100644 index 00000000..92224ea1 --- /dev/null +++ b/tests/so/sanity-check/macroflow.robot @@ -0,0 +1,25 @@ +*** Settings *** +Library Collections +Library RequestsLibrary +Library OperatingSystem +Library json + +*** Test Cases *** +Distribute Service Template + Create Session sdc_controller_session http://${REPO_IP}:8085 + ${data}= Get Binary File ${CURDIR}${/}data${/}serviceBasicVfCnfnotification.json + &{headers}= Create Dictionary Authorization=Basic bXNvX2FkbWluOnBhc3N3b3JkMSQ= resource-location=/distribution-test-zip/unzipped/ Content-Type=application/json Accept=application/json + ${resp}= Post Request sdc_controller_session /test/treatNotification/v1 data=${data} headers=${headers} + Log To Console Received status code: ${resp.status_code} + Run Keyword If '${resp.status_code}' == '200' log to console \nexecuted with expected result + Should Be Equal As Strings '${resp.status_code}' '200' + + +Macroflow + Create Session api_handler_session http://${REPO_IP}:8080 + ${data}= Get Binary File ${CURDIR}${/}data${/}macroflow.json + &{headers}= Create Dictionary Authorization=Basic SW5mcmFQb3J0YWxDbGllbnQ6cGFzc3dvcmQxJA== Content-Type=application/json Accept=application/json + ${service_instantiation_request}= Post Request api_handler_session /onap/so/infra/serviceInstantiation/v7/serviceInstances data=${data} headers=${headers} + Log To Console Received status code: ${service_instantiation_request.status_code} + Run Keyword If '${service_instantiation_request.status_code}' == '202' log to console \nexecuted with expected result + Should Be Equal As Strings '${service_instantiation_request.status_code}' '202'