[TESTS] Add additional tests for testkube based testing 36/140736/3
authorAndreas Geissler <andreas-geissler@telekom.de>
Tue, 22 Apr 2025 13:13:18 +0000 (15:13 +0200)
committerAndreas Geissler <andreas-geissler@telekom.de>
Wed, 23 Apr 2025 13:37:45 +0000 (15:37 +0200)
- aai_initial_data_setup
- add_delete_cnf_macro
...

Issue-ID: TEST-404

Change-Id: Iaf1c26cd239782156dff2a002947aae687c5049e
Signed-off-by: Andreas Geissler <andreas-geissler@telekom.de>
117 files changed:
.gitignore
pylama.ini
requirements.txt
run_test.py
setup.cfg
setup.py
src/onaptests/configuration/aai_initial_data_setup_settings.py [new file with mode: 0644]
src/onaptests/configuration/add_delete_cnf_macro_settings.py [new file with mode: 0644]
src/onaptests/configuration/basic_cnf_macro_settings.py
src/onaptests/configuration/basic_cps_settings.py
src/onaptests/configuration/basic_kafka_settings.py [new file with mode: 0644]
src/onaptests/configuration/basic_onboard_settings.py
src/onaptests/configuration/basic_policy_settings.py [new file with mode: 0644]
src/onaptests/configuration/basic_prh_settings.py [new file with mode: 0644]
src/onaptests/configuration/basic_vm_macro_settings.py
src/onaptests/configuration/cba_enrichment_settings.py
src/onaptests/configuration/cba_verification_settings.py [new file with mode: 0644]
src/onaptests/configuration/cds_resource_resolution_settings.py
src/onaptests/configuration/check_time_sync_settings.py [new file with mode: 0644]
src/onaptests/configuration/generic_network_settings.py [new file with mode: 0644]
src/onaptests/configuration/instantiate_pnf_without_registration_event_settings.py [new file with mode: 0644]
src/onaptests/configuration/instantiate_service_without_resource_settings.py [new file with mode: 0644]
src/onaptests/configuration/modify_service_pnf_settings.py [new file with mode: 0644]
src/onaptests/configuration/pnf_with_ves_event_settings.py [new file with mode: 0644]
src/onaptests/configuration/settings.py
src/onaptests/configuration/status_settings.py
src/onaptests/configuration/ves_publish_settings.py [new file with mode: 0644]
src/onaptests/scenario/aai_initial_data_setup.py [new file with mode: 0644]
src/onaptests/scenario/add_delete_cnf_macro.py [new file with mode: 0644]
src/onaptests/scenario/add_pnf_in_running_svc.py [new file with mode: 0644]
src/onaptests/scenario/basic_cds.py
src/onaptests/scenario/basic_cnf_macro.py
src/onaptests/scenario/basic_kafka.py [new file with mode: 0644]
src/onaptests/scenario/basic_onboard.py
src/onaptests/scenario/basic_policy.py [new file with mode: 0644]
src/onaptests/scenario/basic_prh.py [new file with mode: 0644]
src/onaptests/scenario/cba_verification.py [new file with mode: 0644]
src/onaptests/scenario/check_time_sync.py [new file with mode: 0644]
src/onaptests/scenario/generic_network.py [new file with mode: 0644]
src/onaptests/scenario/instantiate_pnf_with_ves_event.py [new file with mode: 0644]
src/onaptests/scenario/instantiate_pnf_without_registration_event.py [new file with mode: 0644]
src/onaptests/scenario/instantiate_service_without_resource.py [new file with mode: 0644]
src/onaptests/scenario/modify_service_pnf.py [new file with mode: 0644]
src/onaptests/scenario/pnf_macro.py
src/onaptests/scenario/publish_ves_event.py [new file with mode: 0644]
src/onaptests/steps/AAIInitialSetup/create_operations.py [new file with mode: 0644]
src/onaptests/steps/AAIInitialSetup/get_all_operations.py [new file with mode: 0644]
src/onaptests/steps/AAIInitialSetup/get_operations.py [new file with mode: 0644]
src/onaptests/steps/AAIInitialSetup/update_operation_step.py [new file with mode: 0644]
src/onaptests/steps/base.py
src/onaptests/steps/cloud/add_pnf.py [new file with mode: 0644]
src/onaptests/steps/cloud/check_status.py
src/onaptests/steps/cloud/cloud_region_create.py
src/onaptests/steps/cloud/cloud_region_upadte.py [new file with mode: 0644]
src/onaptests/steps/cloud/complex_create.py
src/onaptests/steps/cloud/complex_update.py [new file with mode: 0644]
src/onaptests/steps/cloud/connect_service_subscription_to_cloud_region.py
src/onaptests/steps/cloud/customer_create.py
src/onaptests/steps/cloud/customer_service_subscription_create.py
src/onaptests/steps/cloud/customer_update.py [new file with mode: 0644]
src/onaptests/steps/cloud/expose_service_node_port.py
src/onaptests/steps/cloud/lineofbusiness_create.py [new file with mode: 0644]
src/onaptests/steps/cloud/link_cloud_to_complex.py
src/onaptests/steps/cloud/link_cloudregion_to_project.py [new file with mode: 0644]
src/onaptests/steps/cloud/link_lineofbusiness_to_tenant.py [new file with mode: 0644]
src/onaptests/steps/cloud/link_owningentity_to_tenant.py [new file with mode: 0644]
src/onaptests/steps/cloud/owning_entity_create.py [new file with mode: 0644]
src/onaptests/steps/cloud/owning_entity_update.py [new file with mode: 0644]
src/onaptests/steps/cloud/platform_create.py [new file with mode: 0644]
src/onaptests/steps/cloud/project_create.py [new file with mode: 0644]
src/onaptests/steps/cloud/publish_pnf_reg_event_to_kafka.py [new file with mode: 0644]
src/onaptests/steps/cloud/register_cloud.py
src/onaptests/steps/cloud/resources.py
src/onaptests/steps/cloud/service_subscription_update.py [new file with mode: 0644]
src/onaptests/steps/cloud/tenant_create.py [new file with mode: 0644]
src/onaptests/steps/instantiate/k8s_profile_create.py
src/onaptests/steps/instantiate/service_ala_carte.py
src/onaptests/steps/instantiate/service_macro.py
src/onaptests/steps/instantiate/so/add_cnf_in_service.py [new file with mode: 0644]
src/onaptests/steps/instantiate/so/add_delete_cnf_base_step.py [new file with mode: 0644]
src/onaptests/steps/instantiate/so/add_pnf_in_service.py [new file with mode: 0644]
src/onaptests/steps/instantiate/so/delete_cnf_in_service.py [new file with mode: 0644]
src/onaptests/steps/instantiate/so/delete_pnf_in_service.py [new file with mode: 0644]
src/onaptests/steps/instantiate/so/generic_network_step.py [new file with mode: 0644]
src/onaptests/steps/instantiate/so/modify_pnf_in_service.py [new file with mode: 0644]
src/onaptests/steps/onboard/cds.py
src/onaptests/steps/onboard/cps.py
src/onaptests/steps/onboard/service.py
src/onaptests/steps/onboard/verify_cba.py [new file with mode: 0644]
src/onaptests/steps/onboard/vsp.py
src/onaptests/steps/policy/policy_operations.py [new file with mode: 0644]
src/onaptests/templates/artifacts/basic_cnf_cba_enriched.zip
src/onaptests/templates/artifacts/cba_enriched_new.zip [new file with mode: 0644]
src/onaptests/templates/artifacts/cds-resource-resolution/dd.json
src/onaptests/templates/artifacts/cds-resource-resolution/resource-resolution.zip
src/onaptests/templates/artifacts/create_kafka_topic_template.json.j2 [new file with mode: 0644]
src/onaptests/templates/artifacts/ntp_checker_daemon.yml.j2 [new file with mode: 0644]
src/onaptests/templates/artifacts/pm_message_file.json [new file with mode: 0644]
src/onaptests/templates/artifacts/pm_message_negative_file.json [new file with mode: 0644]
src/onaptests/templates/artifacts/pnf_instantiation_ves_event.json.j2 [new file with mode: 0644]
src/onaptests/templates/artifacts/pnf_registration_dmaap_event_template.json.j2 [new file with mode: 0644]
src/onaptests/templates/artifacts/pnf_registration_ves_event.json [new file with mode: 0644]
src/onaptests/templates/artifacts/ves_message_file.json [new file with mode: 0644]
src/onaptests/templates/artifacts/ves_message_file_negative.json [new file with mode: 0644]
src/onaptests/templates/slack-notifications/notifications.jinja [new file with mode: 0644]
src/onaptests/templates/vnf-services/basic_cnf_macro-service.yaml.j2
src/onaptests/templates/vnf-services/generic_network-service.yaml [new file with mode: 0644]
src/onaptests/templates/vnf-services/instantiate_service_without_resource.yaml [new file with mode: 0644]
src/onaptests/templates/vnf-services/modify-service-pnf.yaml [new file with mode: 0644]
src/onaptests/templates/vnf-services/pnf-service.yaml.j2
src/onaptests/utils/exceptions.py
src/onaptests/utils/gitlab.py [new file with mode: 0644]
src/onaptests/utils/kubernetes.py
src/onaptests/utils/kubernetes_kafka.py [new file with mode: 0644]
src/onaptests/utils/ntp_checker.py [new file with mode: 0644]
src/onaptests/utils/slack.py [new file with mode: 0644]
tox.ini

index 69cc86e..c07be43 100644 (file)
@@ -32,3 +32,7 @@ benchmark/
 .tox/
 **/__pycache__/
 *.pyc
+.vscode/launch.json
+
+.v*
+.env
\ No newline at end of file
index aa7fdc5..2886656 100644 (file)
@@ -25,7 +25,8 @@ disable =
     too-many-locals,
     too-many-statements,
     too-many-boolean-expressions,
-    too-many-positional-arguments
+    too-many-positional-arguments,
+    too-many-lines
 
 bad_functions = print
 load_plugins =
index 632cb19..a3392cc 100644 (file)
@@ -2,10 +2,14 @@ cryptography==38.0.4
 xtesting==0.91.0
 avionix>=0.4.5
 openstacksdk>=0.61.0
-onapsdk==13.2.0
+onapsdk==14.1.0
 jinja2>3
 kubernetes>=22.6.0
 setuptools==65.3.0
 natural==0.2.0
+slack-sdk==3.21.3
 pg8000==1.30.1
 mysql-connector-python==8.3.0
+pandas==2.2.1
+matplotlib==3.8.3
+grpcio-health-checking==1.71.0
index 7e5b410..0cd28c0 100644 (file)
@@ -26,7 +26,7 @@ MODULES_TO_RELOAD = [
 def get_entrypoints():
     config = configparser.ConfigParser()
     config.read('setup.cfg')
-    entry_points = config['entry_points']['xtesting.testcase']
+    entry_points = config['options.entry_points']['xtesting.testcase']
     config = configparser.ConfigParser()
     config.read_string(f"[entry_points]\n{entry_points}")
     entry_points = config['entry_points']
index 8a75072..68fe394 100644 (file)
--- a/setup.cfg
+++ b/setup.cfg
@@ -1,7 +1,7 @@
 # SPDX-License-Identifier: Apache-2.0
 [metadata]
 name = onaptests
-version = 0.0.1
+version = 1.0.0
 description = Test SDK to use ONAP Programatically
 long_description = file: README.md
 url = https://git.onap.org/testsuite/pythonsdk-tests
@@ -21,8 +21,20 @@ packages=find_namespace:
 setup_requires =
   pytest-runner==5.2
 install_requires =
+  cryptography==38.0.4
+  xtesting==0.91.0
+  avionix>=0.4.5
+  openstacksdk>=0.61.0
+  onapsdk==14.1.0
+  jinja2>3
+  kubernetes>=22.6.0
+  setuptools==65.3.0
+  natural==0.2.0
+  slack-sdk==3.21.3
   pg8000==1.30.1
   mysql-connector-python==8.3.0
+  pandas==2.2.1
+  matplotlib==3.8.3
 tests_require =
   mock
   pytest
@@ -44,8 +56,9 @@ addopts =
 
 testpaths = tests
 
-[entry_points]
+[options.entry_points]
 xtesting.testcase =
+  check_time_sync = onaptests.scenario.check_time_sync:CheckTimeSync
   basic_vm = onaptests.scenario.basic_vm:BasicVm
   basic_vm_macro = onaptests.scenario.basic_vm_macro:BasicVmMacro
   basic_vm_macro_stability = onaptests.scenario.basic_vm_macro_stability:BasicVmMacroStability
@@ -55,9 +68,23 @@ xtesting.testcase =
   clearwater_ims = onaptests.scenario.clearwater_ims:ClearwaterIms
   basic_onboard = onaptests.scenario.basic_onboard:BasicOnboard
   pnf_macro = onaptests.scenario.pnf_macro:PnfMacro
+  instantiate_pnf_without_registration_event = onaptests.scenario.instantiate_pnf_without_registration_event:InstantiatePnfWithoutRegistrationEvent
+  pnf_with_ves_event = onaptests.scenario.instantiate_pnf_with_ves_event:PnfWithVesEvent
+  add_pnf_in_running_service = onaptests.scenario.add_pnf_in_running_svc:AddPnfInRunningSvc
   cds_resource_resolution = onaptests.scenario.cds_resource_resolution:CDSResourceResolution
   multi_vnf_macro = onaptests.scenario.multi_vnf_macro:MultiVnfUbuntuMacro
   basic_cnf_macro = onaptests.scenario.basic_cnf_macro:BasicCnfMacro
   basic_cps = onaptests.scenario.basic_cps:BasicCps
   status = onaptests.scenario.status:Status
   basic_sdnc = onaptests.scenario.basic_sdnc:BasicSdnc
+  ves_publish = onaptests.scenario.publish_ves_event:VesCollectorTestCase
+  basic_kafka = onaptests.scenario.basic_kafka:KafkaTestCase
+  generic_network = onaptests.scenario.generic_network:GenericNetwork
+  cba_verification = onaptests.scenario.cba_verification:CbaVerification
+  basic_intent = onaptests.scenario.basic_intent:IntentScenario
+  instantiate_service_without_resource = onaptests.scenario.instantiate_service_without_resource:InstantiateServiceWithoutResource
+  aai_initial_data_setup = onaptests.scenario.aai_initial_data_setup:AAICrud
+  basic_prh = onaptests.scenario.basic_prh:PRHBase
+  modify_service_pnf = onaptests.scenario.modify_service_pnf:ModifyPnf
+  add_delete_cnf_macro = onaptests.scenario.add_delete_cnf_macro:AddDeleteCnfInRunningSvcScenario
+  basic_policy = onaptests.scenario.basic_policy:PolicyScenario
index 8b44c49..d95c771 100644 (file)
--- a/setup.py
+++ b/setup.py
@@ -5,6 +5,5 @@
 from setuptools import setup
 
 setup(
-    setup_requires=['pbr','setuptools'],
-    pbr=True
+    setup_requires=['setuptools']
 )
diff --git a/src/onaptests/configuration/aai_initial_data_setup_settings.py b/src/onaptests/configuration/aai_initial_data_setup_settings.py
new file mode 100644 (file)
index 0000000..ab7e908
--- /dev/null
@@ -0,0 +1,33 @@
+from .settings import *  # noqa
+
+SERVICE_NAME = "AAI_Initial_Data_Setup"
+SERVICE_DETAILS = "Verification of various AAI API endpoints"
+GLOBAL_CUSTOMER_ID = "basiccnf-macro-customer"
+SUBSCRIBER_NAME = "basiccnf-macro-customer"
+SUBSCRIBER_TYPE = "INFRA"
+
+CLEANUP_FLAG = True
+
+OWNING_ENTITY_ID = "test_owning_entity"
+OWNING_ENTITY_NAME = "test_owning_entity_name"
+PLATFORM = "test_platform"
+COMPLEX_PHYSICAL_LOCATION_ID = "test-lannion"
+LINE_OF_BUSINESS = "test_lineofbusiness"
+CLOUD_REGION_CLOUD_OWNER = "test_cloud_owner"  # must not contain _
+CLOUD_REGION_ID = "test_cloud_region"
+CLOUD_REGION_TYPE = "test-cloud-region-type"
+CLOUD_REGION_VERSION = "test-cloud-region-version"
+CLOUD_OWNER_DEFINED_TYPE = "test-cloud-owner-defined-type"
+COMPLEX_DATA_CENTER_CODE = "test-complex-data-center-code"
+
+SERVICE_INSTANCE_ID = "test_service_instance_id"
+TENANT_NAME = "test_tenant_name"
+TENANT_ID = "test_tenant_id"
+
+# update entities
+COMPLEX_DATA_CENTER_CODE = "updated_test-complex-data-center-code"
+UPDATED_SUBSCRIBER_TYPE = "updated_subscriber_type"
+UPDATED_CLOUD_OWNER_DEFINED_TYPE = "updated_cloud_owner_defined_type"
+UPDATED_PHYSICAL_LOCATION_TYPE = "updated_physical_location_type"
+UPDATED_CLOUD_TYPE = "updated_cloud_type"
+UPDATED_OWNING_ENTITY_NAME = "updated_owning_entity_name"
diff --git a/src/onaptests/configuration/add_delete_cnf_macro_settings.py b/src/onaptests/configuration/add_delete_cnf_macro_settings.py
new file mode 100644 (file)
index 0000000..300a81b
--- /dev/null
@@ -0,0 +1,4 @@
+from .basic_cnf_macro_settings import *  # noqa
+
+SERVICE_INSTANCE_NAME = "Add-Delete-Cnf-Macro"
+CNF_INSTANCE_NAME = "cnf-macro-test-1"
index d52f8bb..0ebd927 100644 (file)
@@ -11,13 +11,14 @@ from .settings import *  # noqa
 
 # Specific basic_cnf_macro with multicloud-k8s and yaml config scenario.
 SERVICE_DETAILS = ("Onboarding, distribution and instantiation of a Apache CNF " +
-                   "using macro and native CNF path: cnf-adapter + K8sPlugin")
+                   "using macro and native CNF path: cnf-adapter + K8sPlugin" +
+                   "testing Onap Operator by create and check Service Instance Custom Resource.")
 
 CLEANUP_FLAG = True
 
 # CDS_DD_FILE = Path(get_resource_location("templates/artifacts/dd.json"))
 CDS_CBA_UNENRICHED = Path("no_such_file")
-CDS_CBA_ENRICHED = Path(get_resource_location("templates/artifacts/basic_cnf_cba_enriched.zip"))
+CDS_CBA_ENRICHED = get_resource_location("templates/artifacts/basic_cnf_cba_enriched.zip")
 
 # This scenario uses multicloud-k8s and not multicloud
 # (no registration requested)
@@ -66,7 +67,7 @@ TENANT_NAME = 'dummy_test'
 
 SERVICE_YAML_TEMPLATE = Path(get_resource_location(
     "templates/vnf-services/basic_cnf_macro-service.yaml"))
-generate_service_config_yaml_file(service_name="basic_cnf_macro",  # noqa
+generate_service_config_yaml_file(service_name="basic_cnf_macro_gnb",  # noqa
                                   service_template="basic_cnf_macro-service.yaml.j2",
                                   service_config=SERVICE_YAML_TEMPLATE)
 
@@ -81,3 +82,19 @@ except (FileNotFoundError, ValueError) as exc:
 SERVICE_INSTANCE_NAME = f"basic_cnf_macro_{str(uuid4())}"
 
 MODEL_YAML_TEMPLATE = None
+
+TEST_ONAP_OPERATOR = False
+GROUP = 'onap.com'
+VERSION = 'v1'
+SERVICE_INSTANCE_PLURAL = 'serviceinstances'
+SERVICE_INSTANCE_CR_NAME = 'cr-test-service-instance'
+VNF_PLURAL = 'vnfs'
+VNF_CR_NAME = 'cr-test-vnf'
+VF_CR_NAME = 'cr-test-vf-lp-2'
+MAX_RETRIES_OF_CR_CREATION = 20
+WAIT_FOR_CR = 10
+IN_CLUSTER = True
+CLEANUP_FLAG = True
+K8S_ADDITIONAL_RESOURCES_NAMESPACE = "onap"
+VNF_MODULE_NAME = 'sim_cucp_vnf 0'
+VF_MODULE_NAME = 'sim_cucp_vnf0..SimCucpVnf..helm_cucp..module-1'
index cbf9d4c..6db5c94 100644 (file)
@@ -73,4 +73,5 @@ DB_PRIMARY_HOST = "cps-core-pg-primary"
 DB_PORT = 5432
 DB_LOGIN = "login"
 DB_PASSWORD = "password"
+DB_USE_SSL_CONTEXT = False
 CHECK_POSTGRESQL = False
diff --git a/src/onaptests/configuration/basic_kafka_settings.py b/src/onaptests/configuration/basic_kafka_settings.py
new file mode 100644 (file)
index 0000000..25194a8
--- /dev/null
@@ -0,0 +1,13 @@
+from onaptests.configuration.settings import *  # noqa
+
+SERVICE_NAME = "KafkaService"
+SERVICE_DETAILS = ("Test Kafka functionality, "
+                   "including dummy event publishing/consumption and topic management")
+TOPIC_NAME = "kafka-testing"  # If topic name is long please and needed delimiter add it as - only
+EVENT_ID = "event121"
+EVENT_NAME = "kafka testing event"
+CLEANUP_FLAG = True
+
+KUBERNETES_API_GROUP = "kafka.strimzi.io"
+KUBERNETES_API_VERSION = "v1beta2"
+KUBERNETES_API_PLURAL = "kafkatopics"
index bf4f40f..7df49f9 100644 (file)
@@ -22,6 +22,7 @@ MODEL_YAML_TEMPLATE = None
 CLEANUP_FLAG = True
 SDC_CLEANUP = True
 VERIFY_DISTRIBUTION = True
+SERVICE_DISTRIBUTION_ENABLED = True
 
 SERVICE_YAML_TEMPLATE = get_resource_location("templates/vnf-services/basic-onboard-service.yaml")
 generate_service_config_yaml_file(service_name="basic_onboard",  # noqa
diff --git a/src/onaptests/configuration/basic_policy_settings.py b/src/onaptests/configuration/basic_policy_settings.py
new file mode 100644 (file)
index 0000000..489865a
--- /dev/null
@@ -0,0 +1,109 @@
+import json
+
+from .settings import *  # noqa
+
+SERVICE_DETAILS = "Policy operations"
+SERVICE_NAME = "POLICY"
+STORE_POLICY = json.dumps({
+    "tosca_definitions_version": "tosca_simple_yaml_1_1_0",
+    "topology_template": {
+        "inputs": {},
+        "policies": [
+            {
+                "onap.policy.test": {
+                    "type": "onap.policies.native.ToscaXacml",
+                    "type_version": "1.0.0",
+                    "properties": {
+                        "policies": [
+                            {
+                                "properties": {
+                                    "description": "Policy that checks if NRCGI is"
+                                                   " a specific value",
+                                    "rules": [
+                                        {
+                                            "condition": {
+                                                "apply": {
+                                                    "operator": "string-equal",
+                                                    "keys": [
+                                                        "sliceData.node.cells.actual.nrcgi"
+                                                    ],
+                                                    "compareWith": {
+                                                        "value": "448903300002"
+                                                    }
+                                                }
+                                            },
+                                            "decision": "Permit",
+                                            "advice": {
+                                                "value": "Cell ID is valid"
+                                            }
+                                        }
+                                    ],
+                                    "default": "Deny"
+                                },
+                                "metadata": {
+                                    "policy-id": 1,
+                                    "policy-version": "1.0.0"
+                                }
+                            }
+                        ]
+                    },
+                    "name": "onap.policy.test",
+                    "version": "1.0.0",
+                    "metadata": {
+                        "action": "policy-test",
+                        "description": "Policy that checks if NRCGI is a specific value",
+                        "policy-id": "onap.policy.test",
+                        "policy-version": "1.0.0"
+                    }
+                }
+            }
+        ]
+    },
+    "name": "ToscaServiceTemplateSimple",
+    "version": "1.0.0"
+})
+
+DEPLOY_POLICY = json.dumps(
+    {
+        "policies": [
+            {
+                "policy-id": "onap.policy.test",
+                "policy-version": "1.0.0"
+            }
+        ]
+    })
+
+POLICY_ID = "onap.policy.test"
+POLICY_VERSION = "1.0.0"
+
+DECISION_REQUEST = json.dumps(
+    {
+        "Request": {
+            "ReturnPolicyIdList": True,
+            "CombinedDecision": False,
+            "Action": [
+                {
+                    "Attribute": [
+                        {
+                            "IncludeInResult": False,
+                            "AttributeId": "action-id",
+                            "Value": "policy-test"
+                        }
+                    ]
+                }
+            ],
+            "Resource": [
+                {
+                    "Attribute": [
+                        {
+                            "IncludeInResult": False,
+                            "AttributeId": "sliceData.node.cells.actual.nrcgi",
+                            "Value": "448903300002"
+                        }
+                    ]
+                }
+            ]
+        }
+    })
+
+CLEANUP_FLAG = True
diff --git a/src/onaptests/configuration/basic_prh_settings.py b/src/onaptests/configuration/basic_prh_settings.py
new file mode 100644 (file)
index 0000000..51a2d5e
--- /dev/null
@@ -0,0 +1,30 @@
+from onaptests.configuration.settings import *  # noqa
+
+SERVICE_NAME = "PRHService"
+SERVICE_DETAILS = "Validate PNF Registration process standalone"
+PNF_REGISTRATION_TOPIC_NAME = "unauthenticated.VES_PNFREG_OUTPUT"
+PNFREADY_TOPIC_NAME = "unauthenticated.PNF_READY"
+
+PNF_NAME = "dummy-ru-PRHTest"
+PNF_ID = "321e0ee9-a1ca-4163-9240-4fd0a6992594"
+PNF_ORCHESTRATION_STATUS = "Inventoried"
+PNF_IN_MAINT = False
+PNF_SPARE_EQUIPMENT_INDICATOR = False
+PNF_NF_ROLE = "sdn controller"
+
+PNF_INSTANCE = {
+    "pnf-name": PNF_NAME,
+    "pnf-id": "321e0ee9-a1ca-4163-9240-4fd0a6992594",
+    "orchestration-status": "Inventoried",
+    "in-maint": False,
+    "spare-equipment-indicator": False,
+    "nf-role": "sdn controller"
+}
+
+PNF_IPADDRESS_V4_OAM = "1.2.3.4"
+PNF_IPADDRESS_V6_OAM = "0:0:0:0:0:ffff:a0a:011"
+PNF_SERIAL_NUMBER = "ORAN_SIM-172.30.1.6-400600927-Simulated Device Melacon"
+
+MAX_ATTEMPTS_TO_CHECK = 5
+WAIT_TIME_SECONDS_BETWEEN_CHECK = 10
+CLEANUP_FLAG = True
index 2f262f4..3781511 100644 (file)
@@ -17,7 +17,7 @@ CLEANUP_FLAG = True
 
 CDS_DD_FILE = Path(get_resource_location("templates/artifacts/dd.json"))
 CDS_CBA_UNENRICHED = Path(get_resource_location("templates/artifacts/basic_vm_cba.zip"))
-CDS_CBA_ENRICHED = Path("/tmp/BASIC_VM_enriched.zip")
+CDS_CBA_ENRICHED = "/tmp/BASIC_VM_enriched.zip"
 
 ONLY_INSTANTIATE = False
 USE_MULTICLOUD = False
index 72cf647..f287854 100644 (file)
@@ -13,3 +13,41 @@ CLEANUP_FLAG = True
 CDS_DD_FILE = Path(get_resource_location("templates/artifacts/dd.json"))
 CDS_CBA_UNENRICHED = Path(get_resource_location("templates/artifacts/PNF_DEMO.zip"))
 CDS_CBA_ENRICHED = "/tmp/PNF_DEMO_enriched.zip"
+CDS_WORKFLOW_NAME = "config-assign"
+CDS_WORKFLOW_INPUT = {
+    "template-prefix": [
+        "netconfrpc"
+    ],
+    "resolution-key": "day-1",
+    "config-assign-properties": {
+        "stream-count": 10
+    }
+}
+CDS_WORKFLOW_EXPECTED_OUTPUT = {
+    "config-assign-response": {
+        "resolved-payload": {
+            "netconfrpc":
+                "<rpc xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\" message-id=\"1\">\n " +
+                "<edit-config>\n <target>\n <running/>\n </target>\n <config>\n    " +
+                "<sample-plugin xmlns=\"urn:opendaylight:params:xml:ns:yang:sample-plugin\">\n" +
+                "      <pg-streams>\n              <pg-stream>\n        <id>fw_udp_1</id>\n   " +
+                "     <is-enabled>true</is-enabled>\n       </pg-stream>\n              " +
+                "<pg-stream>\n        <id>fw_udp_2</id>\n        <is-enabled>true</is-enabled>\n" +
+                "       </pg-stream>\n              <pg-stream>\n        <id>fw_udp_3</id>\n    " +
+                "    <is-enabled>true</is-enabled>\n       </pg-stream>\n              " +
+                "<pg-stream>\n        <id>fw_udp_4</id>\n        <is-enabled>true</is-enabled>\n" +
+                "       </pg-stream>\n              <pg-stream>\n        <id>fw_udp_5</id>\n    " +
+                "    <is-enabled>true</is-enabled>\n       </pg-stream>\n              " +
+                "<pg-stream>\n        <id>fw_udp_6</id>\n        <is-enabled>true</is-enabled>\n" +
+                "       </pg-stream>\n              <pg-stream>\n        <id>fw_udp_7</id>\n    " +
+                "    <is-enabled>true</is-enabled>\n       </pg-stream>\n              " +
+                "<pg-stream>\n        <id>fw_udp_8</id>\n        <is-enabled>true</is-enabled>\n" +
+                "       </pg-stream>\n              <pg-stream>\n        <id>fw_udp_9</id>\n    " +
+                "    <is-enabled>true</is-enabled>\n       </pg-stream>\n              " +
+                "<pg-stream>\n        <id>fw_udp_10</id>\n        " +
+                "<is-enabled>true</is-enabled>\n" +
+                "       </pg-stream>\n           </pg-streams>\n    </sample-plugin>\n " +
+                "</config>\n </edit-config>\n</rpc>\n"
+        }
+    }
+}
diff --git a/src/onaptests/configuration/cba_verification_settings.py b/src/onaptests/configuration/cba_verification_settings.py
new file mode 100644 (file)
index 0000000..5205ec5
--- /dev/null
@@ -0,0 +1,69 @@
+import os
+
+from .settings import *  # noqa
+
+SERVICE_NAME = "CBA_VERIFICATION"
+SERVICE_DETAILS = """Verify if CBAs are deployed and their content is same like in gitlab"""
+CLEANUP_FLAG = True
+LOCAL_PATH = "/tmp"
+B2B_EQUINIX_POC_CBA = {
+    "name": "B2B_EQUINIX_POC_CBA",
+    "version": "1.0.0",
+    "gitlab_project_id": "210484",
+    "gitlab_repo_cba": "cba",
+    "enrichment": True,
+    "gitlab_branch": "master",
+}
+B2B_POC_CBA = {
+    "name": "B2B_POC_CBA",
+    "version": "1.0.0",
+    "gitlab_project_id": "176661",
+    "gitlab_repo_cba": "cba",
+    "enrichment": True,
+    "gitlab_branch": "develop",
+}
+INTENT_MGMT_CBA = {
+    "name": "INTENT-MGMT-CBA",
+    "version": "1.0.0",
+    "gitlab_project_id": "199504",
+    "gitlab_repo_cba": "Intent-Mgmt-CBA/INTENT-MGMT-CBA",
+    "enrichment": True,
+    "gitlab_branch": "develop",
+}
+CBA_GNB_SIM = {
+    "name": "CBA_GNB_SIM",
+    "version": "1.0.0",
+    "gitlab_project_id": "215376",
+    "gitlab_repo_cba": "gnb-simulator-with-ran-inventory/common-cba",
+    "enrichment": True,
+    "gitlab_branch": "main",
+}
+HUAWEI_EMS_CBA = {
+    "name": "SLICING_CBA",
+    "version": "1.1.0",
+    "gitlab_project_id": "124384",
+    "gitlab_repo_cba": "cba",
+    "enrichment": True,
+    "gitlab_branch": "develop",
+}
+CBA_LIST = [
+    HUAWEI_EMS_CBA
+]
+test_env_name = os.getenv('TEST_ENV_NAME')
+if test_env_name and 'b2b' in test_env_name:
+    CBA_LIST = [
+        B2B_EQUINIX_POC_CBA,
+        B2B_POC_CBA,
+        INTENT_MGMT_CBA
+    ]
+
+ENRICHMENT_FILES = ['Definitions/data_types.json',
+                    'Definitions/node_types.json',
+                    'Definitions/resources_definition_types.json']
+IGNORE_FILES = ['Tests/',
+                'pom.xml',
+                '.DS_Store',
+                'Archive.zip',
+                'CBA_GNB_SIM.zip']
+GITLAB_BASE_URL = "https://gitlab.devops.telekom.de/api/v4"
+GITLAB_ACCESS_TKN = "glpat-nzqxs_HMQLYz7SrhxKi2"
index 7b311a1..140cd27 100644 (file)
@@ -63,7 +63,7 @@ CDS_DD_FILE = Path(get_resource_location(
     "templates/artifacts/cds-resource-resolution/dd.json"))
 CDS_CBA_UNENRICHED = Path(get_resource_location(
     "templates/artifacts/cds-resource-resolution/resource-resolution.zip"))
-CDS_CBA_ENRICHED = Path("/tmp/resource-resolution-enriched.zip")
+CDS_CBA_ENRICHED = "/tmp/resource-resolution-enriched.zip"
 CDS_WORKFLOW_NAME = "resource-resolution"
 CDS_WORKFLOW_INPUT = {
     "template-prefix": [
@@ -79,8 +79,17 @@ CDS_WORKFLOW_INPUT = {
 CDS_WORKFLOW_EXPECTED_OUTPUT = {
     "resource-resolution-response": {
         "meshed-template": {
-            "helloworld-velocity": "{\n  \"default\": \"ok\",\n  \"input\": \"ok\",\n  \"script\": {\n    \"python\": \"ok\",\n    \"kotlin\": \"ok\"\n  },\n  \"db\": \"ok\",\n  \"rest\": {\n    \"GET\": \"A046E51D-44DC-43AE-BBA2-86FCA86C5265\",\n    \"POST\": \"post:ok\",\n    \"PUT\": \"put:ok\",\n    \"PATCH\": \"patch:ok\",\n    \"DELETE\": \"delete:ok\"\n  }\n}\n",  # noqa
-            "helloworld-jinja": "{\n  \"default\": \"ok\",\n  \"input\": \"ok\",\n  \"script\": {\n    \"python\": \"ok\",\n    \"kotlin\": {\n      \"base\": \"ok\"\n      \"from suspend function\": \"ok\"\n    }\n  },\n  \"db\": \"ok\",\n  \"rest\": {\n    \"GET\": \"A046E51D-44DC-43AE-BBA2-86FCA86C5265\",\n    \"GET_ID\": \"74FE67C6-50F5-4557-B717-030D79903908\",\n    \"POST\": \"post:ok\",\n    \"PUT\": \"put:ok\",\n    \"PATCH\": \"patch:ok\",\n    \"DELETE\": \"delete:ok\"\n  }\n}\n"  # noqa
+            "helloworld-velocity": "{\n  \"default\": \"ok\",\n  \"input\": \"ok\",\n  " +
+            "\"script\": {\n    \"python\": \"ok\",\n    \"kotlin\": \"ok\"\n  },\n  \"rest\": " +
+            "{\n    \"GET\": \"${v_get}\",\n    \"POST\": \"${v_post}\",\n" +
+            "    \"PUT\": \"${v_put}\",\n    \"PATCH\": \"${v_patch}\",\n    " +
+            "\"DELETE\": \"${v_del}\"\n  }\n}\n",
+            "helloworld-jinja": "{\n  \"default\": \"ok\",\n  \"input\": \"ok\",\n  \"script\": " +
+            "{\n    \"python\": \"ok\",\n    \"kotlin\": {\n      \"base\": \"ok\"\n      " +
+            "\"from suspend function\": \"ok\"\n    }\n  },\n  \"rest\": " +
+            "{\n    \"GET\": \"${j_get}\",\n    \"GET_ID\": \"${j_get_id}\",\n    \"POST\": " +
+            "\"${j_post}\",\n    \"PUT\": \"${j_put}\",\n    \"PATCH\": \"${j_patch}\",\n    " +
+            "\"DELETE\": \"${j_del}\"\n  }\n}\n"
         }
     }
 }
diff --git a/src/onaptests/configuration/check_time_sync_settings.py b/src/onaptests/configuration/check_time_sync_settings.py
new file mode 100644 (file)
index 0000000..2eee7b7
--- /dev/null
@@ -0,0 +1,12 @@
+from .settings import *  # noqa
+from .settings import K8S_TESTS_NAMESPACE
+
+SERVICE_NAME = "Check Time Sync on Nodes"
+SERVICE_DETAILS = "Check Time Sync on K8s Nodes"
+DAEMON_NAME = "ntp-checker-test"
+DAEMON_NS = K8S_TESTS_NAMESPACE
+DAEMON_READY_WAIT_TIMOUT_SEC = 120
+DAEMON_DEPLOYMENT_RATIO = 0.7
+NODE_TIME_QUERIES_NUMBER = 5
+MAX_TIME_DIFF_MS = 5000
+CLEANUP_FLAG = True
diff --git a/src/onaptests/configuration/generic_network_settings.py b/src/onaptests/configuration/generic_network_settings.py
new file mode 100644 (file)
index 0000000..7440fe8
--- /dev/null
@@ -0,0 +1,95 @@
+import os
+from pathlib import Path
+from uuid import uuid4
+
+from yaml import SafeLoader, load
+
+import onaptests.utils.exceptions as onap_test_exceptions
+from onaptests.utils.resources import get_resource_location
+
+from .settings import *  # noqa
+
+SERVICE_DETAILS = "Add Generic Network resource in service using macro"
+
+
+# CDS_DD_FILE = Path(get_resource_location("templates/artifacts/dd.json"))
+CDS_CBA_UNENRICHED = Path("no_such_file")
+CDS_CBA_ENRICHED = get_resource_location("templates/artifacts/basic_cnf_cba_enriched.zip")
+
+# This scenario uses multicloud-k8s and not multicloud
+# (no registration requested)
+USE_MULTICLOUD = False
+# Set ONLY_INSTANTIATE to true to run an instantiation without repeating
+# onboarding and related AAI configuration (Cloud config)
+ONLY_INSTANTIATE = False
+EXPOSE_SERVICES_NODE_PORTS = False
+CLEANUP_FLAG = True
+
+# Relative path to config file to set k8s connectivity information
+K8S_CONFIG = get_resource_location("templates/artifacts/config")
+
+VENDOR_NAME = "g-network_macro_vendor"
+
+CLOUD_REGION_CLOUD_OWNER = "g-network-cloud-owner"  # must not contain _
+CLOUD_REGION_ID = "k8sregion-g-network-macro"
+CLOUD_REGION_TYPE = "k8s"
+CLOUD_REGION_VERSION = "1.0"
+CLOUD_DOMAIN = "Default"
+CLOUD_OWNER_DEFINED_TYPE = "t1"
+
+COMPLEX_PHYSICAL_LOCATION_ID = "lannion"
+COMPLEX_DATA_CENTER_CODE = "1234-5"
+AVAILABILITY_ZONE_NAME = "g-network-availability-zone"
+AVAILABILITY_ZONE_TYPE = "nova"
+
+GLOBAL_CUSTOMER_ID = "g-network-macro-customer"
+
+OWNING_ENTITY = "g-network-macro_owning_entity"
+PROJECT = "g-network-macro_project"
+LINE_OF_BUSINESS = "g-network-macro_lob"
+PLATFORM = "g-network-macro_platform"
+
+# The cloud Part
+# Assuming a cloud.yaml is available, use the openstack client
+# to retrieve cloud info and avoid data duplication
+# For basic_cnf, no tenant information is required but some dummy
+# information shall be provided by default
+# So it is not requested to set OS_TEST_CLOUD
+TEST_CLOUD = os.getenv('OS_TEST_CLOUD')
+VIM_USERNAME = 'dummy'
+VIM_PASSWORD = 'dummy123'
+VIM_SERVICE_URL = 'http://10.12.25.2:5000/v3'
+TENANT_ID = '123456'
+TENANT_NAME = 'dummy_test'
+
+TEST_ONAP_OPERATOR = False
+GROUP = 'onap.com'
+VERSION = 'v1'
+SERVICE_INSTANCE_PLURAL = 'serviceinstances'
+SERVICE_INSTANCE_CR_NAME = 'cr-test-service-instance'
+VNF_PLURAL = 'vnfs'
+VNF_CR_NAME = 'cr-test-vnf'
+VF_CR_NAME = 'cr-test-vf-lp-2'
+MAX_RETRIES_OF_CR_CREATION = 20
+WAIT_FOR_CR = 10
+IN_CLUSTER = True
+K8S_ADDITIONAL_RESOURCES_NAMESPACE = "onap"
+VNF_MODULE_NAME = 'sim_cucp_vnf 0'
+VF_MODULE_NAME = 'sim_cucp_vnf0..SimCucpVnf..helm_cucp..module-1'
+
+SERVICE_YAML_TEMPLATE = Path(get_resource_location(
+    "templates/vnf-services/generic_network-service.yaml"))
+
+try:
+    # Try to retrieve the SERVICE NAME from the yaml file
+    with open(SERVICE_YAML_TEMPLATE, "r", encoding="utf-8") as yaml_template:
+        yaml_config_file = load(yaml_template, SafeLoader)
+        SERVICE_NAME = next(iter(yaml_config_file.keys()))
+except (FileNotFoundError, ValueError) as exc:
+    raise onap_test_exceptions.TestConfigurationException from exc
+
+SERVICE_INSTANCE_NAME = f"basic_nw_{str(uuid4())}"
+
+MODEL_YAML_TEMPLATE = None
+
+REQUEST_TIMEOUT = 60
diff --git a/src/onaptests/configuration/instantiate_pnf_without_registration_event_settings.py b/src/onaptests/configuration/instantiate_pnf_without_registration_event_settings.py
new file mode 100644 (file)
index 0000000..9e72cdd
--- /dev/null
@@ -0,0 +1,5 @@
+from .pnf_macro_settings import *  # noqa
+
+SERVICE_INSTANCE_NAME = "Pnf-Macro-Without-Ves-Event"
+PNF_WITHOUT_VES = True
+PNF_WITH_VES = False
diff --git a/src/onaptests/configuration/instantiate_service_without_resource_settings.py b/src/onaptests/configuration/instantiate_service_without_resource_settings.py
new file mode 100644 (file)
index 0000000..f3dc570
--- /dev/null
@@ -0,0 +1,43 @@
+from pathlib import Path
+from uuid import uuid4
+
+from yaml import SafeLoader, load
+
+import onaptests.utils.exceptions as onap_test_exceptions
+from onaptests.utils.resources import get_resource_location
+
+from .settings import *  # noqa
+
+CLEANUP_FLAG = True
+
+GLOBAL_CUSTOMER_ID = "pnf_macrocustomer"
+OWNING_ENTITY = "test_owning_entity"
+PROJECT = "basicnf_macro_project"
+LINE_OF_BUSINESS = "basicnf_macro_lob"
+
+SERVICE_DETAILS = ("Onboarding, distribution and instantiation of service " +
+                   "without resources using macro")
+
+CDS_CBA_UNENRICHED = Path("no_such_file")
+CDS_CBA_ENRICHED = get_resource_location("templates/artifacts/cba_enriched_new.zip")
+SERVICE_YAML_TEMPLATE = Path(get_resource_location(
+    "templates/vnf-services/instantiate_service_without_resource.yaml"))
+
+# This scenario uses multicloud-k8s and not multicloud
+# (no registration requested)
+USE_MULTICLOUD = False
+# Set ONLY_INSTANTIATE to true to run an instantiation without repeating
+# onboarding and related AAI configuration (Cloud config)
+ONLY_INSTANTIATE = False
+K8S_CONFIG = get_resource_location("templates/artifacts/config")
+
+try:
+    # Try to retrieve the SERVICE NAME from the yaml file
+    with open(SERVICE_YAML_TEMPLATE, "r", encoding="utf-8") as yaml_template:
+        yaml_config_file = load(yaml_template, SafeLoader)
+        SERVICE_NAME = next(iter(yaml_config_file.keys()))
+except (FileNotFoundError, ValueError) as exc:
+    raise onap_test_exceptions.TestConfigurationException from exc
+
+SERVICE_INSTANCE_NAME = f"svc_without_res_{str(uuid4())}"
+MODEL_YAML_TEMPLATE = None
diff --git a/src/onaptests/configuration/modify_service_pnf_settings.py b/src/onaptests/configuration/modify_service_pnf_settings.py
new file mode 100644 (file)
index 0000000..df09580
--- /dev/null
@@ -0,0 +1,52 @@
+from pathlib import Path
+from uuid import uuid4
+
+from yaml import SafeLoader, load
+
+import onaptests.utils.exceptions as onap_test_exceptions
+from onaptests.utils.resources import get_resource_location
+
+from .settings import *  # noqa
+
+ONLY_INSTANTIATE = False
+CLEANUP_FLAG = True
+USE_MULTICLOUD = False
+
+VENDOR_NAME = "pnf_macro_vendor"
+SERVICE_NAME = "Modify_PNF_Test"
+SERVICE_DETAILS = ("Onboarding, distribution and registration of PNF using macro " +
+                   "after creation of empty service first")
+SERVICE_YAML_TEMPLATE = Path(get_resource_location(
+    "templates/vnf-services/modify-service-pnf.yaml"))
+SERVICE_INSTANCE_NAME = f"pnf-modify_{str(uuid4())}"
+
+try:
+    # Try to retrieve the SERVICE NAME from the yaml file
+    with open(SERVICE_YAML_TEMPLATE, "r", encoding="utf-8") as yaml_template:
+        yaml_config_file = load(yaml_template, SafeLoader)
+        SERVICE_NAME = next(iter(yaml_config_file.keys()))
+except (FileNotFoundError, ValueError) as exc:
+    raise onap_test_exceptions.TestConfigurationException from exc
+
+PNF_WITHOUT_VES = True
+PNF_WITH_VES = False
+CDS_DD_FILE = Path(get_resource_location("templates/artifacts/dd.json"))
+CDS_CBA_UNENRICHED = Path(get_resource_location("templates/artifacts/PNF_DEMO.zip"))
+CDS_CBA_ENRICHED = "/tmp/cba_enriched_new.zip"
+
+GLOBAL_CUSTOMER_ID = "pnf_macrocustomer"
+OWNING_ENTITY = "test_owning_entity"
+PROJECT = "basicnf_macro_project"
+LOB = "basicnf_macro_lob"
+LINE_OF_BUSINESS = "basicnf_macro_lob"
+PLATFORM = "basicnf_macro_platform"
+
+INSTANTIATION_TIMEOUT = 600
+K8S_CONFIG = get_resource_location("templates/artifacts/config")
+PNF_WAIT_TIME = 60.0
+PNF_REGISTRATION_NUMBER_OF_TRIES = 30
+CLEANUP_ACTIVITY_TIMER = 10
+ORCHESTRATION_REQUEST_TIMEOUT = 60.0 * 15  # 15 minutes in seconds
+
+# Disable YAML SDC model definition which means all SDC config reside in SERVICE_YAML_TEMPLATE
+MODEL_YAML_TEMPLATE = None
diff --git a/src/onaptests/configuration/pnf_with_ves_event_settings.py b/src/onaptests/configuration/pnf_with_ves_event_settings.py
new file mode 100644 (file)
index 0000000..4bab193
--- /dev/null
@@ -0,0 +1,4 @@
+from .pnf_macro_settings import *  # noqa
+
+SERVICE_INSTANCE_NAME = "Pnf-Macro-With-Ves-Event"
+PNF_WITH_VES = True
index a3aa3b9..b0d1ad3 100644 (file)
@@ -8,6 +8,7 @@
 
 import random
 import string
+
 from jinja2 import Environment, PackageLoader
 
 # Variables to set logger information
@@ -68,6 +69,10 @@ SDNC_DB_PRIMARY_HOST = "mariadb-galera.onap.svc.cluster.local"
 SDNC_DB_PORT = 3306
 
 
+KAFKA_USER = "strimzi-kafka-admin"
+KUBERNETES_NAMESPACE = "onap"
+
+
 # We need to create a service file with a random service name,
 # to be sure that we force onboarding
 def generate_service_config_yaml_file(service_name: str,
index a291c21..2e95cd1 100644 (file)
@@ -3,6 +3,7 @@ from .settings import *  # noqa
 SERVICE_NAME = "Status Check"
 SERVICE_DETAILS = "Checks status of all k8s resources in the selected namespace"
 STATUS_RESULTS_DIRECTORY = "/tmp"
+STORE_LOGS = True
 STORE_ARTIFACTS = True
 CHECK_POD_VERSIONS = True
 IGNORE_EMPTY_REPLICAS = False
@@ -22,6 +23,8 @@ FULL_LOGS_CONTAINERS = [
 # patterns to be excluded from the check
 WAIVER_LIST = ['integration']
 
+WAIVER_EVENTS = ['PolicyViolation']
+
 EXCLUDED_LABELS = {
 }
 
diff --git a/src/onaptests/configuration/ves_publish_settings.py b/src/onaptests/configuration/ves_publish_settings.py
new file mode 100644 (file)
index 0000000..d590ca9
--- /dev/null
@@ -0,0 +1,15 @@
+from onaptests.configuration.settings import *  # noqa
+
+SERVICE_NAME = "VesCollectorService"
+SERVICE_DETAILS = "Test publishing and reception of ves events"
+PNF_REGISTRATION_TOPIC_NAME = "unauthenticated.VES_PNFREG_OUTPUT"
+PNF_REGISTRATION_VES_DOMAIN_NAME = "pnfRegistration"
+PNF_SOURCE_NAME_IN_VES_EVENT = "dummy-ru-vesCollectorTest"
+
+DMAAP_USERNAME = "dcae@dcae.onap.org"
+DMAAP_PASSWORD = "demo123456!"
+
+DMAAP_USERNAME = "dcae@dcae.onap.org"
+DMAAP_PASSWORD = "demo123456!"
+
+VES_VERSION = "v7"
diff --git a/src/onaptests/scenario/aai_initial_data_setup.py b/src/onaptests/scenario/aai_initial_data_setup.py
new file mode 100644 (file)
index 0000000..5809fa7
--- /dev/null
@@ -0,0 +1,79 @@
+from onapsdk.configuration import settings
+
+from onaptests.scenario.scenario_base import BaseScenarioStep, ScenarioBase
+from onaptests.steps.AAIInitialSetup.create_operations import \
+    CreateOperationStep
+from onaptests.steps.AAIInitialSetup.get_operations import GetOperationsStep
+from onaptests.steps.AAIInitialSetup.update_operation_step import \
+    UpdateOperationStep
+from onaptests.steps.base import BaseStep
+from onaptests.steps.cloud.link_cloudregion_to_project import \
+    LinkCloudRegionToProjectStep
+from onaptests.steps.cloud.link_lineofbusiness_to_tenant import \
+    LinkLineOfBusinessToTenantStep
+from onaptests.steps.cloud.link_owningentity_to_tenant import \
+    LinkOwningEntityToTenantStep
+
+
+class AaiInitialDataSetup(BaseScenarioStep):
+    """Step created to run scenario and generate report."""
+
+    def __init__(self):
+        """Initialize step."""
+        super().__init__(cleanup=BaseStep.HAS_NO_CLEANUP)
+        self._logger.info("Instantiation started")
+
+        self.add_step(CreateOperationStep())
+        self.add_step(GetOperationsStep())
+        # self.add_step(GetAllOperationsStep())
+        self.add_step(UpdateOperationStep())
+
+        # create relationship between cloud region and project
+        self.add_step(LinkCloudRegionToProjectStep())
+        # create relationship between owning entity and tenant
+        self.add_step(LinkOwningEntityToTenantStep())
+        # create relationship between line of business and tenant
+        self.add_step(LinkLineOfBusinessToTenantStep())
+
+    @property
+    def description(self) -> str:
+        """Step description.
+
+            Used for reports
+
+        Returns:
+            str: Step description
+
+        """
+        return "AAIInitialSetup for performing CRUD operations on entities.."
+
+    @property
+    def component(self) -> str:
+        """Component name.
+
+            Name of component which step is related with.
+            Most is the name of ONAP component
+
+        Returns:
+            str: Component name
+        """
+        return "AAI"
+
+    @property
+    def service_instance_name(self) -> str:
+        """Service instance name.
+
+        Returns:
+            str: Service instance name
+
+        """
+        return settings.SERVICE_INSTANCE_NAME
+
+
+class AAICrud(ScenarioBase):
+    """AaiInitialDataSetup to test CRUD operation for all entities."""
+
+    def __init__(self, **kwargs):
+        """Init AaiInitialDataSetup execution started."""
+        super().__init__('aai_initial_data_setup', **kwargs)
+        self.test = AaiInitialDataSetup()
diff --git a/src/onaptests/scenario/add_delete_cnf_macro.py b/src/onaptests/scenario/add_delete_cnf_macro.py
new file mode 100644 (file)
index 0000000..4765ed1
--- /dev/null
@@ -0,0 +1,17 @@
+"""Instantiate service with CNF using SO macro flow."""
+import logging
+
+from onaptests.scenario.scenario_base import ScenarioBase
+from onaptests.steps.instantiate.so.add_cnf_in_service import AddCnfInService
+
+
+class AddDeleteCnfInRunningSvcScenario(ScenarioBase):
+    """Instantiate a service with CNF, then add and delete CNF from the service."""
+
+    __logger = logging.getLogger(__name__)
+
+    def __init__(self, **kwargs):
+        """Add cnf in running service."""
+        super().__init__('add_delete_cnf_macro', **kwargs)
+        self.__logger.info("CnfMacro init started")
+        self.test = AddCnfInService()
diff --git a/src/onaptests/scenario/add_pnf_in_running_svc.py b/src/onaptests/scenario/add_pnf_in_running_svc.py
new file mode 100644 (file)
index 0000000..e09b8e6
--- /dev/null
@@ -0,0 +1,98 @@
+"""Instantiate service with PNF using SO macro flow."""
+import logging
+
+import yaml
+from onapsdk.configuration import settings
+
+from onaptests.scenario.scenario_base import (BaseStep, ScenarioBase,
+                                              YamlTemplateBaseScenarioStep)
+from onaptests.steps.instantiate.service_macro import \
+    YamlTemplateServiceMacroInstantiateStep
+from onaptests.steps.instantiate.so.add_pnf_in_service import AddPnfInService
+from onaptests.steps.instantiate.so.delete_pnf_in_service import \
+    DeletePnfMacroInService
+from onaptests.steps.onboard.cds import CbaPublishStep
+
+
+class AddPnfInRunningSvcScenario(YamlTemplateBaseScenarioStep):
+    """Step created to run scenario and generate report."""
+
+    def __init__(self):
+        """Initialize step.
+
+        Substeps:
+            - YamlTemplateServiceMacroInstantiateStep.
+        """
+        super().__init__(cleanup=BaseStep.HAS_NO_CLEANUP)
+        self._yaml_template = None
+        self._logger.info("AddPnfInRunningSvcScenario started")
+        self.add_step(CbaPublishStep())
+        self.add_step(YamlTemplateServiceMacroInstantiateStep())
+        self.add_step(DeletePnfMacroInService())
+        self.add_step(AddPnfInService())
+
+    @property
+    def description(self) -> str:
+        """Step description.
+
+        Used for reports
+
+        Returns:
+            str: Step description
+
+        """
+        return "PNF macro scenario step"
+
+    @property
+    def component(self) -> str:
+        """Component name.
+
+        Name of component which step is related with.
+            Most is the name of ONAP component.
+
+        Returns:
+            str: Component name
+
+        """
+        return "TEST"
+
+    @property
+    def yaml_template(self) -> dict:
+        """YAML template abstract property.
+
+        Every YAML template step need to implement that property.
+
+        Returns:
+            dict: YAML template
+
+        """
+        if not self._yaml_template:
+            with open(settings.SERVICE_YAML_TEMPLATE, "r", encoding="utf-8") as yaml_template:
+                self._yaml_template: dict = yaml.safe_load(yaml_template)
+        return self._yaml_template
+
+    @property
+    def model_yaml_template(self) -> dict:
+        return {}
+
+    @property
+    def service_instance_name(self) -> str:
+        """Service instance name.
+
+        Returns:
+            str: Service instance name
+
+        """
+        return settings.SERVICE_INSTANCE_NAME
+
+
+class AddPnfInRunningSvc(ScenarioBase):
+    """Run PNF simulator and onboard then instantiate a service with PNF."""
+
+    __logger = logging.getLogger(__name__)
+
+    def __init__(self, **kwargs):
+        """Add pnf in running service."""
+        super().__init__('add_pnf_in_running_service', **kwargs)
+        self.__logger.info("PnfMacro init started")
+        self.test = AddPnfInRunningSvcScenario()
index 9cb4a54..313584a 100644 (file)
@@ -1,13 +1,13 @@
 #!/usr/bin/env python
 """Simple CDS blueprint erichment test scenario."""
 from onaptests.scenario.scenario_base import ScenarioBase
-from onaptests.steps.onboard.cds import CbaEnrichStep
+from onaptests.steps.onboard.cds import CbaProcessStep
 
 
 class CDSBlueprintEnrichment(ScenarioBase):
     """Enrich simple blueprint using CDS blueprintprocessor."""
 
     def __init__(self, **kwargs):
-        """Init CDS blueprint enrichment use case."""
+        """Init CDS blueprint CbaProcessStep use case."""
         super().__init__('basic_cds', **kwargs)
-        self.test = CbaEnrichStep()
+        self.test = CbaProcessStep()
index 95fafad..d13053b 100644 (file)
@@ -4,8 +4,9 @@ from yaml import SafeLoader, load
 
 from onaptests.scenario.scenario_base import (BaseStep, ScenarioBase,
                                               YamlTemplateBaseScenarioStep)
-from onaptests.steps.instantiate.service_macro import \
-    YamlTemplateServiceMacroInstantiateStep
+from onaptests.steps.instantiate.service_macro import (
+    YamlTemplateServiceMacroInstantiateStep,
+    YamlTemplateServiceOperatorInstantiateStep)
 from onaptests.steps.onboard.cds import CbaPublishStep
 
 
@@ -17,12 +18,16 @@ class BasicCnfMacroStep(YamlTemplateBaseScenarioStep):
 
         Substeps:
             - CbaPublishStep
-            - YamlTemplateServiceMacroInstantiateStep.
+            - YamlTemplateServiceMacroInstantiateStep
+            - CheckOnapVnfCr.
         """
         super().__init__(cleanup=BaseStep.HAS_NO_CLEANUP)
         self._yaml_template: dict = None
         self.add_step(CbaPublishStep())
-        self.add_step(YamlTemplateServiceMacroInstantiateStep())
+        if not settings.TEST_ONAP_OPERATOR:
+            self.add_step(YamlTemplateServiceMacroInstantiateStep())
+        elif settings.TEST_ONAP_OPERATOR:
+            self.add_step(YamlTemplateServiceOperatorInstantiateStep())
 
     @property
     def description(self) -> str:
@@ -34,7 +39,9 @@ class BasicCnfMacroStep(YamlTemplateBaseScenarioStep):
             str: Step description
 
         """
-        return "Basic CNF macro scenario step"
+        if not settings.TEST_ONAP_OPERATOR:
+            return "Basic CNF macro scenario step"
+        return "Basic CNF macro and Onap-Operator scenario step"
 
     @property
     def component(self) -> str:
diff --git a/src/onaptests/scenario/basic_kafka.py b/src/onaptests/scenario/basic_kafka.py
new file mode 100644 (file)
index 0000000..7afe562
--- /dev/null
@@ -0,0 +1,178 @@
+import json
+
+from onapsdk.configuration import settings
+from onapsdk.kafka import onap_kafka
+
+from onaptests.scenario.scenario_base import BaseScenarioStep, ScenarioBase
+from onaptests.steps.base import BaseStep
+from onaptests.utils import kubernetes_kafka
+from onaptests.utils.exceptions import OnapTestException
+
+
+def get_kafka_password():
+    """
+    Retrieves the Kafka admin password.
+
+    This method initializes a KubernetesKafka reader object,
+    reads the Kafka admin secret, and retrieves the Kafka admin password.
+
+    Returns:
+        str: The Kafka admin password.
+    """
+    reader = kubernetes_kafka.KubernetesKafka()
+    reader.read_kafka_admin_secret()
+    kafka_password = reader.get_kafka_admin_password()
+    return kafka_password
+
+
+class KafkaEventStep(BaseScenarioStep):
+    """Step to test Kafka functionality,
+    including dummy event publishing/consumption and topic management."""
+
+    def __init__(self) -> None:
+        super().__init__(cleanup=BaseStep.HAS_NO_CLEANUP)
+        self.add_step(CreateKafkaTopicStep())
+        self.add_step(SubmitDummyEventToKafkaStep())
+        self.add_step(ReceiveDummyEventFromKafkaStep())
+
+    @property
+    def component(self) -> str:
+        return "KAFKA"
+
+    @property
+    def description(self) -> str:
+        return "Kafka Event Handling Step"
+
+
+class CreateKafkaTopicStep(BaseStep):
+    """Step to create a new topic on Kafka."""
+
+    def __init__(self) -> None:
+        super().__init__(cleanup=settings.CLEANUP_FLAG)
+
+    @property
+    def description(self) -> str:
+        """Step description."""
+        return "Create a new topic on Kafka"
+
+    @property
+    def component(self) -> str:
+        """Component name."""
+        return "KAFKA"
+
+    @BaseStep.store_state
+    def execute(self):
+        """Create a new topic on Kafka"""
+        super().execute()
+
+        try:
+            kubernetes_kafka.create_topic(settings.TOPIC_NAME)
+        except Exception as ce:
+            self._logger.error("Failed to create topic on Kafka: %s", {str(ce)})
+            raise OnapTestException(ce) from ce
+
+    @BaseStep.store_state(cleanup=True)
+    def cleanup(self) -> None:
+        self._logger.info("Deleting topic if exist on kafka.")
+        try:
+            kubernetes_kafka.delete_topic(settings.TOPIC_NAME)
+        except Exception as e:
+            self._logger.error("Exception while deleting topic on kafka: %s", e, exc_info=1)
+            raise OnapTestException(e) from e
+
+        super().cleanup()
+
+
+class SubmitDummyEventToKafkaStep(BaseStep):
+    """Step to submit a dummy event to a newly created Kafka topic."""
+
+    def __init__(self) -> None:
+        super().__init__(cleanup=BaseStep.HAS_NO_CLEANUP)
+
+    @property
+    def description(self) -> str:
+        """Step description."""
+        return "Submit dummy event to Kafka by publishing it"
+
+    @property
+    def component(self) -> str:
+        """Component name."""
+        return "KAFKA"
+
+    @BaseStep.store_state
+    def execute(self):
+        """Submit dummy event to Kafka"""
+        super().execute()
+
+        try:
+            event_data = {'event_id': settings.EVENT_ID, 'event_name': settings.EVENT_NAME}
+            kafka_password = get_kafka_password()
+            onap_kafka.publish_event_on_topic(settings.KAFKA_USER, kafka_password,
+                                              json.dumps(event_data).encode('utf-8'),
+                                              settings.TOPIC_NAME)
+        except Exception as ce:
+            self._logger.error("Failed to publish event on Kafka: %s", {str(ce)})
+            raise OnapTestException(ce) from ce
+
+
+class ReceiveDummyEventFromKafkaStep(BaseStep):
+    """Step to receive a dummy event from Kafka."""
+
+    def __init__(self) -> None:
+        super().__init__(cleanup=BaseStep.HAS_NO_CLEANUP)
+
+    @property
+    def description(self) -> str:
+        """Step description."""
+        return "Receive dummy event from Kafka"
+
+    @property
+    def component(self) -> str:
+        """Component name."""
+        return "Kafka"
+
+    @BaseStep.store_state
+    def execute(self):
+        """Receive dummy event from Kafka"""
+        super().execute()
+
+        kafka_password = get_kafka_password()
+
+        try:
+            events = onap_kafka.get_events_for_topic(settings.KAFKA_USER, kafka_password,
+                                                     settings.TOPIC_NAME)
+            self._logger.info("Below events are existed on topic.")
+            self._logger.info(events)
+            is_event_found = False
+            for event in events:
+                event_obj = json.loads(event)
+                if ((event_obj['event_id']) ==
+                        settings.EVENT_ID and
+                        event_obj['event_name'] ==
+                        settings.EVENT_NAME):
+                    self._logger.info("Received the required event from Kafka")
+                    is_event_found = True
+                    break
+            if not is_event_found:
+                msg = "Did not get the required event from Kafka"
+                self._logger.error(msg)
+                raise OnapTestException(msg)
+
+        except Exception as ce:
+            self._logger.debug("Failed to receive the event from Kafka: %s", {str(ce)})
+            raise OnapTestException(ce) from ce
+
+
+class KafkaTestCase(ScenarioBase):
+    """Test case to test Kafka functionality,
+    including dummy event publish/consume and topic creation/deletion."""
+
+    def __init__(self, **kwargs):
+        super().__init__('basic_kafka', **kwargs)
+        self.test: BaseStep = KafkaEventStep()
+
+
+if __name__ == '__main__':
+    tt = KafkaTestCase()
+    tt.run()
+    tt.clean()
index ae1ba8c..aa324f2 100644 (file)
@@ -5,8 +5,9 @@ from yaml import SafeLoader, load
 
 from onaptests.scenario.scenario_base import (BaseStep, ScenarioBase,
                                               YamlTemplateBaseScenarioStep)
-from onaptests.steps.onboard.service import (VerifyServiceDistributionStep,
-                                             YamlTemplateServiceOnboardStep)
+from onaptests.steps.onboard.service import (
+    VerifyServiceDistributionStep, YamlTemplateServiceDistributionStep,
+    YamlTemplateServiceOnboardStep)
 
 
 class BasicSdcOnboardStep(YamlTemplateBaseScenarioStep):
@@ -16,12 +17,17 @@ class BasicSdcOnboardStep(YamlTemplateBaseScenarioStep):
         """Initialize step.
 
         Substeps:
+            - YamlTemplateServiceDistributionStep
             - YamlTemplateServiceOnboardStep
             - VerifyServiceDistributionStep (optional).
         """
         super().__init__(cleanup=BaseStep.HAS_NO_CLEANUP)
         self._yaml_template: dict = None
-        self.add_step(YamlTemplateServiceOnboardStep())
+        if settings.SERVICE_DISTRIBUTION_ENABLED:
+            self.add_step(YamlTemplateServiceDistributionStep())
+        else:
+            self.add_step(YamlTemplateServiceOnboardStep())
+
         if settings.VERIFY_DISTRIBUTION:
             self.add_step(VerifyServiceDistributionStep())
 
diff --git a/src/onaptests/scenario/basic_policy.py b/src/onaptests/scenario/basic_policy.py
new file mode 100644 (file)
index 0000000..fe09738
--- /dev/null
@@ -0,0 +1,11 @@
+from onaptests.scenario.scenario_base import ScenarioBase
+from onaptests.steps.policy.policy_operations import GetPolicyDecisionStep
+
+
+class PolicyScenario(ScenarioBase):
+    """Perform policy operations."""
+
+    def __init__(self, **kwargs):
+        """Init policy."""
+        super().__init__('basic_policy', **kwargs)
+        self.test = GetPolicyDecisionStep()
diff --git a/src/onaptests/scenario/basic_prh.py b/src/onaptests/scenario/basic_prh.py
new file mode 100644 (file)
index 0000000..d6d9ab1
--- /dev/null
@@ -0,0 +1,134 @@
+import json
+import threading
+
+from onapsdk.aai.business.pnf import PnfInstance
+from onapsdk.configuration import settings
+from onapsdk.kafka import onap_kafka
+
+from onaptests.scenario.scenario_base import (BaseScenarioStep, BaseStep,
+                                              ScenarioBase)
+from onaptests.steps.cloud.add_pnf import AAIAddPNFStep
+from onaptests.steps.cloud.publish_pnf_reg_event_to_kafka import \
+    PublishVESRegistrationEventToKafkaStep
+from onaptests.utils.exceptions import OnapTestException
+from onaptests.utils.kubernetes_kafka import KubernetesKafka
+
+
+class PRHStep(BaseScenarioStep):
+    """The testcase to test PRH's handling PNF registration event received over Kafka"""
+
+    def __init__(self) -> None:
+        super().__init__(cleanup=BaseStep.HAS_NO_CLEANUP)
+        self.add_step(AAIAddPNFStep())
+        self.add_step(PublishVESRegistrationEventToKafkaStep())
+        self.add_step(ValidatePRHProcessing())
+
+    @property
+    def component(self) -> str:
+        return "DCAE"
+
+    @property
+    def description(self) -> str:
+        return "PRH Registration Event Handling step"
+
+
+class ValidatePRHProcessing(BaseStep):
+    """Step to validate PRH processing results of PNF Registration Event"""
+
+    pnf = PnfInstance(pnf_name=settings.PNF_NAME,
+                      pnf_id=settings.PNF_ID,
+                      orchestration_status=settings.PNF_ORCHESTRATION_STATUS,
+                      in_maint=settings.PNF_IN_MAINT,
+                      nf_role=settings.PNF_NF_ROLE,
+                      service_instance=None)
+
+    ticker = threading.Event()
+
+    @property
+    def description(self) -> str:
+        return "Validate PRH processing results of PNF Registration Event"
+
+    @property
+    def component(self) -> str:
+        return "DCAE"
+
+    def validate_aai_changes(self):
+        """Validate correctness of PNF configuration in AAI"""
+        prh_get_response_dict = PnfInstance.send_message_json("GET",
+                                                              f"Get {self.pnf.pnf_name} PNF",
+                                                              f"{self.pnf.url}")
+
+        updated_pnf = PnfInstance.create_from_api_response(prh_get_response_dict, None)
+
+        if (updated_pnf.serial_number == settings.PNF_SERIAL_NUMBER and
+                updated_pnf.ipaddress_v4_oam == settings.PNF_IPADDRESS_V4_OAM and
+                updated_pnf.ipaddress_v6_oam == settings.PNF_IPADDRESS_V6_OAM):
+            self._logger.info("PNF is updated in AAI as expected")
+            self.ticker.set()
+        else:
+            self._logger.info("PNF is not yet updated in AAI as expected")
+
+    @BaseStep.store_state
+    def execute(self) -> None:
+        """Validate AAI changes made and also receive the message published by\
+              PRH by calling kakfa methods"""
+
+        super().execute()
+
+        # Check for PNF updates in AAI
+        count = 0
+        while not self.ticker.wait(settings.WAIT_TIME_SECONDS_BETWEEN_CHECK) and\
+                count < settings.MAX_ATTEMPTS_TO_CHECK:
+            self._logger.info("Attempt-%s: Checking if PNF is updated in AAI", count + 1)
+            self.validate_aai_changes()
+            count = count + 1
+
+        if (count == settings.MAX_ATTEMPTS_TO_CHECK and self.ticker.is_set() is False):
+            msg = "PNF is not updated in AAI as expected in {sec} seconds".format(
+                sec=settings.WAIT_TIME_SECONDS_BETWEEN_CHECK * settings.MAX_ATTEMPTS_TO_CHECK)
+            self._logger.info(msg)
+            raise OnapTestException(msg)
+
+        reader = KubernetesKafka()
+        reader.read_kafka_admin_secret()
+        kafka_password = reader.get_kafka_admin_password()
+
+        # Try to receive PNF_READY event
+        self.ticker.clear()
+        count = 0
+        while not self.ticker.wait(settings.WAIT_TIME_SECONDS_BETWEEN_CHECK) and\
+                count < settings.MAX_ATTEMPTS_TO_CHECK:
+            self._logger.info("Attempt-%s: Checking if PNF_READY event is published", count + 1)
+            events = onap_kafka.get_events_for_topic(settings.KAFKA_USER, kafka_password,
+                                                     settings.PNFREADY_TOPIC_NAME)
+            is_event_found = False
+            for event in events:
+                event_obj = json.loads(event)
+                if event_obj['correlationId'] == settings.PNF_NAME:
+                    self._logger.info("Received required PNF_READY event from Message Router !")
+                    is_event_found = True
+                    self.ticker.set()
+                    break
+            if is_event_found is False:
+                self._logger.info("PNF_READY event is not yet received")
+            count = count + 1
+
+        if (count == settings.MAX_ATTEMPTS_TO_CHECK and self.ticker.is_set() is False):
+            msg = "PNF_READY event is not received as expected in {sec} seconds".format(
+                sec=settings.WAIT_TIME_SECONDS_BETWEEN_CHECK * settings.MAX_ATTEMPTS_TO_CHECK)
+            self._logger.info(msg)
+            raise OnapTestException(msg)
+
+
+class PRHBase(ScenarioBase):
+    """The testcase to test the PRH's functionality of handling PNF registration event"""
+
+    def __init__(self, **kwargs):
+        super().__init__('basic_prh', **kwargs)
+        self.test: BaseScenarioStep = PRHStep()
+
+
+if __name__ == "__main__":
+    vctc = PRHBase()
+    vctc.run()
+    vctc.clean()
diff --git a/src/onaptests/scenario/cba_verification.py b/src/onaptests/scenario/cba_verification.py
new file mode 100644 (file)
index 0000000..3640e90
--- /dev/null
@@ -0,0 +1,55 @@
+#!/usr/bin/env python
+"""CBA Verification test case."""
+from onapsdk.configuration import settings
+
+from onaptests.scenario.scenario_base import BaseScenarioStep, ScenarioBase
+from onaptests.steps.base import BaseStep
+from onaptests.steps.onboard.verify_cba import TestCbaStep
+
+
+class CbaVerificationStep(BaseScenarioStep):
+    """Basic cba verification step."""
+
+    def __init__(self):
+        """CbaVerification step.
+
+        Substeps:
+            - TestCbaStep
+        """
+        super().__init__(cleanup=BaseStep.HAS_NO_CLEANUP)
+        for cba in settings.CBA_LIST:
+            self.add_step(TestCbaStep(cba))
+
+    @property
+    def description(self) -> str:
+        """Step description.
+
+        Used for reports
+
+        Returns:
+            str: Step description
+
+        """
+        return "Verify CBA deployed into environment"
+
+    @property
+    def component(self) -> str:
+        """Component name.
+
+        Name of component which step is related with.
+            Most is the name of ONAP component.
+
+        Returns:
+            str: Component name
+
+        """
+        return "CBA"
+
+
+class CbaVerification(ScenarioBase):
+    """CBA verification scenario."""
+
+    def __init__(self, **kwargs):
+        """Init CBA Verification."""
+        super().__init__('cba_verification', **kwargs)
+        self.test = CbaVerificationStep()
diff --git a/src/onaptests/scenario/check_time_sync.py b/src/onaptests/scenario/check_time_sync.py
new file mode 100644 (file)
index 0000000..965179a
--- /dev/null
@@ -0,0 +1,224 @@
+import json
+import time
+from pathlib import Path
+
+import yaml
+from jinja2 import BaseLoader, Environment, PackageLoader, select_autoescape
+from kubernetes import client, config
+from kubernetes.client.rest import ApiException
+from onapsdk.configuration import settings
+from onapsdk.onap_service import OnapService
+
+from onaptests.scenario.scenario_base import ScenarioBase
+from onaptests.steps.base import BaseStep
+from onaptests.utils.exceptions import OnapTestException
+from onaptests.utils.resources import get_resource_location
+
+
+class CreateTimeSyncChecker(BaseStep):
+    """Create Time Sync Checker Daemon Step."""
+
+    def __init__(self) -> None:
+        """Initialize step."""
+        super().__init__(cleanup=settings.CLEANUP_FLAG)
+        self.config_map = None
+        self.daemon_set = None
+
+    @property
+    def component(self) -> str:
+        return "K8S"
+
+    @property
+    def description(self) -> str:
+        return "Create Time Sync Checker Daemon"
+
+    def _build_cm_definition(self):
+        cm_body = """
+apiVersion: v1
+kind: ConfigMap
+metadata:
+  name: {{daemon_name}}-script
+data:
+  checker.py: |
+{{script_body|indent(4, True)}}
+"""
+        template = Environment(loader=BaseLoader).from_string(cm_body)
+        script_body_file = Path(get_resource_location("utils/ntp_checker.py"))
+        with open(script_body_file, "r", encoding="utf-8") as py_file:
+            script_body = py_file.read()
+        cm_final_body = template.render(
+            daemon_name=settings.DAEMON_NAME,
+            script_body=script_body)
+        return cm_final_body
+
+    def _build_daemon_definition(self):
+        jinja_env_file = Environment(autoescape=select_autoescape(['yml.j2']),
+                                     loader=PackageLoader('onaptests.templates',
+                                                          'artifacts'))
+        template = jinja_env_file.get_template("ntp_checker_daemon.yml.j2")
+        daemon_final_body = template.render(
+            daemon_name=settings.DAEMON_NAME)
+        return daemon_final_body
+
+    def _deamon_deployment_ratio(self):
+        if self.daemon_set.status.desired_number_scheduled <= 0:
+            return 0
+        ratio = 1.0 * (self.daemon_set.status.number_ready /
+                       self.daemon_set.status.desired_number_scheduled)
+        return ratio
+
+    @BaseStep.store_state
+    def execute(self):  # noqa: C901
+        if settings.IN_CLUSTER:
+            config.load_incluster_config()
+        else:
+            config.load_kube_config(config_file=settings.K8S_CONFIG)
+
+        try:
+            core = client.CoreV1Api()
+            cm_name = f"{settings.DAEMON_NAME}-script"
+            try:
+                self.config_map = core.read_namespaced_config_map(
+                    cm_name, settings.DAEMON_NS)
+            except ApiException as e:
+                if e.status != 404:
+                    raise e
+                self._logger.info(
+                    f"Config map {cm_name} does not exist - creating")
+                cm_definition = yaml.safe_load(self._build_cm_definition())
+                self.config_map = core.create_namespaced_config_map(
+                    settings.DAEMON_NS, cm_definition)
+
+            app = client.AppsV1Api()
+            try:
+                self.daemon_set = app.read_namespaced_daemon_set(
+                    settings.DAEMON_NAME, settings.DAEMON_NS)
+            except ApiException as e:
+                if e.status != 404:
+                    raise e
+                self._logger.info(
+                    f"Daemon set {settings.DAEMON_NAME} does not exist - creating")
+                daemomn_definition = yaml.safe_load(self._build_daemon_definition())
+                self.daemon_set = app.create_namespaced_daemon_set(
+                    settings.DAEMON_NS, daemomn_definition)
+
+            start_time = time.time()
+            while self._deamon_deployment_ratio() < settings.DAEMON_DEPLOYMENT_RATIO:
+                if (time.time() - start_time) > settings.DAEMON_READY_WAIT_TIMOUT_SEC:
+                    raise OnapTestException(
+                        f"Daemon set {settings.DAEMON_NAME} cannot come up")
+                self._logger.info(
+                    f"Waiting for daemon set {settings.DAEMON_NAME} to come up")
+                time.sleep(30)
+                self.daemon_set = app.read_namespaced_daemon_set(
+                    settings.DAEMON_NAME, settings.DAEMON_NS)
+            # additional short time to wait for pods to be ready
+            time.sleep(15)
+        except ApiException as e:
+            raise OnapTestException(e) from e
+
+    @BaseStep.store_state(cleanup=True)
+    def cleanup(self):
+        if self.daemon_set:
+            self._logger.info(
+                f"Deleting daemon set {settings.DAEMON_NAME}")
+            try:
+                app = client.AppsV1Api()
+                app.delete_namespaced_daemon_set(
+                    settings.DAEMON_NAME, settings.DAEMON_NS)
+            except ApiException as e:
+                self._logger.exception(e)
+        if self.config_map:
+            self._logger.info(
+                f"Deleting config map {settings.DAEMON_NAME}-script")
+            try:
+                core = client.CoreV1Api()
+                core.delete_namespaced_config_map(
+                    f"{settings.DAEMON_NAME}-script", settings.DAEMON_NS)
+            except ApiException as e:
+                self._logger.exception(e)
+
+
+class CheckNtpTimeSyncStep(BaseStep, OnapService):
+    """Check Local Time of Nodes Step."""
+
+    def __init__(self) -> None:
+        """Initialize step."""
+        super().__init__(cleanup=BaseStep.HAS_NO_CLEANUP)
+        self.daemon_step = CreateTimeSyncChecker()
+        self.add_step(self.daemon_step)
+
+    @property
+    def component(self) -> str:
+        return "NTP"
+
+    @property
+    def description(self) -> str:
+        return "Check status of local time in k8s nodes"
+
+    def _get_host_list(self):
+        pod_list = []
+        if settings.IN_CLUSTER:
+            config.load_incluster_config()
+        else:
+            config.load_kube_config(config_file=settings.K8S_CONFIG)
+        selector = self.daemon_step.daemon_set.spec.selector.match_labels
+        raw_selector = ''
+        for key, value in selector.items():
+            raw_selector += key + '=' + value + ','
+        raw_selector = raw_selector[:-1]
+        core = client.CoreV1Api()
+        pods = core.list_namespaced_pod(
+            settings.DAEMON_NS, label_selector=raw_selector).items
+        for pod in pods:
+            if pod.status.pod_ip:
+                pod_list.append({"id": pod.metadata.name,
+                                "host_name": pod.status.pod_ip})
+        return pod_list
+
+    @BaseStep.store_state
+    def execute(self):
+        hosts_to_check = self._get_host_list()
+        # hosts_to_check = [{"id": "localhost", "host_name": "127.0.0.1"}]
+        all_time_diffs = []
+        for host in hosts_to_check:
+            host_id = host["id"]
+            host_name = host["host_name"]
+            self._logger.info(f"Checking time of {host_id}: {host_name}")
+            time_diffs = []
+            for _ in range(0, settings.NODE_TIME_QUERIES_NUMBER):
+                # 1 ms for processing on our side
+                try:
+                    local_time = int(time.time() * 1000) + 1
+                    executions_response = self.send_message_json(
+                        "POST",
+                        f"Get time status on {host_id}",
+                        f"http://{host_name}:8000/local-time-status",
+                        data=json.dumps({"time": local_time}),
+                        timeout=5
+                    )
+                    # 1 ms for processing on server side
+                    response_time = int(time.time() * 1000) - local_time - 1
+                    time_diff = executions_response["time"]
+                    real_diff = int(time_diff - (response_time / 2.0))
+                    time_diffs.append(real_diff)
+                    self._logger.debug(f"Diff {time_diff} -> Real Diff {real_diff}")
+                except Exception as e:
+                    self._logger.exception(e)
+            if len(time_diffs) > 0:
+                final_diff = max(time_diffs)
+                all_time_diffs.append(final_diff)
+                self._logger.info(f"Final time diff for {host_id} is: {final_diff}")
+        if len(all_time_diffs) > 1:
+            overall_diff = max(all_time_diffs) - min(all_time_diffs)
+            if abs(overall_diff) > settings.MAX_TIME_DIFF_MS:
+                raise OnapTestException(f"Time diff {overall_diff} ms exceeds max threshold")
+
+
+class CheckTimeSync(ScenarioBase):
+    """Check time synchronization in the cluster."""
+
+    def __init__(self, **kwargs):
+        """Init time sync test case."""
+        super().__init__('check_time_sync', **kwargs)
+        self.test = CheckNtpTimeSyncStep()
diff --git a/src/onaptests/scenario/generic_network.py b/src/onaptests/scenario/generic_network.py
new file mode 100644 (file)
index 0000000..c42f68a
--- /dev/null
@@ -0,0 +1,82 @@
+import yaml
+from onapsdk.configuration import settings
+from yaml import SafeLoader
+
+from onaptests.scenario.basic_cnf_macro import BasicCnfMacroStep
+from onaptests.scenario.scenario_base import (ScenarioBase,
+                                              YamlTemplateBaseScenarioStep)
+from onaptests.steps.instantiate.so.generic_network_step import \
+    GenericNetworkStep
+
+
+class AddGenericNetwork(YamlTemplateBaseScenarioStep):
+    """Add generic network in existing VNF."""
+
+    def __init__(self):
+        """Init Generic Network use case."""
+        super().__init__()
+        self._yaml_template = None
+        self.add_step(BasicCnfMacroStep())
+        self.add_step(GenericNetworkStep())
+
+    @property
+    def description(self) -> str:
+        """Step description.
+
+        Used for reports
+
+        Returns:
+            str: Step description
+
+        """
+        return "Generic Network instantiation step"
+
+    @property
+    def component(self) -> str:
+        """Component name.
+
+        Name of component which step is related with.
+            Most is the name of ONAP component.
+
+        Returns:
+            str: Component name
+
+        """
+        return "SO"
+
+    @property
+    def yaml_template(self) -> dict:
+        """YAML template abstract property.
+
+        Every YAML template step need to implement that property.
+
+        Returns:
+            dict: YAML template
+
+        """
+        if not self._yaml_template:
+            with open(settings.SERVICE_YAML_TEMPLATE, "r", encoding="utf-8") as yaml_template:
+                self._yaml_template: dict = yaml.load(yaml_template, SafeLoader)
+        return self._yaml_template
+
+    @property
+    def model_yaml_template(self) -> dict:
+        return {}
+
+    @property
+    def service_instance_name(self) -> str:
+        """Service instance name.
+
+        Returns:
+            str: Service instance name
+
+        """
+        return settings.SERVICE_INSTANCE_NAME
+
+
+class GenericNetwork(ScenarioBase):
+    """Instantiate a basic cnf macro."""
+    def __init__(self, **kwargs):
+        """Init GenericNetwork Macro use case."""
+        super().__init__('generic_network', **kwargs)
+        self.test = AddGenericNetwork()
diff --git a/src/onaptests/scenario/instantiate_pnf_with_ves_event.py b/src/onaptests/scenario/instantiate_pnf_with_ves_event.py
new file mode 100644 (file)
index 0000000..12b8b1e
--- /dev/null
@@ -0,0 +1,91 @@
+import logging
+
+import yaml
+from onapsdk.configuration import settings
+
+from onaptests.scenario.scenario_base import (BaseStep, ScenarioBase,
+                                              YamlTemplateBaseScenarioStep)
+from onaptests.steps.instantiate.service_macro import \
+    YamlTemplateServiceMacroInstantiateStep
+from onaptests.steps.onboard.cds import CbaPublishStep
+
+
+class InstantiatePnfWithVESEvent(YamlTemplateBaseScenarioStep):
+    """Step created to run pnf instantiation with ves event."""
+
+    def __init__(self, cleanup=False):
+        """Initialize step.
+
+        Substeps:
+            - YamlTemplateServiceMacroInstantiateStep.
+        """
+        super().__init__(cleanup=BaseStep.HAS_NO_CLEANUP)
+        self._logger.info("InstantiatePnfWithVESEvent started")
+        self._yaml_template: dict = None
+        self.add_step(CbaPublishStep())
+        self.add_step(YamlTemplateServiceMacroInstantiateStep())
+
+    @property
+    def description(self) -> str:
+        """Step description.
+
+        Used for reports
+
+        Returns:
+            str: Step description
+
+        """
+        return "PNF macro scenario step"
+
+    @property
+    def component(self) -> str:
+        """Component name.
+
+        Name of component which step is related with.
+            Most is the name of ONAP component.
+
+        Returns:
+            str: Component name
+
+        """
+        return "TEST"
+
+    @property
+    def yaml_template(self) -> dict:
+        """YAML template abstract property.
+
+        Every YAML template step need to implement that property.
+
+        Returns:
+            dict: YAML template
+
+        """
+        if not self._yaml_template:
+            with open(settings.SERVICE_YAML_TEMPLATE, "r", encoding="utf-8") as yaml_template:
+                self._yaml_template: dict = yaml.safe_load(yaml_template)
+        return self._yaml_template
+
+    @property
+    def service_instance_name(self) -> str:
+        """Service instance name.
+
+        Returns:
+            str: Service instance name
+
+        """
+        return settings.SERVICE_INSTANCE_NAME
+
+    @property
+    def model_yaml_template(self) -> dict:
+        return {}
+
+
+class PnfWithVesEvent(ScenarioBase):
+    """Instantiate a service with PNF."""
+
+    __logger = logging.getLogger(__name__)
+
+    def __init__(self, **kwargs):
+        super().__init__('pnf_with_ves_event', **kwargs)
+        self.__logger.info("PnfWithVesEventTestCase init started")
+        self.test = InstantiatePnfWithVESEvent()
diff --git a/src/onaptests/scenario/instantiate_pnf_without_registration_event.py b/src/onaptests/scenario/instantiate_pnf_without_registration_event.py
new file mode 100644 (file)
index 0000000..2c05da3
--- /dev/null
@@ -0,0 +1,92 @@
+"""Instantiate service with PNF using SO macro flow."""
+import logging
+
+import yaml
+from onapsdk.configuration import settings
+
+from onaptests.scenario.scenario_base import (BaseStep, ScenarioBase,
+                                              YamlTemplateBaseScenarioStep)
+from onaptests.steps.instantiate.service_macro import \
+    YamlTemplateServiceMacroInstantiateStep
+from onaptests.steps.onboard.cds import CbaPublishStep
+
+
+class PnfMacroWithoutEventScenarioStep(YamlTemplateBaseScenarioStep):
+    """Step created to run pnf instantiation without ves event."""
+
+    def __init__(self):
+        """Initialize step.
+
+        Substeps:
+            - YamlTemplateServiceMacroInstantiateStep.
+        """
+        super().__init__(cleanup=BaseStep.HAS_NO_CLEANUP)
+        self._logger.info("PnfMacroWithoutEventScenarioStep without event started")
+        self._yaml_template: dict = None
+        self.add_step(CbaPublishStep())
+        self.add_step(YamlTemplateServiceMacroInstantiateStep())
+
+    @property
+    def description(self) -> str:
+        """Step description.
+
+        Used for reports
+
+        Returns:
+            str: Step description
+
+        """
+        return "PNF macro scenario step"
+
+    @property
+    def component(self) -> str:
+        """Component name.
+
+        Name of component which step is related with.
+            Most is the name of ONAP component.
+
+        Returns:
+            str: Component name
+
+        """
+        return "TEST"
+
+    @property
+    def yaml_template(self) -> dict:
+        """YAML template abstract property.
+
+        Every YAML template step need to implement that property.
+
+        Returns:
+            dict: YAML template
+
+        """
+        if not self._yaml_template:
+            with open(settings.SERVICE_YAML_TEMPLATE, "r", encoding="utf-8") as yaml_template:
+                self._yaml_template: dict = yaml.safe_load(yaml_template)
+        return self._yaml_template
+
+    @property
+    def service_instance_name(self) -> str:
+        """Service instance name.
+
+        Returns:
+            str: Service instance name
+
+        """
+        return settings.SERVICE_INSTANCE_NAME
+
+    @property
+    def model_yaml_template(self) -> dict:
+        return {}
+
+
+class InstantiatePnfWithoutRegistrationEvent(ScenarioBase):
+    """Instantiate a service with PNF."""
+
+    __logger = logging.getLogger(__name__)
+
+    def __init__(self, **kwargs):
+        super().__init__('instantiate_pnf_without_registration_event', **kwargs)
+        self.__logger.info("instantiate_pnf_without_registration_event init started")
+        self.test = PnfMacroWithoutEventScenarioStep()
diff --git a/src/onaptests/scenario/instantiate_service_without_resource.py b/src/onaptests/scenario/instantiate_service_without_resource.py
new file mode 100644 (file)
index 0000000..f9d4adc
--- /dev/null
@@ -0,0 +1,72 @@
+#!/usr/bin/env python
+"""Service Instantiation without the resource.."""
+from onapsdk.configuration import settings
+from yaml import SafeLoader, load
+
+from onaptests.scenario.scenario_base import (BaseStep, ScenarioBase,
+                                              YamlTemplateBaseScenarioStep)
+from onaptests.steps.instantiate.service_macro import \
+    YamlTemplateServiceMacroInstantiateStep
+from onaptests.steps.onboard.cds import CbaPublishStep
+
+
+class InstantiateServiceWithoutResourceStep(YamlTemplateBaseScenarioStep):
+    """"Main scenario step"""
+
+    def __init__(self):
+        """Initialize step..
+
+        Sub steps:
+            - CbaPublishStep
+            - YamlTemplateInstantiateServiceWithoutResourceStep.
+        """
+        super().__init__(cleanup=BaseStep.HAS_NO_CLEANUP)
+        self._yaml_template: dict = None
+        self.add_step(CbaPublishStep())
+        self.add_step(YamlTemplateServiceMacroInstantiateStep())
+
+    @property
+    def component(self) -> str:
+        return "SO"
+
+    @property
+    def description(self) -> str:
+        return "Service Instantiation without the resource step"
+
+    @property
+    def service_instance_name(self) -> str:
+        """Service instance name.
+
+        Returns:
+            str: Service instance name
+
+        """
+        return settings.SERVICE_INSTANCE_NAME
+
+    @property
+    def yaml_template(self) -> dict:
+        """YAML template abstract property.
+
+        Every YAML template step need to implement that property.
+
+        Returns:
+            dict: YAML template
+
+        """
+        if not self._yaml_template:
+            with open(settings.SERVICE_YAML_TEMPLATE, "r", encoding="utf-8") as yaml_template:
+                self._yaml_template: dict = load(yaml_template, SafeLoader)
+        return self._yaml_template
+
+    @property
+    def model_yaml_template(self) -> dict:
+        return {}
+
+
+class InstantiateServiceWithoutResource(ScenarioBase):
+    """Instantiate a service without resource."""
+
+    def __init__(self, **kwargs):
+        """Init a service without resource use case."""
+        super().__init__('instantiate_service_without_resource')
+        self.test = InstantiateServiceWithoutResourceStep()
diff --git a/src/onaptests/scenario/modify_service_pnf.py b/src/onaptests/scenario/modify_service_pnf.py
new file mode 100644 (file)
index 0000000..24a3ef4
--- /dev/null
@@ -0,0 +1,93 @@
+"""Modify service with PNF using SO macro flow."""
+
+import yaml
+from onapsdk.configuration import settings
+
+from onaptests.scenario.scenario_base import (ScenarioBase,
+                                              YamlTemplateBaseScenarioStep)
+from onaptests.steps.instantiate.service_macro import \
+    YamlTemplateServiceMacroInstantiateStep
+from onaptests.steps.instantiate.so.modify_pnf_in_service import \
+    ModifyPnfInService
+from onaptests.steps.onboard.cds import CbaPublishStep
+
+
+class ModifyPnfScenarioStep(YamlTemplateBaseScenarioStep):
+    """Step created to modify pnf without ves event."""
+
+    def __init__(self):
+        """Initialize step.
+
+        Sub steps:
+            - YamlTemplateServiceMacroInstantiateStep.
+        """
+        super().__init__()
+        self._logger.info("Modify Pnf Step started")
+        self._yaml_template: dict = None
+        self.add_step(CbaPublishStep())
+        self.add_step(YamlTemplateServiceMacroInstantiateStep())
+        self.add_step(ModifyPnfInService())
+
+    @property
+    def description(self) -> str:
+        """Step description.
+
+        Used for reports
+
+        Returns:
+            str: Step description
+
+        """
+        return "PNF modify scenario step"
+
+    @property
+    def component(self) -> str:
+        """Component name.
+
+        Name of component which step is related with.
+            Most is the name of ONAP component.
+
+        Returns:
+            str: Component name
+
+        """
+        return "PythonSDK-tests"
+
+    @property
+    def yaml_template(self) -> dict:
+        """YAML template abstract property.
+
+        Every YAML template step need to implement that property.
+
+        Returns:
+            dict: YAML template
+
+        """
+        if not self._yaml_template:
+            with open(settings.SERVICE_YAML_TEMPLATE,
+                      "r", encoding="utf-8") as yaml_template:
+                self._yaml_template: dict = yaml.safe_load(yaml_template)
+        return self._yaml_template
+
+    @property
+    def service_instance_name(self) -> str:
+        """Service instance name.
+
+        Returns:
+            str: Service instance name
+
+        """
+        return settings.SERVICE_INSTANCE_NAME
+
+    @property
+    def model_yaml_template(self) -> dict:
+        return {}
+
+
+class ModifyPnf(ScenarioBase):
+    """Instantiate a service with PNF."""
+
+    def __init__(self, **kwargs):
+        """Init a service with PNF use case"""
+        super().__init__('modify_service_pnf')
+        self.test = ModifyPnfScenarioStep()
index 10f9866..29f42e1 100644 (file)
@@ -8,7 +8,7 @@ from onaptests.steps.instantiate.pnf_register_ves import \
     SendPnfRegisterVesEvent
 from onaptests.steps.instantiate.service_macro import \
     YamlTemplateServiceMacroInstantiateStep
-from onaptests.steps.onboard.cds import CbaEnrichStep
+from onaptests.steps.onboard.cds import CbaPublishStep
 from onaptests.steps.simulator.pnf_simulator_cnf.pnf_register import \
     PnfSimulatorCnfRegisterStep
 
@@ -28,7 +28,7 @@ class PnfMacroScenarioStep(YamlTemplateBaseScenarioStep):
             self.add_step(PnfSimulatorCnfRegisterStep())
         else:
             self.add_step(SendPnfRegisterVesEvent())
-        self.add_step(CbaEnrichStep())
+        self.add_step(CbaPublishStep())
         self.add_step(YamlTemplateServiceMacroInstantiateStep())
 
     @property
diff --git a/src/onaptests/scenario/publish_ves_event.py b/src/onaptests/scenario/publish_ves_event.py
new file mode 100644 (file)
index 0000000..2fe34e8
--- /dev/null
@@ -0,0 +1,136 @@
+import json
+from pathlib import Path
+
+import requests
+from onapsdk.configuration import settings
+from onapsdk.kafka import onap_kafka
+from onapsdk.ves.ves import Ves
+
+from onaptests.scenario.scenario_base import BaseScenarioStep, ScenarioBase
+from onaptests.steps.base import BaseStep
+from onaptests.utils.exceptions import OnapTestException
+from onaptests.utils.kubernetes_kafka import KubernetesKafka
+
+
+class VESEventStep(BaseScenarioStep):
+    """Step to test the VES-Collector's functionality to handle VES event received over REST"""
+
+    def __init__(self) -> None:
+        super().__init__(cleanup=BaseStep.HAS_NO_CLEANUP)
+        self.add_step(SubmitVESEventToCollectorStep())
+        self.add_step(ReceiveVESEventFromKafkaStep())
+
+    @property
+    def component(self) -> str:
+        return "VES-Collector"
+
+    @property
+    def description(self) -> str:
+        return "VES Event Handling step"
+
+
+class SubmitVESEventToCollectorStep(BaseStep):
+    """Submit VES event to Collector step."""
+
+    def __init__(self) -> None:
+        super().__init__(cleanup=BaseStep.HAS_NO_CLEANUP)
+
+    @property
+    def description(self) -> str:
+        """Step description."""
+        return "Submit VES event to Collector over REST"
+
+    @property
+    def component(self) -> str:
+        """Component name."""
+        return "VES-Collector"
+
+    @BaseStep.store_state
+    def execute(self):
+        """Submit VES event to Collector over REST"""
+        super().execute()
+
+        try:
+            send_event_response = Ves.send_event(
+                version=settings.VES_VERSION,
+                json_event=self.get_ves_payload_from_file(),
+                basic_auth=settings.VES_BASIC_AUTH)
+            self._logger.info("VES collector response=%s|%s",
+                              send_event_response.status_code,
+                              send_event_response.text)
+
+        except (requests.exceptions.ConnectionError) as ce:
+            self._logger.error("Can't connect with VES Collector: %s", {str(ce)})
+            raise OnapTestException(ce) from ce
+        except (requests.exceptions.HTTPError) as he:
+            self._logger.error("HTTP Error from VES Collector: %s", {str(he)})
+            raise OnapTestException(he) from he
+
+    def get_ves_payload_from_file(self) -> str:
+        """Get ves payload from file."""
+
+        with open(Path(Path(__file__).parent.parent,
+                  "templates/artifacts/pnf_registration_ves_event.json"),
+                  "r",
+                  encoding="utf-8") as ves_event_file:
+            return ves_event_file.read()
+
+
+class ReceiveVESEventFromKafkaStep(BaseStep):
+    """Receive VES event from Kafka step."""
+
+    def __init__(self) -> None:
+        super().__init__(cleanup=BaseStep.HAS_NO_CLEANUP)
+
+    @property
+    def description(self) -> str:
+        """Step description."""
+        return "Receive VES event from Kafka"
+
+    @property
+    def component(self) -> str:
+        """Component name."""
+        return "Kafka"
+
+    @BaseStep.store_state
+    def execute(self):
+        """Receive VES event from Kafka"""
+        super().execute()
+
+        reader = KubernetesKafka()
+        reader.read_kafka_admin_secret()
+        kafka_password = reader.get_kafka_admin_password()
+
+        try:
+            events = onap_kafka.get_events_for_topic(settings.KAFKA_USER, kafka_password,
+                                                     settings.PNF_REGISTRATION_TOPIC_NAME)
+            is_event_found = False
+            for event in events:
+                event_obj = json.loads(event)
+                if ((event_obj['event']['commonEventHeader']['domain']) ==
+                    settings.PNF_REGISTRATION_VES_DOMAIN_NAME and
+                        event_obj['event']['commonEventHeader']['sourceName'] ==
+                        settings.PNF_SOURCE_NAME_IN_VES_EVENT):
+                    self._logger.info("Received the required VES event from Message Router !")
+                    is_event_found = True
+                    break
+            if not is_event_found:
+                msg = "Did not get the required VES event from Message Router"
+                self._logger.error(msg)
+                raise OnapTestException(msg)
+
+        except (requests.exceptions.ConnectionError) as ce:
+            self._logger.debug("Can't connect with Message Router: %s", {str(ce)})
+            raise OnapTestException(ce) from ce
+        except (requests.exceptions.HTTPError) as he:
+            self._logger.error("HTTP Error from Message Router: %s", {str(he)})
+            raise OnapTestException(he) from he
+
+
+class VesCollectorTestCase(ScenarioBase):
+    """The testcase to test the VES-Collector's functionality of
+    handling VES event received over REST"""
+
+    def __init__(self, **kwargs):
+        super().__init__('ves_publish', **kwargs)
+        self.test: BaseStep = VESEventStep()
diff --git a/src/onaptests/steps/AAIInitialSetup/create_operations.py b/src/onaptests/steps/AAIInitialSetup/create_operations.py
new file mode 100644 (file)
index 0000000..ff6d639
--- /dev/null
@@ -0,0 +1,46 @@
+from onaptests.scenario.scenario_base import BaseScenarioStep
+from onaptests.steps.base import BaseStep
+from onaptests.steps.cloud.cloud_region_create import CloudRegionCreateStep
+from onaptests.steps.cloud.complex_create import ComplexCreateStep
+from onaptests.steps.cloud.customer_service_subscription_create import \
+    CustomerServiceSubscriptionCreateStep
+from onaptests.steps.cloud.lineofbusiness_create import \
+    LineofBusinessCreateStep
+from onaptests.steps.cloud.owning_entity_create import OwningEntityCreateStep
+from onaptests.steps.cloud.platform_create import PlatformCreateStep
+from onaptests.steps.cloud.project_create import ProjectCreateStep
+
+
+class CreateOperationStep(BaseScenarioStep):
+    """create operations in AAI."""
+
+    def __init__(self):
+        super().__init__(cleanup=BaseStep.HAS_NO_CLEANUP)
+
+        # Create Complex
+        self.add_step(ComplexCreateStep())
+        # Create CloudRegionCreateStep
+        self.add_step(CloudRegionCreateStep())
+        # create Service Subscription Step
+        self.add_step(CustomerServiceSubscriptionCreateStep())
+        # Create Customer
+        # CustomerCreate Step
+        # covered under CustomerServiceSubscriptionCreateStep
+        # Create platform
+        self.add_step(PlatformCreateStep())
+        # Create project
+        self.add_step(ProjectCreateStep())
+        # Create owning entity
+        self.add_step(OwningEntityCreateStep())
+        # Create line of business
+        self.add_step(LineofBusinessCreateStep())
+
+    @property
+    def description(self) -> str:
+        """Step description."""
+        return "AAI create Operations.."
+
+    @property
+    def component(self) -> str:
+        """Component name."""
+        return "AAI"
diff --git a/src/onaptests/steps/AAIInitialSetup/get_all_operations.py b/src/onaptests/steps/AAIInitialSetup/get_all_operations.py
new file mode 100644 (file)
index 0000000..50978f3
--- /dev/null
@@ -0,0 +1,62 @@
+from typing import List
+
+from onapsdk.aai.business import (Customer, LineOfBusiness, OwningEntity,
+                                  Platform, Project)
+from onapsdk.aai.cloud_infrastructure import CloudRegion, Complex
+
+from onaptests.steps.base import BaseStep
+from onaptests.utils.exceptions import OnapTestException
+
+
+class GetAllOperationsStep(BaseStep):
+    """get all operations in AAI."""
+
+    @property
+    def description(self) -> str:
+        """Step description."""
+        return "AAI get all Operations.."
+
+    @property
+    def component(self) -> str:
+        """Component name."""
+        return "AAI"
+
+    @BaseStep.store_state
+    def execute(self):
+
+        super().execute()
+
+        # get all Customers
+        customer_list: List[Customer] = list(Customer.get_all())
+        if not customer_list:
+            raise OnapTestException("Customer list shouldn't be empty")
+
+        # get all Owning Entity
+        owning_ent_list: List[OwningEntity] = list(OwningEntity.get_all())
+        if not owning_ent_list:
+            raise OnapTestException("OwningEntity list shouldn't be empty")
+
+        # get all Platform
+        platform_list: List[Platform] = list(Platform.get_all())
+        if not platform_list:
+            raise OnapTestException("Platform list shouldn't be empty")
+
+        # get all Project
+        project_list: List[Project] = list(Project.get_all())
+        if not project_list:
+            raise OnapTestException("Project list shouldn't be empty")
+
+        # get all LineOfBusiness
+        lob_list: List[LineOfBusiness] = list(LineOfBusiness.get_all())
+        if not lob_list:
+            raise OnapTestException("Line of business list shouldn't be empty")
+
+        # get all Complex
+        complex_list: List[Complex] = list(Complex.get_all())
+        if not complex_list:
+            raise OnapTestException("Complex list shouldn't be empty")
+
+        # get all cloud region
+        cloud_region_list: List[CloudRegion] = list(CloudRegion.get_all())
+        if not cloud_region_list:
+            raise OnapTestException("CloudRegion list shouldn't be empty")
diff --git a/src/onaptests/steps/AAIInitialSetup/get_operations.py b/src/onaptests/steps/AAIInitialSetup/get_operations.py
new file mode 100644 (file)
index 0000000..2217d4c
--- /dev/null
@@ -0,0 +1,49 @@
+from onapsdk.aai.business import (Customer, LineOfBusiness, OwningEntity,
+                                  Platform, Project)
+from onapsdk.aai.cloud_infrastructure import CloudRegion, Complex
+from onapsdk.configuration import settings
+
+from onaptests.steps.base import BaseStep
+
+
+class GetOperationsStep(BaseStep):
+    """get operations in AAI."""
+
+    @property
+    def description(self) -> str:
+        """Step description."""
+        return "AAI get Operations.."
+
+    @property
+    def component(self) -> str:
+        """Component name."""
+        return "AAI"
+
+    @BaseStep.store_state
+    def execute(self):
+
+        super().execute()
+
+        # get Customer
+        Customer.get_by_global_customer_id(settings.GLOBAL_CUSTOMER_ID)
+
+        # get Owning Entity
+        OwningEntity.get_by_owning_entity_id(settings.OWNING_ENTITY_ID)
+
+        # get Platform
+        Platform.get_by_name(settings.PLATFORM)
+
+        # get Project
+        Project.get_by_name(settings.PROJECT)
+
+        # get LineOfBusiness
+        LineOfBusiness.get_by_name(settings.LINE_OF_BUSINESS)
+
+        # get Complex
+        Complex.get_by_physical_location_id(settings.COMPLEX_PHYSICAL_LOCATION_ID)
+
+        # get cloud region
+        CloudRegion.get_by_id(
+            settings.CLOUD_REGION_CLOUD_OWNER,
+            settings.CLOUD_REGION_ID,
+        )
diff --git a/src/onaptests/steps/AAIInitialSetup/update_operation_step.py b/src/onaptests/steps/AAIInitialSetup/update_operation_step.py
new file mode 100644 (file)
index 0000000..01e04a9
--- /dev/null
@@ -0,0 +1,32 @@
+from onaptests.scenario.scenario_base import BaseScenarioStep
+from onaptests.steps.base import BaseStep
+from onaptests.steps.cloud.cloud_region_upadte import CloudRegionUpdateStep
+from onaptests.steps.cloud.complex_update import ComplexUpdateStep
+from onaptests.steps.cloud.customer_update import CustomerUpdateStep
+from onaptests.steps.cloud.owning_entity_update import OwningEntityUpdateStep
+
+
+class UpdateOperationStep(BaseScenarioStep):
+    """update operations in AAI."""
+
+    def __init__(self):
+        super().__init__(cleanup=BaseStep.HAS_NO_CLEANUP)
+
+        # Update Complex
+        self.add_step(ComplexUpdateStep())
+        # Update cloud region
+        self.add_step(CloudRegionUpdateStep())
+        # Update owning entity
+        self.add_step(OwningEntityUpdateStep())
+        # Update customer
+        self.add_step(CustomerUpdateStep())
+
+    @property
+    def description(self) -> str:
+        """Step description."""
+        return "AAI update Operations.."
+
+    @property
+    def component(self) -> str:
+        """Component name."""
+        return "AAI"
index c615047..9356f4d 100644 (file)
@@ -146,13 +146,19 @@ class BaseStep(StoreStateHandler, ABC):
         except SettingsError:
             pass
 
-    def __init__(self, cleanup: bool = False, break_on_error=True) -> None:
+    def __init__(self,
+                 cleanup: bool = False,
+                 break_on_error: bool = True,
+                 is_optional: bool = False) -> None:
         """Step initialization.
 
         Args:
             cleanup(bool, optional): Determines if cleanup action should be called.
             break_on_error(bool, optional): Determines if fail on execution should
                 result with continuation of further steps
+            is_optional(bool): Determines if step is optional and error should be ignored.
+                Step would be marked as failed, but still move forward with rest of the
+                scenario. False by default.
 
         """
         self._steps: List["BaseStep"] = []
@@ -169,6 +175,7 @@ class BaseStep(StoreStateHandler, ABC):
         self._state_clean: bool = False
         self._nesting_level: int = 0
         self._break_on_error: bool = break_on_error
+        self._is_optional: bool = is_optional
         self._substeps_executed: bool = False
         self._is_validation_only = settings.IF_VALIDATION
         self._is_force_cleanup = os.environ.get(IF_FORCE_CLEANUP) is not None
@@ -292,6 +299,22 @@ class BaseStep(StoreStateHandler, ABC):
         """Step name."""
         return self.__class__.__name__
 
+    @property
+    def service_name(self) -> str:
+        """Service name."""
+        return settings.SERVICE_NAME
+
+    @property
+    def service_type(self) -> str:
+        """Service type."""
+        try:
+            service_type = getattr(settings, "SERVICE_TYPE")
+            if service_type:
+                return service_type
+            return self.service_name
+        except (KeyError, AttributeError, SettingsError):
+            return self.service_name
+
     @property
     @abstractmethod
     def description(self) -> str:
@@ -362,9 +385,12 @@ class BaseStep(StoreStateHandler, ABC):
             try:
                 step.execute()
             except (OnapTestException, SDKException) as substep_err:
-                if step._break_on_error:
+                if step._is_optional:
+                    self._logger.info("Step is optional, error ignored, continue test execution")
+                elif step._break_on_error:
                     raise SubstepExecutionException("", substep_err) # noqa: W0707
-                substep_exceptions.append(substep_err)
+                else:
+                    substep_exceptions.append(substep_err)
         if self._steps:
             if len(substep_exceptions) > 0 and self._break_on_error:
                 if len(substep_exceptions) == 1:
@@ -482,7 +508,7 @@ class YamlTemplateBaseStep(BaseStep, ABC):
                 Customer.get_by_global_customer_id(settings.GLOBAL_CUSTOMER_ID)
         if self._service_subscription is None or reload:
             self._service_subscription: ServiceSubscription = \
-                self._customer.get_service_subscription_by_service_type(self.service_name)
+                self._customer.get_service_subscription_by_service_type(self.service_type)
 
     def _load_service_instance(self):
         if self._service_instance is None:
@@ -503,6 +529,23 @@ class YamlTemplateBaseStep(BaseStep, ABC):
             return next(iter(self.yaml_template.keys()))
         return self.parent.service_name
 
+    @property
+    def service_type(self) -> str:
+        """Service type.
+
+        Gets from YAML template if it's a root step, gets from parent otherwise.
+        If YAML template has no service_type key returns service name otherwise.
+
+        Returns:
+            str: Service type
+
+        """
+        if self.is_root:
+            if "service_type" in self.yaml_template[self.service_name]:
+                return self.yaml_template[self.service_name]["service_type"]
+            return self.service_name
+        return self.parent.service_type
+
     @property
     def service_instance_name(self) -> str:
         """Service instance name.
@@ -539,3 +582,24 @@ class YamlTemplateBaseStep(BaseStep, ABC):
             dict: YAML template
 
         """
+
+
+class DelayStep(BaseStep):
+    """Delay step -- useful if some delay between two steps is needed."""
+
+    def __init__(self, delay: int, break_on_error=True):
+        super().__init__(BaseStep.HAS_NO_CLEANUP, break_on_error)
+        self.delay: int = delay
+
+    @property
+    def description(self) -> str:
+        return f"Wait for {self.delay} seconds."
+
+    @property
+    def component(self) -> str:
+        return "Python ONAP SDK"
+
+    @BaseStep.store_state
+    def execute(self):
+        super().execute()
+        time.sleep(self.delay)
diff --git a/src/onaptests/steps/cloud/add_pnf.py b/src/onaptests/steps/cloud/add_pnf.py
new file mode 100644 (file)
index 0000000..2b83206
--- /dev/null
@@ -0,0 +1,52 @@
+from onapsdk.aai.business.pnf import PnfInstance
+from onapsdk.configuration import settings
+
+from onaptests.utils.exceptions import OnapTestException
+
+from ..base import BaseStep
+
+
+class AAIAddPNFStep(BaseStep):
+    """Step to add a PNF in AAI"""
+
+    pnf = PnfInstance(pnf_name=settings.PNF_NAME,
+                      pnf_id=settings.PNF_ID,
+                      orchestration_status=settings.PNF_ORCHESTRATION_STATUS,
+                      in_maint=settings.PNF_IN_MAINT,
+                      nf_role=settings.PNF_NF_ROLE,
+                      service_instance=None)
+
+    def __init__(self) -> None:
+        super().__init__(cleanup=settings.CLEANUP_FLAG)
+
+    @property
+    def description(self) -> str:
+        return "Step to add a PNF in AAI"
+
+    @property
+    def component(self) -> str:
+        return "AAI"
+
+    @BaseStep.store_state
+    def execute(self) -> None:
+        """Add a PNF in AAI by calling its REST API"""
+
+        super().execute()
+
+        self._logger.info("Put pnf:%s", self.pnf.pnf_name)
+        try:
+            self.pnf.put_in_aai()
+        except Exception as e:
+            self._logger.error("Exception while adding PNF in AAI: %s", e, exc_info=1)
+            raise OnapTestException(e) from e
+
+    @BaseStep.store_state(cleanup=True)
+    def cleanup(self) -> None:
+        self._logger.info("Deleting PNF put in AAI for the test")
+        try:
+            self.pnf.delete_from_aai()
+        except Exception as e:
+            self._logger.error("Exception while deleting PNF from AAI: %s", e, exc_info=1)
+            raise OnapTestException(e) from e
+
+        super().cleanup()
index 5a4b962..ae7d80b 100644 (file)
@@ -9,6 +9,7 @@ from pathlib import Path
 
 from jinja2 import Environment, PackageLoader, select_autoescape
 from kubernetes import client, config
+from kubernetes.client.exceptions import ApiException
 from kubernetes.stream import stream
 from natural.date import delta
 from onapsdk.configuration import settings
@@ -19,8 +20,16 @@ from onaptests.utils.exceptions import StatusCheckException
 
 from ..base import BaseStep
 from .resources import (ConfigMap, Container, DaemonSet, Deployment, Ingress,
-                        Job, Pod, Pvc, ReplicaSet, Secret, Service,
-                        StatefulSet, Node)
+                        Job, Namespace, Node, Pod, Pvc, ReplicaSet, Secret,
+                        Service, StatefulSet, VulnerabilityReport)
+
+
+class K8sResourceSet(set):
+    """Customized set with append method"""
+
+    def append(self, item):
+        """Does the same what add does"""
+        self.add(item)
 
 
 class CheckK8sResourcesStep(BaseStep):
@@ -35,6 +44,7 @@ class CheckK8sResourcesStep(BaseStep):
         self.batch = client.BatchV1Api()
         self.app = client.AppsV1Api()
         self.networking = client.NetworkingV1Api()
+        self.cr = client.CustomObjectsApi()
         self.namespace = namespace
 
         if settings.STATUS_RESULTS_DIRECTORY:
@@ -46,12 +56,16 @@ class CheckK8sResourcesStep(BaseStep):
             self.res_dir = f"{self.res_dir}/{self.namespace}"
 
         self.failing = False
+        self.k8s_issue = ""
         self.resource_type = resource_type
         self.k8s_resources = []
-        self.all_resources = []
-        self.failing_resources = []
-        self.jinja_env = Environment(autoescape=select_autoescape(['html']),
-                                     loader=PackageLoader('onaptests.templates', 'status'))
+        self.all_resources = K8sResourceSet()
+        self.failing_resources = K8sResourceSet()
+        self.unstable_resources = K8sResourceSet()
+        if settings.STORE_ARTIFACTS:
+            self.jinja_env = Environment(autoescape=select_autoescape(['html']),
+                                         loader=PackageLoader('onaptests.templates',
+                                                              'status'))
 
     @property
     def component(self) -> str:
@@ -71,21 +85,45 @@ class CheckK8sResourcesStep(BaseStep):
     def _init_resources(self):
         if self.resource_type != "":
             self.__logger.debug(f"Loading all k8s {self.resource_type}s"
-                                " in the {NAMESPACE} namespace")
+                                f" in the {self.namespace} namespace")
 
     def _parse_resources(self):
         """Parse the resources."""
         return []
 
-    def _add_failing_resource(self, resource):
+    def _propagate_events(self):
+        pass
+
+    def _analyze_events(self):
+        for res in self.all_resources:
+            for event in res.events:
+                if event.type != "Normal" and event.reason not in settings.WAIVER_EVENTS:
+                    self._add_unstable_resource(res, str(event.reason))
+
+    def _is_waiver(self, resource):
         if (hasattr(resource, 'labels') and resource.labels and settings.EXCLUDED_LABELS
                 and (resource.labels.keys() and settings.EXCLUDED_LABELS.keys())):
             for label in resource.labels.items():
                 for waived_label in settings.EXCLUDED_LABELS.items():
                     if label[0] in waived_label[0] and label[1] in waived_label[1]:
-                        return
+                        return True
+        return False
+
+    def _add_unstable_resource(self, resource, reason=None):
+        if self._is_waiver(resource):
+            return
+        self.__logger.warning("a {} is unstable: {}".format(self.resource_type, resource.name))
+        self.unstable_resources.append(resource)
+        if reason and reason not in resource.unstability_reasons:
+            resource.unstability_reasons.append(reason)
+
+    def _add_failing_resource(self, resource, reason=None):
+        if self._is_waiver(resource):
+            return
         self.__logger.warning("a {} is in error: {}".format(self.resource_type, resource.name))
         self.failing_resources.append(resource)
+        if reason and reason not in resource.failing_reasons:
+            resource.failing_reasons.append(reason)
         self.failing = True
 
     def execute(self):
@@ -98,16 +136,27 @@ class CheckK8sResourcesStep(BaseStep):
                                    len(self.k8s_resources),
                                    self.resource_type)
                 self._parse_resources()
-                self.__logger.info("%4s %ss parsed, %s failing",
+                self._propagate_events()
+                self._analyze_events()
+                self.__logger.info("%4s %ss parsed, %s failing, %s unstable",
                                    len(self.all_resources),
                                    self.resource_type,
-                                   len(self.failing_resources))
+                                   len(self.failing_resources),
+                                   len(self.unstable_resources))
                 if self.failing:
                     raise StatusCheckException(f"{self.resource_type} test failed")
         except (ConnectionRefusedError, MaxRetryError, NewConnectionError) as e:
             self.__logger.error("Test of k8s %ss failed.", self.resource_type)
             self.__logger.error("Cannot connect to Kubernetes.")
-            raise StatusCheckException from e
+            self.failing = True
+            self.k8s_issue = f"K8s API Connection issue: {str(e)}"
+            raise StatusCheckException(e) from e
+        except (ApiException) as e:
+            self.__logger.error("Test of k8s %ss failed.", self.resource_type)
+            self.__logger.error("K8s API Access issue.")
+            self.failing = True
+            self.k8s_issue = f"K8s API Access issue: {str(e)}"
+            raise StatusCheckException(e) from e
 
 
 class CheckBasicK8sResourcesStep(CheckK8sResourcesStep):
@@ -166,6 +215,68 @@ class CheckK8sIngressesStep(CheckBasicK8sResourcesStep):
         self.k8s_resources = self.networking.list_namespaced_ingress(self.namespace).items
 
 
+class CheckTrivyVulnerabilitiesStep(CheckK8sResourcesStep):
+    """Check for trivy CRITICAL vulnerabilities"""
+    API_GROUP = "aquasecurity.github.io"
+    API_VERSION = "v1alpha1"
+    KIND = "vulnerabilityreports"
+    READABILITY_THRESHOLD = 999999
+
+    __logger = logging.getLogger(__name__)
+
+    def __init__(self, namespace: str):
+        """Init CheckTrivyVulnerabilitiesStep."""
+        super().__init__(namespace=namespace, resource_type=self.KIND[:-1])
+
+    def _init_resources(self):
+        super()._init_resources()
+        try:
+            self.k8s_resources = self.cr.list_namespaced_custom_object(namespace=self.namespace,
+                                                                       group=self.API_GROUP,
+                                                                       version=self.API_VERSION,
+                                                                       plural=self.KIND
+                                                                       )['items']
+        except Exception as e:
+            self.__logger.warning(f"Cannot resolve Vulnerability Reports: {str(e)}")
+            raise StatusCheckException(e) from e
+
+    def _parse_resources(self):
+        """Parse the vulnerabilityreports.
+        Return a list of VulnerabilityReports that were created after trivy scan.
+        """
+        super()._parse_resources()
+        artifacts = set()
+        vrs_unique = []
+        total_crt = 0
+        for v in self.k8s_resources:
+            vr = VulnerabilityReport(v)
+            if vr.artifact in artifacts:
+                continue
+            if vr.crt_count > 0:
+                total_crt += vr.crt_count
+                self.__logger.error(f"Vulnerability Report for {vr.owner_kind} "
+                                    f"{vr.owner_name} has {vr.crt_count} CRT Vulnerabilities")
+            artifacts.add(vr.artifact)
+            vrs_unique.append(vr)
+
+        if len(vrs_unique) > self.READABILITY_THRESHOLD:
+            ns = Namespace(self.core.read_namespace(self.namespace))
+            if total_crt > 0:
+                self._add_failing_resource(
+                    ns, f"CVE ({total_crt})")
+            self.all_resources.append(ns)
+        else:
+            for vr in vrs_unique:
+                if vr.crt_count > 0:
+                    self._add_failing_resource(
+                        vr, f"CVE ({vr.crt_count}) [{vr.owner_kind.lower()}-{vr.owner_name}]")
+                self.all_resources.append(vr)
+
+    @BaseStep.store_state
+    def execute(self):
+        super().execute()
+
+
 class CheckK8sPvcsStep(CheckK8sResourcesStep):
     """Check of k8s pvcs in the selected namespace."""
 
@@ -217,18 +328,37 @@ class CheckK8sNodesStep(CheckK8sResourcesStep):
         Return a list of Nodes.
         """
         super()._parse_resources()
+        metrics = []
+        try:
+            metrics = self.cr.list_cluster_custom_object(
+                "metrics.k8s.io", "v1beta1", "nodes")['items']
+        except Exception as e:
+            self.__logger.warning(f"Cannot resolve metrics for nodes: {str(e)}")
         for k8s in self.k8s_resources:
             node = Node(k8s=k8s)
             for condition in k8s.status.conditions:
                 failing = False
-                if condition.status == 'False' and condition.type == 'Ready':
+                if condition.status != 'True' and condition.type == 'Ready':
                     failing = True
-                elif condition.status == 'True' and condition.type != 'Ready':
+                elif condition.status != 'False' and condition.type != 'Ready':
                     failing = True
                 if failing:
-                    self._add_failing_resource(node)
+                    self._add_failing_resource(node, condition.reason)
                     self.__logger.error(
                         f"Node {node.name} {condition.type} status is {condition.status}")
+            node.events = self.core.list_namespaced_event(
+                "default",
+                field_selector="involvedObject.name={}".format(node.name)).items
+            alloc = k8s.status.allocatable
+            node.details['allocatable'] = {
+                'cpu': alloc['cpu'],
+                'memory': alloc['memory'],
+                'storage': alloc['ephemeral-storage']
+            }
+            for metric in metrics:
+                if metric['metadata']['name'] == node.name:
+                    node.details['usage'] = metric['usage']
+                    break
             self.all_resources.append(node)
 
     @BaseStep.store_state
@@ -244,6 +374,14 @@ class CheckK8sResourcesUsingPodsStep(CheckK8sResourcesStep):
         super().__init__(namespace=namespace, resource_type=resource_type)
         self.pods_source = pods_source
 
+    def _propagate_events(self):
+        if self.resource_type == "pod":
+            return
+        for res in self.all_resources:
+            for pod in res.pods:
+                for event in pod.events:
+                    res.events.append(event)
+
     def _get_used_pods(self):
         pods = []
         if self.pods_source is not None:
@@ -287,7 +425,7 @@ class CheckK8sJobsStep(CheckK8sResourcesUsingPodsStep):
         super()._init_resources()
         self.k8s_resources = self.batch.list_namespaced_job(self.namespace).items
 
-    def _parse_resources(self):
+    def _parse_resources(self):  # noqa: C901
         """Parse the jobs.
         Return a list of Pods that were created to perform jobs.
         """
@@ -307,10 +445,9 @@ class CheckK8sJobsStep(CheckK8sResourcesUsingPodsStep):
             job.events = self.core.list_namespaced_event(
                 self.namespace,
                 field_selector=field_selector).items
-
-            self.jinja_env.get_template('job.html.j2').stream(job=job).dump(
-                '{}/job-{}.html'.format(self.res_dir, job.name))
-
+            if settings.STORE_ARTIFACTS:
+                self.jinja_env.get_template('job.html.j2').stream(job=job).dump(
+                    '{}/job-{}.html'.format(self.res_dir, job.name))
             if not any(waiver_elt in job.name for waiver_elt in settings.WAIVER_LIST):
                 cron_job = self._get_cron_job_name(k8s)
                 if cron_job:
@@ -318,8 +455,12 @@ class CheckK8sJobsStep(CheckK8sResourcesUsingPodsStep):
                         cron_jobs[cron_job] = []
                     cron_jobs[cron_job].append(job)
                 elif not k8s.status.completion_time:
-                    # timemout job
-                    self._add_failing_resource(job)
+                    if k8s.status.active and k8s.status.active > 0:
+                        self.__logger.warning(
+                            "Job %s is still running", job.name)
+                    else:
+                        # timemout or failed job
+                        self._add_failing_resource(job)
                 self.all_resources.append(job)
             else:
                 self.__logger.warning(
@@ -331,7 +472,12 @@ class CheckK8sJobsStep(CheckK8sResourcesUsingPodsStep):
                               key=lambda job: job.k8s.metadata.creation_timestamp,
                               reverse=True)
             if not jobs[0].k8s.status.completion_time:
-                self._add_failing_resource(jobs[0])
+                if jobs[0].k8s.status.active and jobs[0].k8s.status.active > 0:
+                    self.__logger.warning(
+                        "Job %s is still running", job.name)
+                else:
+                    # timemout or failed job
+                    self._add_failing_resource(jobs[0])
 
     def _get_cron_job_name(self, k8s):
         if k8s.metadata.owner_references:
@@ -458,17 +604,19 @@ class CheckK8sPodsStep(CheckK8sResourcesUsingPodsStep):
                 for k8s_container in k8s.status.container_statuses:
                     pod.running_containers += self._parse_container(
                         pod, k8s_container)
+            pod.details['node'] = k8s.spec.node_name
             pod.events = self.core.list_namespaced_event(
                 self.namespace,
                 field_selector="involvedObject.name={}".format(pod.name)).items
-            self.jinja_env.get_template('pod.html.j2').stream(pod=pod).dump(
-                '{}/pod-{}.html'.format(self.res_dir, pod.name))
+            if settings.STORE_ARTIFACTS:
+                self.jinja_env.get_template('pod.html.j2').stream(pod=pod).dump(
+                    '{}/pod-{}.html'.format(self.res_dir, pod.name))
             if any(waiver_elt in pod.name for waiver_elt in settings.WAIVER_LIST):
                 self.__logger.warning("Waiver pattern found in pod, exclude %s", pod.name)
             else:
                 self.all_resources.append(pod)
 
-        if settings.CHECK_POD_VERSIONS:
+        if settings.CHECK_POD_VERSIONS and settings.STORE_ARTIFACTS:
             self.jinja_env.get_template('version.html.j2').stream(
                 pod_versions=pod_versions).dump('{}/versions.html'.format(
                     self.res_dir))
@@ -516,7 +664,7 @@ class CheckK8sPodsStep(CheckK8sResourcesUsingPodsStep):
                 pod.init_done = False
         else:
             pod.restart_count = max(pod.restart_count, container.restart_count)
-        if settings.STORE_ARTIFACTS:
+        if settings.STORE_LOGS and settings.STORE_ARTIFACTS:
             try:
                 log_files = {}
                 logs = self._get_container_logs(pod=pod, container=container, full=False)
@@ -564,13 +712,14 @@ class CheckK8sPodsStep(CheckK8sResourcesUsingPodsStep):
             except client.rest.ApiException as exc:
                 self.__logger.warning("%scontainer %s of pod %s has an exception: %s",
                                       prefix, container.name, pod.name, exc.reason)
-            self.jinja_env.get_template('container_log.html.j2').stream(
-                container=container,
-                pod_name=pod.name,
-                logs=logs,
-                old_logs=old_logs,
-                log_files=log_files).dump('{}/pod-{}-{}-logs.html'.format(
-                    self.res_dir, pod.name, container.name))
+            if settings.STORE_ARTIFACTS:
+                self.jinja_env.get_template('container_log.html.j2').stream(
+                    container=container,
+                    pod_name=pod.name,
+                    logs=logs,
+                    old_logs=old_logs,
+                    log_files=log_files).dump('{}/pod-{}-{}-logs.html'.format(
+                        self.res_dir, pod.name, container.name))
         if any(waiver_elt in container.name for waiver_elt in settings.WAIVER_LIST):
             self.__logger.warning(
                 "Waiver pattern found in container, exclude %s", container.name)
@@ -601,9 +750,10 @@ class CheckK8sServicesStep(CheckK8sResourcesUsingPodsStep):
             (service.pods,
              service.failed_pods) = self._find_child_pods(k8s.spec.selector)
 
-            self.jinja_env.get_template('service.html.j2').stream(
-                service=service).dump('{}/service-{}.html'.format(
-                    self.res_dir, service.name))
+            if settings.STORE_ARTIFACTS:
+                self.jinja_env.get_template('service.html.j2').stream(
+                    service=service).dump('{}/service-{}.html'.format(
+                        self.res_dir, service.name))
             self.all_resources.append(service)
 
 
@@ -636,9 +786,10 @@ class CheckK8sDeploymentsStep(CheckK8sResourcesUsingPodsStep):
                 self.namespace,
                 field_selector=field_selector).items
 
-            self.jinja_env.get_template('deployment.html.j2').stream(
-                deployment=deployment).dump('{}/deployment-{}.html'.format(
-                    self.res_dir, deployment.name))
+            if settings.STORE_ARTIFACTS:
+                self.jinja_env.get_template('deployment.html.j2').stream(
+                    deployment=deployment).dump('{}/deployment-{}.html'.format(
+                        self.res_dir, deployment.name))
 
             if k8s.status.unavailable_replicas:
                 self._add_failing_resource(deployment)
@@ -676,9 +827,10 @@ class CheckK8sReplicaSetsStep(CheckK8sResourcesUsingPodsStep):
                 self.namespace,
                 field_selector=field_selector).items
 
-            self.jinja_env.get_template('replicaset.html.j2').stream(
-                replicaset=replicaset).dump('{}/replicaset-{}.html'.format(
-                    self.res_dir, replicaset.name))
+            if settings.STORE_ARTIFACTS:
+                self.jinja_env.get_template('replicaset.html.j2').stream(
+                    replicaset=replicaset).dump('{}/replicaset-{}.html'.format(
+                        self.res_dir, replicaset.name))
 
             if (not k8s.status.ready_replicas or
                     (k8s.status.ready_replicas < k8s.status.replicas)):
@@ -717,9 +869,10 @@ class CheckK8sStatefulSetsStep(CheckK8sResourcesUsingPodsStep):
                 self.namespace,
                 field_selector=field_selector).items
 
-            self.jinja_env.get_template('statefulset.html.j2').stream(
-                statefulset=statefulset).dump('{}/statefulset-{}.html'.format(
-                    self.res_dir, statefulset.name))
+            if settings.STORE_ARTIFACTS:
+                self.jinja_env.get_template('statefulset.html.j2').stream(
+                    statefulset=statefulset).dump('{}/statefulset-{}.html'.format(
+                        self.res_dir, statefulset.name))
 
             if ((not k8s.status.ready_replicas)
                     or (k8s.status.ready_replicas < k8s.status.replicas)):
@@ -755,9 +908,10 @@ class CheckK8sDaemonSetsStep(CheckK8sResourcesUsingPodsStep):
                 self.namespace,
                 field_selector=field_selector).items
 
-            self.jinja_env.get_template('daemonset.html.j2').stream(
-                daemonset=daemonset).dump('{}/daemonset-{}.html'.format(
-                    self.res_dir, daemonset.name))
+            if settings.STORE_ARTIFACTS:
+                self.jinja_env.get_template('daemonset.html.j2').stream(
+                    daemonset=daemonset).dump('{}/daemonset-{}.html'.format(
+                        self.res_dir, daemonset.name))
 
             if k8s.status.number_ready < k8s.status.desired_number_scheduled:
                 self._add_failing_resource(daemonset)
@@ -787,6 +941,7 @@ class CheckNamespaceStatusStep(CheckK8sResourcesStep):
         self.ingress_list_step = None
         self.pvc_list_step = None
         self.node_list_step = None
+        self.vulnerabilityreports_list_step = None
         if not settings.IF_VALIDATION:
             if settings.IN_CLUSTER:
                 config.load_incluster_config()
@@ -833,6 +988,7 @@ class CheckNamespaceStatusStep(CheckK8sResourcesStep):
         ingress_list_step = CheckK8sIngressesStep(namespace)
         pvc_list_step = CheckK8sPvcsStep(namespace)
         node_list_step = CheckK8sNodesStep(namespace)
+        vulnerabilityreports_list_step = CheckTrivyVulnerabilitiesStep(namespace)
         if namespace == settings.K8S_TESTS_NAMESPACE:
             self.job_list_step = job_list_step
             self.pod_list_step = pod_list_step
@@ -846,7 +1002,9 @@ class CheckNamespaceStatusStep(CheckK8sResourcesStep):
             self.ingress_list_step = ingress_list_step
             self.pvc_list_step = pvc_list_step
             self.node_list_step = node_list_step
+            self.vulnerabilityreports_list_step = vulnerabilityreports_list_step
             self.add_step(node_list_step)
+        self.add_step(vulnerabilityreports_list_step)
         self.add_step(job_list_step)
         self.add_step(pod_list_step)
         self.add_step(service_list_step)
@@ -876,7 +1034,7 @@ class CheckNamespaceStatusStep(CheckK8sResourcesStep):
         Use settings values:
          - K8S_TESTS_NAMESPACE
          - STATUS_RESULTS_DIRECTORY
-         - STORE_ARTIFACTS
+         - STORE_LOGS
          - CHECK_POD_VERSIONS
          - IGNORE_EMPTY_REPLICAS
          - INCLUDE_ALL_RES_IN_DETAILS
@@ -902,12 +1060,13 @@ class CheckNamespaceStatusStep(CheckK8sResourcesStep):
         self.failing_daemonsets = self.daemonset_list_step.failing_resources
         self.failing_pvcs = self.pvc_list_step.failing_resources
 
-        self.jinja_env.get_template('index.html.j2').stream(
-            ns=self,
-            delta=delta).dump('{}/index.html'.format(self.res_dir))
-        self.jinja_env.get_template('raw_output.txt.j2').stream(
-            ns=self, namespace=self.namespace).dump('{}/onap-k8s.log'.format(
-                self.res_dir))
+        if settings.STORE_ARTIFACTS:
+            self.jinja_env.get_template('index.html.j2').stream(
+                ns=self,
+                delta=delta).dump('{}/index.html'.format(self.res_dir))
+            self.jinja_env.get_template('raw_output.txt.j2').stream(
+                ns=self, namespace=self.namespace).dump('{}/onap-k8s.log'.format(
+                    self.res_dir))
 
         details = {"namespace": {
             "all": list(self.namespaces_to_check_set - set([self.namespace])),
@@ -917,7 +1076,14 @@ class CheckNamespaceStatusStep(CheckK8sResourcesStep):
         def store_results(result_dict, step):
             result_dict[step.resource_type] = {
                 'number_failing': len(step.failing_resources),
-                'failing': self.map_by_name(step.failing_resources)
+                'failing': self.map_by_name(step.failing_resources),
+                'failing_reasons': self.map_by_failing_reasons(step.failing_resources),
+                'number_unstable': len(step.unstable_resources),
+                'unstable': self.map_by_name(step.unstable_resources),
+                'unstable_reasons': self.map_by_unstability_reasons(
+                    step.unstable_resources),
+                'details': self.map_by_details(step.all_resources),
+                'k8s_issue': step.k8s_issue
             }
             if settings.INCLUDE_ALL_RES_IN_DETAILS:
                 result_dict[step.resource_type]['all'] = self.map_by_name(step.all_resources)
@@ -947,3 +1113,18 @@ class CheckNamespaceStatusStep(CheckK8sResourcesStep):
     def map_by_name(self, resources):
         """Get resources' names."""
         return list(map(lambda resource: resource.name, resources))
+
+    def map_by_failing_reasons(self, resources):
+        """Get resources' failing reasons."""
+        return dict(map(lambda resource: (resource.name, resource.failing_reasons),
+                        resources))
+
+    def map_by_unstability_reasons(self, resources):
+        """Get resources' instability reasons."""
+        return dict(map(lambda resource: (resource.name, resource.unstability_reasons),
+                        resources))
+
+    def map_by_details(self, resources):
+        """Get resources' details."""
+        return dict(map(lambda resource: (resource.name, resource.details),
+                        resources))
index fcda251..afed216 100644 (file)
@@ -11,7 +11,7 @@ class CloudRegionCreateStep(BaseStep):
 
     def __init__(self):
         """Initialize step."""
-        super().__init__(cleanup=BaseStep.HAS_NO_CLEANUP)
+        super().__init__(cleanup=settings.CLEANUP_FLAG)
 
     @property
     def description(self) -> str:
@@ -54,3 +54,18 @@ class CloudRegionCreateStep(BaseStep):
                 owner_defined_type=settings.CLOUD_OWNER_DEFINED_TYPE,
                 complex_name=settings.COMPLEX_PHYSICAL_LOCATION_ID
             )
+
+    @BaseStep.store_state(cleanup=True)
+    def cleanup(self) -> None:
+
+        """Cleanup created cloud region."""
+        self._logger.info("Clean the cloud region")
+        try:
+            cloud_region = CloudRegion.get_by_id(
+                cloud_owner=settings.CLOUD_REGION_CLOUD_OWNER,
+                cloud_region_id=settings.CLOUD_REGION_ID,
+            )
+            cloud_region.delete()
+        except ResourceNotFound:
+            self._logger.info("Resource trying to delete is not available..")
+        super().cleanup()
diff --git a/src/onaptests/steps/cloud/cloud_region_upadte.py b/src/onaptests/steps/cloud/cloud_region_upadte.py
new file mode 100644 (file)
index 0000000..e9a3c27
--- /dev/null
@@ -0,0 +1,101 @@
+"""A&AI cloud region updation module."""
+from onapsdk.aai.cloud_infrastructure import CloudRegion
+from onapsdk.configuration import settings
+from onapsdk.exceptions import APIError
+
+from onaptests.utils.exceptions import OnapTestException
+
+from ..base import BaseStep
+
+
+class CloudRegionUpdateStep(BaseStep):
+    """Cloud region update step."""
+
+    def __init__(self):
+        """Initialize step."""
+        super().__init__(cleanup=BaseStep.HAS_NO_CLEANUP)
+
+    @property
+    def description(self) -> str:
+        """Step description."""
+        return "Update cloud region."
+
+    @property
+    def component(self) -> str:
+        """Component name."""
+        return "AAI"
+
+    @BaseStep.store_state
+    def execute(self):
+        """Update cloud region.
+
+        Use settings values:
+         - CLOUD_REGION_CLOUD_OWNER,
+         - CLOUD_REGION_ID,
+         - UPDATED_CLOUD_TYPE,
+         - CLOUD_REGION_VERSION,
+         - UPDATED_CLOUD_OWNER_DEFINED_TYPE,
+         - COMPLEX_PHYSICAL_LOCATION_ID.
+
+        """
+        super().execute()
+        self._logger.info("*Check if cloud region exists *")
+
+        cloud_region = CloudRegion.get_by_id(
+            cloud_owner=settings.CLOUD_REGION_CLOUD_OWNER,
+            cloud_region_id=settings.CLOUD_REGION_ID,
+        )
+        cloud_region.update(
+            cloud_owner=settings.CLOUD_REGION_CLOUD_OWNER,
+            cloud_region_id=settings.CLOUD_REGION_ID,
+            orchestration_disabled=False,
+            in_maint=False,
+            cloud_type=settings.UPDATED_CLOUD_TYPE,
+            cloud_region_version=settings.CLOUD_REGION_VERSION,
+            owner_defined_type=settings.UPDATED_CLOUD_OWNER_DEFINED_TYPE,
+            complex_name=settings.COMPLEX_PHYSICAL_LOCATION_ID
+        )
+
+
+class CloudRegionGetByIDStep(BaseStep):
+    """Cloud region getting by id step."""
+
+    def __init__(self):
+        """Initialize step."""
+        super().__init__(cleanup=BaseStep.HAS_NO_CLEANUP)
+
+    @property
+    def description(self) -> str:
+        """Step description."""
+        return "Get cloud region by ID."
+
+    @property
+    def component(self) -> str:
+        """Component name.
+
+        Name of component which step is related with.
+            Most is the name of ONAP component.
+
+        Returns:
+            str: Component name
+
+        """
+        return "AAI"
+
+    @BaseStep.store_state
+    def execute(self):
+        """Get cloud region by id.
+
+        Use settings values:
+         - CLOUD_REGION_CLOUD_OWNER,
+         - CLOUD_REGION_ID.
+        """
+        super().execute()
+        self._logger.info("*Check if cloud region exists *")
+        try:
+            CloudRegion.get_by_id(
+                cloud_owner=settings.CLOUD_REGION_CLOUD_OWNER,
+                cloud_region_id=settings.CLOUD_REGION_ID
+            )
+        except APIError as exc:
+            raise OnapTestException("Getting cloud region is failed.") from exc
index 7afad16..e9f1069 100644 (file)
@@ -1,6 +1,6 @@
 from onapsdk.aai.cloud_infrastructure import Complex
 from onapsdk.configuration import settings
-from onapsdk.exceptions import APIError
+from onapsdk.exceptions import APIError, ResourceNotFound
 
 from ..base import BaseStep
 
@@ -10,7 +10,7 @@ class ComplexCreateStep(BaseStep):
 
     def __init__(self):
         """Initialize step."""
-        super().__init__(cleanup=BaseStep.HAS_NO_CLEANUP)
+        super().__init__(cleanup=settings.CLEANUP_FLAG)
 
     @property
     def description(self) -> str:
@@ -32,10 +32,28 @@ class ComplexCreateStep(BaseStep):
 
         """
         super().execute()
+        self._logger.info("*Check if complex exists *")
         try:
+            Complex.get_by_physical_location_id(settings.COMPLEX_PHYSICAL_LOCATION_ID)
+            self._logger.info("Requested resource Available in AAI .")
+        except ResourceNotFound:
+            self._logger.warning("if requested resource not available create it")
             Complex.create(
                 physical_location_id=settings.COMPLEX_PHYSICAL_LOCATION_ID,
                 data_center_code=settings.COMPLEX_DATA_CENTER_CODE,
                 name=settings.COMPLEX_PHYSICAL_LOCATION_ID)
         except APIError:
             self._logger.warning("Try to update the complex failed.")
+
+    @BaseStep.store_state(cleanup=True)
+    def cleanup(self) -> None:
+
+        """Cleanup created complex."""
+        self._logger.info("Clean the complex")
+        try:
+            complex_instance = Complex.get_by_physical_location_id(
+                settings.COMPLEX_PHYSICAL_LOCATION_ID)
+            complex_instance.delete()
+        except ResourceNotFound:
+            self._logger.info("Resource trying to delete is not available..")
+        super().cleanup()
diff --git a/src/onaptests/steps/cloud/complex_update.py b/src/onaptests/steps/cloud/complex_update.py
new file mode 100644 (file)
index 0000000..faa37e1
--- /dev/null
@@ -0,0 +1,42 @@
+from onapsdk.aai.cloud_infrastructure import Complex
+from onapsdk.configuration import settings
+
+from ..base import BaseStep
+
+
+class ComplexUpdateStep(BaseStep):
+    """Complex update step."""
+
+    def __init__(self):
+        """Initialize step."""
+        super().__init__(cleanup=BaseStep.HAS_NO_CLEANUP)
+
+    @property
+    def description(self) -> str:
+        """Step description."""
+        return "Update complex."
+
+    @property
+    def component(self) -> str:
+        """Component name."""
+        return "AAI"
+
+    @BaseStep.store_state
+    def execute(self):
+        """Update complex.
+
+        Use settings values:
+         - COMPLEX_PHYSICAL_LOCATION_ID,
+         - COMPLEX_DATA_CENTER_CODE.
+         - UPDATED_PHYSICAL_LOCATION_TYPE
+
+        """
+        super().execute()
+        self._logger.info("*Check if complex exists *")
+        complex_instance = Complex.get_by_physical_location_id(
+            settings.COMPLEX_PHYSICAL_LOCATION_ID)
+        complex_instance.update(
+            physical_location_id=settings.COMPLEX_PHYSICAL_LOCATION_ID,
+            physical_location_type=settings.UPDATED_PHYSICAL_LOCATION_TYPE,
+            data_center_code=settings.COMPLEX_DATA_CENTER_CODE,
+            name=settings.COMPLEX_PHYSICAL_LOCATION_ID)
index 2bc206d..1581d34 100644 (file)
@@ -45,7 +45,6 @@ class ConnectServiceSubToCloudRegionStep(BaseStep):
 
         Use settings values:
          - GLOBAL_CUSTOMER_ID,
-         - SERVICE_NAME,
          - CLOUD_REGION_CLOUD_OWNER,
          - CLOUD_REGION_ID.
 
@@ -54,7 +53,7 @@ class ConnectServiceSubToCloudRegionStep(BaseStep):
         customer: Customer = Customer.get_by_global_customer_id(
             settings.GLOBAL_CUSTOMER_ID)
         service_subscription: ServiceSubscription = \
-            customer.get_service_subscription_by_service_type(settings.SERVICE_NAME)
+            customer.get_service_subscription_by_service_type(self.service_type)
         cloud_region: CloudRegion = CloudRegion.get_by_id(
             cloud_owner=settings.CLOUD_REGION_CLOUD_OWNER,
             cloud_region_id=settings.CLOUD_REGION_ID,
index 7bffb1a..b86df5d 100644 (file)
@@ -1,6 +1,6 @@
 from onapsdk.aai.business import Customer
 from onapsdk.configuration import settings
-from onapsdk.exceptions import APIError
+from onapsdk.exceptions import APIError, ResourceNotFound
 
 from ..base import BaseStep
 
@@ -10,7 +10,7 @@ class CustomerCreateStep(BaseStep):
 
     def __init__(self):
         """Initialize step."""
-        super().__init__(cleanup=BaseStep.HAS_NO_CLEANUP)
+        super().__init__(cleanup=settings.CLEANUP_FLAG)
 
     @property
     def description(self) -> str:
@@ -29,8 +29,26 @@ class CustomerCreateStep(BaseStep):
         Use settings values:
          - GLOBAL_CUSTOMER_ID.
         """
+
         super().execute()
+        self._logger.info("*Check if customer exists *")
         try:
+            Customer.get_by_global_customer_id(settings.GLOBAL_CUSTOMER_ID)
+            self._logger.warning("Requested resource Available in AAI .")
+        except ResourceNotFound:
+            self._logger.warning("if requested resource not available, create it")
             Customer.create(settings.GLOBAL_CUSTOMER_ID, settings.GLOBAL_CUSTOMER_ID, "INFRA")
         except APIError:
             self._logger.warning("Try to update the Customer failed.")
+
+    @BaseStep.store_state(cleanup=True)
+    def cleanup(self) -> None:
+
+        """Cleanup created customer."""
+        self._logger.info("Clean the customer")
+        try:
+            customer = Customer.get_by_global_customer_id(settings.GLOBAL_CUSTOMER_ID)
+            customer.delete()
+        except ResourceNotFound:
+            self._logger.info("Resource trying to delete is not available..")
+        super().cleanup()
index 170f033..9ebd80f 100644 (file)
@@ -1,6 +1,6 @@
-from onapsdk.aai.business import Customer
+from onapsdk.aai.business import Customer, ServiceSubscription
 from onapsdk.configuration import settings
-from onapsdk.sdc.service import Service
+from onapsdk.exceptions import ResourceNotFound
 
 from ..base import BaseStep
 from .customer_create import CustomerCreateStep
@@ -15,7 +15,7 @@ class CustomerServiceSubscriptionCreateStep(BaseStep):
         Substeps:
             - CustomerCreateStep.
         """
-        super().__init__(cleanup=BaseStep.HAS_NO_CLEANUP)
+        super().__init__(cleanup=settings.CLEANUP_FLAG)
         self.add_step(CustomerCreateStep())
 
     @property
@@ -37,6 +37,19 @@ class CustomerServiceSubscriptionCreateStep(BaseStep):
          - SERVICE_NAME.
         """
         super().execute()
-        service = Service(name=settings.SERVICE_NAME)
         customer = Customer.get_by_global_customer_id(settings.GLOBAL_CUSTOMER_ID)
-        customer.subscribe_service(service.name)
+        customer.subscribe_service(self.service_type)
+
+    @BaseStep.store_state(cleanup=True)
+    def cleanup(self) -> None:
+
+        """Cleanup created service subscription."""
+        self._logger.info("Clean the service subscription")
+        try:
+            customer = Customer.get_by_global_customer_id(settings.GLOBAL_CUSTOMER_ID)
+            service_subscription: ServiceSubscription = \
+                customer.get_service_subscription_by_service_type(self.service_type)
+            customer.delete_subscribed_service(service_subscription)
+        except ResourceNotFound:
+            self._logger.info("Resource trying to delete is not available..")
+        super().cleanup()
diff --git a/src/onaptests/steps/cloud/customer_update.py b/src/onaptests/steps/cloud/customer_update.py
new file mode 100644 (file)
index 0000000..95cb266
--- /dev/null
@@ -0,0 +1,36 @@
+from onapsdk.aai.business import Customer
+from onapsdk.configuration import settings
+
+from ..base import BaseStep
+
+
+class CustomerUpdateStep(BaseStep):
+    """Customer update step."""
+
+    def __init__(self):
+        """Initialize step."""
+        super().__init__(cleanup=BaseStep.HAS_NO_CLEANUP)
+
+    @property
+    def description(self) -> str:
+        """Step description."""
+        return "Update customer."
+
+    @property
+    def component(self) -> str:
+        """Component name."""
+        return "AAI"
+
+    @BaseStep.store_state
+    def execute(self):
+        """Update cutomer.
+
+        Use settings values:
+         - GLOBAL_CUSTOMER_ID,
+         - UPDATED_SUBSCRIBER_TYPE
+        """
+
+        super().execute()
+        self._logger.info("*Check if customer exists *")
+        customer = Customer.get_by_global_customer_id(settings.GLOBAL_CUSTOMER_ID)
+        customer.update(settings.GLOBAL_CUSTOMER_ID, settings.UPDATED_SUBSCRIBER_TYPE, "INFRA")
index db5b646..6f26da2 100644 (file)
@@ -19,7 +19,7 @@ class ExposeServiceNodePortStep(BaseStep):
         """Initialize step."""
         super().__init__(cleanup=settings.CLEANUP_FLAG)
         self.component_value = component
-        self.service_name = service_name
+        self.k8s_service_name = service_name
         self.port = port
         self.node_port = node_port
         self.k8s_client: client.CoreV1Api = None
@@ -45,7 +45,7 @@ class ExposeServiceNodePortStep(BaseStep):
         """
         try:
             service_data: Dict[str, Any] = self.k8s_client.read_namespaced_service(
-                self.service_name,
+                self.k8s_service_name,
                 settings.K8S_TESTS_NAMESPACE
             )
             return service_data.spec.type == "NodePort"
@@ -72,7 +72,7 @@ class ExposeServiceNodePortStep(BaseStep):
         if not self.is_service_node_port_type():
             try:
                 self.k8s_client.patch_namespaced_service(
-                    self.service_name,
+                    self.k8s_service_name,
                     settings.K8S_TESTS_NAMESPACE,
                     {"spec": {"ports": [{"port": self.port,
                                          "nodePort": self.node_port}],
@@ -97,7 +97,7 @@ class ExposeServiceNodePortStep(BaseStep):
         if self.is_service_node_port_type():
             try:
                 self.k8s_client.patch_namespaced_service(
-                    self.service_name,
+                    self.k8s_service_name,
                     settings.K8S_TESTS_NAMESPACE,
                     [
                         {
diff --git a/src/onaptests/steps/cloud/lineofbusiness_create.py b/src/onaptests/steps/cloud/lineofbusiness_create.py
new file mode 100644 (file)
index 0000000..94b8101
--- /dev/null
@@ -0,0 +1,53 @@
+from onapsdk.aai.business import LineOfBusiness
+from onapsdk.configuration import settings
+from onapsdk.exceptions import ResourceNotFound
+
+from ..base import BaseStep
+
+
+class LineofBusinessCreateStep(BaseStep):
+    """LineofBusiness creation step."""
+
+    def __init__(self):
+        """Initialize step."""
+        super().__init__(cleanup=settings.CLEANUP_FLAG)
+
+    @property
+    def description(self) -> str:
+        """Step description."""
+        return "Create LineofBusiness."
+
+    @property
+    def component(self) -> str:
+        """Component name."""
+        return "AAI"
+
+    @BaseStep.store_state
+    def execute(self):
+        """Create LineofBusiness.
+
+        Use settings values:
+         - LINE_OF_BUSINESS
+
+        """
+        super().execute()
+        self._logger.info("*Check if LOB exists *")
+        try:
+            LineOfBusiness.get_by_name(settings.LINE_OF_BUSINESS)
+            self._logger.warning("Requested Resource Available in AAI .")
+        except ResourceNotFound:
+            self._logger.warning("if requested resource not available, create it")
+            LineOfBusiness.create(settings.LINE_OF_BUSINESS)
+
+    @BaseStep.store_state(cleanup=True)
+    def cleanup(self) -> None:
+        super().cleanup()
+
+        # Cleanup created line of business.
+        self._logger.info("Clean the line of business")
+        try:
+            line_of_business = LineOfBusiness.get_by_name(settings.LINE_OF_BUSINESS)
+            line_of_business.delete()
+        except ResourceNotFound:
+            self._logger.info("Resource trying to delete is not available..")
+        super().cleanup()
index 8f5dad6..23b757a 100644 (file)
@@ -15,7 +15,7 @@ class LinkCloudRegionToComplexStep(BaseStep):
             - ComplexCreateStep,
             - CloudRegionCreateStep.
         """
-        super().__init__(cleanup=BaseStep.HAS_NO_CLEANUP)
+        super().__init__(cleanup=settings.CLEANUP_FLAG)
         self.add_step(ComplexCreateStep())
 
     @property
@@ -47,3 +47,25 @@ class LinkCloudRegionToComplexStep(BaseStep):
             cloud_region_id=settings.CLOUD_REGION_ID,
         )
         cloud_region.link_to_complex(cmplx)
+
+    @BaseStep.store_state(cleanup=True)
+    def cleanup(self) -> None:
+        """Unlink cloud region and complex.
+
+        Delete previously created relationship
+        Use settings values:
+         - COMPLEX_PHYSICAL_LOCATION_ID,
+         - CLOUD_REGION_CLOUD_OWNER,
+         - CLOUD_REGION_ID.
+
+        """
+        cmplx = Complex(
+            physical_location_id=settings.COMPLEX_PHYSICAL_LOCATION_ID,
+            name=settings.COMPLEX_PHYSICAL_LOCATION_ID
+        )
+        cloud_region = CloudRegion.get_by_id(
+            cloud_owner=settings.CLOUD_REGION_CLOUD_OWNER,
+            cloud_region_id=settings.CLOUD_REGION_ID,
+        )
+        cloud_region.unlink_complex(cmplx)
+        return super().cleanup()
diff --git a/src/onaptests/steps/cloud/link_cloudregion_to_project.py b/src/onaptests/steps/cloud/link_cloudregion_to_project.py
new file mode 100644 (file)
index 0000000..6cf253a
--- /dev/null
@@ -0,0 +1,70 @@
+import logging
+
+from onapsdk.aai.business.project import Project
+from onapsdk.aai.cloud_infrastructure import CloudRegion
+from onapsdk.configuration import settings
+from onapsdk.exceptions import ResourceNotFound
+
+from ..base import BaseStep
+from .cloud_region_create import CloudRegionCreateStep
+from .project_create import ProjectCreateStep
+
+
+class LinkCloudRegionToProjectStep(BaseStep):
+    """Link cloud region to project step"""
+
+    __logger = logging.getLogger(__name__)
+
+    def __init__(self):
+        """Initialize step.
+
+        Substeps:
+            - ProjectCreateStep,
+            - CloudRegionCreateStep.
+        """
+        super().__init__(cleanup=settings.CLEANUP_FLAG)
+        self.add_step(ProjectCreateStep())
+        self.add_step(CloudRegionCreateStep())
+
+    @property
+    def description(self) -> str:
+        """Step description."""
+        return "Connect cloud region with project."
+
+    @property
+    def component(self) -> str:
+        """Component name."""
+        return "AAI"
+
+    @BaseStep.store_state
+    def execute(self):
+        """Link cloud region to project.
+
+        Use settings values:
+         - PROJECT
+         - CLOUD_REGION_CLOUD_OWNER,
+         - CLOUD_REGION_ID.
+        """
+        super().execute()
+        project = Project.get_by_name(settings.PROJECT)
+        cloud_region = CloudRegion.get_by_id(
+            cloud_owner=settings.CLOUD_REGION_CLOUD_OWNER,
+            cloud_region_id=settings.CLOUD_REGION_ID,
+        )
+        self.__logger.info("Link between cloud region and project is going to be created")
+        cloud_region.link_to_project(project)
+
+    @BaseStep.store_state(cleanup=True)
+    def cleanup(self) -> None:
+        """Cleanup created platform."""
+        self._logger.info("Clean the relationship")
+        try:
+            cloud_region = CloudRegion.get_by_id(
+                cloud_owner=settings.CLOUD_REGION_CLOUD_OWNER,
+                cloud_region_id=settings.CLOUD_REGION_ID,
+            )
+            project = Project.get_by_name(settings.PROJECT)
+            cloud_region.delete_project(project)
+        except ResourceNotFound:
+            self._logger.info("One of resource from relationship is not available..")
+        super().cleanup()
diff --git a/src/onaptests/steps/cloud/link_lineofbusiness_to_tenant.py b/src/onaptests/steps/cloud/link_lineofbusiness_to_tenant.py
new file mode 100644 (file)
index 0000000..773a9d1
--- /dev/null
@@ -0,0 +1,74 @@
+import logging
+
+from onapsdk.aai.business import LineOfBusiness
+from onapsdk.aai.cloud_infrastructure import CloudRegion
+from onapsdk.configuration import settings
+from onapsdk.exceptions import ResourceNotFound
+
+from ..base import BaseStep
+from .lineofbusiness_create import LineofBusinessCreateStep
+from .tenant_create import TenantCreateStep
+
+
+class LinkLineOfBusinessToTenantStep(BaseStep):
+    """Link line of busniess to tenant step"""
+
+    __logger = logging.getLogger(__name__)
+
+    def __init__(self) -> None:
+        """Initialize step.
+
+        Substeps:
+            - LineofBusinessCreateStep,
+            - TenantCreateStep.
+        """
+        super().__init__(cleanup=settings.CLEANUP_FLAG)
+        self.add_step(LineofBusinessCreateStep())
+        self.add_step(TenantCreateStep())
+
+    @property
+    def description(self) -> str:
+        """Step description."""
+        return "Connect line of business with tenant."
+
+    @property
+    def component(self) -> str:
+        """Component name."""
+        return "AAI"
+
+    @BaseStep.store_state
+    def execute(self):
+        """Link line of business to tenant.
+
+        Use settings values:
+         - TENANT_ID
+         - TENANT_NAME,
+         - CLOUD_REGION_CLOUD_OWNER,
+         - CLOUD_REGION_ID.
+        """
+        super().execute()
+        cloud_region = CloudRegion.get_by_id(
+            cloud_owner=settings.CLOUD_REGION_CLOUD_OWNER,
+            cloud_region_id=settings.CLOUD_REGION_ID,
+        )
+        tenant = cloud_region.get_tenant(tenant_id=settings.TENANT_ID)
+        line_of_business = LineOfBusiness.get_by_name(name=settings.LINE_OF_BUSINESS)
+        self.__logger.info("Creating relationship between line of business and tenant")
+        line_of_business.link_to_tenant(tenant)
+
+    @BaseStep.store_state(cleanup=True)
+    def cleanup(self) -> None:
+
+        """Cleanup created platform."""
+        self._logger.info("Clean the platform")
+        try:
+            cloud_region = CloudRegion.get_by_id(
+                cloud_owner=settings.CLOUD_REGION_CLOUD_OWNER,
+                cloud_region_id=settings.CLOUD_REGION_ID,
+            )
+            tenant = cloud_region.get_tenant(tenant_id=settings.TENANT_ID)
+            line_of_business = LineOfBusiness.get_by_name(name=settings.LINE_OF_BUSINESS)
+            line_of_business.delete_relationship_with_tenant(tenant)
+        except ResourceNotFound:
+            self._logger.info("One of resource from relationship is not available..")
+        super().cleanup()
diff --git a/src/onaptests/steps/cloud/link_owningentity_to_tenant.py b/src/onaptests/steps/cloud/link_owningentity_to_tenant.py
new file mode 100644 (file)
index 0000000..1b05d57
--- /dev/null
@@ -0,0 +1,74 @@
+import logging
+
+from onapsdk.aai.business import OwningEntity
+from onapsdk.aai.cloud_infrastructure import CloudRegion
+from onapsdk.configuration import settings
+from onapsdk.exceptions import ResourceNotFound
+
+from ..base import BaseStep
+from .owning_entity_create import OwningEntityCreateStep
+from .tenant_create import TenantCreateStep
+
+
+class LinkOwningEntityToTenantStep(BaseStep):
+    """Link owning entity to tenant step"""
+
+    __logger = logging.getLogger(__name__)
+
+    def __init__(self) -> None:
+        """Initialize step.
+
+        Substeps:
+            - OwningEntityCreateStep,
+            - TenantCreateStep.
+        """
+        super().__init__(cleanup=settings.CLEANUP_FLAG)
+        self.add_step(OwningEntityCreateStep())
+        self.add_step(TenantCreateStep())
+
+    @property
+    def description(self) -> str:
+        """Step description."""
+        return "Connect owning entity with tenant."
+
+    @property
+    def component(self) -> str:
+        """Component name."""
+        return "AAI"
+
+    @BaseStep.store_state
+    def execute(self):
+        """Link owning entity to tenant.
+
+        Use settings values:
+         - TENANT_ID
+         - TENANT_NAME,
+         - CLOUD_REGION_CLOUD_OWNER,
+         - CLOUD_REGION_ID.
+        """
+        super().execute()
+        cloud_region = CloudRegion.get_by_id(
+            cloud_owner=settings.CLOUD_REGION_CLOUD_OWNER,
+            cloud_region_id=settings.CLOUD_REGION_ID,
+        )
+        tenant = cloud_region.get_tenant(tenant_id=settings.TENANT_ID)
+        owning_entity = OwningEntity.get_by_owning_entity_id(settings.OWNING_ENTITY_ID)
+        self.__logger.info("Creating relationship between owning entity and tenant")
+        owning_entity.link_to_tenant(tenant)
+
+    @BaseStep.store_state(cleanup=True)
+    def cleanup(self) -> None:
+
+        """Cleanup created platform."""
+        self._logger.info("Clean the platform")
+        try:
+            cloud_region = CloudRegion.get_by_id(
+                cloud_owner=settings.CLOUD_REGION_CLOUD_OWNER,
+                cloud_region_id=settings.CLOUD_REGION_ID,
+            )
+            tenant = cloud_region.get_tenant(tenant_id=settings.TENANT_ID)
+            owning_entity = OwningEntity.get_by_owning_entity_id(settings.OWNING_ENTITY_ID)
+            owning_entity.delete_relationship_with_tenant(tenant)
+        except ResourceNotFound:
+            self._logger.info("One of resource from relationship is not available..")
+        super().cleanup()
diff --git a/src/onaptests/steps/cloud/owning_entity_create.py b/src/onaptests/steps/cloud/owning_entity_create.py
new file mode 100644 (file)
index 0000000..f1ccd93
--- /dev/null
@@ -0,0 +1,53 @@
+from onapsdk.aai.business import OwningEntity
+from onapsdk.configuration import settings
+from onapsdk.exceptions import ResourceNotFound
+
+from ..base import BaseStep
+
+
+class OwningEntityCreateStep(BaseStep):
+    """OwningEntity creation step."""
+
+    def __init__(self):
+        """Initialize step."""
+        super().__init__(cleanup=settings.CLEANUP_FLAG)
+
+    @property
+    def description(self) -> str:
+        """Step description."""
+        return "Create OwningEntity."
+
+    @property
+    def component(self) -> str:
+        """Component name."""
+        return "AAI"
+
+    @BaseStep.store_state
+    def execute(self):
+        """Create OwningEntity.
+
+        Use settings values:
+         - OWNING_ENTITY
+
+        """
+
+        super().execute()
+        self._logger.info("*Check if Owning Entity exists *")
+        try:
+            OwningEntity.get_by_owning_entity_id(settings.OWNING_ENTITY_ID)
+            self._logger.warning("Requested Resource Available in AAI .")
+        except ResourceNotFound:
+            self._logger.warning("if requested resource not available, create it")
+            OwningEntity.create(settings.OWNING_ENTITY_NAME, settings.OWNING_ENTITY_ID)
+
+    @BaseStep.store_state(cleanup=True)
+    def cleanup(self) -> None:
+        super().cleanup()
+        # Cleanup created owning entity.
+        self._logger.info("Clean the owning entity")
+        try:
+            own_entity = OwningEntity.get_by_owning_entity_id(settings.OWNING_ENTITY_ID)
+            own_entity.delete()
+        except ResourceNotFound:
+            self._logger.info("Resource trying to delete is not available..")
+        super().cleanup()
diff --git a/src/onaptests/steps/cloud/owning_entity_update.py b/src/onaptests/steps/cloud/owning_entity_update.py
new file mode 100644 (file)
index 0000000..c1665c8
--- /dev/null
@@ -0,0 +1,35 @@
+from onapsdk.aai.business import OwningEntity
+from onapsdk.configuration import settings
+
+from ..base import BaseStep
+
+
+class OwningEntityUpdateStep(BaseStep):
+    """OwningEntity update step."""
+
+    def __init__(self):
+        """Initialize step."""
+        super().__init__(cleanup=BaseStep.HAS_NO_CLEANUP)
+
+    @property
+    def description(self) -> str:
+        """Step description."""
+        return "Update OwningEntity."
+
+    @property
+    def component(self) -> str:
+        """Component name."""
+        return "AAI"
+
+    @BaseStep.store_state
+    def execute(self):
+        """update OwningEntity.
+
+        Use settings values:
+         - OWNING_ENTITY
+
+        """
+        super().execute()
+        self._logger.info("*Check if Owning Entity exists *")
+        owning_entity = OwningEntity.get_by_owning_entity_id(settings.OWNING_ENTITY_ID)
+        owning_entity.update(settings.UPDATED_OWNING_ENTITY_NAME, owning_entity.owning_entity_id)
diff --git a/src/onaptests/steps/cloud/platform_create.py b/src/onaptests/steps/cloud/platform_create.py
new file mode 100644 (file)
index 0000000..9724885
--- /dev/null
@@ -0,0 +1,53 @@
+from onapsdk.aai.business import Platform
+from onapsdk.configuration import settings
+from onapsdk.exceptions import ResourceNotFound
+
+from ..base import BaseStep
+
+
+class PlatformCreateStep(BaseStep):
+    """Platform creation step."""
+
+    def __init__(self):
+        """Initialize step."""
+        super().__init__(cleanup=settings.CLEANUP_FLAG)
+
+    @property
+    def description(self) -> str:
+        """Step description."""
+        return "Create Platform."
+
+    @property
+    def component(self) -> str:
+        """Component name."""
+        return "AAI"
+
+    @BaseStep.store_state
+    def execute(self):
+        """Create Platform.
+
+        Use settings values:
+         - PLATFORM
+
+        """
+
+        super().execute()
+        self._logger.info("*Check if Platform exists *")
+        try:
+            Platform.get_by_name(settings.PLATFORM)
+            self._logger.info("Requested Resource Available in AAI .")
+        except ResourceNotFound:
+            self._logger.warning("if requested resource not available, create it")
+            Platform.create(settings.PLATFORM)
+
+    @BaseStep.store_state(cleanup=True)
+    def cleanup(self) -> None:
+
+        """Cleanup created platform."""
+        self._logger.info("Clean the platform")
+        try:
+            platform = Platform.get_by_name(settings.PLATFORM)
+            platform.delete()
+        except ResourceNotFound:
+            self._logger.info("Resource trying to delete is not available..")
+        super().cleanup()
diff --git a/src/onaptests/steps/cloud/project_create.py b/src/onaptests/steps/cloud/project_create.py
new file mode 100644 (file)
index 0000000..0943855
--- /dev/null
@@ -0,0 +1,53 @@
+from onapsdk.aai.business import Project
+from onapsdk.configuration import settings
+from onapsdk.exceptions import ResourceNotFound
+
+from ..base import BaseStep
+
+
+class ProjectCreateStep(BaseStep):
+    """Project creation step."""
+
+    def __init__(self):
+        """Initialize step."""
+        super().__init__(cleanup=settings.CLEANUP_FLAG)
+
+    @property
+    def description(self) -> str:
+        """Step description."""
+        return "Create Project."
+
+    @property
+    def component(self) -> str:
+        """Component name."""
+        return "AAI"
+
+    @BaseStep.store_state
+    def execute(self):
+        """Create Project.
+
+        Use settings values:
+         - PROJECT
+
+        """
+
+        super().execute()
+        self._logger.info("*Check if Project exists *")
+        try:
+            Project.get_by_name(settings.PROJECT)
+            self._logger.warning("Requested Resource Available in AAI .")
+        except ResourceNotFound:
+            self._logger.warning("if requested resource not available, create it")
+            Project.create(settings.PROJECT)
+
+    @BaseStep.store_state(cleanup=True)
+    def cleanup(self) -> None:
+
+        """Cleanup created project."""
+        self._logger.info("Clean the project")
+        try:
+            project = Project.get_by_name(settings.PROJECT)
+            project.delete()
+        except ResourceNotFound:
+            self._logger.info("Resource trying to delete is not available..")
+        super().cleanup()
diff --git a/src/onaptests/steps/cloud/publish_pnf_reg_event_to_kafka.py b/src/onaptests/steps/cloud/publish_pnf_reg_event_to_kafka.py
new file mode 100644 (file)
index 0000000..14afd48
--- /dev/null
@@ -0,0 +1,57 @@
+import json
+from pathlib import Path
+
+from jinja2 import Environment, FileSystemLoader
+from onapsdk.configuration import settings
+from onapsdk.exceptions import (APIError, ConnectionFailed, RequestError,
+                                ResourceNotFound)
+from onapsdk.kafka import onap_kafka
+
+from onaptests.utils.exceptions import OnapTestException
+from onaptests.utils.kubernetes_kafka import KubernetesKafka
+
+from ..base import BaseStep
+
+
+class PublishVESRegistrationEventToKafkaStep(BaseStep):
+    """Step to publish VES Registration event on kafka"""
+
+    @property
+    def description(self) -> str:
+        return "Step to publish VES Registration event on kafka"
+
+    @property
+    def component(self) -> str:
+        return "Kafka"
+
+    @BaseStep.store_state
+    def execute(self) -> None:
+        """Publish a VES Registration event on kafka by calling its REST API"""
+
+        super().execute()
+
+        environment = Environment(loader=FileSystemLoader((Path(__file__).parent.parent.parent)
+                                                          .joinpath("templates/artifacts/")))
+        template = environment.get_template("pnf_registration_dmaap_event_template.json.j2")
+
+        reg_event_parameters = {
+            "sourceName": settings.PNF_NAME,
+            "serialNumber": settings.PNF_SERIAL_NUMBER,
+            "oamV6IpAddress": settings.PNF_IPADDRESS_V6_OAM,
+            "oamV4IpAddress": settings.PNF_IPADDRESS_V4_OAM
+        }
+        reg_event = template.render(reg_event_parameters)
+        reg_event_list = json.loads(reg_event)
+        formatted_json_data = json.dumps(reg_event_list[0], separators=(',', ':'))
+
+        reader = KubernetesKafka()
+        reader.read_kafka_admin_secret()
+        kafka_password = reader.get_kafka_admin_password()
+
+        try:
+            onap_kafka.publish_event_on_topic(settings.KAFKA_USER, kafka_password,
+                                              formatted_json_data.encode('utf-8'),
+                                              settings.PNF_REGISTRATION_TOPIC_NAME)
+        except (RequestError, ResourceNotFound, APIError, ConnectionFailed) as exc:
+            self._logger.error("Error while publishing event via kafka")
+            raise OnapTestException(exc) from exc
index 00af163..56b9a28 100644 (file)
@@ -2,7 +2,7 @@
 import time
 from uuid import uuid4
 
-from onapsdk.aai.cloud_infrastructure import CloudRegion
+from onapsdk.aai.cloud_infrastructure import CloudRegion, Tenant
 from onapsdk.configuration import settings
 from onapsdk.exceptions import ResourceNotFound
 
@@ -20,7 +20,7 @@ class RegisterCloudRegionStep(BaseStep):
         Substeps:
             - CloudRegionCreateStep.
         """
-        super().__init__(cleanup=BaseStep.HAS_NO_CLEANUP)
+        super().__init__(cleanup=settings.CLEANUP_FLAG)
         self.add_step(CloudRegionCreateStep())
 
     @property
@@ -38,13 +38,17 @@ class RegisterCloudRegionStep(BaseStep):
         """Register cloud region.
 
         Use settings values:
+         - AVAILABILITY_ZONE_NAME,
+         - AVAILABILITY_ZONE_TYPE,
          - CLOUD_REGION_CLOUD_OWNER,
          - CLOUD_REGION_ID,
          - CLOUD_DOMAIN,
          - VIM_USERNAME,
          - VIM_PASSWORD,
          - VIM_SERVICE_URL,
-         - TENANT_NAME.
+         - TENANT_NAME,
+         - TENANT_ID,
+         - USE_MULTICLOUD.
         """
         super().execute()
         cloud_region: CloudRegion = CloudRegion.get_by_id(
@@ -102,3 +106,24 @@ class RegisterCloudRegionStep(BaseStep):
             cloud_region.add_availability_zone(
                 settings.AVAILABILITY_ZONE_NAME,
                 settings.AVAILABILITY_ZONE_TYPE)
+
+    @BaseStep.store_state(cleanup=True)
+    def cleanup(self) -> None:
+        """Cleanup cloud region registration step."""
+        self._logger.info("Clean after cloud region registration")
+        try:
+            cloud_region = CloudRegion.get_by_id(
+                cloud_owner=settings.CLOUD_REGION_CLOUD_OWNER,
+                cloud_region_id=settings.CLOUD_REGION_ID,
+            )
+            if settings.USE_MULTICLOUD:
+                cloud_region.unregister_from_multicloud()
+            else:
+                try:
+                    tenant: Tenant = cloud_region.get_tenant(settings.TENANT_ID)
+                    tenant.delete()
+                except ResourceNotFound:
+                    self._logger.info("Tenant does not exist")
+        except ResourceNotFound:
+            self._logger.info("Resource trying to delete is not available..")
+        return super().cleanup()
index c0cdfe8..23c01bc 100644 (file)
@@ -4,17 +4,25 @@
 class K8sResource():
     """K8sResource class."""
 
-    def __init__(self, k8s=None):
+    def __init__(self, k8s=None, cr=False):
         """Init the k8s resource."""
         self.k8s = k8s
         self.name = ""
         self.events = []
+        self.failing_reasons = []
+        self.unstability_reasons = []
+        self.details = {}
         self.labels = None
         self.annotations = None
         if self.k8s:
-            self.name = self.k8s.metadata.name
-            self.labels = self.k8s.metadata.labels
-            self.annotations = self.k8s.metadata.annotations
+            if cr:
+                self.name = self.k8s['metadata']['name']
+                self.labels = self.k8s['metadata'].get('labels', {})
+                self.annotations = self.k8s['metadata'].get('annotations', {})
+            else:
+                self.name = self.k8s.metadata.name
+                self.labels = self.k8s.metadata.labels
+                self.annotations = self.k8s.metadata.annotations
             self.specific_k8s_init()
         if not self.labels:
             self.labels = {}
@@ -35,6 +43,9 @@ class K8sResource():
             return self.name == other.name
         return False
 
+    def __hash__(self):
+        return hash(self.name)
+
 
 class K8sPodParentResource(K8sResource):
     """K8sPodParentResource class."""
@@ -130,6 +141,30 @@ class Service(K8sPodParentResource):
         self.type = self.k8s.spec.type
 
 
+class VulnerabilityReport(K8sResource):
+    """VulnerabilityReport class."""
+
+    def __init__(self, k8s=None):
+        """Init the service."""
+        self.type = ""
+        super().__init__(k8s=k8s, cr=True)
+
+    def specific_k8s_init(self):
+        """Do the specific part for VulnerabilityReport when k8s object is present."""
+        self.owner_name = ""
+        self.owner_kind = ""
+        self.crt_count = 0
+        self.artifact = ""
+        if self.k8s['metadata'].get('ownerReferences'):
+            self.owner_name = self.k8s['metadata'].get('ownerReferences')[0]['name']
+            self.owner_kind = self.k8s['metadata'].get('ownerReferences')[0]['kind']
+        if self.k8s['report']['summary'].get('criticalCount'):
+            self.crt_count = self.k8s['report']['summary'].get('criticalCount', 0)
+        self.artifact = (f"{self.k8s['report']['artifact']['repository']}:"
+                         f"{self.k8s['report']['artifact']['tag']}"
+                         )
+
+
 class Job(K8sPodParentResource):
     """Job class."""
 
@@ -168,3 +203,7 @@ class Ingress(K8sResource):
 
 class Node(K8sResource):
     """Node class."""
+
+
+class Namespace(K8sResource):
+    """Namespace class."""
diff --git a/src/onaptests/steps/cloud/service_subscription_update.py b/src/onaptests/steps/cloud/service_subscription_update.py
new file mode 100644 (file)
index 0000000..9f7d178
--- /dev/null
@@ -0,0 +1,36 @@
+from onapsdk.aai.business import Customer
+from onapsdk.configuration import settings
+
+from ..base import BaseStep
+
+
+class ServiceSubscriptionUpdateStep(BaseStep):
+    """ServiceSubscription update step."""
+
+    def __init__(self):
+        """Initialize step."""
+        super().__init__(cleanup=BaseStep.HAS_NO_CLEANUP)
+
+    @property
+    def description(self) -> str:
+        """Step description."""
+        return "Update ServiceSubscription."
+
+    @property
+    def component(self) -> str:
+        """Component name."""
+        return "AAI"
+
+    @BaseStep.store_state
+    def execute(self):
+        """Update ServiceSubscription.
+
+        Use settings values:
+         - GLOBAL_CUSTOMER_ID,
+         - UPDATED_SUBSCRIBER_TYPE
+        """
+
+        super().execute()
+        self._logger.info("*Check if 5G customer is exists *")
+        customer = Customer.get_by_global_customer_id(settings.GLOBAL_CUSTOMER_ID)
+        customer.update(settings.GLOBAL_CUSTOMER_ID, settings.UPDATED_SUBSCRIBER_TYPE, "5G")
diff --git a/src/onaptests/steps/cloud/tenant_create.py b/src/onaptests/steps/cloud/tenant_create.py
new file mode 100644 (file)
index 0000000..eda3ce7
--- /dev/null
@@ -0,0 +1,65 @@
+from onapsdk.aai.cloud_infrastructure import CloudRegion
+from onapsdk.configuration import settings
+from onapsdk.exceptions import ResourceNotFound
+
+from ..base import BaseStep
+from .cloud_region_create import CloudRegionCreateStep
+
+
+class TenantCreateStep(BaseStep):
+    """Tenant creation step."""
+
+    def __init__(self):
+        """Initialize step."""
+        super().__init__(cleanup=settings.CLEANUP_FLAG)
+        self.add_step(CloudRegionCreateStep())
+
+    @property
+    def description(self) -> str:
+        """Step description."""
+        return "Create Tenant."
+
+    @property
+    def component(self) -> str:
+        """Component name."""
+        return "AAI"
+
+    @BaseStep.store_state
+    def execute(self):
+        """Create Tenant.
+
+        Use settings values:
+         - COMPLEX_PHYSICAL_LOCATION_ID,
+         - COMPLEX_DATA_CENTER_CODE.
+
+        """
+        super().execute()
+        self._logger.info("*Check if tenant exists *")
+        cloud_region = CloudRegion.get_by_id(
+            cloud_owner=settings.CLOUD_REGION_CLOUD_OWNER,
+            cloud_region_id=settings.CLOUD_REGION_ID,
+        )
+        try:
+            tenant = cloud_region.get_tenant(tenant_id=settings.TENANT_ID)
+            self._logger.info("Requested resource Available in AAI .")
+            tenant.delete()
+        except ResourceNotFound:
+            self._logger.warning("if requested resource not available create it")
+        cloud_region.add_tenant(tenant_id=settings.TENANT_ID,
+                                tenant_name=settings.TENANT_NAME)
+
+    @BaseStep.store_state(cleanup=True)
+    def cleanup(self) -> None:
+
+        """Cleanup created tenant."""
+        self._logger.info("Clean the Tenant")
+        try:
+            cloud_region = CloudRegion.get_by_id(
+                cloud_owner=settings.CLOUD_REGION_CLOUD_OWNER,
+                cloud_region_id=settings.CLOUD_REGION_ID,
+            )
+            tenant = cloud_region.get_tenant(tenant_id=settings.TENANT_ID)
+            tenant.delete()
+        except ResourceNotFound:
+            self._logger.info("Resource trying to delete is not available..")
+        super().cleanup()
index 9630ec4..6a93f40 100644 (file)
@@ -68,6 +68,23 @@ class K8SProfileStep(BaseStep):
             return next(iter(self.yaml_template.keys()))
         return self.parent.service_name
 
+    @property
+    def service_type(self) -> str:
+        """Service type.
+
+        Gets from YAML template if it's a root step, gets from parent otherwise.
+        If YAML template has no service_type key returns service name otherwise.
+
+        Returns:
+            str: Service type
+
+        """
+        if self.is_root:
+            if "service_type" in self.yaml_template[self.service_name]:
+                return self.yaml_template[self.service_name]["service_type"]
+            return self.service_name
+        return self.parent.service_type
+
     @property
     def service_instance_name(self) -> str:
         """Service instance name.
@@ -119,7 +136,7 @@ class K8SProfileStep(BaseStep):
         super().execute()
         customer: Customer = Customer.get_by_global_customer_id(settings.GLOBAL_CUSTOMER_ID)
         service_subscription: ServiceSubscription = \
-            customer.get_service_subscription_by_service_type(self.service_name)
+            customer.get_service_subscription_by_service_type(self.service_type)
         self._service_instance: ServiceInstance = \
             service_subscription.get_service_instance_by_name(self.service_instance_name)
 
index e3aac8a..a31d3b5 100644 (file)
@@ -139,10 +139,12 @@ class YamlTemplateServiceAlaCarteInstantiateStep(YamlTemplateBaseStep):
             service_instantiation.wait_for_finish(settings.ORCHESTRATION_REQUEST_TIMEOUT)
         except TimeoutError as exc:
             self._logger.error("Service instantiation %s timed out", self.service_instance_name)
-            raise onap_test_exceptions.ServiceCleanupException from exc
+            raise onap_test_exceptions.ServiceInstantiateException(
+                "Timeout on instatiation") from exc
         if service_instantiation.failed:
             self._logger.error("Service instantiation %s failed", self.service_instance_name)
-            raise onap_test_exceptions.ServiceInstantiateException
+            raise onap_test_exceptions.ServiceInstantiateException(
+                service_instantiation.status_message)
         self._load_customer_and_subscription(reload=True)
         self._load_service_instance()
 
@@ -153,12 +155,14 @@ class YamlTemplateServiceAlaCarteInstantiateStep(YamlTemplateBaseStep):
                 service_deletion.wait_for_finish(settings.ORCHESTRATION_REQUEST_TIMEOUT)
             except TimeoutError as exc:
                 self._logger.error("Service deletion %s timed out", self._service_instance_name)
-                raise onap_test_exceptions.ServiceCleanupException from exc
+                raise onap_test_exceptions.ServiceCleanupException(
+                    "Timeout on cleanup") from exc
             if service_deletion.finished:
                 self._logger.info("Service %s deleted", self._service_instance_name)
             else:
                 self._logger.error("Service deletion %s failed", self._service_instance_name)
-                raise onap_test_exceptions.ServiceCleanupException
+                raise onap_test_exceptions.ServiceCleanupException(
+                    service_deletion.status_message)
 
     @YamlTemplateBaseStep.store_state(cleanup=True)
     def cleanup(self) -> None:
index 047acb8..0bfab64 100644 (file)
@@ -1,6 +1,9 @@
+
+import time
 from typing import List
 from uuid import uuid4
 
+from jinja2 import Environment, PackageLoader, select_autoescape
 from onapsdk.aai.business.owning_entity import OwningEntity
 from onapsdk.aai.cloud_infrastructure.cloud_region import CloudRegion
 from onapsdk.aai.cloud_infrastructure.tenant import Tenant
@@ -10,14 +13,16 @@ from onapsdk.sdc.service import Service
 from onapsdk.so.instantiation import (InstantiationParameter,
                                       ServiceInstantiation, SoService,
                                       VfmoduleParameters, VnfParameters)
+from onapsdk.ves.ves import Ves
 from yaml import SafeLoader, load
 
 import onaptests.utils.exceptions as onap_test_exceptions
-from onaptests.steps.base import YamlTemplateBaseStep
+from onaptests.steps.base import BaseStep, YamlTemplateBaseStep
 from onaptests.steps.cloud.connect_service_subscription_to_cloud_region import \
     ConnectServiceSubToCloudRegionStep
 from onaptests.steps.cloud.customer_service_subscription_create import \
     CustomerServiceSubscriptionCreateStep
+from onaptests.steps.cloud.onap_operator_cr_check import CheckOnapVnfCr
 from onaptests.steps.instantiate.sdnc_service import TestSdncStep
 from onaptests.steps.onboard.service import (VerifyServiceDistributionStep,
                                              YamlTemplateServiceOnboardStep)
@@ -144,6 +149,7 @@ class YamlTemplateServiceMacroInstantiateBaseStep(YamlTemplateBaseStep):
                 cloud_region_id=settings.CLOUD_REGION_ID,
             )
             tenant: Tenant = cloud_region.get_tenant(settings.TENANT_ID)
+            self._logger.info("inside if vnfs/networks ")
         else:
             #  Only PNF is going to be instantiated so
             #  neither cloud_region nor tenant are needed
@@ -158,12 +164,15 @@ class YamlTemplateServiceMacroInstantiateBaseStep(YamlTemplateBaseStep):
         so_service = None
         vnf_params_list: List[VnfParameters] = []
         if settings.MODEL_YAML_TEMPLATE:
+            self._logger.info("inside if settings.MODEL_YAML_TEMPLATE ")
             so_data = self.yaml_template[self.service_name]
             so_service = SoService(vnfs=so_data.get("vnfs", []),
                                    subscription_service_type=so_data.get(
                                        'subscription_service_type'))
         else:
+            self._logger.info("inside else settings.MODEL_YAML_TEMPLATE ")
             for vnf_data in self.yaml_template[self.service_name].get("vnfs", []):
+                self._logger.info("getting vnf data ")
                 vnf_params_list.append(VnfParameters(
                     vnf_data["vnf_name"],
                     [InstantiationParameter(name=parameter["name"],
@@ -187,6 +196,24 @@ class YamlTemplateServiceMacroInstantiateBaseStep(YamlTemplateBaseStep):
             tenant, owning_entity, so_service, skip_pnf_registration_event, vnf_params_list)
 
 
+class YamlTemplateServiceOperatorInstantiateStep(YamlTemplateServiceMacroInstantiateBaseStep):
+    """Instantiate SO service with Operator."""
+
+    def __init__(self):
+        """Init YamlTemplateServiceOperatorInstantiateStep."""
+        super().__init__(cleanup=BaseStep.HAS_NO_CLEANUP)
+        self.add_step(CheckOnapVnfCr(service_macro_base=self))
+
+    @property
+    def description(self) -> str:
+        """Step description."""
+        return "Instantiate SO service with Operator"
+
+    @YamlTemplateBaseStep.store_state
+    def execute(self):
+        super().execute()
+
+
 class YamlTemplateServiceMacroInstantiateStep(YamlTemplateServiceMacroInstantiateBaseStep):
     """Instantiate SO service."""
 
@@ -203,7 +230,7 @@ class YamlTemplateServiceMacroInstantiateStep(YamlTemplateServiceMacroInstantiat
     def execute(self):
         super().execute()
         (service, _, _, cloud_region, tenant, owning_entity, so_service,
-            _, vnf_params_list) = self.base_execute()
+            skip_pnf_registration_event, vnf_params_list) = self.base_execute()
         # remove leftover
         self._cleanup_logic()
 
@@ -220,16 +247,27 @@ class YamlTemplateServiceMacroInstantiateStep(YamlTemplateServiceMacroInstantiat
             service_instance_name=self.service_instance_name,
             vnf_parameters=vnf_params_list,
             enable_multicloud=settings.USE_MULTICLOUD,
-            so_service=so_service
+            so_service=so_service,
+            skip_pnf_registration_event=skip_pnf_registration_event
         )
+        try:
+            if settings.PNF_WITH_VES:
+                time.sleep(settings.SERVICE_DISTRIBUTION_SLEEP_TIME)
+                Ves.send_event(version="v7", json_event=self.get_ves_payload_from_file(),
+                               basic_auth={'username': 'sample1', 'password': 'sample1'})
+        except SDKException:
+            self._logger.info("Not required to wait for VES event")
+
         try:
             service_instantiation.wait_for_finish(timeout=settings.ORCHESTRATION_REQUEST_TIMEOUT)
         except TimeoutError as exc:
             self._logger.error("Service instantiation %s timed out", self.service_instance_name)
-            raise onap_test_exceptions.ServiceInstantiateException from exc
+            raise onap_test_exceptions.ServiceInstantiateException(
+                "Timeout on instatiation") from exc
         if service_instantiation.failed:
             self._logger.error("Service instantiation %s failed", self.service_instance_name)
-            raise onap_test_exceptions.ServiceInstantiateException
+            raise onap_test_exceptions.ServiceInstantiateException(
+                service_instantiation.status_message)
 
         self._load_customer_and_subscription(reload=True)
         self._load_service_instance()
@@ -241,12 +279,14 @@ class YamlTemplateServiceMacroInstantiateStep(YamlTemplateServiceMacroInstantiat
                 service_deletion.wait_for_finish(timeout=settings.ORCHESTRATION_REQUEST_TIMEOUT)
             except TimeoutError as exc:
                 self._logger.error("Service deletion %s timed out", self._service_instance_name)
-                raise onap_test_exceptions.ServiceCleanupException from exc
+                raise onap_test_exceptions.ServiceCleanupException(
+                    "Timeout on cleanup") from exc
             if service_deletion.finished:
                 self._logger.info("Service %s deleted", self._service_instance_name)
             else:
                 self._logger.error("Service deletion %s failed", self._service_instance_name)
-                raise onap_test_exceptions.ServiceCleanupException
+                raise onap_test_exceptions.ServiceCleanupException(
+                    service_deletion.status_message)
 
     @YamlTemplateBaseStep.store_state(cleanup=True)
     def cleanup(self) -> None:
@@ -260,3 +300,15 @@ class YamlTemplateServiceMacroInstantiateStep(YamlTemplateServiceMacroInstantiat
         self._load_service_instance()
         self._cleanup_logic()
         super().cleanup()
+
+    def get_ves_payload_from_file(self) -> str:
+        """Get ves payload from file."""
+
+        jinja_env = Environment(autoescape=select_autoescape(['json.j2']),
+                                loader=PackageLoader('onaptests.templates',
+                                'artifacts'))
+        template = jinja_env.get_template("pnf_instantiation_ves_event.json.j2")
+        ves_event_json = template.render(
+            source_name=self.service_instance_name)
+
+        return ves_event_json
diff --git a/src/onaptests/steps/instantiate/so/add_cnf_in_service.py b/src/onaptests/steps/instantiate/so/add_cnf_in_service.py
new file mode 100644 (file)
index 0000000..8c8265e
--- /dev/null
@@ -0,0 +1,89 @@
+from onapsdk.aai.business.service import ServiceInstance
+from onapsdk.aai.cloud_infrastructure import CloudRegion
+from onapsdk.configuration import settings
+from onapsdk.sdc.service import Service
+from onapsdk.so.instantiation import VnfInstantiation
+
+import onaptests.utils.exceptions as onap_test_exceptions
+from onaptests.steps.base import BaseStep
+from onaptests.steps.instantiate.so.add_delete_cnf_base_step import \
+    AddDeleteCnfInService
+from onaptests.steps.instantiate.so.delete_cnf_in_service import \
+    DeleteCnfMacroFromService
+
+
+class AddCnfInService(AddDeleteCnfInService):
+    """Add CNF in running service using YAML template."""
+
+    def __init__(self):
+        """Initialize step."""
+        super().__init__()
+        self._cnf_instantiation: VnfInstantiation = None
+        self.add_step(DeleteCnfMacroFromService())
+
+    @property
+    def description(self) -> str:
+        """Step description."""
+        return "Add CNF in running service using SO macro method."
+
+    @BaseStep.store_state
+    def execute(self):
+
+        """Instantiate CNF.
+        Use settings values:
+         - GLOBAL_CUSTOMER_ID,
+         - SERVICE_NAME,
+         - LINE_OF_BUSINESS,
+         - PLATFORM,
+         - CLOUD_REGION_ID,
+         - SERVICE_INSTANCE_NAME,
+         - TENANT_ID.
+
+        Raises:
+            Exception: Service instantiation failed
+
+        """
+        # global service
+        super().execute()
+        service: Service = Service(self.service_name)
+        self._load_customer_and_subscription()
+        self._load_service_instance()
+        cloud_region = (CloudRegion.
+                        get_by_id(settings.CLOUD_REGION_CLOUD_OWNER, settings.CLOUD_REGION_ID))
+
+        tenant = cloud_region.get_tenant(settings.TENANT_ID)
+
+        # using existing VNF related functions for getting and adding CNF,
+        # as processing in SO is same for both
+        cnf = next(service.vnfs)
+
+        self._cnf_instantiation = ServiceInstance.add_vnf(
+            self=self._service_instance,
+            vnf=cnf,
+            line_of_business=settings.LINE_OF_BUSINESS,
+            platform=settings.PLATFORM,
+            cloud_region=cloud_region,
+            tenant=tenant,
+            vnf_instance_name=settings.CNF_INSTANCE_NAME,
+            a_la_carte=False
+
+        )
+
+        try:
+            self._cnf_instantiation.wait_for_finish(timeout=settings.ORCHESTRATION_REQUEST_TIMEOUT)
+        except TimeoutError as exc:
+            self._logger.error("CNF instantiation %s timed out", self._cnf_instantiation.name)
+            raise onap_test_exceptions.VnfInstantiateException(
+                "Timeout on instantiation") from exc
+        if self._cnf_instantiation.failed:
+            self._logger.error("CNF instantiation %s failed", self._cnf_instantiation.name)
+            raise onap_test_exceptions.VnfInstantiateException(
+                self._cnf_instantiation.status_message)
+
+        cnf_inst = next(self._service_instance.vnf_instances)
+        if cnf_inst.vnf_name == settings.CNF_INSTANCE_NAME:
+            self._logger.debug("CNF added successfully")
+        else:
+            self._logger.debug("CNF not added successfully")
+            raise onap_test_exceptions.VnfInstantiateException(
+                self._cnf_instantiation.status_message)
diff --git a/src/onaptests/steps/instantiate/so/add_delete_cnf_base_step.py b/src/onaptests/steps/instantiate/so/add_delete_cnf_base_step.py
new file mode 100644 (file)
index 0000000..870e64d
--- /dev/null
@@ -0,0 +1,62 @@
+from uuid import uuid4
+
+import yaml
+from onapsdk.configuration import settings
+
+from onaptests.steps.base import BaseStep, YamlTemplateBaseStep
+
+
+class AddDeleteCnfInService(YamlTemplateBaseStep):
+    """Add CNF in running service using YAML template."""
+
+    def __init__(self):
+        """Initialize step."""
+        super().__init__(cleanup=BaseStep.HAS_NO_CLEANUP)
+        self._yaml_template = None
+        self._service_instance_name: str = None
+
+    @property
+    def description(self) -> str:
+        """Step description."""
+        return "Add and Delete CNF in running service using SO macro method."
+
+    @property
+    def component(self) -> str:
+        """Component name."""
+        return "SO"
+
+    @property
+    def model_yaml_template(self) -> dict:
+        return {}
+
+    @property
+    def service_instance_name(self) -> str:
+        """Service instance name.
+
+        Generate using `service_name` and `uuid4()` function if it's a root step,
+            get from parent otherwise.
+
+        Returns:
+            str: Service instance name
+
+        """
+        if self.is_root:
+            if not self._service_instance_name:
+                self._service_instance_name: str = f"{self.service_name}-{str(uuid4())}"
+            return self._service_instance_name
+        return self.parent.service_instance_name
+
+    @property
+    def yaml_template(self) -> dict:
+        """YAML template abstract property.
+
+        Every YAML template step need to implement that property.
+
+        Returns:
+            dict: YAML template
+
+        """
+        if not self._yaml_template:
+            with open(settings.SERVICE_YAML_TEMPLATE, "r", encoding="utf-8") as yaml_template:
+                self._yaml_template: dict = yaml.safe_load(yaml_template)
+        return self._yaml_template
diff --git a/src/onaptests/steps/instantiate/so/add_pnf_in_service.py b/src/onaptests/steps/instantiate/so/add_pnf_in_service.py
new file mode 100644 (file)
index 0000000..36225dd
--- /dev/null
@@ -0,0 +1,125 @@
+import time
+
+import yaml
+from onapsdk.aai.business import PnfInstance
+from onapsdk.aai.business.customer import Customer, ServiceSubscription
+from onapsdk.aai.business.service import ServiceInstance
+from onapsdk.configuration import settings
+from onapsdk.sdc.service import Pnf
+from onapsdk.so.instantiation import (PnfInstantiation,
+                                      PnfRegistrationParameters, SoServicePnf)
+
+from onaptests.steps.base import BaseStep, YamlTemplateBaseStep
+
+
+class AddPnfInService(YamlTemplateBaseStep):
+    """Add PNF in running service using YAML template."""
+
+    def __init__(self):
+        """Initialize step."""
+        super().__init__(cleanup=BaseStep.HAS_NO_CLEANUP)
+
+        self._yaml_template = None
+        self._pnf_instantiation = None
+        self._service_instance_id: str = None
+        self._service_instance: ServiceInstance = None
+        self._pnf_instance: PnfInstance = None
+        self._pnf_id: str = None
+
+    @property
+    def description(self) -> str:
+        """Step description."""
+        return "Add PNF in running service using SO macro method."
+
+    @property
+    def component(self) -> str:
+        """Component name."""
+        return "SO"
+
+    @property
+    def model_yaml_template(self) -> dict:
+        return {}
+
+    @property
+    def yaml_template(self) -> dict:
+        """YAML template abstract property.
+
+        Every YAML template step need to implement that property.
+
+        Returns:
+            dict: YAML template
+
+        """
+        if not self._yaml_template:
+            with open(settings.SERVICE_YAML_TEMPLATE, "r", encoding="utf-8") as yaml_template:
+                self._yaml_template: dict = yaml.safe_load(yaml_template)
+        return self._yaml_template
+
+    @BaseStep.store_state
+    def execute(self):
+
+        """Instantiate pnf.
+        Use settings values:
+         - GLOBAL_CUSTOMER_ID,
+         - CLOUD_REGION_CLOUD_OWNER,
+         - OWNING_ENTITY,
+         - PROJECT.
+
+        Raises:
+            Exception: Service instantiation failed
+
+        """
+        # global service
+        super().execute()
+        self._logger.info("Adding pnf from running service")
+        customer: Customer = Customer.get_by_global_customer_id(settings.GLOBAL_CUSTOMER_ID)
+        self._logger.info("got customer")
+
+        service_subscription: ServiceSubscription = \
+            customer.get_service_subscription_by_service_type(self.service_type)
+
+        self._logger.info("got service_subscription")
+
+        self._service_instance = \
+            service_subscription.get_service_instance_by_name(settings.SERVICE_INSTANCE_NAME)
+
+        self._logger.info("service instance name " + self._service_instance.instance_name)
+        service = self._service_instance.sdc_service
+        self._logger.info("sdc service " + service.name)
+
+        pnf: Pnf = next(service.pnfs)
+
+        so_pnf = SoServicePnf(
+            model_name="test_so_service_pnf_model_name_1",
+            instance_name="test_so_service_pnf_instance_name_1",
+            registration_parameters=PnfRegistrationParameters(
+                model_number="test_model_number",
+                oam_v4_ip_address="test_ip",
+                oam_v6_ip_address="test_mac",
+                serial_number="test_serial_number",
+                software_version="test_software_version",
+                unit_type="test_unit_type",
+                vendor_name="test_vendor"
+            )
+        )
+
+        self._logger.info("Got pnf  and calling add...." + pnf.name)
+
+        self._pnf_instantiation = PnfInstantiation.instantiate_macro(
+            aai_service_instance=self._service_instance,
+            pnf_object=pnf,
+            line_of_business=settings.LINE_OF_BUSINESS,
+            platform=settings.PLATFORM,
+            sdc_service=service,
+            so_pnf=so_pnf
+        )
+
+        self._logger.info("after calling add")
+        if self._pnf_instantiation.status == self._pnf_instantiation.StatusEnum.IN_PROGRESS:
+            time.sleep(settings.PNF_WAIT_TIME)
+
+        if self._pnf_instantiation.status == self._pnf_instantiation.StatusEnum.COMPLETED:
+            self._logger.error("Status Completed ")
+
+        if self._pnf_instantiation.status == self._pnf_instantiation.StatusEnum.FAILED:
+            self._logger.error("Status Failed ")
diff --git a/src/onaptests/steps/instantiate/so/delete_cnf_in_service.py b/src/onaptests/steps/instantiate/so/delete_cnf_in_service.py
new file mode 100644 (file)
index 0000000..7fc5f9d
--- /dev/null
@@ -0,0 +1,77 @@
+from onapsdk.aai.business import VnfInstance
+from onapsdk.configuration import settings
+
+import onaptests.utils.exceptions as onap_test_exceptions
+from onaptests.scenario.basic_cnf_macro import BasicCnfMacroStep
+from onaptests.steps.base import BaseStep
+from onaptests.steps.instantiate.so.add_delete_cnf_base_step import \
+    AddDeleteCnfInService
+
+
+class DeleteCnfMacroFromService(AddDeleteCnfInService):
+    """Delete CNF in service."""
+
+    def __init__(self):
+        """Initialize step."""
+        super().__init__()
+        self._cnf_instance: VnfInstance = None
+        self.add_step(BasicCnfMacroStep())
+
+    @property
+    def description(self) -> str:
+        """Step description."""
+        return "Delete CNF in running service using SO macro method."
+
+    @BaseStep.store_state
+    def execute(self):
+
+        """Delete CNF in running service.
+
+        ====
+        Args:
+
+        Returns:
+            cnfDeletion: cnfInstantiation object
+        ====
+        Use settings values:
+         - GLOBAL_CUSTOMER_ID,
+         - SERVICE_NAME,
+         - SERVICE_INSTANCE_NAME.
+
+        Raises:
+            Exception: Service deletion failed
+
+        """
+        # global service
+        super().execute()
+        self._load_customer_and_subscription()
+        self._load_service_instance()
+
+        # using existing VNF related functions for getting and deleting CNF,
+        # as processing in SO is same for both
+        self._cnf_instance = next(self._service_instance.vnf_instances)
+
+        cnf_deletion = self._cnf_instance.delete(a_la_carte=False)
+
+        try:
+            cnf_deletion.wait_for_finish(timeout=settings.ORCHESTRATION_REQUEST_TIMEOUT)
+        except TimeoutError as exc:
+            self._logger.error("CNF deletion %s timed out", cnf_deletion.name)
+            raise onap_test_exceptions.VnfCleanupException(
+                "Timeout on cleanup") from exc
+        if cnf_deletion.finished:
+            self._logger.info("CNF %s deleted", cnf_deletion.name)
+        else:
+            self._logger.error("CNF deletion %s failed", cnf_deletion.name)
+            raise onap_test_exceptions.VnfCleanupException(
+                cnf_deletion.status_message)
+
+        try:
+            next(self._service_instance.vnf_instances)
+            #  if control reaches here, throw exception to fail the step as no CNF
+            #  is expected to be present in the service_instance
+            raise onap_test_exceptions.VnfCleanupException(
+                cnf_deletion.status_message)
+        except StopIteration:
+            # this exception is expected as no CNF should present in servie_instance
+            self._logger.debug("CNF deleted successfully")
diff --git a/src/onaptests/steps/instantiate/so/delete_pnf_in_service.py b/src/onaptests/steps/instantiate/so/delete_pnf_in_service.py
new file mode 100644 (file)
index 0000000..ddc92ab
--- /dev/null
@@ -0,0 +1,107 @@
+import time
+
+import yaml
+from onapsdk.aai.business import PnfInstance
+from onapsdk.aai.business.customer import Customer, ServiceSubscription
+from onapsdk.aai.business.service import ServiceInstance
+from onapsdk.configuration import settings
+from onapsdk.so.instantiation import ServiceInstantiation
+
+from onaptests.steps.base import BaseStep, YamlTemplateBaseStep
+
+
+class DeletePnfMacroInService(YamlTemplateBaseStep):
+    """Delete pnf in service."""
+
+    def __init__(self):
+        """Initialize step."""
+        super().__init__(cleanup=BaseStep.HAS_NO_CLEANUP)
+
+        self._yaml_template = None
+        self._service_instance_id: str = None
+        self._service_instance_name = None
+        self._service_instance: ServiceInstance = None
+        self._pnf_instance: PnfInstance = None
+        self._pnf_id: str = None
+
+    @property
+    def description(self) -> str:
+        """Step description."""
+        return "Delete pnf in running service using SO macro method."
+
+    @property
+    def component(self) -> str:
+        """Component name."""
+        return "SO"
+
+    @property
+    def model_yaml_template(self) -> dict:
+        return {}
+
+    @property
+    def yaml_template(self) -> dict:
+        """YAML template abstract property.
+
+        Every YAML template step need to implement that property.
+
+        Returns:
+            dict: YAML template
+
+        """
+        if not self._yaml_template:
+            with open(settings.SERVICE_YAML_TEMPLATE, "r", encoding="utf-8") as yaml_template:
+                self._yaml_template: dict = yaml.safe_load(yaml_template)
+        return self._yaml_template
+
+    @BaseStep.store_state
+    def execute(self):
+
+        """Delete pnf in running service.
+
+        ====
+        Args:
+
+        Returns:
+            pnfDeletion: pnfInstantiation object
+====
+        Use settings values:
+         - GLOBAL_CUSTOMER_ID,
+         - SERVICE_SUBSCRIPTION,
+         - OWNING_ENTITY,
+         - PROJECT.
+
+        Raises:
+            Exception: Service deletion failed
+
+        """
+        # global service
+        super().execute()
+        self._logger.info("Deleting pnf from running service")
+        customer_pnf: Customer = Customer.get_by_global_customer_id(settings.GLOBAL_CUSTOMER_ID)
+        service_sub_pnf: ServiceSubscription = \
+            customer_pnf.get_service_subscription_by_service_type(self.service_type)
+
+        self._logger.info("got customer and sub")
+        service_instance_pnf = service_sub_pnf.get_service_instance_by_name(
+            settings.SERVICE_INSTANCE_NAME)
+
+        self._logger.info("got pnf instance " + service_instance_pnf.instance_name)
+
+        self._pnf_instance = service_instance_pnf.pnfs.__next__()
+
+        self._logger.info("pnf instance present" + self._pnf_instance.name)
+
+        pnf_deletion = self._pnf_instance.delete(a_la_carte=False)
+
+        self._logger.info("After pnf_deletion")
+
+        if pnf_deletion.status == ServiceInstantiation.StatusEnum.IN_PROGRESS:
+            time.sleep(settings.PNF_WAIT_TIME)
+
+        if pnf_deletion.status == ServiceInstantiation.StatusEnum.COMPLETED:
+            self._logger.error("Status Completed ")
+
+        if pnf_deletion.status == ServiceInstantiation.StatusEnum.FAILED:
+            self._logger.error("Status Failed ")
+
+        self._logger.info("pnf deleted successfully")
diff --git a/src/onaptests/steps/instantiate/so/generic_network_step.py b/src/onaptests/steps/instantiate/so/generic_network_step.py
new file mode 100644 (file)
index 0000000..0111017
--- /dev/null
@@ -0,0 +1,156 @@
+import yaml
+from onapsdk.aai.business import VnfInstance
+from onapsdk.aai.business.customer import Customer, ServiceSubscription
+from onapsdk.aai.business.service import ServiceInstance
+from onapsdk.aai.cloud_infrastructure import CloudRegion, Tenant
+from onapsdk.configuration import settings
+from onapsdk.sdc.service import Network, Vnf
+from onapsdk.so.instantiation import NetworkDetails, NetworkDetailsElement
+
+import onaptests.utils.exceptions as onap_test_exceptions
+from onaptests.steps.base import BaseStep, YamlTemplateBaseStep
+
+
+class GenericNetworkStep(YamlTemplateBaseStep):
+    """Add generic network in existing VNF using YAML template."""
+
+    def __init__(self):
+        """Initialize step."""
+        super().__init__(cleanup=BaseStep.HAS_NO_CLEANUP)
+
+        self._service_instance_name = None
+        self._yaml_template = None
+        self._service_instance_id: str = None
+        self._service_instance: ServiceInstance = None
+        self.network_instantiation = None
+        self.vnf_id: str = None
+        self._vnf_instance: VnfInstance = None
+
+    @property
+    def description(self) -> str:
+        """Step description."""
+        return "Add generic networks in existing VNF."
+
+    @property
+    def component(self) -> str:
+        """Component name."""
+        return "SO"
+
+    @property
+    def model_yaml_template(self) -> dict:
+        return {}
+
+    @property
+    def yaml_template(self) -> dict:
+        """YAML template abstract property.
+
+        Every YAML template step need to implement that property.
+
+        Returns:
+            dict: YAML template
+
+        """
+        if not self._yaml_template:
+            with open(settings.SERVICE_YAML_TEMPLATE, "r", encoding="utf-8") as yaml_template:
+                self._yaml_template: dict = yaml.safe_load(yaml_template)
+        return self._yaml_template
+
+    @YamlTemplateBaseStep.store_state
+    def execute(self):
+
+        """Instantiate pnf.
+        Use settings values:
+         - GLOBAL_CUSTOMER_ID,
+         - CLOUD_REGION_CLOUD_OWNER,
+         - OWNING_ENTITY,
+         - PROJECT.
+
+        Raises:
+            Exception: Service instantiation failed
+
+        """
+        # global service
+        super().execute()
+        self._logger.info("Generic Network Step")
+        customer: Customer = Customer.get_by_global_customer_id(settings.GLOBAL_CUSTOMER_ID)
+        self._logger.info("got customer")
+        service_subscription: ServiceSubscription = \
+            customer.get_service_subscription_by_service_type(self.service_type)
+        self._logger.info("got service_subscription")
+        self._service_instance = \
+            service_subscription.get_service_instance_by_name(settings.SERVICE_INSTANCE_NAME)
+        self._vnf_instance = next(self._service_instance.vnf_instances)
+        self._logger.info("service instance name " + self._service_instance.instance_name)
+        service = self._service_instance.sdc_service
+        self._logger.info("sdc service " + service.name)
+        vnf: Vnf = next(service.vnfs)
+        vnf_id = self._vnf_instance.vnf_id
+        # get network details object
+        network_details: NetworkDetails = self.getnetworkdetails(vnf_id)
+        self._logger.info("got VNF details " + network_details.vnf_id)
+        network: Network = next(service.networks)
+        self._logger.info("Got vnf with name...." + vnf.name)
+        cloud_region: CloudRegion = CloudRegion.get_by_id(
+            cloud_owner=settings.CLOUD_REGION_CLOUD_OWNER,
+            cloud_region_id=settings.CLOUD_REGION_ID,
+        )
+        tenant: Tenant = cloud_region.get_tenant(settings.TENANT_ID)
+        self.network_instantiation = ServiceInstance.add_network(
+            self._service_instance,
+            network=network,
+            line_of_business=settings.LINE_OF_BUSINESS,
+            platform=settings.PLATFORM,
+            cloud_region=cloud_region,
+            tenant=tenant,
+            a_la_carte=False,
+            network_details=network_details
+        )
+        self._logger.info("after calling add")
+        try:
+            self.network_instantiation. \
+                wait_for_finish(timeout=settings.ORCHESTRATION_REQUEST_TIMEOUT)
+        except TimeoutError as exc:
+            self._logger.error("Service instantiation %s timed out", self._service_instance_name)
+            raise onap_test_exceptions.ServiceInstantiateException(
+                "Timeout on instatiation") from exc
+
+        if self.network_instantiation.status == self.network_instantiation.StatusEnum.FAILED:
+            self._logger.error("Service instantiation %s failed", self._service_instance_name)
+            raise onap_test_exceptions.ServiceInstantiateException(
+                self.network_instantiation.status_message)
+
+    def getnetworkdetails(self, vnf_id: str = None) -> NetworkDetails:
+        """Get generic network details.
+
+        Args:
+            vnf_id(str): vnf id in which we need to add network resources.
+
+        Returns:
+            NetworkDetails: NetworkDetails object
+
+        """
+        return NetworkDetails(
+            network_type="generic-vnf",
+            vnf_id=vnf_id,
+            child_resources=[
+                NetworkDetailsElement(network_details_element_type="l-interface",
+                                      network_details_element_parameters={
+                                          "interface-name": "equinix-internal_test",
+                                          "interface-type": "internal",
+                                          "is-port-mirrored": "",
+                                          "in-maint": "",
+                                          "is-ip-unnumbered": "",
+                                          "l2-multicasting": ""
+                                      }),
+                NetworkDetailsElement(network_details_element_type="l-interface",
+                                      network_details_element_parameters={
+                                          "interface-name": "equinix-external_test",
+                                          "interface-type": "external",
+                                          "is-port-mirrored": "",
+                                          "in-maint": "",
+                                          "is-ip-unnumbered": "",
+                                          "l2-multicasting": ""
+                                      })],
+            related_to=[NetworkDetailsElement(network_details_element_type="pserver",
+                                              network_details_element_parameters={
+                                                  "hostname": "test-pserver"})])
diff --git a/src/onaptests/steps/instantiate/so/modify_pnf_in_service.py b/src/onaptests/steps/instantiate/so/modify_pnf_in_service.py
new file mode 100644 (file)
index 0000000..f57c82d
--- /dev/null
@@ -0,0 +1,116 @@
+import yaml
+from onapsdk.aai.business import PnfInstance
+from onapsdk.aai.business.customer import Customer, ServiceSubscription
+from onapsdk.aai.business.service import ServiceInstance
+from onapsdk.configuration import settings
+from onapsdk.so.modification import PnfModificationRequest
+
+import onaptests.utils.exceptions as onap_test_exceptions
+from onaptests.steps.base import BaseStep, YamlTemplateBaseStep
+
+
+class ModifyPnfInService(YamlTemplateBaseStep):
+    """Modify pnf in service."""
+
+    def __init__(self):
+        """Initialize step."""
+        super().__init__(cleanup=BaseStep.HAS_NO_CLEANUP)
+        self.pnf_modification = None
+        self._yaml_template = None
+        self._service_instance: ServiceInstance = None
+        self._pnf_instance: PnfInstance = None
+        self._pnf_id: str = None
+
+    @property
+    def description(self) -> str:
+        """Step description."""
+        return "modification of  pnf in running service using SO macro method."
+
+    @property
+    def component(self) -> str:
+        """Component name."""
+        return "SO"
+
+    @property
+    def model_yaml_template(self) -> dict:
+        return {}
+
+    @property
+    def yaml_template(self) -> dict:
+        """YAML template abstract property.
+
+        Every YAML template step need to implement that property.
+
+        Returns:
+            dict: YAML template
+
+        """
+        if not self._yaml_template:
+            with open(settings.SERVICE_YAML_TEMPLATE,
+                      "r", encoding="utf-8") as yaml_template:
+                self._yaml_template: dict = yaml.safe_load(yaml_template)
+        return self._yaml_template
+
+    @BaseStep.store_state
+    def execute(self):
+
+        """modify pnf in running service.
+
+        ====
+        Args:
+
+        Returns:
+            pnf modification: pnfInstantiation object
+        ====
+        Use settings values:
+         - GLOBAL_CUSTOMER_ID,
+         - SERVICE_SUBSCRIPTION,
+         - OWNING_ENTITY,
+         - PROJECT.
+
+        Raises:
+            Exception: Service modification failed
+
+        """
+
+        super().execute()
+        self._logger.info("modifying pnf from running service")
+        customer_pnf: Customer = Customer.get_by_global_customer_id(settings.GLOBAL_CUSTOMER_ID)
+
+        service_sub_pnf: ServiceSubscription = \
+            customer_pnf.get_service_subscription_by_service_type(self.service_type)
+        self._logger.info("got service_subscription")
+
+        self._service_instance = \
+            service_sub_pnf.get_service_instance_by_name(settings.SERVICE_INSTANCE_NAME)
+        self._logger.info("service instance name " + self._service_instance.instance_name)
+
+        service = self._service_instance.sdc_service
+        self._logger.info("sdc service " + service.name)
+
+        self._logger.info("got pnf instance " + self._service_instance.instance_name)
+
+        self._pnf_instance = next(self._service_instance.pnfs)
+
+        self._logger.info("pnf id also found " + self._pnf_instance.pnf_id)
+
+        self.pnf_modification = PnfModificationRequest.send_request(
+            pnf_object=self._pnf_instance,
+            sdc_service=service,
+            aai_service_instance=self._service_instance,
+        )
+
+        try:
+            self.pnf_modification.wait_for_finish(timeout=settings.ORCHESTRATION_REQUEST_TIMEOUT)
+        except TimeoutError as exc:
+            self._logger.error("PNF Modification %s timed out", self._pnf_instance)
+            raise onap_test_exceptions.ServiceInstantiateException(
+                "Timeout on instatiation") from exc
+        if self.pnf_modification.failed:
+            self._logger.error("PNF Modification %s failed hence testcase is failed",
+                               self._pnf_instance)
+            raise onap_test_exceptions.ServiceInstantiateException(
+                self.pnf_modification.status_message)
+        if self.pnf_modification.completed:
+            self._logger.error("PNF Modification %s completed hence testcase is passed",
+                               self._pnf_instance)
index b9cc458..e91eb4a 100644 (file)
@@ -119,7 +119,9 @@ class CbaEnrichStep(CDSBaseStep):
         Delete enriched CBA file.
 
         """
-        Path(settings.CDS_CBA_ENRICHED).unlink()
+        path = Path(settings.CDS_CBA_ENRICHED)
+        if path.is_file():
+            path.unlink()
         super().cleanup()
 
 
index 280082b..fcac58b 100644 (file)
@@ -8,6 +8,7 @@ import pg8000
 from kubernetes import client, config
 from onapsdk.configuration import settings
 from onapsdk.cps import Anchor, Dataspace, SchemaSet
+from onapsdk.exceptions import APIError
 
 from onaptests.utils.exceptions import (EnvironmentPreparationException,
                                         OnapTestException)
@@ -296,17 +297,21 @@ class CheckPostgressDataBaseConnectionStep(CpsBaseStep):
 
         self.get_database_credentials()
         if self.login and self.password:
-            ctx = ssl.create_default_context()
-            ctx.check_hostname = False
-            ctx.verify_mode = ssl.CERT_NONE
+
             db_params = {
                 "user": self.login,
                 "password": self.password,
                 "host": settings.DB_PRIMARY_HOST,
                 "database": settings.DATABASE,
-                "port": settings.DB_PORT,
-                "ssl_context": ctx
+                "port": settings.DB_PORT
             }
+
+            if settings.DB_USE_SSL_CONTEXT:
+                ctx = ssl.create_default_context()
+                ctx.check_hostname = False
+                ctx.verify_mode = ssl.CERT_NONE
+                db_params["ssl_context"] = ctx
+
             try:
                 connection = pg8000.connect(**db_params)
                 cursor = connection.cursor()
@@ -336,3 +341,34 @@ class CheckPostgressDataBaseConnectionStep(CpsBaseStep):
          """
         super().execute()
         self.connect_to_postgress()
+
+
+class RanDoCpsCleanup(CpsBaseStep):
+    """Back CPS nodes to fresh state."""
+
+    @property
+    def description(self) -> str:
+        """Step description."""
+        return "Back CPS nodes into base state"
+
+    @BaseStep.store_state
+    def execute(self):
+        """Update all configured CPS nodes back to fresh state.
+
+        It iterates through all configured data files, read them
+            and update associated node data.
+        """
+        super().execute()
+        dataspace = Dataspace(settings.DATASPACE_NAME)
+        for path, anchor_name in settings.CPS_FILE_TO_ANCHOR_MAP.items():
+            with path.open("r") as data_file:
+                try:
+                    anchor: Anchor = dataspace.get_anchor(anchor_name)
+                except APIError:
+                    self._logger.error("Anchor %s does not exist, needs to be created", anchor_name)
+                    schema_set: SchemaSet = dataspace.get_schema_set(settings.SCHEMA_SET_NAME)
+                    anchor = dataspace.create_anchor(
+                        schema_set,
+                        anchor_name
+                    )
+                anchor.update_node("/", data_file.read())
index 9243d97..a35b4da 100644 (file)
@@ -9,6 +9,7 @@ from onapsdk.exceptions import InvalidResponse, ResourceNotFound
 from onapsdk.sdc2.component_instance import (ComponentInstance,
                                              ComponentInstanceInput)
 from onapsdk.sdc2.pnf import Pnf
+from onapsdk.sdc2.sdc_category import ServiceCategory
 from onapsdk.sdc2.sdc_resource import LifecycleOperation, LifecycleState
 from onapsdk.sdc2.service import Service, ServiceInstantiationType
 from onapsdk.sdc2.vf import Vf
@@ -17,7 +18,6 @@ from onapsdk.so.catalog_db_adapter import CatalogDbAdapter
 from yaml import SafeLoader, load
 
 import onaptests.utils.exceptions as onap_test_exceptions
-from onaptests.scenario.scenario_base import BaseScenarioStep
 from onaptests.utils.kubernetes import KubernetesHelper
 
 from ..base import BaseStep, YamlTemplateBaseStep
@@ -37,6 +37,7 @@ class YamlTemplateServiceOnboardStep(YamlTemplateBaseStep):
         super().__init__(cleanup=settings.CLEANUP_FLAG)
         self._yaml_template: dict = None
         self._model_yaml_template: dict = None
+        self.category: str = None
         if "vnfs" in self.yaml_template[self.service_name]:
             self.add_step(YamlTemplateVfOnboardStep())
         if "pnfs" in self.yaml_template[self.service_name]:
@@ -99,6 +100,12 @@ class YamlTemplateServiceOnboardStep(YamlTemplateBaseStep):
     @YamlTemplateBaseStep.store_state
     def execute(self):
         """Onboard service."""
+        category: str = None
+        if "category" in self.yaml_template[self.service_name]:
+            category_type = self.yaml_template[self.service_name]["category"]
+            if category_type == "NSST":
+                category = ServiceCategory.get_by_name("NSST")
+
         super().execute()
         if "instantiation_type" in self.yaml_template[self.service_name]:
             instantiation_type: ServiceInstantiationType = ServiceInstantiationType(
@@ -110,12 +117,15 @@ class YamlTemplateServiceOnboardStep(YamlTemplateBaseStep):
             if service.distributed:
                 return
         except ResourceNotFound:
-            service = Service.create(name=self.service_name, instantiation_type=instantiation_type)
+            self._logger.info("before service create")
+            service = Service.create(name=self.service_name,
+                                     instantiation_type=instantiation_type,
+                                     category=category)
+            self._logger.info("after service create")
             self.declare_resources(service)
             self.assign_properties(service)
         if service.lifecycle_state != LifecycleState.CERTIFIED:
             service.lifecycle_operation(LifecycleOperation.CERTIFY)
-        service.distribute()
 
     def declare_resources(self, service: Service) -> None:
         """Declare resources.
@@ -193,36 +203,85 @@ class YamlTemplateServiceOnboardStep(YamlTemplateBaseStep):
         super().cleanup()
 
 
-class VerifyServiceDistributionStep(BaseScenarioStep):
-    """Service distribution check step."""
+class YamlTemplateServiceDistributionStep(YamlTemplateBaseStep):
+    """Step for distributing a service after creation."""
 
     def __init__(self):
-        """Initialize step."""
+        """Initialize distribution step."""
         super().__init__(cleanup=BaseStep.HAS_NO_CLEANUP)
-        self.add_step(ServiceDistributionWaitStep())
-        for notified_module in settings.SDC_SERVICE_DISTRIBUTION_COMPONENTS:
-            self.add_step(VerifyServiceDistributionStatusStep(
-                notified_module=notified_module))
-        if settings.IN_CLUSTER:
-            self.add_step(VerifyServiceDistributionInSoStep())
-            self.add_step(VerifyServiceDistributionInSdncStep())
-        self.add_step(VerifyServiceDistributionInAaiStep())
+        self._yaml_template: dict = None
+        self._model_yaml_template: dict = None
+        self.add_step(YamlTemplateServiceOnboardStep())
+
+    @property
+    def yaml_template(self) -> dict:
+        """Step YAML template.
+
+        Load from file if it's a root step, get from parent otherwise.
+
+        Returns:
+            dict: Step YAML template
+
+        """
+        if settings.MODEL_YAML_TEMPLATE:
+            return self.model_yaml_template
+        if self.is_root:
+            if not self._yaml_template:
+                with open(settings.SERVICE_YAML_TEMPLATE, "r", encoding="utf-8") as yaml_template:
+                    self._yaml_template: dict = load(yaml_template, SafeLoader)
+            return self._yaml_template
+        return self.parent.yaml_template
+
+    @property
+    def model_yaml_template(self) -> dict:
+        """Step Model YAML template.
+
+        Load from file if it's a root step, get from parent otherwise.
+
+        Returns:
+            dict: Step YAML template
+
+        """
+        if self.is_root:
+            if not self._model_yaml_template:
+                with open(settings.MODEL_YAML_TEMPLATE, "r",
+                          encoding="utf-8") as model_yaml_template:
+                    self._model_yaml_template: dict = load(model_yaml_template, SafeLoader)
+            return self._model_yaml_template
+        return self.parent.model_yaml_template
 
     @property
     def description(self) -> str:
         """Step description."""
-        return "Verify complete status of distribution"
+        return "Distribute the service created in the onboard step."
 
     @property
     def component(self) -> str:
         """Component name."""
         return "SDC"
 
+    @YamlTemplateBaseStep.store_state
+    def execute(self):
+        """Distribute service."""
+        super().execute()
+        service: Service = Service.get_by_name(name=self.service_name)
+        if service:
+            if not service.distributed:
+                service.distribute()
+                self._logger.info(f"Service {self.service_name} distributed successfully.")
+            else:
+                self._logger.info(f"Service {self.service_name} is already distributed.")
+        else:
+            raise onap_test_exceptions.OnapTestException(f"Service {self.service_name} "
+                                                         f"not found for distribution.")
+
 
 class BaseServiceDistributionComponentCheckStep(BaseStep):
     """Service distribution check step."""
 
-    def __init__(self, component_name: str, break_on_error: bool = True):
+    service_model = None
+
+    def __init__(self, component_name: str, break_on_error: bool = True, load_model: bool = True):
         """Initialize step.
 
         Args:
@@ -233,6 +292,7 @@ class BaseServiceDistributionComponentCheckStep(BaseStep):
                          break_on_error=break_on_error)
         self.component_name = component_name
         self.service: Service = None
+        self.load_model = load_model
 
     @property
     def description(self) -> str:
@@ -244,81 +304,33 @@ class BaseServiceDistributionComponentCheckStep(BaseStep):
         """Component name."""
         return self.component_name
 
-    def execute(self):
-        """Check service distribution status."""
-        super().execute()
-        self.service = Service.get_by_name(name=settings.SERVICE_NAME)
-
-
-class ServiceDistributionWaitStep(BaseServiceDistributionComponentCheckStep):
-    """Service distribution wait step."""
-
-    def __init__(self):
-        """Initialize step."""
-        super().__init__(component_name="SDC", break_on_error=False)
-
-    @BaseStep.store_state
-    def execute(self):
-        """Wait for service distribution."""
-        super().execute()
-        # Before instantiating, be sure that the service has been distributed
-        self._logger.info("******** Check Service Distribution *******")
-        distribution_completed = False
-        nb_try = 0
-        while distribution_completed is False and \
-                nb_try < settings.SERVICE_DISTRIBUTION_NUMBER_OF_TRIES:
-            distribution_completed = self.service.distributed
-            if distribution_completed is True:
-                self._logger.info(
-                    "Service Distribution for %s is sucessfully finished",
-                    self.service.name)
-                break
-            self._logger.info(
-                "Service Distribution for %s ongoing, Wait for %d s",
-                self.service.name, settings.SERVICE_DISTRIBUTION_SLEEP_TIME)
-            time.sleep(settings.SERVICE_DISTRIBUTION_SLEEP_TIME)
-            nb_try += 1
-
-        if distribution_completed is False:
-            msg = f"Service Distribution for {self.service.name} failed after timeout!!"
-            self._logger.error(msg)
-            raise onap_test_exceptions.ServiceDistributionException(msg)
-
+    def check_preconditions(self, cleanup=False) -> bool:
+        """Check preconditions.
 
-class VerifyServiceDistributionStatusStep(BaseServiceDistributionComponentCheckStep):
-    """Check service distribution in SO step."""
+        Check if step preconditions are satisfied. If not, step is skipped
+        without further consequences. If yes, execution is initiated
 
-    def __init__(self, notified_module: str):
-        """Initialize step.
+        Returns:
+            bool: True if preconditions are satisfied, False otherwise
 
-        Args:
-            notified_module (str): Name of notified module
         """
+        if cleanup:
+            return True
+        return self.load_model or BaseServiceDistributionComponentCheckStep.service_model
 
-        component_name = notified_module.split("-")[0].upper()
-        super().__init__(component_name=component_name)
-        self.component_id = notified_module
-
-    @property
-    def description(self) -> str:
-        """Step description."""
-        return f"Check service distribution in {self.component_name} \
-{self.component_id}."
-
-    @BaseStep.store_state
     def execute(self):
         """Check service distribution status."""
         super().execute()
-        if not self.service.distributed:
-            latest_distribution = self.service.latest_distribution
-            for status in latest_distribution.distribution_status_list:
-                if status.component_id == self.component_id and status.failed:
-                    msg = f"Service {self.service.name} is not \
-distributed into [{self.component_id}]: {status.error_reason}"
-                    self._logger.error(msg)
-                    raise onap_test_exceptions.ServiceDistributionException(msg)
-        msg = f"Service {self.service.name} is distributed in SO and {self.component_id}."
-        self._logger.info(msg)
+        if not BaseServiceDistributionComponentCheckStep.service_model:
+            BaseServiceDistributionComponentCheckStep.service_model = Service.get_by_name(
+                name=settings.SERVICE_NAME)
+        self.service = BaseServiceDistributionComponentCheckStep.service_model
+
+    def _raise_reason(self, reason, exc=None):
+        self._logger.error(reason)
+        if exc:
+            raise onap_test_exceptions.ServiceDistributionException(reason) from exc
+        raise onap_test_exceptions.ServiceDistributionException(reason)
 
 
 class VerifyServiceDistributionInSoStep(BaseServiceDistributionComponentCheckStep):
@@ -326,22 +338,23 @@ class VerifyServiceDistributionInSoStep(BaseServiceDistributionComponentCheckSte
 
     def __init__(self):
         """Initialize step."""
-        super().__init__(component_name="SO")
+        super().__init__(component_name="SO", load_model=False)
 
     @BaseStep.store_state
     def execute(self):
         """Check service distribution status."""
         super().execute()
-        try:
-            CatalogDbAdapter.get_service_info(self.service.uuid)
-        except ResourceNotFound as e:
-            msg = f"Service {self.service.name} is missing in SO."
-            self._logger.error(msg)
-            raise onap_test_exceptions.ServiceDistributionException(msg) from e
-        except InvalidResponse:
-            # looks like json returned by SO catalog DB adapter returns wrong json
-            # but we don't care here. It is important to just know if service is there
-            pass
+        if settings.IN_CLUSTER:
+            try:
+                CatalogDbAdapter.get_service_info(self.service.uuid)
+            except ResourceNotFound as e:
+                msg = "Service model is missing in SO."
+                self._logger.error(msg)
+                raise onap_test_exceptions.ServiceDistributionException(msg) from e
+            except InvalidResponse:
+                # looks like json returned by SO catalog DB adapter returns wrong json
+                # but we don't care here. It is important to just know if service is there
+                pass
 
 
 class VerifyServiceDistributionInAaiStep(BaseServiceDistributionComponentCheckStep):
@@ -380,7 +393,7 @@ class VerifyServiceDistributionInAaiStep(BaseServiceDistributionComponentCheckSt
     def __init__(self):
         """Initialize step."""
         BaseServiceDistributionComponentCheckStep.__init__(
-            self, component_name="AAI")
+            self, component_name="AAI", load_model=False)
 
     @BaseStep.store_state
     def execute(self):
@@ -393,7 +406,7 @@ class VerifyServiceDistributionInAaiStep(BaseServiceDistributionComponentCheckSt
                 self._logger.info(
                     f"Resolved {aai_service.invariant_id} aai service")
         except ResourceNotFound as e:
-            msg = f"Service {self.service.name} is missing in AAI."
+            msg = "Service model is missing in AAI."
             self._logger.error(msg)
             raise onap_test_exceptions.ServiceDistributionException(msg) from e
 
@@ -408,38 +421,184 @@ class VerifyServiceDistributionInSdncStep(BaseServiceDistributionComponentCheckS
     def __init__(self):
         """Initialize step."""
         BaseServiceDistributionComponentCheckStep.__init__(
-            self, component_name="SDNC")
+            self, component_name="SDNC", load_model=False)
 
     @BaseStep.store_state
     def execute(self):
         """Check service distribution status."""
         super().execute()
-        login, password = KubernetesHelper.get_credentials_from_secret(
-            settings.SDNC_SECRET_NAME, self.SDNC_DB_LOGIN, self.SDNC_DB_PASSWORD)
-        conn = None
-        try:
-            conn = mysql.connect(
-                database=self.SDNC_DATABASE,
-                host=settings.SDNC_DB_PRIMARY_HOST,
-                port=settings.SDNC_DB_PORT,
-                user=login,
-                password=password)
-            cursor = conn.cursor()
-            cursor.execute(
-                f"SELECT * FROM service_model WHERE service_uuid = '{self.service.uuid}';")
-            cursor.fetchall()
-            if cursor.rowcount <= 0:
+        if settings.IN_CLUSTER:
+            login, password = KubernetesHelper.get_credentials_from_secret(
+                settings.SDNC_SECRET_NAME, self.SDNC_DB_LOGIN, self.SDNC_DB_PASSWORD)
+            conn = None
+            try:
+                conn = mysql.connect(
+                    database=self.SDNC_DATABASE,
+                    host=settings.SDNC_DB_PRIMARY_HOST,
+                    port=settings.SDNC_DB_PORT,
+                    user=login,
+                    password=password)
+                cursor = conn.cursor()
+                cursor.execute(
+                    f"SELECT * FROM service_model WHERE service_uuid = '{self.service.uuid}';")
+                cursor.fetchall()
+                if cursor.rowcount <= 0:
+                    msg = "Service model is missing in SDNC."
+                    self._logger.error(msg)
+                    raise onap_test_exceptions.ServiceDistributionException(msg)
+                self._logger.info("Service found in SDNC")
+                cursor.close()
+            except Exception as e:
                 msg = "Service model is missing in SDNC."
-                self._logger.error(msg)
-                raise onap_test_exceptions.ServiceDistributionException(msg)
-            self._logger.info("Service found in SDNC")
-            cursor.close()
-        except Exception as e:
-            msg = f"Service {self.service.name} is missing in SDNC."
-            raise onap_test_exceptions.ServiceDistributionException(msg) from e
-        finally:
-            if conn:
-                try:
-                    conn.close()
-                except Exception:
-                    pass
+                raise onap_test_exceptions.ServiceDistributionException(msg) from e
+            finally:
+                if conn:
+                    try:
+                        conn.close()
+                    except Exception:
+                        pass
+
+
+class VerifyServiceDistributionStep(BaseStep):
+    """Service distribution check step."""
+
+    COMPONENTS_DISTRIBUTION_VERIFICATION_MAP = {
+        "AAI": VerifyServiceDistributionInAaiStep,
+        "SDNC": VerifyServiceDistributionInSdncStep,
+        "SO": VerifyServiceDistributionInSoStep
+    }
+
+    def __init__(self):
+        """Initialize step."""
+        super().__init__(cleanup=BaseStep.HAS_NO_CLEANUP)
+        self.add_step(ServiceDistributionWaitStep())
+        modules = sorted(settings.SDC_SERVICE_DISTRIBUTION_COMPONENTS,
+                         reverse=True)
+        for notified_module in modules:
+            component_name = notified_module.split("-")[0].upper()
+            self.add_step(VerifyServiceDistributionStatusStep(
+                notified_module=notified_module, component_name=component_name))
+            try:
+                self.add_step(self.COMPONENTS_DISTRIBUTION_VERIFICATION_MAP[component_name]())
+            except KeyError:
+                pass
+
+    @property
+    def description(self) -> str:
+        """Step description."""
+        return "Verify complete status of distribution"
+
+    @property
+    def component(self) -> str:
+        """Component name."""
+        return "TEST"
+
+    @BaseStep.store_state
+    def execute(self):
+        """Check service distribution status."""
+        super().execute()
+        # we get here only if previous steps have not failed
+        # or are not braking on error
+        for step in self._steps:
+            # if step failed and do not break on error we want to skip further
+            # steps anyway. If there was any error it will be reported
+            # under dedicated step anyway
+            if not step.is_executed:
+                raise onap_test_exceptions.ServiceDistributionException(
+                    "Service distribution has failed")
+
+
+class ServiceDistributionWaitStep(BaseServiceDistributionComponentCheckStep):
+    """Service distribution wait step."""
+
+    def __init__(self):
+        """Initialize step."""
+        super().__init__(component_name="SDC", break_on_error=False)
+
+    @property
+    def description(self) -> str:
+        """Step description."""
+        return "Wait for service distribution result"
+
+    @BaseStep.store_state
+    def execute(self):
+        """Wait for service distribution."""
+        super().execute()
+        # Before instantiating, be sure that the service has been distributed
+        self._logger.info("******** Check Service Distribution *******")
+        distribution_completed = False
+        nb_try = 0
+        while distribution_completed is False and \
+                nb_try < settings.SERVICE_DISTRIBUTION_NUMBER_OF_TRIES:
+            distribution_completed = self.service.distributed
+            if distribution_completed is True:
+                self._logger.info(
+                    "Service Distribution for %s is sucessfully finished",
+                    self.service.name)
+                break
+            self._logger.info(
+                "Service Distribution for %s ongoing, Wait for %d s",
+                self.service.name, settings.SERVICE_DISTRIBUTION_SLEEP_TIME)
+            time.sleep(settings.SERVICE_DISTRIBUTION_SLEEP_TIME)
+            nb_try += 1
+
+        if distribution_completed is False:
+            msg = f"Service Distribution for {self.service.name} failed after timeout!!"
+            self._logger.error(msg)
+            # We don't do it now as it would be reported for dediated component
+            # in VerifyServiceDistributionStatusStep
+            # raise onap_test_exceptions.ServiceDistributionException(
+            #     "Service distribution failed after timeout!!")
+
+
+class VerifyServiceDistributionStatusStep(BaseServiceDistributionComponentCheckStep):
+    """Check service distribution in SO step."""
+
+    def __init__(self, notified_module: str, component_name: str):
+        """Initialize step.
+
+        Args:
+            notified_module (str): Name of notified module
+            component_name (str): Name of the module's component
+        """
+
+        super().__init__(
+            component_name=component_name, break_on_error=False, load_model=False)
+        self.component_id = notified_module
+
+    @property
+    def description(self) -> str:
+        """Step description."""
+        return f"Check service distribution in {self.component_name} \
+{self.component_id}."
+
+    @BaseStep.store_state
+    def execute(self):
+        """Check service distribution status."""
+        super().execute()
+        if not self.service.distributed:
+            latest_distribution = self.service.latest_distribution
+            present = False
+            msg = ""
+            distributed = False
+            for status in latest_distribution.distribution_status_list:
+                if status.component_id == self.component_id:
+                    present = True
+                    if status.distributed:
+                        distributed = True
+                    if status.failed:
+                        distributed = False
+                        msg = f"Service model distribution to [{self.component_id}] \
+failed: {status.error_reason}"
+                        break
+            if not distributed:
+                if not msg:
+                    if present:
+                        msg = f"Service model distribution to {self.component_id} \
+    was not completed"
+                    else:
+                        msg = f"Service model was not distributed to {self.component_id}"
+                self._raise_reason(msg)
+        msg = f"Service {self.service.name} is distributed in {self.component_name} \
+and {self.component_id}."
+        self._logger.info(msg)
diff --git a/src/onaptests/steps/onboard/verify_cba.py b/src/onaptests/steps/onboard/verify_cba.py
new file mode 100644 (file)
index 0000000..7345fe8
--- /dev/null
@@ -0,0 +1,287 @@
+#!/usr/bin/env python
+"""CBA Verification test case."""
+import difflib
+import json
+import os
+import shutil
+from io import BytesIO
+
+from onapsdk.cds.blueprint import Blueprint
+from onapsdk.cds.cds_element import CdsElement
+from onapsdk.configuration import settings
+from onapsdk.exceptions import ResourceNotFound
+
+from onaptests.steps.base import BaseStep
+from onaptests.utils.exceptions import CbaVerificationException
+from onaptests.utils.gitlab import GitLabClient
+
+
+class TestCbaBaseStep(BaseStep):
+    """Test CBA Base Step"""
+
+    def __init__(self, cleanup=BaseStep.HAS_NO_CLEANUP, break_on_error: bool = True):
+        """TestCbaBaseStep."""
+        super().__init__(cleanup=cleanup, break_on_error=break_on_error)
+
+    @property
+    def component(self) -> str:
+        """Component name.
+
+        Name of component which step is related with.
+            Most is the name of ONAP component.
+
+        Returns:
+            str: Component name
+
+        """
+        return "CBA"
+
+
+class DownloadCbaStep(TestCbaBaseStep, CdsElement):
+    """Step to download CBA content and convert it to zip."""
+
+    def __init__(self, cba_data):
+        """Download CBA step."""
+
+        super().__init__(cleanup=settings.CLEANUP_FLAG, break_on_error=True)
+        self.cba_data = cba_data
+        self.cba = None
+
+    def get_details_by_name_and_version(self, name: str, version: str):
+        """Get CBA details from its name and version."""
+        cba_details = self.send_message(
+            action="Download CBA Details",
+            method="GET",
+            url=f"{self._url}/api/v1/blueprint-model/by-name/{name}/version/{version}",
+            auth=self.auth
+        )
+        return cba_details
+
+    def get_by_name_and_version(self, name: str, version: str) -> Blueprint:
+        """Get CBA blueprint from its name and version."""
+        cba_data = self.send_message(
+            action="Download CBA Content",
+            method="GET",
+            url=f"{self._url}/api/v1/blueprint-model/download/by-name/{name}/version/{version}",
+            auth=self.auth
+        )
+        return Blueprint(BytesIO(cba_data.content).read())
+
+    @property
+    def description(self) -> str:
+        """Step description.
+
+        Used for reports
+
+        Returns:
+            str: Step description
+
+        """
+        name = self.cba_data["name"]
+        return f"Downloading CBA {name} from CDS"
+
+    def zip_cba_from_cds(self):
+        """Check CBA zip downloaded from CDS."""
+        name = self.cba_data["name"]
+        version = self.cba_data["version"]
+        zip_path = f"{settings.LOCAL_PATH}/{name}/{version}"
+        os.makedirs(zip_path, exist_ok=True)
+        zip_file = f"{zip_path}/cba.zip"
+        try:
+            self.get_details_by_name_and_version(name, version)
+            blueprint = self.cba = self.get_by_name_and_version(name, version)
+            blueprint.save(zip_file)
+            shutil.unpack_archive(zip_file, zip_path, 'zip')
+        except ResourceNotFound as exc:
+            self._logger.error(f"CBA {name}-{version} Not Found")
+            raise CbaVerificationException("CBA Not Found in CDS") from exc
+
+    def delete_cba_directory(self):
+        """Delete local CBA content."""
+        name = self.cba_data["name"]
+        delete_path = f"{settings.LOCAL_PATH}/{name}"
+        if os.path.exists(delete_path):
+            try:
+                shutil.rmtree(delete_path)
+                self._logger.info(f"The directory '{delete_path}' has been successfully deleted.")
+            except OSError as e:
+                self._logger.eror(f"Error: {e}")
+                error_message = f"Error while deleting directory: {e}"
+                self._logger.error(error_message)
+                raise CbaVerificationException(error_message) from e
+        else:
+            self._logger.info(f"The directory '{delete_path}' does not exist.")
+
+    @BaseStep.store_state
+    def execute(self) -> None:
+        """Download CBA content and convert it to zip."""
+        super().execute()
+        self.zip_cba_from_cds()
+
+    @BaseStep.store_state(cleanup=True)
+    def cleanup(self) -> None:
+        """Delete downloaded cba content directory."""
+        self.delete_cba_directory()
+        super().cleanup()
+
+
+class TestCbaStep(TestCbaBaseStep):
+    """Step to check if workflow exists and compare CBA from Gitlab with CBA downloaded from CDS."""
+
+    def __init__(self, cba_data):
+        """Initialize step.
+
+        Substeps:
+            - DownloadCbaStep
+        """
+        super().__init__(break_on_error=False)
+        self.cba_data = cba_data
+        self.download_cba_step = DownloadCbaStep(cba_data)
+        self.add_step(self.download_cba_step)
+
+    @property
+    def description(self) -> str:
+        """Step description.
+
+        Used for reports
+
+        Returns:
+            str: Step description
+
+        """
+        name = self.cba_data["name"]
+        return f"Compare CBA {name} Content with Gitlab"
+
+    def show_differences(self, git_file_content, local_file_content, entry_path):
+        """Show CBA file difference: git and local."""
+        diff = difflib.unified_diff(
+            git_file_content.splitlines(),
+            local_file_content.splitlines(),
+            fromfile=entry_path + ' (git)',
+            tofile=entry_path + ' (local)'
+        )
+        diff_text = "\n".join(diff)
+        return f"\n\n{diff_text}\n"
+
+    def load_json(self, git_file_content, local_file_path):
+        """Load CBA json file with formatting."""
+        try:
+            git_file_content = json.loads(git_file_content)
+            if os.path.exists(local_file_path):
+                with open(local_file_path, 'r', encoding="utf-8") as local_file:
+                    local_file_content = local_file.read()
+                local_file_content = json.loads(local_file_content)
+                git_file_content = dict(sorted(git_file_content.items()))
+                local_file_content = dict(sorted(local_file_content.items()))
+            return (json.dumps(git_file_content, indent=4, sort_keys=True),
+                    json.dumps(local_file_content, indent=4, sort_keys=True))
+        except json.JSONDecodeError as e:
+            self._logger.error(f"Error decoding JSON: {e}")
+            return None, None
+
+    def check_if_path_in_pattern_list(self, path, patterns):
+        """Check if file path is in the pattern."""
+        for pattern in patterns:
+            if pattern in path:
+                return True
+        return False
+
+    def compare_directories_recursive( # noqa: C901
+            self, branch, git_directory_path, local_directory_path, gitlab_id):
+        """Compare local and gitlab CBA directories."""
+        enrichment = self.cba_data["enrichment"]
+        try:
+            git_directory_entries = GitLabClient.get_directory_entries(
+                branch, git_directory_path, gitlab_id)
+            if len(git_directory_entries) == 0:
+                self._logger.error(f"Folder '{git_directory_path}' on gitlab "
+                                   f"with projct ID: {gitlab_id} does not exist")
+                raise CbaVerificationException("Cannot locate repo folder in gitlab")
+        except TypeError as exc:
+            self._logger.error(f"Branch '{branch}' on gitlab "
+                               f"with projct ID: {gitlab_id} does not exist")
+            raise CbaVerificationException("Cannot locate branch in gitlab") from exc
+        ident_files = []
+        diff_files = []
+        git_only_files = []
+        differences = {}
+        for entry in git_directory_entries:
+            if self.check_if_path_in_pattern_list(entry.path, settings.IGNORE_FILES):
+                # Check if the path is in ignore_files set
+                continue
+            binary_check = False
+            if entry.type == "tree":
+                ident, diff, git, show = self.compare_directories_recursive(
+                    branch, entry.path, local_directory_path + '/' + entry.name, gitlab_id)
+                ident_files.extend(ident)
+                diff_files.extend(diff)
+                git_only_files.extend(git)
+                differences.update(show)
+            else:  # It's a file
+                git_file_content = GitLabClient.get_text_file_content(branch, entry.path, gitlab_id)
+                git_file_content = git_file_content.replace("\r\n", "\n").strip()
+                entry_name = os.path.basename(entry.path)
+                local_file_path = os.path.join(local_directory_path, entry_name)
+                if os.path.exists(local_file_path):
+                    try:
+                        with open(local_file_path, 'r', encoding='utf-8') as local_file:
+                            local_file_content = local_file.read().replace("\r\n", "\n").strip()
+                    except UnicodeDecodeError:
+                        binary_check = True
+                    if binary_check is False:
+                        if 'Definitions/' in entry.path:
+                            git_file_content, local_file_content = self.load_json(
+                                git_file_content, local_file_path)
+                        if git_file_content == local_file_content:
+                            ident_files.append(entry.path)
+                        else:
+                            if (not enrichment or
+                                not self.check_if_path_in_pattern_list(entry.path,
+                                                                       settings.ENRICHMENT_FILES)):
+                                diff_files.append(entry.path)
+                                diff_text = self.show_differences(
+                                    git_file_content, local_file_content, entry.path
+                                )
+                                differences[entry.path] = diff_text
+                else:
+                    git_only_files.append(entry.path)
+        return ident_files, diff_files, git_only_files, differences
+
+    def summarize_comparison(self, local_path, gitlab_id, branch):
+        """Summarize CBA comparison."""
+        gitlab_repo_cba = self.cba_data["gitlab_repo_cba"]
+        ident_files, diff_files, git_only_files, differences = self.compare_directories_recursive(
+            branch, gitlab_repo_cba,
+            local_path, gitlab_id)
+        error = False
+        if ident_files:
+            self._logger.info(f"Identical files: {ident_files}")
+            self._logger.info(f"There are {len(ident_files)} identical files")
+        if diff_files:
+            dif_error_message = f"Different files: {diff_files}"
+            self._logger.error(dif_error_message)
+            self._logger.error(f"There are {len(diff_files)} different files")
+            error = True
+        if git_only_files:
+            git_error_message = f"Files that exists only on Gitlab: {git_only_files}"
+            self._logger.error(git_error_message)
+            self._logger.error(f"There are {len(git_only_files)} files that exist only on Gitlab")
+            error = True
+        if differences:
+            for file_path, diff_text in differences.items():
+                self._logger.info(f"Differences in file: {file_path}")
+                self._logger.info(diff_text)
+        if error:
+            raise CbaVerificationException(
+                "CBA content differencies between Gitlab and CDS")
+
+    @BaseStep.store_state
+    def execute(self) -> None:
+        """Check if workflow exists and compare CBA from Gitlab with CBA downloaded from CDS."""
+        super().execute()
+        gitlab_id = str(self.cba_data["gitlab_project_id"])
+        branch = self.cba_data["gitlab_branch"]
+        name = self.cba_data["name"]
+        version = self.cba_data["version"]
+        local_path = f"{settings.LOCAL_PATH}/{name}/{version}"
+        self.summarize_comparison(local_path, gitlab_id, branch)
index 75c6c5e..54a3f19 100644 (file)
@@ -1,8 +1,10 @@
 import onapsdk.constants as const
 from onapsdk.configuration import settings
+from onapsdk.exceptions import APIError
 from onapsdk.sdc.vendor import Vendor
 from onapsdk.sdc.vsp import Vsp
 
+from onaptests.utils.exceptions import OnapTestException
 from onaptests.utils.resources import get_resource_location
 
 from ..base import BaseStep, YamlTemplateBaseStep
@@ -161,3 +163,51 @@ class YamlTemplateVspOnboardStep(YamlTemplateBaseStep):
             for pnf in self.yaml_template["pnfs"]:
                 self._cleanup_vsp(f"{pnf['pnf_name']}_VSP")
         super().cleanup()
+
+
+class GetVspLoadStatusStep(BaseStep):
+    """Getting VSP status step.
+
+    The step shows a status of VSP's (vendor-software-products) status at SDC:
+    DRAFT --> UPLOADED --> VALIDATED --> COMMITED --> CERTIFIED.
+    """
+
+    def __init__(self):
+        """Initialize step."""
+        super().__init__(cleanup=BaseStep.HAS_NO_CLEANUP)
+
+    @property
+    def description(self) -> str:
+        """Step description."""
+        return "Get vsp status by ID from SDC."
+
+    @property
+    def component(self) -> str:
+        """Component name.
+
+        Name of component which step is related with.
+            Most is the name of ONAP component.
+
+        Returns:
+            str: Component name
+
+        """
+        return "SDC"
+
+    @BaseStep.store_state
+    def execute(self) -> None:
+        super().execute()
+        self._logger.info("Get VSP status from SDC")
+        try:
+            vendor: Vendor = Vendor(name=settings.SDBH_VENDOR_NAME)
+            vsp: Vsp = Vsp(name=settings.SDBH_VSP_NAME, vendor=vendor)
+            vsp.load_status()
+            if vsp.status is not None:
+                self._logger.info("Getting VSP status is completed.")
+                if vsp.status == 'Certified':
+                    self._logger.info("VSP status is: " + vsp.status)
+                    return vsp.status
+                raise OnapTestException("Unexpected VSP status %s" % vsp.status)
+            raise OnapTestException("vsp.status is NoneType")
+        except APIError as exc:
+            raise OnapTestException("Error while accessing SDC") from exc
diff --git a/src/onaptests/steps/policy/policy_operations.py b/src/onaptests/steps/policy/policy_operations.py
new file mode 100644 (file)
index 0000000..b821904
--- /dev/null
@@ -0,0 +1,141 @@
+from abc import ABC
+
+from onapsdk.configuration import settings
+from onapsdk.policy.policy import Policy
+
+from onaptests.steps.base import BaseStep
+from onaptests.utils.exceptions import OnapTestException
+
+
+class PolicyBaseStep(BaseStep, ABC):
+    """Abstract Policy base step."""
+
+    @property
+    def component(self) -> str:
+        """Component name."""
+        return "Policy"
+
+
+class StorePolicyStep(PolicyBaseStep):
+    """Storage of policy."""
+
+    def __init__(self) -> None:
+        """Initialize step."""
+        super().__init__(cleanup=BaseStep.HAS_NO_CLEANUP)
+
+    @property
+    def description(self) -> str:
+        """Step description."""
+        return "Storage of policy."
+
+    @BaseStep.store_state
+    def execute(self) -> None:
+        """Storage of policy."""
+        super().execute()
+        try:
+            response = Policy.store(settings.STORE_POLICY)
+            self._logger.info(response.content)
+        except Exception as exc:
+            raise OnapTestException(f"An error occurred while storing policy: {exc}") from exc
+
+
+class DeployPolicyStep(PolicyBaseStep):
+    """Deployment of policy."""
+
+    def __init__(self) -> None:
+        """Initialize step.
+
+         Substeps:
+            - StorePolicyStep.
+        """
+        super().__init__(cleanup=settings.CLEANUP_FLAG)
+        self.add_step(StorePolicyStep())
+
+    @property
+    def description(self) -> str:
+        """Step description."""
+        return "Deployment of policy."
+
+    @BaseStep.store_state
+    def execute(self) -> None:
+        """Deployment of policy."""
+        super().execute()
+        try:
+            response = Policy.deploy(settings.DEPLOY_POLICY)
+            self._logger.info(response.content)
+        except Exception as exc:
+            raise OnapTestException(f"An error occurred while deploying policy: {exc}") from exc
+
+    @BaseStep.store_state(cleanup=True)
+    def cleanup(self) -> None:
+        """Cleanup of deployed policy."""
+        self._logger.info("cleanup of deployed policy")
+        try:
+            get_response = Policy.get(settings.POLICY_ID, settings.POLICY_VERSION)
+            if get_response.status_code == 200:
+                Policy.undeploy(settings.POLICY_ID)
+                Policy.delete(settings.POLICY_ID, settings.POLICY_VERSION)
+                self._logger.info("Policy deleted successfully.")
+            else:
+                self._logger.error("Policy does not exist.")
+        except Exception as exc:
+            self._logger.error(f"Error occurred while deleting policy: {exc}")
+            raise OnapTestException(exc) from exc
+        super().cleanup()
+
+
+class GetPolicyStep(PolicyBaseStep):
+    """Get the policy.
+
+    Substeps:
+            - DeployPolicyStep.
+    """
+
+    def __init__(self) -> None:
+        """Initialize step."""
+        super().__init__(cleanup=BaseStep.HAS_NO_CLEANUP)
+        self.add_step(DeployPolicyStep())
+
+    @property
+    def description(self) -> str:
+        """Step description."""
+        return "Get the policy."
+
+    @BaseStep.store_state
+    def execute(self) -> None:
+        """Get the policy."""
+        super().execute()
+        try:
+            response = Policy.get(settings.POLICY_ID, settings.POLICY_VERSION)
+            self._logger.info(response.content)
+        except Exception as exc:
+            raise OnapTestException(f"An error occurred while getting the policy: {exc}") from exc
+
+
+class GetPolicyDecisionStep(PolicyBaseStep):
+    """Get the decision.
+
+    Substeps:
+            - GetPolicyStep.
+    """
+
+    def __init__(self) -> None:
+        """Initialize step."""
+        super().__init__(cleanup=BaseStep.HAS_NO_CLEANUP)
+        self.add_step(GetPolicyStep())
+
+    @property
+    def description(self) -> str:
+        """Step description."""
+        return "Get the decision."
+
+    @BaseStep.store_state
+    def execute(self) -> None:
+        """Get the decision."""
+        super().execute()
+        try:
+            response = Policy.decision(settings.DECISION_REQUEST)
+            self._logger.info(response.content)
+        except Exception as exc:
+            raise OnapTestException(f"An error occurred while getting the policy"
+                                    f" decision: {exc}") from exc
index be5c4da..3df16f3 100644 (file)
Binary files a/src/onaptests/templates/artifacts/basic_cnf_cba_enriched.zip and b/src/onaptests/templates/artifacts/basic_cnf_cba_enriched.zip differ
diff --git a/src/onaptests/templates/artifacts/cba_enriched_new.zip b/src/onaptests/templates/artifacts/cba_enriched_new.zip
new file mode 100644 (file)
index 0000000..c28ecdd
Binary files /dev/null and b/src/onaptests/templates/artifacts/cba_enriched_new.zip differ
index ead7f86..cad6bd0 100644 (file)
@@ -73,7 +73,7 @@
         "type": "string"
       },
       "sources": {
-        "sdnc": {
+        "rest": {
           "type": "source-rest",
           "properties": {
             "type": "string",
         "type": "string"
       },
       "sources": {
-        "sdnc": {
+        "rest": {
           "type": "source-rest",
           "properties": {
             "type": "string",
         "type": "string"
       },
       "sources": {
-        "sdnc": {
+        "rest": {
           "type": "source-rest",
           "properties": {
             "type": "string",
         "type": "string"
       },
       "sources": {
-        "sdnc": {
+        "rest": {
           "type": "source-rest",
           "properties": {
             "type": "string",
         "type": "string"
       },
       "sources": {
-        "sdnc": {
+        "rest": {
           "type": "source-rest",
           "properties": {
             "type": "string",
         "type": "string"
       },
       "sources": {
-        "sdnc": {
+        "rest": {
           "type": "source-rest",
           "properties": {
             "type": "string",
index f42d4ea..7ae504e 100644 (file)
Binary files a/src/onaptests/templates/artifacts/cds-resource-resolution/resource-resolution.zip and b/src/onaptests/templates/artifacts/cds-resource-resolution/resource-resolution.zip differ
diff --git a/src/onaptests/templates/artifacts/create_kafka_topic_template.json.j2 b/src/onaptests/templates/artifacts/create_kafka_topic_template.json.j2
new file mode 100644 (file)
index 0000000..2c9e2f5
--- /dev/null
@@ -0,0 +1,16 @@
+{
+        "apiVersion":"kafka.strimzi.io/v1beta2",
+        "kind":"KafkaTopic",
+        "metadata": {
+            "name":"{{ topicName }}",
+            "namespace":"{{ namespace }}",
+            "labels":{
+                "strimzi.io/cluster":"onap-strimzi"
+            }
+        },
+        "spec": {
+            "partitions":3,
+            "replicas":1,
+            "name":"{{ topicName }}"
+        }
+    }
\ No newline at end of file
diff --git a/src/onaptests/templates/artifacts/ntp_checker_daemon.yml.j2 b/src/onaptests/templates/artifacts/ntp_checker_daemon.yml.j2
new file mode 100644 (file)
index 0000000..73d5060
--- /dev/null
@@ -0,0 +1,42 @@
+apiVersion: apps/v1
+kind: DaemonSet
+metadata:
+  name: {{daemon_name}}
+  labels:
+    k8s-app: {{daemon_name}}
+spec:
+  selector:
+    matchLabels:
+      name: {{daemon_name}}
+  template:
+    metadata:
+      labels:
+        name: {{daemon_name}}
+    spec:
+      containers:
+      - name: {{daemon_name}}
+        image: dockerhub.devops.telekom.de/python:3.11-slim
+        command: ["python", "/app/checker.py"]
+        resources:
+          limits:
+            memory: 200Mi
+          requests:
+            cpu: 100m
+            memory: 200Mi
+        volumeMounts:
+        - name: varlog
+          mountPath: /var/log
+        - name: {{daemon_name}}-script
+          mountPath: /app/checker.py
+          subPath: checker.py
+        ports:
+        - containerPort: 8000
+      terminationGracePeriodSeconds: 30
+      volumes:
+      - name: {{daemon_name}}-script
+        configMap:
+          name: {{daemon_name}}-script
+          defaultMode: 0777
+      - name: varlog
+        hostPath:
+          path: /var/log
diff --git a/src/onaptests/templates/artifacts/pm_message_file.json b/src/onaptests/templates/artifacts/pm_message_file.json
new file mode 100644 (file)
index 0000000..395f0cb
--- /dev/null
@@ -0,0 +1,228 @@
+{
+  "event": {
+    "commonEventHeader": {
+      "domain": "perf3gpp",
+      "eventId": "9efa1210-f285-455f-9c6a-3a659b1f1882",
+      "sequence": 0,
+      "eventName": "perf3gpp_gnb-Ericsson_pmMeasResult",
+      "sourceName": "du-1",
+      "reportingEntityName": "",
+      "priority": "Normal",
+      "startEpochMicrosec": 951912000000,
+      "lastEpochMicrosec": 951912900000,
+      "version": "4.0",
+      "vesEventListenerVersion": "7.1",
+      "timeZoneOffset": "+00:00"
+    },
+    "perf3gppFields": {
+      "perf3gppFieldsVersion": "1.0",
+      "measDataCollection": {
+        "granularityPeriod": 900,
+        "measuredEntityUserName": "RNC Telecomville",
+        "measuredEntityDn": "SubNetwork=CountryNN,MeContext=MEC-Gbg-1,ManagedElement=RNC-Gbg-1",
+        "measuredEntitySoftwareVersion": "",
+        "measInfoList": [
+          {
+            "measInfoId": {
+              "sMeasInfoId": ""
+            },
+            "measTypes": {
+              "sMeasTypesList": [
+                "attTCHSeizures",
+                "succTCHSeizures",
+                "Cell_Unavailable_Fault",
+                "Cell_Unavailable_Manual_Intervention"
+              ]
+            },
+            "measValuesList": [
+              {
+                "measObjInstId": "RncFunction=RF-1,UtranCell=Gbg-997",
+                "suspectFlag": "false",
+                "measResults": [
+                  {
+                    "p": 1,
+                    "sValue": "813"
+                  },
+                  {
+                    "p": 2,
+                    "sValue": "913"
+                  },
+                  {
+                    "p": 3,
+                    "sValue": "1013"
+                  },
+                  {
+                    "p": 4,
+                    "sValue": "1113"
+                  }
+                ]
+              },
+              {
+                "measObjInstId": "RncFunction=RF-1,UtranCell=Gbg-998",
+                "suspectFlag": "false",
+                "measResults": [
+                  {
+                    "p": 1,
+                    "sValue": "890"
+                  },
+                  {
+                    "p": 2,
+                    "sValue": "901"
+                  },
+                  {
+                    "p": 3,
+                    "sValue": "123"
+                  },
+                  {
+                    "p": 4,
+                    "sValue": "234"
+                  }
+                ]
+              },
+              {
+                "measObjInstId": "RncFunction=RF-1,UtranCell=Gbg-999",
+                "suspectFlag": "true",
+                "measResults": [
+                  {
+                    "p": 1,
+                    "sValue": "456"
+                  },
+                  {
+                    "p": 2,
+                    "sValue": "567"
+                  },
+                  {
+                    "p": 3,
+                    "sValue": "678"
+                  },
+                  {
+                    "p": 4,
+                    "sValue": "789"
+                  }
+                ]
+              }
+            ]
+          },
+          {
+            "measInfoId": {
+              "sMeasInfoId": "ENodeBFunction"
+            },
+            "measTypes": {
+              "sMeasTypesList": [
+                "attTCHSeizures",
+                "succTCHSeizures2",
+                "attImmediateAssignProcs3",
+                "succImmediateAssignProcs4"
+              ]
+            },
+            "measValuesList": [
+              {
+                "measObjInstId": "ManagedElement=RNC-Gbg-1,ENodeBFunction=1",
+                "suspectFlag": "false",
+                "measResults": [
+                  {
+                    "p": 1,
+                    "sValue": "4"
+                  },
+                  {
+                    "p": 2,
+                    "sValue": "86,87,2,6,77,96,75,33,24"
+                  },
+                  {
+                    "p": 3,
+                    "sValue": "40"
+                  },
+                  {
+                    "p": 4,
+                    "sValue": "90"
+                  }
+                ]
+              }
+            ]
+          },
+          {
+            "measInfoId": {
+              "sMeasInfoId": ""
+            },
+            "measTypes": {
+              "sMeasTypesList": [
+                "test123",
+                "succTCHSeizures6",
+                "attImmediateAssignProcs7",
+                "succImmediateAssignProcs8"
+              ]
+            },
+            "measValuesList": [
+              {
+                "measObjInstId": "RncFunction=RF-1,UtranCell=Gbg-997",
+                "suspectFlag": "false",
+                "measResults": [
+                  {
+                    "p": 1,
+                    "sValue": "238"
+                  },
+                  {
+                    "p": 2,
+                    "sValue": "344"
+                  },
+                  {
+                    "p": 3,
+                    "sValue": "563"
+                  },
+                  {
+                    "p": 4,
+                    "sValue": "787"
+                  }
+                ]
+              },
+              {
+                "measObjInstId": "RncFunction=RF-1,UtranCell=Gbg-998",
+                "suspectFlag": "false",
+                "measResults": [
+                  {
+                    "p": 1,
+                    "sValue": "898"
+                  },
+                  {
+                    "p": 2,
+                    "sValue": "905"
+                  },
+                  {
+                    "p": 3,
+                    "sValue": "127"
+                  },
+                  {
+                    "p": 4,
+                    "sValue": "238"
+                  }
+                ]
+              },
+              {
+                "measObjInstId": "RncFunction=RF-1,UtranCell=Gbg-999",
+                "suspectFlag": "true",
+                "measResults": [
+                  {
+                    "p": 1,
+                    "sValue": "454"
+                  },
+                  {
+                    "p": 2,
+                    "sValue": "569"
+                  },
+                  {
+                    "p": 3,
+                    "sValue": "672"
+                  },
+                  {
+                    "p": 4,
+                    "sValue": "785"
+                  }
+                ]
+              }
+            ]
+          }
+        ]
+      }
+    }
+  }
+}
diff --git a/src/onaptests/templates/artifacts/pm_message_negative_file.json b/src/onaptests/templates/artifacts/pm_message_negative_file.json
new file mode 100644 (file)
index 0000000..b454a6b
--- /dev/null
@@ -0,0 +1,228 @@
+{
+  "event": {
+    "commonEventHeader": {
+      "domain": "perf3gpp",
+      "eventId": "9efa1210-f285-455f-9c6a-3a659b1f1882",
+      "sequence": 0,
+      "eventName": "perf3gpp_gnb-Ericsson_pmMeasResult",
+      "sourceName": "empty",
+      "reportingEntityName": "",
+      "priority": "Normal",
+      "startEpochMicrosec": 951912000000,
+      "lastEpochMicrosec": 951912900000,
+      "version": "4.0",
+      "vesEventListenerVersion": "7.1",
+      "timeZoneOffset": "+00:00"
+    },
+    "perf3gppFields": {
+      "perf3gppFieldsVersion": "1.0",
+      "measDataCollection": {
+        "granularityPeriod": 900,
+        "measuredEntityUserName": "RNC Telecomville",
+        "measuredEntityDn": "SubNetwork=CountryNN,MeContext=MEC-Gbg-1,ManagedElement=RNC-Gbg-1",
+        "measuredEntitySoftwareVersion": "",
+        "measInfoList": [
+          {
+            "measInfoId": {
+              "sMeasInfoId": ""
+            },
+            "measTypes": {
+              "sMeasTypesList": [
+                "attTCHSeizures",
+                "succTCHSeizures",
+                "Cell_Unavailable_Fault",
+                "Cell_Unavailable_Manual_Intervention"
+              ]
+            },
+            "measValuesList": [
+              {
+                "measObjInstId": "RncFunction=RF-1,UtranCell=Gbg-997",
+                "suspectFlag": "false",
+                "measResults": [
+                  {
+                    "p": 1,
+                    "sValue": "813"
+                  },
+                  {
+                    "p": 2,
+                    "sValue": "913"
+                  },
+                  {
+                    "p": 3,
+                    "sValue": "1013"
+                  },
+                  {
+                    "p": 4,
+                    "sValue": "1113"
+                  }
+                ]
+              },
+              {
+                "measObjInstId": "RncFunction=RF-1,UtranCell=Gbg-998",
+                "suspectFlag": "false",
+                "measResults": [
+                  {
+                    "p": 1,
+                    "sValue": "890"
+                  },
+                  {
+                    "p": 2,
+                    "sValue": "901"
+                  },
+                  {
+                    "p": 3,
+                    "sValue": "123"
+                  },
+                  {
+                    "p": 4,
+                    "sValue": "234"
+                  }
+                ]
+              },
+              {
+                "measObjInstId": "RncFunction=RF-1,UtranCell=Gbg-999",
+                "suspectFlag": "true",
+                "measResults": [
+                  {
+                    "p": 1,
+                    "sValue": "456"
+                  },
+                  {
+                    "p": 2,
+                    "sValue": "567"
+                  },
+                  {
+                    "p": 3,
+                    "sValue": "678"
+                  },
+                  {
+                    "p": 4,
+                    "sValue": "789"
+                  }
+                ]
+              }
+            ]
+          },
+          {
+            "measInfoId": {
+              "sMeasInfoId": "ENodeBFunction"
+            },
+            "measTypes": {
+              "sMeasTypesList": [
+                "attTCHSeizures",
+                "succTCHSeizures2",
+                "attImmediateAssignProcs3",
+                "succImmediateAssignProcs4"
+              ]
+            },
+            "measValuesList": [
+              {
+                "measObjInstId": "ManagedElement=RNC-Gbg-1,ENodeBFunction=1",
+                "suspectFlag": "false",
+                "measResults": [
+                  {
+                    "p": 1,
+                    "sValue": "4"
+                  },
+                  {
+                    "p": 2,
+                    "sValue": "86,87,2,6,77,96,75,33,24"
+                  },
+                  {
+                    "p": 3,
+                    "sValue": "40"
+                  },
+                  {
+                    "p": 4,
+                    "sValue": "90"
+                  }
+                ]
+              }
+            ]
+          },
+          {
+            "measInfoId": {
+              "sMeasInfoId": ""
+            },
+            "measTypes": {
+              "sMeasTypesList": [
+                "test123",
+                "succTCHSeizures6",
+                "attImmediateAssignProcs7",
+                "succImmediateAssignProcs8"
+              ]
+            },
+            "measValuesList": [
+              {
+                "measObjInstId": "RncFunction=RF-1,UtranCell=Gbg-997",
+                "suspectFlag": "false",
+                "measResults": [
+                  {
+                    "p": 1,
+                    "sValue": "238"
+                  },
+                  {
+                    "p": 2,
+                    "sValue": "344"
+                  },
+                  {
+                    "p": 3,
+                    "sValue": "563"
+                  },
+                  {
+                    "p": 4,
+                    "sValue": "787"
+                  }
+                ]
+              },
+              {
+                "measObjInstId": "RncFunction=RF-1,UtranCell=Gbg-998",
+                "suspectFlag": "false",
+                "measResults": [
+                  {
+                    "p": 1,
+                    "sValue": "898"
+                  },
+                  {
+                    "p": 2,
+                    "sValue": "905"
+                  },
+                  {
+                    "p": 3,
+                    "sValue": "127"
+                  },
+                  {
+                    "p": 4,
+                    "sValue": "238"
+                  }
+                ]
+              },
+              {
+                "measObjInstId": "RncFunction=RF-1,UtranCell=Gbg-999",
+                "suspectFlag": "true",
+                "measResults": [
+                  {
+                    "p": 1,
+                    "sValue": "454"
+                  },
+                  {
+                    "p": 2,
+                    "sValue": "569"
+                  },
+                  {
+                    "p": 3,
+                    "sValue": "672"
+                  },
+                  {
+                    "p": 4,
+                    "sValue": "785"
+                  }
+                ]
+              }
+            ]
+          }
+        ]
+      }
+    }
+  }
+}
diff --git a/src/onaptests/templates/artifacts/pnf_instantiation_ves_event.json.j2 b/src/onaptests/templates/artifacts/pnf_instantiation_ves_event.json.j2
new file mode 100644 (file)
index 0000000..892b966
--- /dev/null
@@ -0,0 +1,50 @@
+{
+  "event": {
+    "commonEventHeader": {
+      "domain": "pnfRegistration",
+      "eventId": "ORAN_SIM_400600927_2020-04-02T17:20:22.2Z",
+      "eventName": "pnfRegistration_EventType5G",
+      "eventType": "EventType5G",
+      "sequence": 0,
+      "priority": "Low",
+      "reportingEntityId": "",
+      "reportingEntityName": "ORAN_SIM_400600927",
+      "sourceId": "{{ source_name }}",
+      "sourceName": "{{ source_name }}",
+      "startEpochMicrosec": 94262132085746,
+      "lastEpochMicrosec": 94262132085746,
+      "nfNamingCode": "sdn controller",
+      "nfVendorName": "sdn",
+      "timeZoneOffset": "+00:00",
+      "version": "4.0.1",
+      "vesEventListenerVersion": "7.0.1"
+    },
+    "pnfRegistrationFields": {
+      "pnfRegistrationFieldsVersion": "2.0",
+      "lastServiceDate": "2019-08-16",
+      "macAddress": "D7:64:C8:CC:E9:32",
+      "manufactureDate": "2019-08-16",
+      "modelNumber": "Simulated Device Melacon",
+      "oamV4IpAddress": "10.42.6.245",
+      "oamV6IpAddress": "0:0:0:0:0:ffff:a0a:011",
+      "serialNumber": "ORAN_SIM-172.30.1.6-400600927-Simulated Device Melacon",
+      "softwareVersion": "2.3.5",
+      "unitFamily": "Simulated Device",
+      "unitType": "ntsim_oran",
+      "vendorName": "Melacon",
+      "additionalFields": {
+        "oamPort": "830",
+        "protocol": "SSH",
+        "username": "netconf",
+        "password": "netconf",
+        "reconnectOnChangedSchema": "false",
+        "sleep-factor": "1.5",
+        "tcpOnly": "false",
+        "connectionTimeout": "20000",
+        "maxConnectionAttempts": "100",
+        "betweenAttemptsTimeout": "2000",
+        "keepaliveDelay": "120"
+      }
+    }
+  }
+}
diff --git a/src/onaptests/templates/artifacts/pnf_registration_dmaap_event_template.json.j2 b/src/onaptests/templates/artifacts/pnf_registration_dmaap_event_template.json.j2
new file mode 100644 (file)
index 0000000..8d79bf4
--- /dev/null
@@ -0,0 +1,55 @@
+[
+   {
+    "event":{
+       "commonEventHeader":{
+          "sourceId":"",
+          "startEpochMicrosec":94262132085746,
+          "eventId":"ORAN_SIM_400600927_2020-04-02T17:20:22.2Z",
+          "timeZoneOffset":"+00:00",
+          "reportingEntityId":"",
+          "internalHeaderFields":{
+             "collectorTimeStamp":"Mon, 06 19 2023 05:59:16 GMT"
+          },
+          "eventType":"EventType5G",
+          "priority":"Low",
+          "version":"4.0.1",
+          "nfVendorName":"sdn",
+          "reportingEntityName":"ORAN_SIM_400600927",
+          "sequence":0,
+          "domain":"pnfRegistration",
+          "lastEpochMicrosec":94262132085746,
+          "eventName":"pnfRegistration_EventType5G",
+          "vesEventListenerVersion":"7.0.1",
+          "sourceName":"{{ sourceName }}",
+          "nfNamingCode":"sdn controller"
+       },
+       "pnfRegistrationFields":{
+          "serialNumber":"{{ serialNumber}}",
+          "additionalFields":{
+             "protocol":"SSH",
+             "password":"netconf",
+             "oamPort":"830",
+             "betweenAttemptsTimeout":"2000",
+             "keepaliveDelay":"120",
+             "sleep-factor":"1.5",
+             "reconnectOnChangedSchema":"false",
+             "connectionTimeout":"20000",
+             "maxConnectionAttempts":"100",
+             "username":"netconf",
+             "tcpOnly":"false"
+          },
+          "lastServiceDate":"2019-08-16",
+          "unitFamily":"Simulated Device",
+          "vendorName":"Melacon",
+          "oamV6IpAddress":"{{ oamV6IpAddress }}",
+          "unitType":"ntsim_oran",
+          "macAddress":"D7:64:C8:CC:E9:32",
+          "pnfRegistrationFieldsVersion":"2.0",
+          "manufactureDate":"2019-08-16",
+          "modelNumber":"Simulated Device Melacon",
+          "oamV4IpAddress":"{{ oamV4IpAddress }}",
+          "softwareVersion":"2.3.5"
+       }
+    }
+   }
+]
\ No newline at end of file
diff --git a/src/onaptests/templates/artifacts/pnf_registration_ves_event.json b/src/onaptests/templates/artifacts/pnf_registration_ves_event.json
new file mode 100644 (file)
index 0000000..784e50a
--- /dev/null
@@ -0,0 +1,50 @@
+{
+  "event": {
+    "commonEventHeader": {
+      "domain": "pnfRegistration",
+      "eventId": "ORAN_SIM_400600927_2020-04-02T17:20:22.2Z",
+      "eventName": "pnfRegistration_EventType5G",
+      "eventType": "EventType5G",
+      "sequence": 0,
+      "priority": "Low",
+      "reportingEntityId": "",
+      "reportingEntityName": "ORAN_SIM_400600927",
+      "sourceId": "",
+      "sourceName": "dummy-ru-vesCollectorTest",
+      "startEpochMicrosec": 94262132085746,
+      "lastEpochMicrosec": 94262132085746,
+      "nfNamingCode": "sdn controller",
+      "nfVendorName": "sdn",
+      "timeZoneOffset": "+00:00",
+      "version": "4.0.1",
+      "vesEventListenerVersion": "7.0.1"
+    },
+    "pnfRegistrationFields": {
+      "pnfRegistrationFieldsVersion": "2.0",
+      "lastServiceDate": "2019-08-16",
+      "macAddress": "D7:64:C8:CC:E9:32",
+      "manufactureDate": "2019-08-16",
+      "modelNumber": "Simulated Device Melacon",
+      "oamV4IpAddress": "10.42.6.245",
+      "oamV6IpAddress": "0:0:0:0:0:ffff:a0a:011",
+      "serialNumber": "ORAN_SIM-172.30.1.6-400600927-Simulated Device Melacon",
+      "softwareVersion": "2.3.5",
+      "unitFamily": "Simulated Device",
+      "unitType": "ntsim_oran",
+      "vendorName": "Melacon",
+      "additionalFields": {
+        "oamPort": "830",
+        "protocol": "SSH",
+        "username": "netconf",
+        "password": "netconf",
+        "reconnectOnChangedSchema": "false",
+        "sleep-factor": "1.5",
+        "tcpOnly": "false",
+        "connectionTimeout": "20000",
+        "maxConnectionAttempts": "100",
+        "betweenAttemptsTimeout": "2000",
+        "keepaliveDelay": "120"
+      }
+    }
+  }
+}
diff --git a/src/onaptests/templates/artifacts/ves_message_file.json b/src/onaptests/templates/artifacts/ves_message_file.json
new file mode 100644 (file)
index 0000000..9b7f65c
--- /dev/null
@@ -0,0 +1,40 @@
+{
+  "event": {
+    "commonEventHeader": {
+      "domain": "notification",
+      "eventId": "5ed35a0328d2be7ee400345405f7539a",
+      "eventName": "Notification_notifyMOIChanges",
+      "eventType": "Cell",
+      "reportingEntityName": "NonRTRIC-CMS",
+      "lastEpochMicrosec": 1606197734860,
+      "priority": "Normal",
+      "sequence": 0,
+      "sourceName": "du-1",
+      "startEpochMicrosec": 1606197734860,
+      "version": "4.1",
+      "vesEventListenerVersion": "7.2"
+    },
+    "notificationFields": {
+      "additionalFields": {
+        "nfType": "du"
+      },
+      "arrayOfNamedHashMap": [
+        {
+          "name": "value",
+          "hashMap": {
+            "nrPCI": "873"
+          }
+        },
+        {
+          "name": "oldValue",
+          "hashMap": {
+            "nrPCI": "871"
+          }
+        }
+      ],
+      "changeIdentifier": "479e1247-77e2-308d-9dd5-47fef2a06676",
+      "changeType": "Notification_NRCellDUCellStateChanged",
+      "notificationFieldsVersion": "2.0"
+    }
+  }
+}
diff --git a/src/onaptests/templates/artifacts/ves_message_file_negative.json b/src/onaptests/templates/artifacts/ves_message_file_negative.json
new file mode 100644 (file)
index 0000000..5ddca50
--- /dev/null
@@ -0,0 +1,40 @@
+{
+  "event": {
+    "commonEventHeader": {
+      "domain": "notification",
+      "eventId": "5ed35a0328d2be7ee400345405f7539a",
+      "eventName": "Notification_notifyMOIChanges",
+      "eventType": "Cell",
+      "reportingEntityName": "NonRTRIC-CMS",
+      "lastEpochMicrosec": 1606197734860,
+      "priority": "Normal",
+      "sequence": 0,
+      "sourceName": "empty",
+      "startEpochMicrosec": 1606197734860,
+      "version": "4.1",
+      "vesEventListenerVersion": "7.2"
+    },
+    "notificationFields": {
+      "additionalFields": {
+        "nfType": "du"
+      },
+      "arrayOfNamedHashMap": [
+        {
+          "name": "value",
+          "hashMap": {
+            "nrPCI": "873"
+          }
+        },
+        {
+          "name": "oldValue",
+          "hashMap": {
+            "nrPCI": "871"
+          }
+        }
+      ],
+      "changeIdentifier": "479e1247-77e2-308d-9dd5-47fef2a06676",
+      "changeType": "Notification_NRCellDUCellStateChanged",
+      "notificationFieldsVersion": "2.0"
+    }
+  }
+}
diff --git a/src/onaptests/templates/slack-notifications/notifications.jinja b/src/onaptests/templates/slack-notifications/notifications.jinja
new file mode 100644 (file)
index 0000000..3ac4ee1
--- /dev/null
@@ -0,0 +1,124 @@
+[
+    {
+        "type": "section",
+        "text": {
+            "type": "mrkdwn",
+            "text": ":fire: *{{env}} <{{ test_suite_link }}|{{ test_suite_name}}> FAILURES*"
+        }
+    },
+    {
+        "type": "context",
+        "elements": [
+            {
+                "type": "mrkdwn",
+                {%- if components_failing == 1 %}
+                "text": ":slam:   *1 CATEGORY IS FAILING*"
+                {%- elif components_failing == "UNDEFINED" %}
+                "text": ":slam:   *DISCOVERED TEST ISSUES*"
+                {%- else %}
+                "text": ":slam:   *{{ components_failing }} CATEGORIES ARE FAILING*"
+                {%- endif %}
+            },
+            {
+                "type": "mrkdwn",
+                "text": "`{%- for component, failures in result_dict.items() %}{{component}}{%- if not loop.last %}, {% endif %}{%- endfor %}`"
+            }
+        ]
+    },
+    {
+        "type": "context",
+        "elements": [
+            {
+                "type": "mrkdwn",
+                "text": "Check tests' artifacts in Testkube for more details"
+            }
+        ]
+    },
+    {%- if has_test_error %}
+    {
+        "type": "context",
+        "elements": [
+            {
+                "type": "mrkdwn",
+                "text": "[?] Result requires a verification in test's logs"
+            }
+        ]
+    },
+    {%- endif %}
+    {
+        "type": "divider"
+    },
+    {%- for component, failures in result_dict.items() %}
+    {
+        "type": "section",
+        "text": {
+            "type": "mrkdwn",
+            {%- if failures['since'] > 1 %}
+            "text": "[{{ component }}] - since {{failures['since']}} days"
+            {%- else %}
+            "text": "[{{ component }}]"
+            {%- endif %}
+        }
+    },
+    {%- for test_failure in failures['results'] if not (test_failure['cleanup'] and ignore_cleanup_steps) %}
+    {%- if test_failure['foldable'] %}
+    {
+        "type": "context",
+        "elements": [
+            {
+                "type": "mrkdwn",
+                {%- if test_failure['link'] != "" %}
+                "text": ":white_small_square: The same in {%- if test_failure['questionable'] %} [?]{%- endif %} <{{ test_failure['link']|safe }}|{{ test_failure['name'] }}>: {{ test_failure['description'] }}"
+                {%- else %}
+                "text": ":white_small_square: The same in {%- if test_failure['questionable'] %} [?]{%- endif %} {{ test_failure['name'] }}: {{ test_failure['description'] }}"
+                {%- endif %}
+            }
+        ]
+    },
+    {%- else %}
+    {
+        "type": "section",
+        "text": {
+            "type": "mrkdwn",
+            {%- if test_failure['link'] != "" %}
+            "text": ":black_small_square: {%- if test_failure['questionable'] %} [?]{%- endif %} <{{ test_failure['link']|safe }}|{{ test_failure['name'] }}>: {{ test_failure['description'] }}"
+            {%- else %}
+            "text": ":black_small_square: {%- if test_failure['questionable'] %} [?]{%- endif %} {{ test_failure['name'] }}: {{ test_failure['description'] }}"
+            {%- endif %}
+        }
+    },
+    {%- endif %}
+    {%- if (test_failure['reasons'] | length) > 0 and not test_failure['foldable'] %}
+    {
+                       "type": "context",
+                       "elements": [
+                {%- for reason in test_failure['reasons'] %}
+                {%- if loop.index < 10 %}
+                {
+                    "type": "mrkdwn",
+                    "text": "`{{ reason[:75] | replace("\n", " ") }}{%- if (reason | length) > 75 %}(...){%- endif %}`"
+                }{%- if not loop.last and loop.index < 10 %},{%- endif %}
+                {%- endif %}
+                {%- if loop.index == 10 %}
+                {
+                    "type": "mrkdwn",
+                    "text": "`There is more resources failing. Check basic-status status-details.json`"
+                }
+                {%- endif %}
+                {%- endfor %}
+            ]
+    },
+    {%- endif %}
+    {%- endfor %}
+    {
+        "type": "divider"
+    },
+    {%- endfor %}
+    {
+        "type": "section",
+        "text": {
+            "type": "mrkdwn",
+            "text": ":tim: watches You. Good luck!"
+        }
+    }
+]
index b58e9f0..2551660 100644 (file)
@@ -3,6 +3,7 @@
     tosca_file_from_SDC: service-basic_cnf_macro-template
     version: "1.0"
     subscription_type: "basic_cnf_macro"
+    instantiation_type: "Macro"
     vnfs:
         - vnf_name: {{ service_name }}
           properties:
diff --git a/src/onaptests/templates/vnf-services/generic_network-service.yaml b/src/onaptests/templates/vnf-services/generic_network-service.yaml
new file mode 100644 (file)
index 0000000..2093c6b
--- /dev/null
@@ -0,0 +1,50 @@
+---
+SDBH-Network-svc:
+    tosca_file_from_SDC: service-basic_cnf_macro-template
+    version: "1.0"
+    subscription_type: "basic_cnf_macro"
+    instantiation_type: "Macro"
+    vnfs:
+        - vnf_name: sdbh-vnf
+          properties:
+              controller_actor: "CDS"
+              skip_post_instantiation_configuration: False
+              sdnc_artifact_name: "vnf"
+              sdnc_model_version: "1.0.0"
+              sdnc_model_name: "CBA_GNB_SIM_TEST"
+          heat_files_to_upload: templates/heat-files/basic_cnf_macro/basic_cnf_macro.zip
+          vnf_parameters: [
+              {
+                  "name": "k8s-rb-profile-namespace",
+                  "value": "onap-tests"
+              },
+              {
+                  "name": "k8s-rb-profile-k8s-version",
+                  "value": "1.19.0"
+              }
+          ]
+          vf_module_parameters:
+              - vf_module_name: helm_cuup
+                parameters: [
+                    {
+                        "name": "vf_module_label",
+                        "value": "helm_apache"
+                    },
+                    {
+                        "name": "k8s-rb-profile-name",
+                        "value": "node-port-profile"
+                    }
+                ]
+    networks:
+        - network_name: net_internal
+          vl_name: "Network"
+          subnets: [
+              {
+                  "subnet-name": "net_internal-subnet",
+                  "start-address": "10.200.0.0",
+                  "cidr-mask": "24",
+                  "ip-version": "4",
+                  "dhcp-enabled": False,
+                  "gateway-address": "10.200.0.1",
+              }
+          ]
diff --git a/src/onaptests/templates/vnf-services/instantiate_service_without_resource.yaml b/src/onaptests/templates/vnf-services/instantiate_service_without_resource.yaml
new file mode 100644 (file)
index 0000000..4ec70c4
--- /dev/null
@@ -0,0 +1,3 @@
+---
+service_without_resources:
+    instantiation_type: "Macro"
diff --git a/src/onaptests/templates/vnf-services/modify-service-pnf.yaml b/src/onaptests/templates/vnf-services/modify-service-pnf.yaml
new file mode 100644 (file)
index 0000000..ad26b09
--- /dev/null
@@ -0,0 +1,9 @@
+---
+testing-1209:
+      tosca_file_from_SDC: service-Testing1209-template
+      version: "1.0"
+      subscription_type: "net"
+      instantiation_type: "Macro"
+      pnfs:
+            - pnf_name: "ru-1309-new"
+              heat_files_to_upload: templates/artifacts/pNF.csar
index 0c13f0a..62d2698 100644 (file)
@@ -7,7 +7,3 @@
       pnfs:
             - pnf_name: "{{ service_name }}"
               heat_files_to_upload: templates/artifacts/pNF.csar
-              pnf_artifact_type: "CONTROLLER_BLUEPRINT_ARCHIVE"
-              pnf_artifact_name: "CBA_enriched.zip"
-              pnf_artifact_label: "cbapnf"
-              pnf_artifact_file_path: "/tmp/PNF_DEMO_enriched.zip"
index 0d7ecee..b28c02a 100644 (file)
@@ -4,6 +4,7 @@
 # are made available under the terms of the Apache License, Version 2.0
 # which accompanies this distribution, and is available at
 # http://www.apache.org/licenses/LICENSE-2.0
+
 """Module to define pythonsdk-test exceptions."""
 
 __author__ = "Morgan Richomme <morgan.richomme@orange.com>"
@@ -170,7 +171,19 @@ class DcaeException(OnapTestException):
         super().__init__(__message)
 
 
+class CbaVerificationException(OnapTestException):
+    """CBA verification Exception."""
+    def __init__(self, __message="CBA verification has not succeeded"):
+        super().__init__(__message)
+
+
 class StatusCheckException(OnapTestException):
     """Status Check exception."""
     def __init__(self, __message="Namespace status check has failed"):
         super().__init__(__message)
+
+
+class DataInconsistentException(OnapTestException):
+    """Data consistency check exception."""
+    def __init__(self, __message="Data inconsistency in AAI and CPS"):
+        super().__init__(__message)
diff --git a/src/onaptests/utils/gitlab.py b/src/onaptests/utils/gitlab.py
new file mode 100644 (file)
index 0000000..c053c65
--- /dev/null
@@ -0,0 +1,115 @@
+"""Helpers for reading data from GitLab."""
+
+from dataclasses import dataclass
+from typing import Any, Dict, List
+
+import requests
+from dacite import from_dict  # type: ignore
+from onapsdk.configuration import settings
+
+
+@dataclass
+class GitLabDirectoryEntry:
+    """Dataclass for holding directory items from GitLab."""
+
+    id: str
+    name: str
+    type: str
+    path: str
+    mode: str
+
+    @classmethod
+    def load(cls, data: Dict[str, Any]) -> "GitLabDirectoryEntry":
+        """Create a GitLabDirectoryEntry object from the dict.
+
+        Returns:
+            GitLabDirectoryEntry: GitLabDirectoryEntry object created from the dictionary
+        """
+        return from_dict(data_class=cls, data=data)  # type: ignore
+
+
+class GitLabClient:
+    """Class that provides methods to read directory entries and files from GitLab."""
+
+    TIMEOUT = 60
+
+    @classmethod
+    def _get_headers(cls):
+        """Generates headers for rest request"""
+        return {
+            "Accept": "application/json",
+            "Content-Type": "application/json",
+            "PRIVATE-TOKEN": settings.GITLAB_ACCESS_TKN,
+        }
+
+    @classmethod
+    def get_directory_entries(
+        cls, commit_ref: str, directory_path: str, gitlab_id: str,
+    ) -> List[GitLabDirectoryEntry]:
+        """Get a list of all directory entries from a GitLab project directory.
+
+        Args:
+            commit_ref (str): The branch name or commit id in GitLab to be read.
+            directory_path (str): The path to the directory in the GitLab project.
+            gitlab_id (str): ID of Gitlab project to be read.
+        Returns:
+             List[GitLabDirectoryEntry]: A list of directory entries.
+        """
+        gitlab_url = f"{settings.GITLAB_BASE_URL}/projects/{gitlab_id}/repository/tree"
+        params = {"path": directory_path, "ref": commit_ref}
+        dir_list = requests.get(
+            gitlab_url, params=params, headers=cls._get_headers(), timeout=cls.TIMEOUT
+        ).json()
+        return [GitLabDirectoryEntry.load(dir_entry) for dir_entry in dir_list]
+
+    @classmethod
+    def get_text_file_content(cls, commit_ref: str, file_path: str, gitlab_id: str,) -> str:
+        """Get the content of a text file from a GitLab project directory.
+
+        Args:
+            commit_ref (str): The branch name or commit id in GitLab to be read.
+            file_path (str): Path to the file, including file name, in the GitLab project.
+            gitlab_id (str): ID of Gitlab project to be read.
+        Returns:
+             str: The content of the file.
+        """
+        path = file_path.replace("/", "%2F")
+        gitlab_url = f"{settings.GITLAB_BASE_URL}/projects/{gitlab_id}/repository/files/{path}/raw"
+        params = {"ref": commit_ref}
+        return requests.get(
+            gitlab_url, params=params, headers=cls._get_headers(), timeout=cls.TIMEOUT
+        ).text
+
+    @classmethod
+    def download_repo_archive(
+        cls,
+        commit_ref: str,
+        git_directory_path: str,
+        directory_path: str,
+        zip_name: str,
+        gitlab_id: str,
+    ) -> str:
+        """Get the content GitLab project directory as zip-archive.
+
+        Args:
+            commit_ref (str): The branch name or commit id in GitLab to be read.
+            git_directory_path (str): The path to the directory in gitrepo to be downloaded.
+            directory_path (str): The path where the zip-file will be stored.
+            zip_name (str): name of the zip-directory.
+            gitlab_id (str): ID of Gitlab project to be read.
+        Returns:
+             path: Path to zip-archive.
+        """
+        gitlab_url = (
+            f"{settings.GITLAB_BASE_URL}/projects/{gitlab_id}/repository/archive.zip"
+        )
+        params = {"sha": commit_ref, "path": git_directory_path}
+        request = requests.get(
+            url=gitlab_url, params=params, headers=cls._get_headers(), timeout=cls.TIMEOUT
+        )
+
+        path = f"{directory_path}/{zip_name}"
+        with open(path, "wb") as zip_ref:
+            zip_ref.write(request.content)
+
+        return path
index b86326d..927dfef 100644 (file)
@@ -37,11 +37,21 @@ class KubernetesHelper:
         try:
             secret = api_instance.read_namespaced_secret(secret_name, namespace)
             if secret.data:
-                if (login_key in secret.data and password_key in secret.data):
-                    login_base64 = secret.data[login_key]
-                    login = base64.b64decode(login_base64).decode("utf-8")
+                error = False
+                login = None
+                password = None
+                if login_key:
+                    if login_key in secret.data:
+                        login_base64 = secret.data[login_key]
+                        login = base64.b64decode(login_base64).decode("utf-8")
+                    else:
+                        error = True
+                if password_key in secret.data:
                     password_base64 = secret.data[password_key]
                     password = base64.b64decode(password_base64).decode("utf-8")
+                else:
+                    error = True
+                if not error:
                     return login, password
                 raise EnvironmentPreparationException(
                     "Login key or password key not found in secret")
diff --git a/src/onaptests/utils/kubernetes_kafka.py b/src/onaptests/utils/kubernetes_kafka.py
new file mode 100644 (file)
index 0000000..0168c36
--- /dev/null
@@ -0,0 +1,117 @@
+import base64
+import json
+import logging
+from pathlib import Path
+
+from jinja2 import Environment, FileSystemLoader
+from kubernetes import client, config
+from kubernetes.client.exceptions import ApiException
+from onapsdk.configuration import settings
+
+from onaptests.utils.exceptions import OnapTestException
+
+logger = logging.getLogger(__name__)
+
+
+def create_topic(topic_name):
+    """
+    Creates a Kafka topic in the Kubernetes cluster.
+
+    Args:
+        topic_name (str): The name of the Kafka topic to create.
+    """
+    config.load_incluster_config()
+    api_instance = client.CustomObjectsApi()
+
+    environment = Environment(loader=FileSystemLoader(Path(__file__).parent.parent
+                                                      .joinpath("templates/artifacts/")))
+    template = environment.get_template("create_kafka_topic_template.json.j2")
+
+    create_topic_json_parameter = {
+        "namespace": settings.KUBERNETES_NAMESPACE,
+        "topicName": settings.TOPIC_NAME,
+    }
+
+    create_topic_json = template.render(create_topic_json_parameter)
+    body = json.loads(create_topic_json)
+
+    try:
+        api_response = api_instance.create_namespaced_custom_object(
+            group=settings.KUBERNETES_API_GROUP,
+            version=settings.KUBERNETES_API_VERSION,
+            namespace=settings.KUBERNETES_NAMESPACE,
+            plural=settings.KUBERNETES_API_PLURAL,
+            body=body
+        )
+        logger.info("Kafka topic created successfully: %s", api_response)
+    except Exception as ce:
+        logger.error("Failed to create topic on Kafka: %s", {str(ce)})
+        raise OnapTestException(ce) from ce
+
+
+def delete_topic(topic_name):
+    """
+    Deletes a Kafka topic from the Kubernetes cluster.
+
+    Args:
+        topic_name (str): The name of the Kafka topic to delete.
+    """
+    config.load_incluster_config()
+    api_instance = client.CustomObjectsApi()
+
+    try:
+        api_instance.delete_namespaced_custom_object(
+            group=settings.KUBERNETES_API_GROUP,
+            version=settings.KUBERNETES_API_VERSION,
+            namespace=settings.KUBERNETES_NAMESPACE,
+            plural=settings.KUBERNETES_API_PLURAL,
+            name=topic_name,
+            body=client.V1DeleteOptions(),
+        )
+        logger.info("Kafka topic deleted successfully: %s", topic_name)
+    except ApiException as ce:
+        logger.error("Failed to delete topic on Kafka: %s", {str(ce)})
+        raise OnapTestException(ce) from ce
+
+
+class KubernetesKafka:
+    """
+    Handles operations related to Kafka on Kubernetes.
+    """
+
+    def __init__(self):
+        self.namespace = settings.KUBERNETES_NAMESPACE
+        self.secret_name = settings.KAFKA_USER
+        self.secret = None
+
+    def read_kafka_admin_secret(self):
+        """
+        Reads the Kafka admin secret from Kubernetes.
+
+        Returns:
+            dict: The secret data.
+        """
+        try:
+            config.load_incluster_config()
+            v1 = client.CoreV1Api()
+            self.secret = v1.read_namespaced_secret(name=self.secret_name, namespace=self.namespace)
+            return self.secret
+        except ApiException as e:
+            logger.error("Exception when calling CoreV1Api->read_namespaced_secret: %s", e)
+            return None
+
+    def get_kafka_admin_password(self):
+        """
+        Retrieves the Kafka admin password from the secret data.
+
+        Returns:
+            str: The decoded Kafka admin password.
+        """
+        if self.secret:
+            secret_data = self.secret.data
+            password_key = "password"
+            if password_key in secret_data:
+                encoded_password = secret_data[password_key]
+                decoded_password = base64.b64decode(encoded_password).decode('utf-8')
+                return decoded_password
+        return None
diff --git a/src/onaptests/utils/ntp_checker.py b/src/onaptests/utils/ntp_checker.py
new file mode 100644 (file)
index 0000000..5154b56
--- /dev/null
@@ -0,0 +1,35 @@
+import http.server
+import json
+import time
+
+
+class NttpCheckerHandler(http.server.BaseHTTPRequestHandler):
+    """Basic HTTP server that can provide time difference information"""
+
+    def do_POST(self):  # noqa
+        """"Exposes /local-time-status that returns
+            time difference with reference to other
+            time provided as a request payload
+        """
+        if self.path == "/local-time-status":
+            arrival_time = int(time.time() * 1000)
+            time_diff = None
+            if self.headers['Content-Length']:
+                data_string = self.rfile.read(int(self.headers['Content-Length']))
+                data = json.loads(data_string)
+                if data["time"]:
+                    reference_time = int(data["time"])
+                    time_diff = int(arrival_time - reference_time)
+                    print(f"Time Diff: {time_diff}")  # noqa
+            self.send_response(200)
+            self.send_header("Content-type", "application/json")
+            self.end_headers()
+            response = {"time": time_diff, "arrival": int(arrival_time * 1000)}
+            self.wfile.write(json.dumps(response).encode())
+        else:
+            self.send_error(404, "Not Found")
+
+
+server = http.server.HTTPServer(("", 8000), NttpCheckerHandler)
+print("Starting server on port 8000")  # noqa
+server.serve_forever()
diff --git a/src/onaptests/utils/slack.py b/src/onaptests/utils/slack.py
new file mode 100644 (file)
index 0000000..d600d08
--- /dev/null
@@ -0,0 +1,43 @@
+import time
+
+from slack_sdk import WebClient
+
+
+class SlackHelper:
+    """"Helper methods to send slack messages"""
+
+    @classmethod
+    def send_slack_message(cls, channel: str, token: str, text: str, blocks=None, retry=3):
+        """"Send slack message with retry"""
+        slack_client = WebClient(token=token)
+        while True:
+            try:
+                slack_client.chat_postMessage(
+                    channel=channel,
+                    text=text,
+                    blocks=blocks
+                )
+                return
+            except Exception as e:
+                retry -= 1
+                if retry <= 0:
+                    raise e
+                time.sleep(5)
+
+    @classmethod
+    def send_slack_file(cls, channel: str, token: str, text: str, file_path: str, retry=3):
+        """"Send slack message with retry"""
+        slack_client = WebClient(token=token)
+        while True:
+            try:
+                slack_client.files_upload(
+                    channels=channel,
+                    initial_comment=text,
+                    file=file_path
+                )
+                return
+            except Exception as e:
+                retry -= 1
+                if retry <= 0:
+                    raise e
+                time.sleep(5)
diff --git a/tox.ini b/tox.ini
index 6c1a015..2292c42 100644 (file)
--- a/tox.ini
+++ b/tox.ini
@@ -1,11 +1,11 @@
 [tox]
 minversion = 3.2.0
-envlist = json,yaml,md,pylama,validate
+envlist = json,yaml,md,pylama,isort-check,validate
 skipsdist = true
 requires = pip >= 8
 
 [testenv]
-basepython = python3.8
+basepython = python3.9
 allowlist_externals =
   git
   /bin/sh
@@ -40,14 +40,6 @@ commands_pre =
 commands =
     /bin/bash -c "coala --non-interactive --disable-caching --no-autoapply-warn py --files $(</tmp/.coalist_py) \ "
 
-[testenv:rst]
-commands_pre =
-    /bin/sh -c "git --no-pager diff HEAD HEAD^ --name-only '*.rst' > /tmp/.coalist_rst"
-deps = -r ./docs/requirements-docs.txt
-commands =
-  /bin/sh -c "sphinx-build -n -W -b html docs docs/build/html $(</tmp/.coalist_rst)"
-  /bin/sh -c "sphinx-build -n -W -b linkcheck docs docs/build/linkcheck $(</tmp/.coalist_rst)"
-
 [testenv:md]
 commands_pre =
     nodeenv -p --verbose