genericparser seed code 48/85748/1
authordyh <dengyuanhong@chinamobile.com>
Fri, 19 Apr 2019 05:57:46 +0000 (13:57 +0800)
committerdyh <dengyuanhong@chinamobile.com>
Fri, 19 Apr 2019 06:01:10 +0000 (14:01 +0800)
Change-Id: Id15ac689c1d560619bf6c699fb0786e7381d3def
Issue-ID: MODELING-153
Signed-off-by: dyh <dengyuanhong@chinamobile.com>
167 files changed:
.gitignore [new file with mode: 0644]
.gitreview [new file with mode: 0644]
INFO.yaml [new file with mode: 0644]
LICENSE [new file with mode: 0644]
README.md [new file with mode: 0644]
assembly.xml [new file with mode: 0644]
docker/Dockerfile [new file with mode: 0644]
docker/LICENSE [new file with mode: 0644]
docker/build_image.sh [new file with mode: 0644]
docker/docker-entrypoint.sh [new file with mode: 0644]
docker/docker-env-conf.sh [new file with mode: 0644]
docker/instance_config.sh [new file with mode: 0644]
docker/instance_init.sh [new file with mode: 0644]
docker/instance_run.sh [new file with mode: 0644]
genericparser/__init__.py [new file with mode: 0644]
genericparser/jobs/__init__.py [new file with mode: 0644]
genericparser/jobs/job_get.py [new file with mode: 0644]
genericparser/jobs/tests/__init__.py [new file with mode: 0644]
genericparser/jobs/tests/tests.py [new file with mode: 0644]
genericparser/jobs/urls.py [new file with mode: 0644]
genericparser/jobs/views.py [new file with mode: 0644]
genericparser/log.yml [new file with mode: 0644]
genericparser/middleware.py [new file with mode: 0644]
genericparser/packages/__init__.py [new file with mode: 0644]
genericparser/packages/biz/__init__.py [new file with mode: 0644]
genericparser/packages/biz/common.py [new file with mode: 0644]
genericparser/packages/biz/ns_descriptor.py [new file with mode: 0644]
genericparser/packages/biz/nsdm_subscription.py [new file with mode: 0644]
genericparser/packages/biz/pnf_descriptor.py [new file with mode: 0644]
genericparser/packages/biz/sdc_ns_package.py [new file with mode: 0644]
genericparser/packages/biz/sdc_service_package.py [new file with mode: 0644]
genericparser/packages/biz/sdc_vnf_package.py [new file with mode: 0644]
genericparser/packages/biz/service_descriptor.py [new file with mode: 0644]
genericparser/packages/biz/vnf_package.py [new file with mode: 0644]
genericparser/packages/biz/vnf_pkg_artifacts.py [new file with mode: 0644]
genericparser/packages/biz/vnf_pkg_subscription.py [new file with mode: 0644]
genericparser/packages/const.py [new file with mode: 0644]
genericparser/packages/serializers/__init__.py [new file with mode: 0644]
genericparser/packages/serializers/checksum.py [new file with mode: 0644]
genericparser/packages/serializers/create_nsd_info_request.py [new file with mode: 0644]
genericparser/packages/serializers/create_pnfd_info_request.py [new file with mode: 0644]
genericparser/packages/serializers/create_vnf_pkg_info_req.py [new file with mode: 0644]
genericparser/packages/serializers/genericparser_serializers.py [new file with mode: 0644]
genericparser/packages/serializers/link.py [new file with mode: 0644]
genericparser/packages/serializers/nsd_info.py [new file with mode: 0644]
genericparser/packages/serializers/nsd_infos.py [new file with mode: 0644]
genericparser/packages/serializers/nsdm_filter_data.py [new file with mode: 0644]
genericparser/packages/serializers/nsdm_subscription.py [new file with mode: 0644]
genericparser/packages/serializers/pnfd_info.py [new file with mode: 0644]
genericparser/packages/serializers/pnfd_infos.py [new file with mode: 0644]
genericparser/packages/serializers/problem_details.py [new file with mode: 0644]
genericparser/packages/serializers/response.py [new file with mode: 0644]
genericparser/packages/serializers/subscription_auth_data.py [new file with mode: 0644]
genericparser/packages/serializers/upload_vnf_pkg_from_uri_req.py [new file with mode: 0644]
genericparser/packages/serializers/vnf_pkg_artifact_info.py [new file with mode: 0644]
genericparser/packages/serializers/vnf_pkg_info.py [new file with mode: 0644]
genericparser/packages/serializers/vnf_pkg_infos.py [new file with mode: 0644]
genericparser/packages/serializers/vnf_pkg_notifications.py [new file with mode: 0644]
genericparser/packages/serializers/vnf_pkg_software_image_info.py [new file with mode: 0644]
genericparser/packages/serializers/vnf_pkg_subscription.py [new file with mode: 0644]
genericparser/packages/tests/__init__.py [new file with mode: 0644]
genericparser/packages/tests/const.py [new file with mode: 0644]
genericparser/packages/tests/test_health_check.py [new file with mode: 0644]
genericparser/packages/tests/test_ns_descriptor.py [new file with mode: 0644]
genericparser/packages/tests/test_nsdm_subscription.py [new file with mode: 0644]
genericparser/packages/tests/test_nspackage.py [new file with mode: 0644]
genericparser/packages/tests/test_pnf_descriptor.py [new file with mode: 0644]
genericparser/packages/tests/test_service_descriptor.py [new file with mode: 0644]
genericparser/packages/tests/test_servicepackage.py [new file with mode: 0644]
genericparser/packages/tests/test_vnf_package.py [new file with mode: 0644]
genericparser/packages/tests/test_vnf_pkg_subscription.py [new file with mode: 0644]
genericparser/packages/tests/test_vnfpackage.py [new file with mode: 0644]
genericparser/packages/urls.py [new file with mode: 0644]
genericparser/packages/views/__init__.py [new file with mode: 0644]
genericparser/packages/views/catalog_views.py [new file with mode: 0644]
genericparser/packages/views/common.py [new file with mode: 0644]
genericparser/packages/views/health_check_views.py [new file with mode: 0644]
genericparser/packages/views/ns_descriptor_views.py [new file with mode: 0644]
genericparser/packages/views/nsdm_subscription_views.py [new file with mode: 0644]
genericparser/packages/views/pnf_descriptor_views.py [new file with mode: 0644]
genericparser/packages/views/vnf_package_artifact_views.py [new file with mode: 0644]
genericparser/packages/views/vnf_package_subscription_views.py [new file with mode: 0644]
genericparser/packages/views/vnf_package_views.py [new file with mode: 0644]
genericparser/pub/__init__.py [new file with mode: 0644]
genericparser/pub/config/__init__.py [new file with mode: 0644]
genericparser/pub/config/config.py [new file with mode: 0644]
genericparser/pub/database/__init__.py [new file with mode: 0644]
genericparser/pub/database/migrations/0001_initial.py [new file with mode: 0644]
genericparser/pub/database/migrations/__init__.py [new file with mode: 0644]
genericparser/pub/database/models.py [new file with mode: 0644]
genericparser/pub/exceptions.py [new file with mode: 0644]
genericparser/pub/msapi/__init__.py [new file with mode: 0644]
genericparser/pub/msapi/extsys.py [new file with mode: 0644]
genericparser/pub/msapi/sdc.py [new file with mode: 0644]
genericparser/pub/ssl/cert/foobar.crt [new file with mode: 0644]
genericparser/pub/ssl/cert/foobar.csr [new file with mode: 0644]
genericparser/pub/ssl/cert/foobar.key [new file with mode: 0644]
genericparser/pub/utils/__init__.py [new file with mode: 0644]
genericparser/pub/utils/fileutil.py [new file with mode: 0644]
genericparser/pub/utils/idutil.py [new file with mode: 0644]
genericparser/pub/utils/jobutil.py [new file with mode: 0644]
genericparser/pub/utils/restcall.py [new file with mode: 0644]
genericparser/pub/utils/syscomm.py [new file with mode: 0644]
genericparser/pub/utils/tests.py [new file with mode: 0644]
genericparser/pub/utils/timeutil.py [new file with mode: 0644]
genericparser/pub/utils/toscaparsers/__init__.py [new file with mode: 0644]
genericparser/pub/utils/toscaparsers/basemodel.py [new file with mode: 0644]
genericparser/pub/utils/toscaparsers/const.py [new file with mode: 0644]
genericparser/pub/utils/toscaparsers/dataentityext.py [new file with mode: 0644]
genericparser/pub/utils/toscaparsers/graph.py [new file with mode: 0644]
genericparser/pub/utils/toscaparsers/nsdmodel.py [new file with mode: 0644]
genericparser/pub/utils/toscaparsers/pnfmodel.py [new file with mode: 0644]
genericparser/pub/utils/toscaparsers/sdmodel.py [new file with mode: 0644]
genericparser/pub/utils/toscaparsers/servicemodel.py [new file with mode: 0644]
genericparser/pub/utils/toscaparsers/testdata/ns/ran.csar [new file with mode: 0644]
genericparser/pub/utils/toscaparsers/testdata/ns/service-vIMS.csar [new file with mode: 0644]
genericparser/pub/utils/toscaparsers/testdata/pnf/ran-du.csar [new file with mode: 0644]
genericparser/pub/utils/toscaparsers/testdata/vnf/vSBC.csar [new file with mode: 0644]
genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/infra.csar [new file with mode: 0644]
genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/vbng.csar [new file with mode: 0644]
genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/vbrgemu.csar [new file with mode: 0644]
genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/vgmux.csar [new file with mode: 0644]
genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/vgw.csar [new file with mode: 0644]
genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/infra.csar [new file with mode: 0644]
genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/vbng.csar [new file with mode: 0644]
genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/vbrgemu.csar [new file with mode: 0644]
genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/vgmux.csar [new file with mode: 0644]
genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/vgw.csar [new file with mode: 0644]
genericparser/pub/utils/toscaparsers/tests.py [new file with mode: 0644]
genericparser/pub/utils/toscaparsers/vnfdmodel.py [new file with mode: 0644]
genericparser/pub/utils/values.py [new file with mode: 0644]
genericparser/samples/__init__.py [new file with mode: 0644]
genericparser/samples/tests.py [new file with mode: 0644]
genericparser/samples/urls.py [new file with mode: 0644]
genericparser/samples/views.py [new file with mode: 0644]
genericparser/settings.py [new file with mode: 0644]
genericparser/swagger/__init__.py [new file with mode: 0644]
genericparser/swagger/management/__init__.py [new file with mode: 0644]
genericparser/swagger/management/commands/__init__.py [new file with mode: 0644]
genericparser/swagger/management/commands/export_swagger.py [new file with mode: 0644]
genericparser/swagger/tests.py [new file with mode: 0644]
genericparser/swagger/urls.py [new file with mode: 0644]
genericparser/swagger/vfc.catalog.swagger.json [new file with mode: 0644]
genericparser/swagger/views.py [new file with mode: 0644]
genericparser/urls.py [new file with mode: 0644]
genericparser/wsgi.py [new file with mode: 0644]
initialize.sh [new file with mode: 0644]
logs/empty.txt [new file with mode: 0644]
manage.py [new file with mode: 0644]
mvn-phase-script.sh [new file with mode: 0644]
pom.xml [new file with mode: 0644]
requirements.txt [new file with mode: 0644]
resources/bin/initDB.sh [new file with mode: 0644]
resources/dbscripts/mysql/modeling-gengricparser-createdb.sql [new file with mode: 0644]
resources/resource-TestFyx-template.yml [new file with mode: 0644]
resources/service-TestServiceFyx-template.yml [new file with mode: 0644]
run.sh [new file with mode: 0644]
static/genericparser/22/nsd_content.txt [new file with mode: 0644]
static/genericparser/22/pnfd_content.txt [new file with mode: 0644]
static/genericparser/222/empty.txt [new file with mode: 0644]
static/genericparser/222/hss.csar [new file with mode: 0644]
static/genericparser/222/resource_test.csar [new file with mode: 0644]
static/genericparser/empty.txt [new file with mode: 0644]
static/genericparser/resource_test.csar [new file with mode: 0644]
stop.sh [new file with mode: 0644]
tox.ini [new file with mode: 0644]
version.properties [new file with mode: 0644]

diff --git a/.gitignore b/.gitignore
new file mode 100644 (file)
index 0000000..e6ae545
--- /dev/null
@@ -0,0 +1,8 @@
+logs/*.log
+*.pyc
+.idea
+.tox
+target
+htmlcov
+.coverage
+test-reports/*
diff --git a/.gitreview b/.gitreview
new file mode 100644 (file)
index 0000000..f95f78a
--- /dev/null
@@ -0,0 +1,4 @@
+[gerrit]
+host=gerrit.onap.org
+port=29418
+project=vfc/nfvo/catalog
\ No newline at end of file
diff --git a/INFO.yaml b/INFO.yaml
new file mode 100644 (file)
index 0000000..64e4f42
--- /dev/null
+++ b/INFO.yaml
@@ -0,0 +1,154 @@
+---
+project: 'vfc-nfvo-genericparser'
+project_creation_date: '2016-08-08'
+lifecycle_state: 'Incubation'
+project_lead: &onap_releng_ptl
+    name: 'Yan Yang'
+    email: 'yangyanyj@chinamobile.com'
+    id: 'yangyan'
+    company: 'China Mobile'
+    timezone: 'Asia/Shanghai'
+project_category: ''
+primary_contact: *onap_releng_ptl
+issue_tracking:
+    type: 'jira'
+    url: 'https://jira.onap.org/projects/VFC'
+    key: 'VFC'
+mailing_list:
+    type: 'groups.io'
+    url: 'lists.onap.org'
+    tag: '<[sub-project_name]>'
+realtime_discussion: ''
+meetings:
+    - type: 'zoom'
+      agenda: 'https://wiki.onap.org/pages/viewpage.action?pageId=6590138'
+      url: 'https://wiki.onap.org/display/DW/Virtual+Function+Controller+Project'
+      server: 'n/a'
+      channel: 'n/a'
+      repeats: 'weekly'
+      time: '09:00 UTC'
+repositories:
+    - 'vfc-gvnfm-vnflcm'
+    - 'vfc-gvnfm-vnfmgr'
+    - 'vfc-gvnfm-vnfres'
+    - 'vfc-nfvo-genericparser'
+    - 'vfc-nfvo-db'
+    - 'vfc-nfvo-driver-ems'
+    - 'vfc-nfvo-driver-sfc'
+    - 'vfc-nfvo-driver-vnfm-gvnfm'
+    - 'vfc-nfvo-driver-vnfm-svnfm'
+    - 'vfc-nfvo-lcm'
+    - 'vfc-nfvo-multivimproxy'
+    - 'vfc-nfvo-resmanagement'
+    - 'vfc-nfvo-wfengine'
+    - 'vfc-oom'
+committers:
+    - <<: *onap_releng_ptl
+    - name: 'Anatoly Andrianov'
+      email: 'anatoly.andrianov@nokia.com'
+      company: 'Nokia'
+      id: 'caa028'
+      timezone: 'America/Chicago'
+    - name: 'Fu Jinhua'
+      email: 'fu.jinhua@zte.com.cn'
+      company: 'ZTE'
+      id: 'fujinhua'
+      timezone: 'Asia/Shanghai'
+    - name: 'Victor Gao'
+      email: 'victor.gao@huawei.com'
+      company: 'Huawei'
+      id: 'g310497'
+      timezone: 'Asia/Shanghai'
+    - name: 'han yanan'
+      email: 'hanyanan@raisecom.com'
+      company: 'Raisecom'
+      id: 'hanyanan'
+      timezone: 'Asia/Shanghai'
+    - name: 'lu xin'
+      email: 'luxin7@huawei.com'
+      company: 'Huawei'
+      id: 'l345485'
+      timezone: 'Asia/Shanghai'
+    - name: 'maopeng zhang'
+      email: 'zhang.maopeng1@zte.com.cn'
+      company: 'ZTE'
+      id: 'maopengzhang'
+      timezone: 'Asia/Shanghai'
+    - name: 'Kanagaraj Manickam'
+      email: 'kanagaraj.manickam@huawei.com'
+      company: 'Huawei'
+      id: 'mkr1481'
+      timezone: 'Asia/Kolkata'
+    - name: 'yunlong ying'
+      email: 'ying.yunlong@zte.com.cn'
+      company: 'ZTE'
+      id: 'ying.yunlong'
+      timezone: 'Asia/Shanghai'
+    - name: 'Yog Vashishth'
+      email: 'yog.vashishth@ril.com'
+      company: 'Ril'
+      id: 'yogvashishth'
+      timezone: 'Asia/Kolkata'
+    - name: 'Lingli Deng'
+      email: 'denglingli@chinamobile.com'
+      company: 'China Mobile'
+      id: 'denglingli'
+      timezone: 'Asia/Shanghai'
+    - name: 'Nagesha Subramanya'
+      email: 'nagesha.subramanya@nokia.com'
+      company: 'Nokia'
+      id: 'hsnagesh'
+      timezone: 'Asia/Kolkata'
+    - name: 'Xinhui Li'
+      email: 'lxinhui@vmware.com'
+      company: 'VMWare'
+      id: 'xinhuili'
+      timezone: 'Asia/Shanghai'
+    - name: 'Guirong Wang'
+      email: 'wangguirong@boco.com.cn'
+      company: 'Boco'
+      id: 'Wang_Guirong'
+      timezone: 'Asia/Shanghai'
+    - name: 'Adityakar Jha'
+      email: 'Adityakar.Jha@ril.com'
+      company: 'Ril'
+      id: 'adityakar.jha'
+      timezone: 'Asia/Kolkata'
+    - name: 'Hu Dong'
+      email: 'donghu@raisecom.com'
+      company: 'Raisecom'
+      id: 'donghu1102'
+      timezone: 'Asia/Shanghai'
+    - name: 'Denes Nemeth'
+      email: 'denes.nemeth@nokia.com'
+      company: 'Nokia'
+      id: 'thelittlemouse'
+      timezone: 'Europe/Budapest'
+    - name: 'Haibin Huang'
+      email: 'haibin.huang@intel.com'
+      company: 'Intel'
+      id: 'haibin'
+      timezone: 'Asia/Shanghai'
+    - name: 'LiLai'
+      email: 'lai.li@zte.com.cn'
+      company: 'ZTE'
+      id: 'laili'
+      timezone: 'Asia/Nanjing'
+    - name: 'Bharath Thiruveedula'
+      email: 'bharath.thiruveedula@verizon.com'
+      company: 'Verizon'
+      id: 'bharaththiruveedula'
+      timezone: 'Asia/Hyderabad'
+    - name: 'Ruoyu Ying'
+      email: 'ruoyu.ying@intel.com'
+      company: 'Intel'
+      id: 'Ruoyu'
+      timezone: 'Asia/ShangHai'
+
+tsc:
+    approval: 'https://lists.onap.org/pipermail/onap-tsc'
+    changes:
+        - type: 'Addition'
+          name: 'Denes Nemeth'
+          name: 'Haibin Huang'
+          link: 'https://lists.onap.org/g/ONAP-TSC/message/3269'
diff --git a/LICENSE b/LICENSE
new file mode 100644 (file)
index 0000000..1586716
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,15 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Micro service of nfvo catalog.
diff --git a/README.md b/README.md
new file mode 100644 (file)
index 0000000..81db212
--- /dev/null
+++ b/README.md
@@ -0,0 +1,34 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Micro service of nfvo genericpaser.
+
+1. Code structure guide
+   ./         project files
+   ./docker   docker related scripts
+   ./logs     log file
+   ./genericpaser  genericpaser management
+       ./packages      package life cycle API& logic
+             ./               API url definition
+             ./views          API related views, each operation is a view
+             ./serializers    API related request and response parametes.
+                              Suggest related to sol003/sol005, each datatype is a file.
+                              Common datatypes are put into the common file
+             ./biz            Package mangement busyness logic files
+             ./tests          All the test case. At least each API should have a test case
+       ./jobs      Related job
+       ./pub       Common class, including database, external micro service API, utils, and config parameters.
+       ./samples   genericpaser micro service health check
+       ./swagger   Auto-generate genericpaser swagger json or yaml files
+   ./static/genericpaser  package storage
diff --git a/assembly.xml b/assembly.xml
new file mode 100644 (file)
index 0000000..5913dab
--- /dev/null
@@ -0,0 +1,80 @@
+<!--
+    Copyright 2017 ZTE Corporation.
+
+    Licensed under the Apache License, Version 2.0 (the "License");
+    you may not use this file except in compliance with the License.
+    You may obtain a copy of the License at
+
+            http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing, software
+    distributed under the License is distributed on an "AS IS" BASIS,
+    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    See the License for the specific language governing permissions and
+    limitations under the License.
+-->
+<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0" 
+  xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd">
+    <id>catalog</id>
+    <formats>
+        <format>zip</format>
+    </formats>
+    <fileSets>
+        <fileSet>
+            <directory>catalog</directory>
+            <outputDirectory>/catalog</outputDirectory>
+            <includes>
+                <include>**/*.py</include>
+                <include>**/*.json</include>
+                <include>**/*.xml</include>
+                <include>**/*.yml</include>
+                <include>**/*.sh</include>
+                <include>**/*.crt</include>
+                <include>**/*.key</include>
+            </includes>
+        </fileSet>
+        <fileSet>
+            <directory>logs</directory>
+            <outputDirectory>/logs</outputDirectory>
+            <includes>
+                <include>*.txt</include>
+            </includes>
+        </fileSet>
+        <fileSet>
+            <directory>resources</directory>
+            <outputDirectory>/resources</outputDirectory>
+            <includes>
+                <include>**/*.sh</include>
+                <include>**/*.sql</include>
+            </includes>
+        </fileSet>
+        <fileSet>
+            <directory>docker</directory>
+            <outputDirectory>/docker</outputDirectory>
+            <includes>
+                <include>*.sh</include>
+                <include>Dockerfile</include>
+            </includes>
+        </fileSet>
+        <fileSet>
+            <directory>static</directory>
+            <outputDirectory>/static</outputDirectory>
+            <includes>
+                <include>**/*.txt</include>
+            </includes>
+        </fileSet>
+        <fileSet>
+            <directory>.</directory>
+            <outputDirectory>/</outputDirectory>
+            <includes>
+                <include>*.py</include>
+                <include>*.txt</include>
+                <include>*.sh</include>
+                <include>*.ini</include>
+                <include>*.md</include>
+            </includes>
+        </fileSet>
+    </fileSets>
+    <baseDirectory>vfc/nfvo/catalog</baseDirectory>
+</assembly>
diff --git a/docker/Dockerfile b/docker/Dockerfile
new file mode 100644 (file)
index 0000000..8cdea82
--- /dev/null
@@ -0,0 +1,17 @@
+FROM python:2-alpine
+
+ARG HTTP_PROXY=${HTTP_PROXY}
+ARG HTTPS_PROXY=${HTTPS_PROXY}
+ENV http_proxy $HTTP_PROXY
+ENV https_proxy $HTTPS_PROXY
+ADD . /service
+WORKDIR /service
+
+RUN sh /service/docker-env-conf.sh
+
+EXPOSE 8806
+USER onap
+WORKDIR /service
+
+#ENTRYPOINT vfc/nfvo/genericparser/docker/docker-entrypoint.sh
+ENTRYPOINT modeling/genericparser/docker/docker-entrypoint.sh
diff --git a/docker/LICENSE b/docker/LICENSE
new file mode 100644 (file)
index 0000000..5c767c3
--- /dev/null
@@ -0,0 +1,473 @@
+THIS LICENSE FILE CONTAINS THE LICENSE APPLICABLE DEPENDING ON THE TYPE OF CONTRIBUTIONS.
+
+APACHE LICENSE 2 IS APPLICABLE FOR SOURCE CODE, CREATIVE COMMONS ATTRIBUTION 4.0 INTERNATIONAL FOR DOCUMENTATION
+
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+
+Apache License
+Version 2.0, January 2004
+http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.
+
+"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.
+
+"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
+
+"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.
+
+"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files.
+
+"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.
+
+"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below).
+
+"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.
+
+"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution."
+
+"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work.
+
+2. Grant of Copyright License.
+
+Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License.
+
+Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.
+
+4. Redistribution.
+
+You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions:
+
+You must give any other recipients of the Work or Derivative Works a copy of this License; and
+You must cause any modified files to carry prominent notices stating that You changed the files; and
+You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and
+If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License.
+You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License.
+
+5. Submission of Contributions.
+
+Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions.
+
+6. Trademarks.
+
+This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty.
+
+Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability.
+
+In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability.
+
+While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+Attribution 4.0 International
+
+https://creativecommons.org/licenses/by/4.0/legalcode
+
+---------------------------------------------------------------------------------------
+
+Creative Commons Corporation ("Creative Commons") is not a law firm and
+does not provide legal services or legal advice. Distribution of
+Creative Commons public licenses does not create a lawyer-client or
+other relationship. Creative Commons makes its licenses and related
+information available on an "as-is" basis. Creative Commons gives no
+warranties regarding its licenses, any material licensed under their
+terms and conditions, or any related information. Creative Commons
+disclaims all liability for damages resulting from their use to the
+fullest extent possible.
+
+Using Creative Commons Public Licenses
+
+Creative Commons public licenses provide a standard set of terms and
+conditions that creators and other rights holders may use to share
+original works of authorship and other material subject to copyright
+and certain other rights specified in the public license below. The
+following considerations are for informational purposes only, are not
+exhaustive, and do not form part of our licenses.
+
+     Considerations for licensors: Our public licenses are
+     intended for use by those authorized to give the public
+     permission to use material in ways otherwise restricted by
+     copyright and certain other rights. Our licenses are
+     irrevocable. Licensors should read and understand the terms
+     and conditions of the license they choose before applying it.
+     Licensors should also secure all rights necessary before
+     applying our licenses so that the public can reuse the
+     material as expected. Licensors should clearly mark any
+     material not subject to the license. This includes other CC-
+     licensed material, or material used under an exception or
+     limitation to copyright. More considerations for licensors:
+     wiki.creativecommons.org/Considerations_for_licensors
+
+     Considerations for the public: By using one of our public
+     licenses, a licensor grants the public permission to use the
+     licensed material under specified terms and conditions. If
+     the licensor's permission is not necessary for any reason--for
+     example, because of any applicable exception or limitation to
+     copyright--then that use is not regulated by the license. Our
+     licenses grant only permissions under copyright and certain
+     other rights that a licensor has authority to grant. Use of
+     the licensed material may still be restricted for other
+     reasons, including because others have copyright or other
+     rights in the material. A licensor may make special requests,
+     such as asking that all changes be marked or described.
+     Although not required by our licenses, you are encouraged to
+     respect those requests where reasonable. More_considerations
+     for the public:
+     wiki.creativecommons.org/Considerations_for_licensees
+
+---------------------------------------------------------------------------------------
+
+Creative Commons Attribution 4.0 International Public License
+
+By exercising the Licensed Rights (defined below), You accept and agree
+to be bound by the terms and conditions of this Creative Commons
+Attribution 4.0 International Public License ("Public License"). To the
+extent this Public License may be interpreted as a contract, You are
+granted the Licensed Rights in consideration of Your acceptance of
+these terms and conditions, and the Licensor grants You such rights in
+consideration of benefits the Licensor receives from making the
+Licensed Material available under these terms and conditions.
+
+
+Section 1 -- Definitions.
+
+  a. Adapted Material means material subject to Copyright and Similar
+     Rights that is derived from or based upon the Licensed Material
+     and in which the Licensed Material is translated, altered,
+     arranged, transformed, or otherwise modified in a manner requiring
+     permission under the Copyright and Similar Rights held by the
+     Licensor. For purposes of this Public License, where the Licensed
+     Material is a musical work, performance, or sound recording,
+     Adapted Material is always produced where the Licensed Material is
+     synched in timed relation with a moving image.
+
+  b. Adapter's License means the license You apply to Your Copyright
+     and Similar Rights in Your contributions to Adapted Material in
+     accordance with the terms and conditions of this Public License.
+
+  c. Copyright and Similar Rights means copyright and/or similar rights
+     closely related to copyright including, without limitation,
+     performance, broadcast, sound recording, and Sui Generis Database
+     Rights, without regard to how the rights are labeled or
+     categorized. For purposes of this Public License, the rights
+     specified in Section 2(b)(1)-(2) are not Copyright and Similar
+     Rights.
+
+  d. Effective Technological Measures means those measures that, in the
+     absence of proper authority, may not be circumvented under laws
+     fulfilling obligations under Article 11 of the WIPO Copyright
+     Treaty adopted on December 20, 1996, and/or similar international
+     agreements.
+
+  e. Exceptions and Limitations means fair use, fair dealing, and/or
+     any other exception or limitation to Copyright and Similar Rights
+     that applies to Your use of the Licensed Material.
+
+  f. Licensed Material means the artistic or literary work, database,
+     or other material to which the Licensor applied this Public
+     License.
+
+  g. Licensed Rights means the rights granted to You subject to the
+     terms and conditions of this Public License, which are limited to
+     all Copyright and Similar Rights that apply to Your use of the
+     Licensed Material and that the Licensor has authority to license.
+
+  h. Licensor means the individual(s) or entity(ies) granting rights
+     under this Public License.
+
+  i. Share means to provide material to the public by any means or
+     process that requires permission under the Licensed Rights, such
+     as reproduction, public display, public performance, distribution,
+     dissemination, communication, or importation, and to make material
+     available to the public including in ways that members of the
+     public may access the material from a place and at a time
+     individually chosen by them.
+
+  j. Sui Generis Database Rights means rights other than copyright
+     resulting from Directive 96/9/EC of the European Parliament and of
+     the Council of 11 March 1996 on the legal protection of databases,
+     as amended and/or succeeded, as well as other essentially
+     equivalent rights anywhere in the world.
+
+  k. You means the individual or entity exercising the Licensed Rights
+     under this Public License. Your has a corresponding meaning.
+
+
+Section 2 -- Scope.
+
+  a. License grant.
+
+       1. Subject to the terms and conditions of this Public License,
+          the Licensor hereby grants You a worldwide, royalty-free,
+          non-sublicensable, non-exclusive, irrevocable license to
+          exercise the Licensed Rights in the Licensed Material to:
+
+            a. reproduce and Share the Licensed Material, in whole or
+               in part; and
+
+            b. produce, reproduce, and Share Adapted Material.
+
+       2. Exceptions and Limitations. For the avoidance of doubt, where
+          Exceptions and Limitations apply to Your use, this Public
+          License does not apply, and You do not need to comply with
+          its terms and conditions.
+
+       3. Term. The term of this Public License is specified in Section
+          6(a).
+
+       4. Media and formats; technical modifications allowed. The
+          Licensor authorizes You to exercise the Licensed Rights in
+          all media and formats whether now known or hereafter created,
+          and to make technical modifications necessary to do so. The
+          Licensor waives and/or agrees not to assert any right or
+          authority to forbid You from making technical modifications
+          necessary to exercise the Licensed Rights, including
+          technical modifications necessary to circumvent Effective
+          Technological Measures. For purposes of this Public License,
+          simply making modifications authorized by this Section 2(a)
+          (4) never produces Adapted Material.
+
+       5. Downstream recipients.
+
+            a. Offer from the Licensor -- Licensed Material. Every
+               recipient of the Licensed Material automatically
+               receives an offer from the Licensor to exercise the
+               Licensed Rights under the terms and conditions of this
+               Public License.
+
+            b. No downstream restrictions. You may not offer or impose
+               any additional or different terms or conditions on, or
+               apply any Effective Technological Measures to, the
+               Licensed Material if doing so restricts exercise of the
+               Licensed Rights by any recipient of the Licensed
+               Material.
+
+       6. No endorsement. Nothing in this Public License constitutes or
+          may be construed as permission to assert or imply that You
+          are, or that Your use of the Licensed Material is, connected
+          with, or sponsored, endorsed, or granted official status by,
+          the Licensor or others designated to receive attribution as
+          provided in Section 3(a)(1)(A)(i).
+
+  b. Other rights.
+
+       1. Moral rights, such as the right of integrity, are not
+          licensed under this Public License, nor are publicity,
+          privacy, and/or other similar personality rights; however, to
+          the extent possible, the Licensor waives and/or agrees not to
+          assert any such rights held by the Licensor to the limited
+          extent necessary to allow You to exercise the Licensed
+          Rights, but not otherwise.
+
+       2. Patent and trademark rights are not licensed under this
+          Public License.
+
+       3. To the extent possible, the Licensor waives any right to
+          collect royalties from You for the exercise of the Licensed
+          Rights, whether directly or through a collecting society
+          under any voluntary or waivable statutory or compulsory
+          licensing scheme. In all other cases the Licensor expressly
+          reserves any right to collect such royalties.
+
+
+Section 3 -- License Conditions.
+
+Your exercise of the Licensed Rights is expressly made subject to the
+following conditions.
+
+  a. Attribution.
+
+       1. If You Share the Licensed Material (including in modified
+          form), You must:
+
+            a. retain the following if it is supplied by the Licensor
+               with the Licensed Material:
+
+                 i. identification of the creator(s) of the Licensed
+                    Material and any others designated to receive
+                    attribution, in any reasonable manner requested by
+                    the Licensor (including by pseudonym if
+                    designated);
+
+                ii. a copyright notice;
+
+               iii. a notice that refers to this Public License;
+
+                iv. a notice that refers to the disclaimer of
+                    warranties;
+
+                 v. a URI or hyperlink to the Licensed Material to the
+                    extent reasonably practicable;
+
+            b. indicate if You modified the Licensed Material and
+               retain an indication of any previous modifications; and
+
+            c. indicate the Licensed Material is licensed under this
+               Public License, and include the text of, or the URI or
+               hyperlink to, this Public License.
+
+       2. You may satisfy the conditions in Section 3(a)(1) in any
+          reasonable manner based on the medium, means, and context in
+          which You Share the Licensed Material. For example, it may be
+          reasonable to satisfy the conditions by providing a URI or
+          hyperlink to a resource that includes the required
+          information.
+
+       3. If requested by the Licensor, You must remove any of the
+          information required by Section 3(a)(1)(A) to the extent
+          reasonably practicable.
+
+       4. If You Share Adapted Material You produce, the Adapter's
+          License You apply must not prevent recipients of the Adapted
+          Material from complying with this Public License.
+
+
+Section 4 -- Sui Generis Database Rights.
+
+Where the Licensed Rights include Sui Generis Database Rights that
+apply to Your use of the Licensed Material:
+
+  a. for the avoidance of doubt, Section 2(a)(1) grants You the right
+     to extract, reuse, reproduce, and Share all or a substantial
+     portion of the contents of the database;
+
+  b. if You include all or a substantial portion of the database
+     contents in a database in which You have Sui Generis Database
+     Rights, then the database in which You have Sui Generis Database
+     Rights (but not its individual contents) is Adapted Material; and
+
+  c. You must comply with the conditions in Section 3(a) if You Share
+     all or a substantial portion of the contents of the database.
+
+For the avoidance of doubt, this Section 4 supplements and does not
+replace Your obligations under this Public License where the Licensed
+Rights include other Copyright and Similar Rights.
+
+
+Section 5 -- Disclaimer of Warranties and Limitation of Liability.
+
+  a. UNLESS OTHERWISE SEPARATELY UNDERTAKEN BY THE LICENSOR, TO THE
+     EXTENT POSSIBLE, THE LICENSOR OFFERS THE LICENSED MATERIAL AS-IS
+     AND AS-AVAILABLE, AND MAKES NO REPRESENTATIONS OR WARRANTIES OF
+     ANY KIND CONCERNING THE LICENSED MATERIAL, WHETHER EXPRESS,
+     IMPLIED, STATUTORY, OR OTHER. THIS INCLUDES, WITHOUT LIMITATION,
+     WARRANTIES OF TITLE, MERCHANTABILITY, FITNESS FOR A PARTICULAR
+     PURPOSE, NON-INFRINGEMENT, ABSENCE OF LATENT OR OTHER DEFECTS,
+     ACCURACY, OR THE PRESENCE OR ABSENCE OF ERRORS, WHETHER OR NOT
+     KNOWN OR DISCOVERABLE. WHERE DISCLAIMERS OF WARRANTIES ARE NOT
+     ALLOWED IN FULL OR IN PART, THIS DISCLAIMER MAY NOT APPLY TO YOU.
+
+  b. TO THE EXTENT POSSIBLE, IN NO EVENT WILL THE LICENSOR BE LIABLE
+     TO YOU ON ANY LEGAL THEORY (INCLUDING, WITHOUT LIMITATION,
+     NEGLIGENCE) OR OTHERWISE FOR ANY DIRECT, SPECIAL, INDIRECT,
+     INCIDENTAL, CONSEQUENTIAL, PUNITIVE, EXEMPLARY, OR OTHER LOSSES,
+     COSTS, EXPENSES, OR DAMAGES ARISING OUT OF THIS PUBLIC LICENSE OR
+     USE OF THE LICENSED MATERIAL, EVEN IF THE LICENSOR HAS BEEN
+     ADVISED OF THE POSSIBILITY OF SUCH LOSSES, COSTS, EXPENSES, OR
+     DAMAGES. WHERE A LIMITATION OF LIABILITY IS NOT ALLOWED IN FULL OR
+     IN PART, THIS LIMITATION MAY NOT APPLY TO YOU.
+
+  c. The disclaimer of warranties and limitation of liability provided
+     above shall be interpreted in a manner that, to the extent
+     possible, most closely approximates an absolute disclaimer and
+     waiver of all liability.
+
+
+Section 6 -- Term and Termination.
+
+  a. This Public License applies for the term of the Copyright and
+     Similar Rights licensed here. However, if You fail to comply with
+     this Public License, then Your rights under this Public License
+     terminate automatically.
+
+  b. Where Your right to use the Licensed Material has terminated under
+     Section 6(a), it reinstates:
+
+       1. automatically as of the date the violation is cured, provided
+          it is cured within 30 days of Your discovery of the
+          violation; or
+
+       2. upon express reinstatement by the Licensor.
+
+     For the avoidance of doubt, this Section 6(b) does not affect any
+     right the Licensor may have to seek remedies for Your violations
+     of this Public License.
+
+  c. For the avoidance of doubt, the Licensor may also offer the
+     Licensed Material under separate terms or conditions or stop
+     distributing the Licensed Material at any time; however, doing so
+     will not terminate this Public License.
+
+  d. Sections 1, 5, 6, 7, and 8 survive termination of this Public
+     License.
+
+
+Section 7 -- Other Terms and Conditions.
+
+  a. The Licensor shall not be bound by any additional or different
+     terms or conditions communicated by You unless expressly agreed.
+
+  b. Any arrangements, understandings, or agreements regarding the
+     Licensed Material not stated herein are separate from and
+     independent of the terms and conditions of this Public License.
+
+
+Section 8 -- Interpretation.
+
+  a. For the avoidance of doubt, this Public License does not, and
+     shall not be interpreted to, reduce, limit, restrict, or impose
+     conditions on any use of the Licensed Material that could lawfully
+     be made without permission under this Public License.
+
+  b. To the extent possible, if any provision of this Public License is
+     deemed unenforceable, it shall be automatically reformed to the
+     minimum extent necessary to make it enforceable. If the provision
+     cannot be reformed, it shall be severed from this Public License
+     without affecting the enforceability of the remaining terms and
+     conditions.
+
+  c. No term or condition of this Public License will be waived and no
+     failure to comply consented to unless expressly agreed to by the
+     Licensor.
+
+  d. Nothing in this Public License constitutes or may be interpreted
+     as a limitation upon, or waiver of, any privileges and immunities
+     that apply to the Licensor or You, including from the legal
+     processes of any jurisdiction or authority.
+
+---------------------------------------------------------------------------------------
+
+Creative Commons is not a party to its public
+licenses. Notwithstanding, Creative Commons may elect to apply one of
+its public licenses to material it publishes and in those instances
+will be considered the "Licensor." The text of the Creative Commons
+public licenses is dedicated to the public domain under the CC0 Public
+Domain Dedication. Except for the limited purpose of indicating that
+material is shared under a Creative Commons public license or as
+otherwise permitted by the Creative Commons policies published at
+creativecommons.org/policies, Creative Commons does not authorize the
+use of the trademark "Creative Commons" or any other trademark or logo
+of Creative Commons without its prior written consent including,
+without limitation, in connection with any unauthorized modifications
+to any of its public licenses or any other arrangements,
+understandings, or agreements concerning use of licensed material. For
+the avoidance of doubt, this paragraph does not form part of the
+public licenses.
+
+Creative Commons may be contacted at creativecommons.org.
\ No newline at end of file
diff --git a/docker/build_image.sh b/docker/build_image.sh
new file mode 100644 (file)
index 0000000..d8deaba
--- /dev/null
@@ -0,0 +1,46 @@
+#!/bin/bash
+DIRNAME=`dirname $0`
+DOCKER_BUILD_DIR=`cd $DIRNAME/; pwd`
+echo "DOCKER_BUILD_DIR=${DOCKER_BUILD_DIR}"
+cd ${DOCKER_BUILD_DIR}
+
+BUILD_ARGS="--no-cache"
+ORG="onap"
+VERSION="1.3.0"
+#PROJECT=" vfc"
+PROJECT=" modeling"
+IMAGE="genericparser"
+DOCKER_REPOSITORY="nexus3.onap.org:10003"
+IMAGE_NAME="${DOCKER_REPOSITORY}/${ORG}/${PROJECT}/${IMAGE}"
+TIMESTAMP=$(date +"%Y%m%dT%H%M%S")
+
+if [ $HTTP_PROXY ]; then
+    BUILD_ARGS+=" --build-arg HTTP_PROXY=${HTTP_PROXY}"
+fi
+if [ $HTTPS_PROXY ]; then
+    BUILD_ARGS+=" --build-arg HTTPS_PROXY=${HTTPS_PROXY}"
+fi
+
+function build_image {
+    echo "Start build docker image: ${IMAGE_NAME}"
+    docker build ${BUILD_ARGS} -t ${IMAGE_NAME}:latest .
+}
+
+function push_image_tag {
+    TAG_NAME=$1
+    echo "Start push ${TAG_NAME}"
+    docker tag ${IMAGE_NAME}:latest ${TAG_NAME}
+    docker push ${TAG_NAME}
+}
+
+function push_image {
+    echo "Start push ${IMAGE_NAME}:latest"
+    docker push ${IMAGE_NAME}:latest
+    
+    push_image_tag ${IMAGE_NAME}:${VERSION}-SNAPSHOT-latest
+    push_image_tag ${IMAGE_NAME}:${VERSION}-STAGING-latest
+    push_image_tag ${IMAGE_NAME}:${VERSION}-STAGING-${TIMESTAMP}
+}
+
+build_image
+push_image
diff --git a/docker/docker-entrypoint.sh b/docker/docker-entrypoint.sh
new file mode 100644 (file)
index 0000000..debbf44
--- /dev/null
@@ -0,0 +1,48 @@
+#!/bin/bash
+
+find  /service -name '*.sh'|xargs chmod a+x
+
+if [ -z "$SERVICE_IP" ]; then
+    export SERVICE_IP=`hostname -i`
+fi
+echo "SERVICE_IP=$SERVICE_IP"
+
+if [ -z "$MSB_ADDR" ]; then
+    echo "Missing required variable MSB_ADDR: Microservices Service Bus address <ip>:<port>"
+    exit 1
+fi
+echo "MSB_ADDR=$MSB_ADDR"
+
+if [ -z "$MYSQL_ADDR" ]; then
+    echo "Missing required variable MYSQL_ADDR: <ip>:<port>"
+    exit 1
+fi
+echo "MYSQL_ADDR=$MYSQL_ADDR"
+
+# Wait for MSB initialization
+echo "Wait for MSB initialization"
+for i in {1..5}; do
+    curl -sS -m 1 $MSB_ADDR > /dev/null && break
+    sleep $i
+done
+
+# Wait for DB initialization
+echo "Wait for DB initialization"
+for i in {1..5}; do
+    curl -sS -m 1 $MYSQL_ADDR > /dev/null && break
+    sleep $i
+done
+
+# Configure service based on docker environment variables
+#vfc/nfvo/genericparser/docker/instance_config.sh
+modeling/genericparser/docker/instance_config.sh
+
+# microservice-specific one-time initialization
+#vfc/nfvo/genericparser/docker/instance_init.sh
+modeling/genericparser/docker/instance_init.sh
+
+date > init.log
+
+# Start the microservice
+#vfc/nfvo/genericparser/docker/instance_run.sh
+modeling/genericparser/docker/instance_run.sh
diff --git a/docker/docker-env-conf.sh b/docker/docker-env-conf.sh
new file mode 100644 (file)
index 0000000..d94a195
--- /dev/null
@@ -0,0 +1,60 @@
+#!/bin/bash
+
+install_sf(){
+
+    sed -i 's/dl-cdn.alpinelinux.org/mirrors.aliyun.com/g' /etc/apk/repositories
+    apk --no-cache update
+    apk --no-cache add bash curl gcc wget mysql-client openssl-dev
+    apk --no-cache add python-dev libffi-dev musl-dev py2-virtualenv
+
+    # get binary zip from nexus - vfc-nfvo-genericparser
+
+#    wget -q -O vfc-nfvo-genericparser.zip 'https://nexus.onap.org/service/local/artifact/maven/redirect?r=snapshots&g=org.onap.vfc.nfvo.genericparser&a=vfc-nfvo-genericparser&v=LATEST&e=zip' && \
+#    unzip vfc-nfvo-genericparser.zip && \
+#    rm -rf vfc-nfvo-genericparser.zip && \
+    wget -q -O vfc-nfvo-genericparser.zip 'https://nexus.onap.org/service/local/artifact/maven/redirect?r=snapshots&g=org.onap.vfc.nfvo.genericparser&a=vfc-nfvo-genericparser&v=LATEST&e=zip' && \
+    unzip vfc-nfvo-genericparser.zip && \
+    rm -rf vfc-nfvo-genericparser.zip && \
+    pip install --upgrade setuptools pip -i https://mirrors.aliyun.com/pypi/simple/ && \
+
+#    pip install --no-cache-dir --pre -r  /service/vfc/nfvo/genericparser/requirements.txt -i https://mirrors.aliyun.com/pypi/simple/
+    pip install --no-cache-dir --pre -r  /service/modeling/genericparser/requirements.txt -i https://mirrors.aliyun.com/pypi/simple/
+}
+
+add_user(){
+
+    apk --no-cache add sudo
+    addgroup -g 1000 -S onap && \
+    adduser onap -D -G onap -u 1000 && \
+    chmod u+w /etc/sudoers && \
+    sed -i '/User privilege/a\\onap    ALL=(ALL:ALL) NOPASSWD:ALL' /etc/sudoers && \
+    chmod u-x /etc/sudoers && \
+    sudo chown onap:onap -R /service
+}
+
+config_logdir(){
+
+    if [ ! -d "/var/log/onap" ]; then
+       sudo mkdir /var/log/onap
+    fi 
+   
+    sudo chown onap:onap -R /var/log/onap
+    chmod g+s /var/log/onap
+    
+}
+
+clean_sf_cache(){
+
+    rm -rf /var/cache/apk/*
+    rm -rf /root/.cache/pip/*
+    rm -rf /tmp/*
+}
+
+install_sf
+wait
+add_user
+config_logdir
+clean_sf_cache
+
+
+
diff --git a/docker/instance_config.sh b/docker/instance_config.sh
new file mode 100644 (file)
index 0000000..a03c41a
--- /dev/null
@@ -0,0 +1,42 @@
+#!/bin/bash
+
+MSB_IP=`echo $MSB_ADDR | cut -d: -f 1`
+MSB_PORT=`echo $MSB_ADDR | cut -d: -f 2`
+# modeling/genericparser
+
+if [ $MSB_IP ]; then
+#    sed -i "s|MSB_SERVICE_IP.*|MSB_SERVICE_IP = '$MSB_IP'|" vfc/nfvo/genericparser/genericparser/pub/config/config.py
+    sed -i "s|MSB_SERVICE_IP.*|MSB_SERVICE_IP = '$MSB_IP'|"  modeling/genericparser/genericparser/pub/config/config.py
+fi
+
+if [ $MSB_PORT ]; then
+#    sed -i "s|MSB_SERVICE_PORT.*|MSB_SERVICE_PORT = '$MSB_PORT'|" vfc/nfvo/genericparser/genericparser/pub/config/config.py
+    sed -i "s|MSB_SERVICE_PORT.*|MSB_SERVICE_PORT = '$MSB_PORT'|" modeling/genericparser/genericparser/pub/config/config.py
+fi
+
+if [ $SERVICE_IP ]; then
+#    sed -i "s|\"ip\": \".*\"|\"ip\": \"$SERVICE_IP\"|" vfc/nfvo/genericparser/genericparser/pub/config/config.py
+    sed -i "s|\"ip\": \".*\"|\"ip\": \"$SERVICE_IP\"|" modeling/genericparser/genericparser/pub/config/config.py
+fi
+
+MYSQL_IP=`echo $MYSQL_ADDR | cut -d: -f 1`
+MYSQL_PORT=`echo $MYSQL_ADDR | cut -d: -f 2`
+echo "MYSQL_ADDR=$MYSQL_ADDR"
+
+if [ $REDIS_ADDR ]; then
+    REDIS_IP=`echo $REDIS_ADDR | cut -d: -f 1`
+else
+    REDIS_IP="$MYSQL_ADDR"
+fi
+
+#sed -i "s|DB_IP.*|DB_IP = '$MYSQL_IP'|" vfc/nfvo/genericparser/genericparser/pub/config/config.py
+#sed -i "s|DB_PORT.*|DB_PORT = $MYSQL_PORT|" vfc/nfvo/genericparser/genericparser/pub/config/config.py
+#sed -i "s|REDIS_HOST.*|REDIS_HOST = '$REDIS_IP'|" vfc/nfvo/genericparser/genericparser/pub/config/config.py
+#
+#cat vfc/nfvo/genericparser/genericparser/pub/config/config.py
+
+sed -i "s|DB_IP.*|DB_IP = '$MYSQL_IP'|" modeling/genericparser/genericparser/pub/config/config.py
+sed -i "s|DB_PORT.*|DB_PORT = $MYSQL_PORT|" modeling/genericparser/genericparser/pub/config/config.py
+sed -i "s|REDIS_HOST.*|REDIS_HOST = '$REDIS_IP'|"modeling/genericparser/genericparser/pub/config/config.py
+
+cat modeling/genericparser/genericparser/pub/config/config.py
diff --git a/docker/instance_init.sh b/docker/instance_init.sh
new file mode 100644 (file)
index 0000000..949f51d
--- /dev/null
@@ -0,0 +1,29 @@
+#!/bin/bash
+
+MYSQL_IP=`echo $MYSQL_ADDR | cut -d: -f 1`
+MYSQL_PORT=`echo $MYSQL_ADDR | cut -d: -f 2`
+
+if [ $MYSQL_AUTH ]; then
+    MYSQL_ROOT_USER=`echo $MYSQL_AUTH | cut -d: -f 1`
+    MYSQL_ROOT_PASSWORD=`echo $MYSQL_AUTH | cut -d: -f 2`
+else
+    MYSQL_ROOT_USER="root"
+    MYSQL_ROOT_PASSWORD="root"
+fi
+
+function create_database {
+
+#    cd /service/vfc/nfvo/genericparser/resources/bin
+    cd /service/modeling/genericparser/resources/bin
+    bash initDB.sh $MYSQL_ROOT_USER $MYSQL_ROOT_PASSWORD $MYSQL_PORT $MYSQL_IP
+
+ }
+
+function migrate_database {
+#    cd /service/vfc/nfvo/genericparser
+    cd /service/modeling/genericparser
+    python manage.py migrate
+}
+
+create_database
+migrate_database
diff --git a/docker/instance_run.sh b/docker/instance_run.sh
new file mode 100644 (file)
index 0000000..aa3e21c
--- /dev/null
@@ -0,0 +1,11 @@
+#!/bin/bash
+
+#cd /service/vfc/nfvo/genericparser
+cd /service/modeling/genericparser
+
+./run.sh
+
+while [ ! -f logs/runtime_catalog.log ]; do
+    sleep 1
+done
+tail -F logs/runtime_catalog.log
diff --git a/genericparser/__init__.py b/genericparser/__init__.py
new file mode 100644 (file)
index 0000000..68cf954
--- /dev/null
@@ -0,0 +1,16 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import pymysql
+
+pymysql.install_as_MySQLdb()
diff --git a/genericparser/jobs/__init__.py b/genericparser/jobs/__init__.py
new file mode 100644 (file)
index 0000000..c7b6818
--- /dev/null
@@ -0,0 +1,13 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/genericparser/jobs/job_get.py b/genericparser/jobs/job_get.py
new file mode 100644 (file)
index 0000000..56e20b6
--- /dev/null
@@ -0,0 +1,46 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import logging
+
+from genericparser.pub.utils.jobutil import JobUtil
+
+logger = logging.getLogger(__name__)
+
+
+class GetJobInfoService(object):
+    def __init__(self, job_id, response_id=0):
+        self.job_id = job_id
+        self.response_id = response_id if response_id else 0
+
+    def do_biz(self):
+        logger.debug("[getjob]job_id=%s, response_id=%s", self.job_id, self.response_id)
+        jobs = JobUtil.query_job_status(self.job_id, self.response_id)
+        if not jobs:
+            return {"jobId": self.job_id}
+        ret = {
+            "jobId": self.job_id,
+            "responseDescriptor": {
+                "status": jobs[0].status,
+                "progress": jobs[0].progress,
+                "statusDescription": jobs[0].descp,
+                "errorCode": jobs[0].errcode,
+                "responseId": jobs[0].indexid,
+                "responseHistoryList": [
+                    {
+                        "status": job.status,
+                        "progress": job.progress,
+                        "statusDescription": job.descp,
+                        "errorCode": job.errcode,
+                        "responseId": job.indexid} for job in jobs[1:]]}}
+        return ret
diff --git a/genericparser/jobs/tests/__init__.py b/genericparser/jobs/tests/__init__.py
new file mode 100644 (file)
index 0000000..c7b6818
--- /dev/null
@@ -0,0 +1,13 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/genericparser/jobs/tests/tests.py b/genericparser/jobs/tests/tests.py
new file mode 100644 (file)
index 0000000..42231bf
--- /dev/null
@@ -0,0 +1,40 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from django.test import TestCase, Client
+from rest_framework import status
+
+from genericparser.pub.database.models import JobModel, JobStatusModel
+
+
+class JobsViewTest(TestCase):
+    def setUp(self):
+        self.job_id = 'test_job_id'
+        self.client = Client()
+
+    def tearDown(self):
+        JobModel.objects.all().delete()
+
+    def test_job_normal(self):
+        JobModel(jobid=self.job_id, jobtype='VNF', jobaction='INST', resid='1').save()
+        JobStatusModel(indexid=1, jobid=self.job_id, status='inst', errcode='0', progress=20, descp='inst').save()
+        response = self.client.get("/api/genericparser/v1/jobs/%s" % self.job_id)
+        self.failUnlessEqual(status.HTTP_200_OK, response.status_code)
+
+    def test_job_when_jobid_not_exist(self):
+        job_id = 'test_new_job_id'
+        JobModel(jobid=self.job_id, jobtype='VNF', jobaction='INST', resid='1').save()
+        JobStatusModel(indexid=1, jobid=self.job_id, status='inst', progress=20, descp='inst').save()
+        response = self.client.get("/api/genericparser/v1/jobs/%s" % job_id)
+        self.assertIn('jobId', response.data)
+        self.assertNotIn('responseDescriptor', response.data)
diff --git a/genericparser/jobs/urls.py b/genericparser/jobs/urls.py
new file mode 100644 (file)
index 0000000..d484703
--- /dev/null
@@ -0,0 +1,20 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from django.conf.urls import url
+
+from genericparser.jobs.views import JobView
+
+urlpatterns = [
+    url(r'^api/genericparser/v1/jobs/(?P<job_id>[0-9a-zA-Z_-]+)$', JobView.as_view()),
+]
diff --git a/genericparser/jobs/views.py b/genericparser/jobs/views.py
new file mode 100644 (file)
index 0000000..b9c5121
--- /dev/null
@@ -0,0 +1,124 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import logging
+
+from drf_yasg import openapi
+from drf_yasg.utils import swagger_auto_schema
+from rest_framework import status
+from rest_framework.response import Response
+from rest_framework.views import APIView
+
+from genericparser.jobs.job_get import GetJobInfoService
+from genericparser.packages.serializers.genericparser_serializers import GetJobResponseSerializer
+from genericparser.packages.serializers.genericparser_serializers import PostJobRequestSerializer
+from genericparser.packages.serializers.genericparser_serializers import PostJobResponseResultSerializer
+from genericparser.pub.utils.jobutil import JobUtil
+from genericparser.pub.utils.values import ignore_case_get
+
+logger = logging.getLogger(__name__)
+
+
+class JobView(APIView):
+
+    input_job_id = openapi.Parameter(
+        'job_id',
+        openapi.IN_QUERY,
+        description="job id",
+        type=openapi.TYPE_STRING)
+    input_response_id = openapi.Parameter(
+        'responseId',
+        openapi.IN_QUERY,
+        description="response id",
+        type=openapi.TYPE_STRING)
+
+    @swagger_auto_schema(
+        operation_description="Get job status",
+        manual_parameters=[input_job_id, input_response_id],
+        responses={
+            status.HTTP_200_OK: GetJobResponseSerializer(),
+            status.HTTP_500_INTERNAL_SERVER_ERROR: PostJobResponseResultSerializer()
+        })
+    def get(self, request, job_id):
+        response_id = ignore_case_get(request.META, 'responseId')
+        ret = GetJobInfoService(job_id, response_id).do_biz()
+        response_serializer = GetJobResponseSerializer(data=ret)
+        validataion_error = self.handleValidatonError(
+            response_serializer, False)
+        if validataion_error:
+            return validataion_error
+
+        return Response(
+            data=response_serializer.data,
+            status=status.HTTP_200_OK)
+
+    @swagger_auto_schema(
+        request_body=PostJobRequestSerializer(),
+        operation_description="Update job status",
+        manual_parameters=[input_job_id],
+        responses={
+            status.HTTP_202_ACCEPTED: PostJobResponseResultSerializer(),
+            status.HTTP_500_INTERNAL_SERVER_ERROR: PostJobResponseResultSerializer()
+        }
+    )
+    def post(self, request, job_id):
+        job_result_ok = {'result': 'ok'}
+
+        logger.debug("Enter JobView:post, %s, %s ", job_id, request.data)
+        jobs = JobUtil.query_job_status(job_id)
+        if len(jobs) > 0 and jobs[-1].errcode == '255':
+            return Response(data=job_result_ok)
+
+        request_serializer = PostJobRequestSerializer(data=request.data)
+        validataion_error = self.handleValidatonError(
+            request_serializer, True)
+        if not validataion_error:
+            return validataion_error
+
+        requestData = request_serializer.data
+        progress = ignore_case_get(requestData, "progress")
+        desc = ignore_case_get(requestData, "desc", '%s' % progress)
+        errcode = '0' if ignore_case_get(
+            requestData, 'errcode') in (
+            'true', 'active') else '255'
+        logger.debug("errcode=%s", errcode)
+        JobUtil.add_job_status(job_id, progress, desc, error_code=errcode)
+
+        response_serializer = PostJobResponseResultSerializer(
+            data=job_result_ok)
+        validataion_error = self.handleValidatonError(
+            response_serializer, False)
+        if validataion_error:
+            return validataion_error
+
+        return Response(
+            data=response_serializer.data,
+            status=status.HTTP_202_ACCEPTED)
+
+    def handleValidatonError(self, base_serializer, is_request):
+        response = None
+
+        if not base_serializer.is_valid():
+            errormessage = base_serializer.errors
+            logger.error(errormessage)
+
+            if is_request:
+                message = 'Invalid request'
+            else:
+                message = 'Invalid response'
+            logger.error(message)
+
+            Response(
+                data={'result': message, 'msg': errormessage},
+                status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+        return response
diff --git a/genericparser/log.yml b/genericparser/log.yml
new file mode 100644 (file)
index 0000000..2a7bcfa
--- /dev/null
@@ -0,0 +1,50 @@
+version: 1
+disable_existing_loggers: False
+
+loggers:
+    gengricparser:
+        handlers: [gengricparserlocal_handler, gengricparser_handler]
+        level: "DEBUG"
+        propagate: False
+    django:
+        handlers: [django_handler]
+        level: "DEBUG"
+        propagate: False
+handlers:
+    gengricparserlocal_handler:
+        level: "DEBUG"
+        class:
+            "logging.handlers.RotatingFileHandler"
+        filename: "logs/runtime_gengricparser.log"
+        formatter:
+            "standard"
+        maxBytes: 52428800
+        backupCount: 10
+    gengricparser_handler:
+        level: "DEBUG"
+        class:
+            "logging.handlers.RotatingFileHandler"
+        filename: "/var/log/onap/vfc/genericparser/runtime_gengricparser.log"
+        formatter:
+            "mdcFormat"
+        maxBytes: 52428800
+        backupCount: 10
+    django_handler:
+        level: "DEBUG"
+        class:
+            "logging.handlers.RotatingFileHandler"
+        filename: "logs/django.log"
+        formatter:
+            "standard"
+        maxBytes: 52428800
+        backupCount: 10
+formatters:
+    standard:
+        format:
+            "%(asctime)s:[%(name)s]:[%(filename)s]-[%(lineno)d] [%(levelname)s]:%(message)s"
+    mdcFormat:
+        format:
+            "%(asctime)s|||||%(name)s||%(thread)s||%(funcName)s||%(levelname)s||%(message)s||||%(mdc)s \t"
+        mdcfmt: "{requestID} {invocationID} {serviceName} {serviceIP}"
+        datefmt: "%Y-%m-%d %H:%M:%S"
+        (): onaplogging.mdcformatter.MDCFormatter
diff --git a/genericparser/middleware.py b/genericparser/middleware.py
new file mode 100644 (file)
index 0000000..8cb8f45
--- /dev/null
@@ -0,0 +1,60 @@
+# Copyright (c) 2017-2018 ZTE, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+
+import uuid
+from onaplogging.mdcContext import MDC
+
+from genericparser.pub.config.config import FORWARDED_FOR_FIELDS, SERVICE_NAME
+
+
+class LogContextMiddleware(object):
+    #  the last IP behind multiple proxies,  if no exist proxies
+    #  get local host ip.
+    def _getLastIp(self, request):
+
+        ip = ""
+        try:
+            for field in FORWARDED_FOR_FIELDS:
+                if field in request.META:
+                    if ',' in request.META[field]:
+                        parts = request.META[field].split(',')
+                        ip = parts[-1].strip().split(":")[0]
+                    else:
+                        ip = request.META[field].split(":")[0]
+
+            if ip == "":
+                ip = request.META.get("HTTP_HOST").split(":")[0]
+
+        except Exception:
+            pass
+
+        return ip
+
+    def process_request(self, request):
+        # Fetch TRANSACTIONID Id and pass to plugin server
+        ReqeustID = request.META.get("HTTP_X_ONAP-RequestID", None)
+        if ReqeustID is None:
+            ReqeustID = uuid.uuid3(uuid.NAMESPACE_URL, SERVICE_NAME)
+            request.META["HTTP_X_ONAP-RequestID"] = ReqeustID
+        MDC.put("requestID", ReqeustID)
+        # generate the unique  id
+        InovocationID = uuid.uuid3(uuid.NAMESPACE_DNS, SERVICE_NAME)
+        MDC.put("invocationID", InovocationID)
+        MDC.put("serviceName", SERVICE_NAME)
+        # access ip
+        MDC.put("serviceIP", self._getLastIp(request))
+
+        return None
+
+    def process_response(self, request, response):
+        MDC.clear()
+        return response
diff --git a/genericparser/packages/__init__.py b/genericparser/packages/__init__.py
new file mode 100644 (file)
index 0000000..c7b6818
--- /dev/null
@@ -0,0 +1,13 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/genericparser/packages/biz/__init__.py b/genericparser/packages/biz/__init__.py
new file mode 100644 (file)
index 0000000..342c2a8
--- /dev/null
@@ -0,0 +1,13 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/genericparser/packages/biz/common.py b/genericparser/packages/biz/common.py
new file mode 100644 (file)
index 0000000..be5627e
--- /dev/null
@@ -0,0 +1,51 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+
+from genericparser.pub.config.config import GENERICPARSER_ROOT_PATH
+from genericparser.pub.utils import fileutil
+
+CHUNK_SIZE = 1024 * 8
+
+
+def save(remote_file, vnf_pkg_id):
+    local_file_name = remote_file.name
+    local_file_dir = os.path.join(GENERICPARSER_ROOT_PATH, vnf_pkg_id)
+    local_file_name = os.path.join(local_file_dir, local_file_name)
+    if not os.path.exists(local_file_dir):
+        fileutil.make_dirs(local_file_dir)
+    with open(local_file_name, 'wb') as local_file:
+        for chunk in remote_file.chunks(chunk_size=CHUNK_SIZE):
+            local_file.write(chunk)
+    return local_file_name
+
+
+def read(file_path, start, end):
+    fp = open(file_path, 'rb')
+    fp.seek(start)
+    pos = start
+    while pos + CHUNK_SIZE < end:
+        yield fp.read(CHUNK_SIZE)
+        pos = fp.tell()
+    yield fp.read(end - pos)
+
+
+def parse_file_range(file_path, file_range):
+    start, end = 0, os.path.getsize(file_path)
+    if file_range:
+        [start, end] = file_range.split('-')
+        start, end = start.strip(), end.strip()
+        start, end = int(start), int(end)
+    return start, end
diff --git a/genericparser/packages/biz/ns_descriptor.py b/genericparser/packages/biz/ns_descriptor.py
new file mode 100644 (file)
index 0000000..60d4a26
--- /dev/null
@@ -0,0 +1,239 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import json
+import logging
+import os
+import uuid
+
+from genericparser.packages.biz.common import parse_file_range, read, save
+from genericparser.packages.const import PKG_STATUS
+from genericparser.pub.config.config import GENERICPARSER_ROOT_PATH
+from genericparser.pub.database.models import NSPackageModel, PnfPackageModel, VnfPackageModel
+from genericparser.pub.exceptions import GenericparserException, ResourceNotFoundException
+from genericparser.pub.utils import fileutil, toscaparsers
+from genericparser.pub.utils.values import ignore_case_get
+
+logger = logging.getLogger(__name__)
+
+METADATA = "metadata"
+
+
+class NsDescriptor(object):
+
+    def __init__(self):
+        pass
+
+    def create(self, data, id=None):
+        logger.info('Start to create a NSD...')
+        user_defined_data = ignore_case_get(data, 'userDefinedData', {})
+        data = {
+            'id': id if id else str(uuid.uuid4()),
+            'nsdOnboardingState': PKG_STATUS.CREATED,
+            'nsdOperationalState': PKG_STATUS.DISABLED,
+            'nsdUsageState': PKG_STATUS.NOT_IN_USE,
+            'userDefinedData': user_defined_data,
+            '_links': None  # TODO
+        }
+        NSPackageModel.objects.create(
+            nsPackageId=data['id'],
+            onboardingState=data['nsdOnboardingState'],
+            operationalState=data['nsdOperationalState'],
+            usageState=data['nsdUsageState'],
+            userDefinedData=json.dumps(user_defined_data)
+        )
+        logger.info('A NSD(%s) has been created.' % data['id'])
+        return data
+
+    def query_multiple(self, nsdId=None):
+        if nsdId:
+            ns_pkgs = NSPackageModel.objects.filter(nsdId=nsdId)
+        else:
+            ns_pkgs = NSPackageModel.objects.all()
+        response_data = []
+        for ns_pkg in ns_pkgs:
+            data = self.fill_resp_data(ns_pkg)
+            response_data.append(data)
+        return response_data
+
+    def query_single(self, nsd_info_id):
+        ns_pkgs = NSPackageModel.objects.filter(nsPackageId=nsd_info_id)
+        if not ns_pkgs.exists():
+            logger.error('NSD(%s) does not exist.' % nsd_info_id)
+            raise ResourceNotFoundException('NSD(%s) does not exist.' % nsd_info_id)
+        return self.fill_resp_data(ns_pkgs[0])
+
+    def delete_single(self, nsd_info_id):
+        logger.info('Start to delete NSD(%s)...' % nsd_info_id)
+        ns_pkgs = NSPackageModel.objects.filter(nsPackageId=nsd_info_id)
+        if not ns_pkgs.exists():
+            logger.info('NSD(%s) has been deleted.' % nsd_info_id)
+            return
+        '''
+        if ns_pkgs[0].operationalState != PKG_STATUS.DISABLED:
+            logger.error('NSD(%s) shall be DISABLED.' % nsd_info_id)
+            raise GenericparserException('NSD(%s) shall be DISABLED.' % nsd_info_id)
+        if ns_pkgs[0].usageState != PKG_STATUS.NOT_IN_USE:
+            logger.error('NSD(%s) shall be NOT_IN_USE.' % nsd_info_id)
+            raise GenericparserException('NSD(%s) shall be NOT_IN_USE.' % nsd_info_id)
+        '''
+        ns_pkgs.delete()
+        ns_pkg_path = os.path.join(GENERICPARSER_ROOT_PATH, nsd_info_id)
+        fileutil.delete_dirs(ns_pkg_path)
+        logger.info('NSD(%s) has been deleted.' % nsd_info_id)
+
+    def upload(self, nsd_info_id, remote_file):
+        logger.info('Start to upload NSD(%s)...' % nsd_info_id)
+        ns_pkgs = NSPackageModel.objects.filter(nsPackageId=nsd_info_id)
+        if not ns_pkgs.exists():
+            logger.error('NSD(%s) does not exist.' % nsd_info_id)
+            raise GenericparserException('NSD(%s) does not exist.' % nsd_info_id)
+        ns_pkgs.update(onboardingState=PKG_STATUS.UPLOADING)
+
+        local_file_name = save(remote_file, nsd_info_id)
+        logger.info('NSD(%s) content has been uploaded.' % nsd_info_id)
+        return local_file_name
+
+    def download(self, nsd_info_id, file_range):
+        logger.info('Start to download NSD(%s)...' % nsd_info_id)
+        ns_pkgs = NSPackageModel.objects.filter(nsPackageId=nsd_info_id)
+        if not ns_pkgs.exists():
+            logger.error('NSD(%s) does not exist.' % nsd_info_id)
+            raise ResourceNotFoundException('NSD(%s) does not exist.' % nsd_info_id)
+        if ns_pkgs[0].onboardingState != PKG_STATUS.ONBOARDED:
+            logger.error('NSD(%s) is not ONBOARDED.' % nsd_info_id)
+            raise GenericparserException('NSD(%s) is not ONBOARDED.' % nsd_info_id)
+
+        local_file_path = ns_pkgs[0].localFilePath
+        start, end = parse_file_range(local_file_path, file_range)
+        logger.info('NSD(%s) has been downloaded.' % nsd_info_id)
+        return read(local_file_path, start, end)
+
+    def parse_nsd_and_save(self, nsd_info_id, local_file_name):
+        logger.info('Start to process NSD(%s)...' % nsd_info_id)
+        ns_pkgs = NSPackageModel.objects.filter(nsPackageId=nsd_info_id)
+        ns_pkgs.update(onboardingState=PKG_STATUS.PROCESSING)
+
+        nsd_json = toscaparsers.parse_nsd(local_file_name)
+        logger.debug("%s", nsd_json)
+        nsd = json.JSONDecoder().decode(nsd_json)
+
+        nsd_id = nsd.get("ns", {}).get("properties", {}).get("descriptor_id", "")
+        nsd_name = nsd.get("ns", {}).get("properties", {}).get("name", "")
+        nsd_version = nsd.get("ns", {}).get("properties", {}).get("version", "")
+        nsd_designer = nsd.get("ns", {}).get("properties", {}).get("designer", "")
+        invariant_id = nsd.get("ns", {}).get("properties", {}).get("invariant_id", "")
+        if nsd_id == "":
+            raise GenericparserException("nsd_id(%s) does not exist in metadata." % nsd_id)
+        other_nspkg = NSPackageModel.objects.filter(nsdId=nsd_id)
+        if other_nspkg and other_nspkg[0].nsPackageId != nsd_info_id:
+            logger.warn("NSD(%s,%s) already exists.", nsd_id, other_nspkg[0].nsPackageId)
+            raise GenericparserException("NSD(%s) already exists." % nsd_id)
+
+        for vnf in nsd["vnfs"]:
+            vnfd_id = vnf["properties"].get("descriptor_id", "undefined")
+            if vnfd_id == "undefined":
+                vnfd_id = vnf["properties"].get("id", "undefined")
+            pkg = VnfPackageModel.objects.filter(vnfdId=vnfd_id)
+            if not pkg:
+                pkg = VnfPackageModel.objects.filter(vnfPackageId=vnfd_id)
+            if not pkg:
+                vnfd_name = vnf.get("vnf_id", "undefined")
+                logger.error("[%s] is not distributed.", vnfd_name)
+                raise GenericparserException("VNF package(%s) is not distributed." % vnfd_id)
+
+        for pnf in nsd["pnfs"]:
+            pnfd_id = pnf["properties"].get("descriptor_id", "undefined")
+            if pnfd_id == "undefined":
+                pnfd_id = pnf["properties"].get("id", "undefined")
+            pkg = PnfPackageModel.objects.filter(pnfdId=pnfd_id)
+            if not pkg:
+                pkg = PnfPackageModel.objects.filter(pnfPackageId=pnfd_id)
+            if not pkg:
+                pnfd_name = pnf.get("pnf_id", "undefined")
+                logger.error("[%s] is not distributed.", pnfd_name)
+                raise GenericparserException("PNF package(%s) is not distributed." % pnfd_name)
+
+        ns_pkgs.update(
+            nsdId=nsd_id,
+            nsdName=nsd_name,
+            nsdDesginer=nsd_designer,
+            nsdDescription=nsd.get("description", ""),
+            nsdVersion=nsd_version,
+            invariantId=invariant_id,
+            onboardingState=PKG_STATUS.ONBOARDED,
+            operationalState=PKG_STATUS.ENABLED,
+            usageState=PKG_STATUS.NOT_IN_USE,
+            nsPackageUri=local_file_name,
+            sdcCsarId=nsd_info_id,
+            localFilePath=local_file_name,
+            nsdModel=nsd_json
+        )
+        logger.info('NSD(%s) has been processed.' % nsd_info_id)
+
+    def fill_resp_data(self, ns_pkg):
+        data = {
+            'id': ns_pkg.nsPackageId,
+            'nsdId': ns_pkg.nsdId,
+            'nsdName': ns_pkg.nsdName,
+            'nsdVersion': ns_pkg.nsdVersion,
+            'nsdDesigner': ns_pkg.nsdDesginer,
+            'nsdInvariantId': ns_pkg.invariantId,
+            'vnfPkgIds': [],
+            'pnfdInfoIds': [],  # TODO
+            'nestedNsdInfoIds': [],  # TODO
+            'nsdOnboardingState': ns_pkg.onboardingState,
+            'onboardingFailureDetails': None,  # TODO
+            'nsdOperationalState': ns_pkg.operationalState,
+            'nsdUsageState': ns_pkg.usageState,
+            'userDefinedData': {},
+            '_links': None  # TODO
+        }
+
+        if ns_pkg.nsdModel:
+            nsd_model = json.JSONDecoder().decode(ns_pkg.nsdModel)
+            vnf_pkg_ids = []
+            for vnf in nsd_model['vnfs']:
+                vnfd_id = vnf["properties"].get("descriptor_id", "undefined")
+                if vnfd_id == "undefined":
+                    vnfd_id = vnf["properties"].get("id", "undefined")
+                pkgs = VnfPackageModel.objects.filter(vnfdId=vnfd_id)
+                if not pkgs:
+                    pkgs = VnfPackageModel.objects.filter(vnfPackageId=vnfd_id)
+                for pkg in pkgs:
+                    vnf_pkg_ids.append(pkg.vnfPackageId)
+            data['vnfPkgIds'] = vnf_pkg_ids
+
+            pnf_info_ids = []
+            for pnf in nsd_model['pnfs']:
+                pnfd_id = pnf["properties"].get("descriptor_id", "undefined")
+                if pnfd_id == "undefined":
+                    pnfd_id = pnf["properties"].get("id", "undefined")
+                pkgs = PnfPackageModel.objects.filter(pnfdId=pnfd_id)
+                if not pkgs:
+                    pkgs = PnfPackageModel.objects.filter(pnfPackageId=pnfd_id)
+                for pkg in pkgs:
+                    pnf_info_ids.append(pkg.pnfPackageId)
+            data['pnfInfoIds'] = pnf_info_ids  # TODO: need reconfirming
+
+        if ns_pkg.userDefinedData:
+            user_defined_data = json.JSONDecoder().decode(ns_pkg.userDefinedData)
+            data['userDefinedData'] = user_defined_data
+
+        return data
+
+    def handle_upload_failed(self, nsd_info_id):
+        ns_pkg = NSPackageModel.objects.filter(nsPackageId=nsd_info_id)
+        ns_pkg.update(onboardingState=PKG_STATUS.CREATED)
diff --git a/genericparser/packages/biz/nsdm_subscription.py b/genericparser/packages/biz/nsdm_subscription.py
new file mode 100644 (file)
index 0000000..ba74c70
--- /dev/null
@@ -0,0 +1,219 @@
+# Copyright (C) 2019 Verizon. All Rights Reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import ast
+import json
+import logging
+import requests
+import uuid
+
+from collections import Counter
+
+from rest_framework import status
+
+from genericparser.packages import const
+from genericparser.pub.database.models import NsdmSubscriptionModel
+from genericparser.pub.exceptions import GenericparserException, \
+    ResourceNotFoundException, \
+    NsdmBadRequestException, NsdmDuplicateSubscriptionException
+from genericparser.pub.utils.values import ignore_case_get
+
+logger = logging.getLogger(__name__)
+
+PARAMSBASICKEYS = ["userName", "password"]
+
+PARAMSOAUTH2CLIENTCREDENTIALSKEYS = ["clientId", "clientPassword",
+                                     "tokenEndpoint"]
+
+
+def is_filter_type_equal(new_filter, existing_filter):
+    return Counter(list(set(new_filter))) == Counter(existing_filter)
+
+
+class NsdmSubscription:
+
+    def __init__(self):
+        pass
+
+    def query_single_subscription(self, subscription_id):
+        logger.debug("Start Query Subscription... ")
+        subscription = \
+            NsdmSubscriptionModel.objects.filter(
+                subscriptionid=subscription_id)
+        if not subscription.exists():
+            raise ResourceNotFoundException(
+                "Subscription(%s) doesn't exists" % subscription_id)
+        logger.debug("Subscription found... ")
+        return self.fill_resp_data(subscription[0])
+
+    def delete_single_subscription(self, subscription_id):
+        logger.debug("Start Delete Subscription... ")
+        subscription = \
+            NsdmSubscriptionModel.objects.filter(
+                subscriptionid=subscription_id)
+        if not subscription.exists():
+            raise ResourceNotFoundException(
+                "Subscription(%s) doesn't exists" % subscription_id)
+        subscription.delete()
+        logger.debug("Deleted Subscription... ")
+
+    def query_multi_subscriptions(self, query_params):
+        self.params = query_params
+        query_data = {}
+        logger.debug("Start QueryMultiSubscriptions get --> "
+                     "Check for filters in query params" % self.params)
+        for query, value in self.params.iteritems():
+            if query in const.NSDM_NOTIFICATION_FILTERS and value:
+                query_data[query + '__icontains'] = json.dumps(list(set(value)))
+        # Query the database with filters if the request
+        # has fields in request params, else fetch all records
+        if query_data:
+            subscriptions = NsdmSubscriptionModel.objects.filter(**query_data)
+        else:
+            subscriptions = NsdmSubscriptionModel.objects.all()
+        if not subscriptions.exists():
+            raise ResourceNotFoundException("Subscriptions doesn't exist")
+        return [self.fill_resp_data(subscription)
+                for subscription in subscriptions]
+
+    def check_callbackuri_connection(self):
+        logger.debug("Create Subscription --> Test Callback URI --"
+                     "Sending GET request to %s" % self.callback_uri)
+        try:
+            response = requests.get(self.callback_uri, timeout=2)
+            if response.status_code != status.HTTP_204_NO_CONTENT:
+                raise GenericparserException("callbackUri %s returns %s status "
+                                       "code." % (self.callback_uri,
+                                                  response.status_code))
+        except Exception:
+            raise GenericparserException("callbackUri %s didn't return 204 status"
+                                   "code." % self.callback_uri)
+
+    def fill_resp_data(self, subscription):
+        subscription_filter = dict()
+        for filter_type in const.NSDM_NOTIFICATION_FILTERS:
+            subscription_filter[filter_type] = \
+                ast.literal_eval(subscription.__dict__[filter_type])
+        resp_data = {
+            'id': subscription.subscriptionid,
+            'callbackUri': subscription.callback_uri,
+            'filter': subscription_filter,
+            '_links': json.loads(subscription.links)
+        }
+        return resp_data
+
+    def create(self, data):
+        logger.debug("Start Create Subscription... ")
+        self.filter = ignore_case_get(data, "filter", {})
+        self.callback_uri = ignore_case_get(data, "callbackUri")
+        self.authentication = ignore_case_get(data, "authentication", {})
+        self.subscription_id = str(uuid.uuid4())
+        self.check_callbackuri_connection()
+        self.check_valid_auth_info()
+        self.check_filter_types()
+        self.check_valid()
+        self.save_db()
+        subscription = \
+            NsdmSubscriptionModel.objects.get(
+                subscriptionid=self.subscription_id)
+        return self.fill_resp_data(subscription)
+
+    def check_filter_types(self):
+        # Check if both nsdId and nsdInfoId
+        # or pnfdId and pnfdInfoId are present
+        logger.debug("Create Subscription --> Validating Filters... ")
+        if self.filter and \
+                self.filter.get("nsdId", "") and \
+                self.filter.get("nsdInfoId", ""):
+            raise NsdmBadRequestException("Notification Filter should contain"
+                                          " either nsdId or nsdInfoId")
+        if self.filter and \
+                self.filter.get("pnfdId", "") and \
+                self.filter.get("pnfdInfoIds", ""):
+            raise NsdmBadRequestException("Notification Filter should contain"
+                                          " either pnfdId or pnfdInfoIds")
+
+    def check_valid_auth_info(self):
+        logger.debug("Create Subscription --> Validating Auth "
+                     "details if provided... ")
+        if self.authentication.get("paramsBasic", {}) and \
+                const.BASIC not in self.authentication.get("authType", ''):
+            raise NsdmBadRequestException('Auth type should be ' + const.BASIC)
+        if self.authentication.get("paramsOauth2ClientCredentials", {}) and \
+                const.OAUTH2_CLIENT_CREDENTIALS not in \
+                self.authentication.get("authType", ''):
+            raise NsdmBadRequestException('Auth type should '
+                                          'be ' + const.OAUTH2_CLIENT_CREDENTIALS)
+        if const.BASIC in self.authentication.get("authType", '') and \
+                "paramsBasic" in self.authentication.keys() and \
+                not is_filter_type_equal(PARAMSBASICKEYS,
+                                         self.authentication.
+                                         get("paramsBasic").keys()):
+            raise NsdmBadRequestException('userName and password needed '
+                                          'for ' + const.BASIC)
+        if const.OAUTH2_CLIENT_CREDENTIALS in \
+                self.authentication.get("authType", '') and \
+                "paramsOauth2ClientCredentials" in \
+                self.authentication.keys() and \
+                not is_filter_type_equal(PARAMSOAUTH2CLIENTCREDENTIALSKEYS,
+                                         self.authentication.
+                                         get("paramsOauth2ClientCredentials")
+                                         .keys()):
+            raise NsdmBadRequestException('clientId, clientPassword and '
+                                          'tokenEndpoint required '
+                                          'for ' + const.OAUTH2_CLIENT_CREDENTIALS)
+
+    def check_filter_exists(self, subscription):
+        for filter_type in const.NSDM_NOTIFICATION_FILTERS:
+            if not is_filter_type_equal(self.filter.get(filter_type, []),
+                                        ast.literal_eval(
+                                            getattr(subscription,
+                                                    filter_type))):
+                return False
+        return True
+
+    def check_valid(self):
+        logger.debug("Create Subscription --> Checking DB if "
+                     "same subscription exists already exists... ")
+        subscriptions = \
+            NsdmSubscriptionModel.objects.filter(
+                callback_uri=self.callback_uri)
+        if not subscriptions.exists():
+            return
+        for subscription in subscriptions:
+            if self.check_filter_exists(subscription):
+                raise NsdmDuplicateSubscriptionException(
+                    "Already Subscription exists with the "
+                    "same callbackUri and filter")
+
+    def save_db(self):
+        logger.debug("Create Subscription --> Saving the subscription "
+                     "%s to the database" % self.subscription_id)
+        links = {
+            "self": {
+                "href":
+                const.NSDM_SUBSCRIPTION_ROOT_URI + self.subscription_id
+            }
+        }
+        subscription_save_db = {
+            "subscriptionid": self.subscription_id,
+            "callback_uri": self.callback_uri,
+            "auth_info": self.authentication,
+            "links": json.dumps(links)
+        }
+        for filter_type in const.NSDM_NOTIFICATION_FILTERS:
+            subscription_save_db[filter_type] = json.dumps(
+                list(set(self.filter.get(filter_type, []))))
+        NsdmSubscriptionModel.objects.create(**subscription_save_db)
+        logger.debug('Create Subscription[%s] success', self.subscription_id)
diff --git a/genericparser/packages/biz/pnf_descriptor.py b/genericparser/packages/biz/pnf_descriptor.py
new file mode 100644 (file)
index 0000000..ae635ff
--- /dev/null
@@ -0,0 +1,227 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import json
+import logging
+import os
+import uuid
+
+from genericparser.packages.biz.common import read, save
+from genericparser.packages.const import PKG_STATUS
+from genericparser.pub.config.config import GENERICPARSER_ROOT_PATH
+from genericparser.pub.database.models import NSPackageModel, PnfPackageModel
+from genericparser.pub.exceptions import GenericparserException, ResourceNotFoundException
+from genericparser.pub.utils import fileutil, toscaparsers
+from genericparser.pub.utils.values import ignore_case_get
+
+logger = logging.getLogger(__name__)
+
+
+class PnfDescriptor(object):
+
+    def __init__(self):
+        pass
+
+    def create(self, data):
+        logger.info('Start to create a PNFD...')
+        user_defined_data = ignore_case_get(data, 'userDefinedData', {})
+        data = {
+            'id': str(uuid.uuid4()),
+            'pnfdOnboardingState': PKG_STATUS.CREATED,
+            'pnfdUsageState': PKG_STATUS.NOT_IN_USE,
+            'userDefinedData': user_defined_data,
+            '_links': None  # TODO
+        }
+        PnfPackageModel.objects.create(
+            pnfPackageId=data['id'],
+            onboardingState=data['pnfdOnboardingState'],
+            usageState=data['pnfdUsageState'],
+            userDefinedData=json.dumps(user_defined_data)
+        )
+        logger.info('A PNFD(%s) has been created.' % data['id'])
+        return data
+
+    def query_multiple(self, pnfdId=None):
+        if pnfdId:
+            pnf_pkgs = PnfPackageModel.objects.filter(pnfdId=pnfdId)
+        else:
+            pnf_pkgs = PnfPackageModel.objects.all()
+        response_data = []
+        for pnf_pkg in pnf_pkgs:
+            data = self.fill_response_data(pnf_pkg)
+            response_data.append(data)
+        return response_data
+
+    def query_single(self, pnfd_info_id):
+        pnf_pkgs = PnfPackageModel.objects.filter(pnfPackageId=pnfd_info_id)
+        if not pnf_pkgs.exists():
+            logger.error('PNFD(%s) does not exist.' % pnfd_info_id)
+            raise ResourceNotFoundException('PNFD(%s) does not exist.' % pnfd_info_id)
+        return self.fill_response_data(pnf_pkgs[0])
+
+    def upload(self, remote_file, pnfd_info_id):
+        logger.info('Start to upload PNFD(%s)...' % pnfd_info_id)
+        pnf_pkgs = PnfPackageModel.objects.filter(pnfPackageId=pnfd_info_id)
+        if not pnf_pkgs.exists():
+            logger.info('PNFD(%s) does not exist.' % pnfd_info_id)
+            raise GenericparserException('PNFD (%s) does not exist.' % pnfd_info_id)
+        pnf_pkgs.update(onboardingState=PKG_STATUS.UPLOADING)
+
+        local_file_name = save(remote_file, pnfd_info_id)
+        logger.info('PNFD(%s) content has been uploaded.' % pnfd_info_id)
+        return local_file_name
+
+    def delete_single(self, pnfd_info_id):
+        logger.info('Start to delete PNFD(%s)...' % pnfd_info_id)
+        pnf_pkgs = PnfPackageModel.objects.filter(pnfPackageId=pnfd_info_id)
+        if not pnf_pkgs.exists():
+            logger.info('PNFD(%s) has been deleted.' % pnfd_info_id)
+            return
+        '''
+        if pnf_pkgs[0].usageState != PKG_STATUS.NOT_IN_USE:
+            logger.info('PNFD(%s) shall be NOT_IN_USE.' % pnfd_info_id)
+            raise GenericparserException('PNFD(%s) shall be NOT_IN_USE.' % pnfd_info_id)
+        '''
+        del_pnfd_id = pnf_pkgs[0].pnfdId
+        ns_pkgs = NSPackageModel.objects.all()
+        for ns_pkg in ns_pkgs:
+            nsd_model = None
+            if ns_pkg.nsdModel:
+                nsd_model = json.JSONDecoder().decode(ns_pkg.nsdModel)
+            if not nsd_model:
+                continue
+            for pnf in nsd_model['pnfs']:
+                if del_pnfd_id == pnf["properties"]["id"]:
+                    logger.warn("PNFD(%s) is referenced in NSD", del_pnfd_id)
+                    raise GenericparserException('PNFD(%s) is referenced.' % pnfd_info_id)
+        pnf_pkgs.delete()
+        pnf_pkg_path = os.path.join(GENERICPARSER_ROOT_PATH, pnfd_info_id)
+        fileutil.delete_dirs(pnf_pkg_path)
+        logger.debug('PNFD(%s) has been deleted.' % pnfd_info_id)
+
+    def download(self, pnfd_info_id):
+        logger.info('Start to download PNFD(%s)...' % pnfd_info_id)
+        pnf_pkgs = PnfPackageModel.objects.filter(pnfPackageId=pnfd_info_id)
+        if not pnf_pkgs.exists():
+            logger.error('PNFD(%s) does not exist.' % pnfd_info_id)
+            raise ResourceNotFoundException('PNFD(%s) does not exist.' % pnfd_info_id)
+        if pnf_pkgs[0].onboardingState != PKG_STATUS.ONBOARDED:
+            logger.error('PNFD(%s) is not ONBOARDED.' % pnfd_info_id)
+            raise GenericparserException('PNFD(%s) is not ONBOARDED.' % pnfd_info_id)
+
+        local_file_path = pnf_pkgs[0].localFilePath
+        start, end = 0, os.path.getsize(local_file_path)
+        logger.info('PNFD(%s) has been downloaded.' % pnfd_info_id)
+        return read(local_file_path, start, end)
+
+    def parse_pnfd_and_save(self, pnfd_info_id, local_file_name):
+        logger.info('Start to process PNFD(%s)...' % pnfd_info_id)
+        pnf_pkgs = PnfPackageModel.objects.filter(pnfPackageId=pnfd_info_id)
+        pnf_pkgs.update(onboardingState=PKG_STATUS.PROCESSING)
+        pnfd_json = toscaparsers.parse_pnfd(local_file_name)
+        pnfd = json.JSONDecoder().decode(pnfd_json)
+
+        logger.debug("pnfd_json is %s" % pnfd_json)
+        pnfd_id = ""
+        pnfdVersion = ""
+        pnfdProvider = ""
+        pnfdName = ""
+        if pnfd.get("pnf", "") != "":
+            if pnfd["pnf"].get("properties", "") != "":
+                pnfd_id = pnfd["pnf"].get("properties", "").get("descriptor_id", "")
+                pnfdVersion = pnfd["pnf"].get("properties", "").get("version", "")
+                pnfdProvider = pnfd["pnf"].get("properties", "").get("provider", "")
+                pnfdName = pnfd["pnf"].get("properties", "").get("name", "")
+        if pnfd_id == "":
+            pnfd_id = pnfd["metadata"].get("descriptor_id", "")
+            if pnfd_id == "":
+                pnfd_id = pnfd["metadata"].get("id", "")
+            if pnfd_id == "":
+                pnfd_id = pnfd["metadata"].get("UUID", "")
+            if pnfd_id == "":
+                raise GenericparserException('pnfd_id is Null.')
+
+        if pnfdVersion == "":
+            pnfdVersion = pnfd["metadata"].get("template_version", "")
+            if pnfdVersion == "":
+                pnfdVersion = pnfd["metadata"].get("version", "")
+
+        if pnfdProvider == "":
+            pnfdProvider = pnfd["metadata"].get("template_author", "")
+            if pnfdVersion == "":
+                pnfdVersion = pnfd["metadata"].get("provider", "")
+
+        if pnfdName == "":
+            pnfdName = pnfd["metadata"].get("template_name", "")
+            if pnfdVersion == "":
+                pnfdName = pnfd["metadata"].get("name", "")
+
+        if pnfd_id:
+            other_pnf = PnfPackageModel.objects.filter(pnfdId=pnfd_id)
+            if other_pnf and other_pnf[0].pnfPackageId != pnfd_info_id:
+                logger.info('PNFD(%s) already exists.' % pnfd_id)
+                raise GenericparserException("PNFD(%s) already exists." % pnfd_id)
+
+        pnf_pkgs.update(
+            pnfdId=pnfd_id,
+            pnfdName=pnfdName,
+            pnfdVersion=pnfdVersion,
+            pnfVendor=pnfdProvider,
+            pnfPackageUri=local_file_name,
+            onboardingState=PKG_STATUS.ONBOARDED,
+            usageState=PKG_STATUS.NOT_IN_USE,
+            localFilePath=local_file_name,
+            pnfdModel=pnfd_json
+        )
+        logger.info('PNFD(%s) has been processed.' % pnfd_info_id)
+
+    def fill_response_data(self, pnf_pkg):
+        data = {
+            'id': pnf_pkg.pnfPackageId,
+            'pnfdId': pnf_pkg.pnfdId,
+            'pnfdName': pnf_pkg.pnfdName,
+            'pnfdVersion': pnf_pkg.pnfdVersion,
+            'pnfdProvider': pnf_pkg.pnfVendor,
+            'pnfdInvariantId': None,  # TODO
+            'pnfdOnboardingState': pnf_pkg.onboardingState,
+            'onboardingFailureDetails': None,  # TODO
+            'pnfdUsageState': pnf_pkg.usageState,
+            'userDefinedData': {},
+            '_links': None  # TODO
+        }
+        if pnf_pkg.userDefinedData:
+            user_defined_data = json.JSONDecoder().decode(pnf_pkg.userDefinedData)
+            data['userDefinedData'] = user_defined_data
+
+        return data
+
+    def handle_upload_failed(self, pnf_pkg_id):
+        pnf_pkg = PnfPackageModel.objects.filter(pnfPackageId=pnf_pkg_id)
+        pnf_pkg.update(onboardingState=PKG_STATUS.CREATED)
+
+    def parse_pnfd(self, csar_id, inputs):
+        ret = None
+        try:
+            pnf_pkg = PnfPackageModel.objects.filter(pnfPackageId=csar_id)
+            if not pnf_pkg:
+                raise GenericparserException("PNF CSAR(%s) does not exist." % csar_id)
+            csar_path = pnf_pkg[0].localFilePath
+            ret = {"model": toscaparsers.parse_pnfd(csar_path, inputs)}
+        except GenericparserException as e:
+            return [1, e.message]
+        except Exception as e:
+            logger.error(e.message)
+            return [1, e.message]
+        return [0, ret]
diff --git a/genericparser/packages/biz/sdc_ns_package.py b/genericparser/packages/biz/sdc_ns_package.py
new file mode 100644 (file)
index 0000000..42346ff
--- /dev/null
@@ -0,0 +1,172 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import os
+import sys
+import traceback
+
+from genericparser.pub.config.config import GENERICPARSER_ROOT_PATH, GENERICPARSER_URL_PATH
+from genericparser.pub.config.config import REG_TO_MSB_REG_PARAM
+from genericparser.pub.database.models import NSPackageModel
+from genericparser.pub.exceptions import GenericparserException
+from genericparser.pub.msapi import sdc
+from genericparser.pub.utils import toscaparsers
+from genericparser.packages.biz.ns_descriptor import NsDescriptor
+from genericparser.pub.utils import fileutil
+
+logger = logging.getLogger(__name__)
+
+STATUS_SUCCESS, STATUS_FAILED = "success", "failed"
+
+METADATA = "metadata"
+
+
+def fmt_ns_pkg_rsp(status, desc, error_code="500"):
+    return [0, {"status": status, "statusDescription": desc, "errorCode": error_code}]
+
+
+def ns_on_distribute(csar_id):
+    ret = None
+    try:
+        ret = NsPackage().on_distribute(csar_id)
+    except GenericparserException as e:
+        NsPackage().delete_csar(csar_id)
+        return fmt_ns_pkg_rsp(STATUS_FAILED, e.message)
+    except:
+        logger.error(traceback.format_exc())
+        NsPackage().delete_csar(csar_id)
+        return fmt_ns_pkg_rsp(STATUS_FAILED, str(sys.exc_info()))
+    if ret[0]:
+        return fmt_ns_pkg_rsp(STATUS_FAILED, ret[1])
+    return fmt_ns_pkg_rsp(STATUS_SUCCESS, ret[1], "")
+
+
+def ns_delete_csar(csar_id):
+    ret = None
+    try:
+        ret = NsPackage().delete_csar(csar_id)
+    except GenericparserException as e:
+        return fmt_ns_pkg_rsp(STATUS_FAILED, e.message)
+    except:
+        logger.error(traceback.format_exc())
+        return fmt_ns_pkg_rsp(STATUS_FAILED, str(sys.exc_info()))
+    return fmt_ns_pkg_rsp(STATUS_SUCCESS, ret[1], "")
+
+
+def ns_get_csars():
+    ret = None
+    try:
+        ret = NsPackage().get_csars()
+    except GenericparserException as e:
+        return [1, e.message]
+    except:
+        logger.error(traceback.format_exc())
+        return [1, str(sys.exc_info())]
+    return ret
+
+
+def ns_get_csar(csar_id):
+    ret = None
+    try:
+        ret = NsPackage().get_csar(csar_id)
+    except GenericparserException as e:
+        return [1, e.message]
+    except Exception as e:
+        logger.error(e.message)
+        logger.error(traceback.format_exc())
+        return [1, str(sys.exc_info())]
+    return ret
+
+
+def parse_nsd(csar_id, inputs):
+    ret = None
+    try:
+        ns_pkg = NSPackageModel.objects.filter(nsPackageId=csar_id)
+        if not ns_pkg:
+            raise GenericparserException("NS CSAR(%s) does not exist." % csar_id)
+        csar_path = ns_pkg[0].localFilePath
+        ret = {"model": toscaparsers.parse_nsd(csar_path, inputs)}
+    except GenericparserException as e:
+        return [1, e.message]
+    except Exception as e:
+        logger.error(e.message)
+        logger.error(traceback.format_exc())
+        return [1, str(sys.exc_info())]
+    return [0, ret]
+
+
+class NsPackage(object):
+    """
+    Actions for sdc ns package.
+    """
+
+    def __init__(self):
+        pass
+
+    def on_distribute(self, csar_id):
+        if NSPackageModel.objects.filter(nsPackageId=csar_id):
+            return [1, "NS CSAR(%s) already exists." % csar_id]
+
+        artifact = sdc.get_artifact(sdc.ASSETTYPE_SERVICES, csar_id)
+        local_path = os.path.join(GENERICPARSER_ROOT_PATH, csar_id)
+        csar_name = "%s.csar" % artifact.get("name", csar_id)
+        local_file_name = sdc.download_artifacts(artifact["toscaModelURL"], local_path, csar_name)
+        if local_file_name.endswith(".csar") or local_file_name.endswith(".zip"):
+            artifact_vnf_file = fileutil.unzip_file(local_file_name, local_path, "Artifacts/Deployment/OTHER/ns.csar")
+            if os.path.exists(artifact_vnf_file):
+                local_file_name = artifact_vnf_file
+
+        data = {
+            'userDefinedData': {}
+        }
+        nsd = NsDescriptor()
+        nsd.create(data, csar_id)
+        nsd.parse_nsd_and_save(csar_id, local_file_name)
+        return [0, "CSAR(%s) distributed successfully." % csar_id]
+
+    def delete_csar(self, csar_id):
+        nsd = NsDescriptor()
+        nsd.delete_single(csar_id)
+        return [0, "Delete CSAR(%s) successfully." % csar_id]
+
+    def get_csars(self):
+        csars = []
+        nss = NSPackageModel.objects.filter()
+        for ns in nss:
+            ret = self.get_csar(ns.nsPackageId)
+            csars.append(ret[1])
+        return [0, csars]
+
+    def get_csar(self, csar_id):
+        package_info = {}
+        csars = NSPackageModel.objects.filter(nsPackageId=csar_id)
+        if csars:
+            package_info["nsdId"] = csars[0].nsdId
+            package_info["nsPackageId"] = csars[0].nsPackageId
+            package_info["nsdProvider"] = csars[0].nsdDesginer
+            package_info["nsdVersion"] = csars[0].nsdVersion
+            package_info["csarName"] = csars[0].nsPackageUri
+            package_info["nsdModel"] = csars[0].nsdModel
+            package_info["nsdInvariantId"] = csars[0].invariantId
+            package_info["downloadUrl"] = "http://%s:%s/%s/%s/%s" % (
+                REG_TO_MSB_REG_PARAM[0]["nodes"][0]["ip"],
+                REG_TO_MSB_REG_PARAM[0]["nodes"][0]["port"],
+                GENERICPARSER_URL_PATH,
+                csar_id,
+                csars[0].nsPackageUri)
+        else:
+            raise GenericparserException("Ns package[%s] not Found." % csar_id)
+
+        return [0, {"csarId": csar_id, "packageInfo": package_info}]
diff --git a/genericparser/packages/biz/sdc_service_package.py b/genericparser/packages/biz/sdc_service_package.py
new file mode 100644 (file)
index 0000000..0390b5c
--- /dev/null
@@ -0,0 +1,114 @@
+# Copyright (c) 2019, CMCC Technologies. Co., Ltd.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import traceback
+
+from coverage.xmlreport import os
+
+from genericparser.packages.biz.service_descriptor import ServiceDescriptor
+from genericparser.pub.config.config import GENERICPARSER_ROOT_PATH, REG_TO_MSB_REG_PARAM, GENERICPARSER_URL_PATH
+from genericparser.pub.database.models import ServicePackageModel
+from genericparser.pub.exceptions import GenericparserException, PackageNotFoundException, \
+    PackageHasExistsException
+from genericparser.pub.msapi import sdc
+from genericparser.pub.utils import fileutil, toscaparsers
+
+logger = logging.getLogger(__name__)
+
+
+class ServicePackage(object):
+    """
+    Actions for sdc service package.
+    """
+
+    def __init__(self):
+        pass
+
+    def on_distribute(self, csar_id):
+        if ServicePackageModel.objects.filter(servicePackageId=csar_id):
+            raise PackageHasExistsException("Service CSAR(%s) already exists." % csar_id)
+
+        try:
+            artifact = sdc.get_artifact(sdc.ASSETTYPE_SERVICES, csar_id)
+            local_path = os.path.join(GENERICPARSER_ROOT_PATH, csar_id)
+            csar_name = "%s.csar" % artifact.get("name", csar_id)
+            local_file_name = sdc.download_artifacts(artifact["toscaModelURL"], local_path, csar_name)
+            if local_file_name.endswith(".csar") or local_file_name.endswith(".zip"):
+                fileutil.unzip_file(local_file_name, local_path, "")
+            data = {
+                'userDefinedData': {}
+            }
+            serviced = ServiceDescriptor()
+            serviced.create(data, csar_id)
+            serviced.parse_serviced_and_save(csar_id, local_file_name)
+
+        except Exception as e:
+            logger.error(traceback.format_exc())
+            if ServicePackageModel.objects.filter(servicePackageId=csar_id):
+                ServicePackage().delete_csar(csar_id)
+            raise e
+
+    def delete_csar(self, csar_id):
+        serviced = ServiceDescriptor()
+        serviced.delete_single(csar_id)
+
+    def get_csars(self):
+        csars = []
+        packages = ServicePackageModel.objects.filter()
+        for package in packages:
+            csar = self.get_csar(package.servicePackageId)
+            csars.append(csar)
+        return csars
+
+    def get_csar(self, csar_id):
+        package_info = {}
+        csars = ServicePackageModel.objects.filter(servicePackageId=csar_id)
+        if csars:
+            package_info["servicedId"] = csars[0].servicedId
+            package_info["servicePackageId"] = csars[0].servicePackageId
+            package_info["servicedProvider"] = csars[0].servicedDesigner
+            package_info["servicedVersion"] = csars[0].servicedVersion
+            package_info["csarName"] = csars[0].servicePackageUri
+            package_info["servicedModel"] = csars[0].servicedModel
+            package_info["servicedInvariantId"] = csars[0].invariantId
+            package_info["downloadUrl"] = "http://%s:%s/%s/%s/%s" % (
+                REG_TO_MSB_REG_PARAM[0]["nodes"][0]["ip"],
+                REG_TO_MSB_REG_PARAM[0]["nodes"][0]["port"],
+                GENERICPARSER_URL_PATH,
+                csar_id,
+                csars[0].servicePackageUri)
+        else:
+            error_message = "Service package[%s] not Found." % csar_id
+            logger.error(error_message)
+            raise PackageNotFoundException(error_message)
+
+        return {"csarId": csar_id, "packageInfo": package_info}
+
+    def parse_serviced(self, csar_id, inputs):
+        service_pkg = ServicePackageModel.objects.filter(servicePackageId=csar_id)
+        if not service_pkg:
+            raise PackageNotFoundException("Service CSAR(%s) does not exist." % csar_id)
+
+        try:
+            csar_path = service_pkg[0].localFilePath
+            ret = {"model": toscaparsers.parse_sd(csar_path, inputs)}
+            return ret
+        except GenericparserException as e:
+            logger.error(e.message)
+            raise e
+        except Exception as e:
+            logger.error(e.message)
+            logger.error(traceback.format_exc())
+            raise e
diff --git a/genericparser/packages/biz/sdc_vnf_package.py b/genericparser/packages/biz/sdc_vnf_package.py
new file mode 100644 (file)
index 0000000..afd219a
--- /dev/null
@@ -0,0 +1,254 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import logging
+import os
+import sys
+import threading
+import traceback
+
+from genericparser.packages.const import PKG_STATUS
+from genericparser.pub.config.config import GENERICPARSER_ROOT_PATH, GENERICPARSER_URL_PATH
+from genericparser.pub.config.config import REG_TO_MSB_REG_PARAM
+from genericparser.pub.database.models import VnfPackageModel
+from genericparser.pub.exceptions import GenericparserException
+from genericparser.pub.msapi import sdc
+from genericparser.pub.utils import fileutil
+from genericparser.pub.utils import toscaparsers
+from genericparser.pub.utils.jobutil import JobUtil
+
+logger = logging.getLogger(__name__)
+
+JOB_ERROR = 255
+
+
+def nf_get_csars():
+    ret = None
+    try:
+        ret = NfPackage().get_csars()
+    except GenericparserException as e:
+        return [1, e.message]
+    except Exception as e:
+        logger.error(e.message)
+        logger.error(traceback.format_exc())
+        return [1, str(sys.exc_info())]
+    return ret
+
+
+def nf_get_csar(csar_id):
+    ret = None
+    try:
+        ret = NfPackage().get_csar(csar_id)
+    except GenericparserException as e:
+        return [1, e.message]
+    except Exception as e:
+        logger.error(e.message)
+        logger.error(traceback.format_exc())
+        return [1, str(sys.exc_info())]
+    return ret
+
+
+def parse_vnfd(csar_id, inputs):
+    ret = None
+    try:
+        nf_pkg = VnfPackageModel.objects.filter(vnfPackageId=csar_id)
+        if not nf_pkg:
+            raise GenericparserException("VNF CSAR(%s) does not exist." % csar_id)
+        csar_path = nf_pkg[0].localFilePath
+        ret = {"model": toscaparsers.parse_vnfd(csar_path, inputs)}
+    except GenericparserException as e:
+        return [1, e.message]
+    except Exception as e:
+        logger.error(e.message)
+        logger.error(traceback.format_exc())
+        return [1, str(sys.exc_info())]
+    return [0, ret]
+
+
+class NfDistributeThread(threading.Thread):
+    """
+    Sdc NF Package Distribute
+    """
+
+    def __init__(self, csar_id, vim_ids, lab_vim_id, job_id):
+        threading.Thread.__init__(self)
+        self.csar_id = csar_id
+        self.vim_ids = vim_ids
+        self.lab_vim_id = lab_vim_id
+        self.job_id = job_id
+
+        self.csar_save_path = os.path.join(GENERICPARSER_ROOT_PATH, csar_id)
+
+    def run(self):
+        try:
+            self.on_distribute()
+        except GenericparserException as e:
+            self.rollback_distribute()
+            JobUtil.add_job_status(self.job_id, JOB_ERROR, e.message)
+        except Exception as e:
+            logger.error(e.message)
+            logger.error(traceback.format_exc())
+            logger.error(str(sys.exc_info()))
+            self.rollback_distribute()
+            JobUtil.add_job_status(self.job_id, JOB_ERROR, "Failed to distribute CSAR(%s)" % self.csar_id)
+
+    def on_distribute(self):
+        JobUtil.create_job(
+            inst_type='nf',
+            jobaction='on_distribute',
+            inst_id=self.csar_id,
+            job_id=self.job_id)
+        JobUtil.add_job_status(self.job_id, 5, "Start CSAR(%s) distribute." % self.csar_id)
+
+        if VnfPackageModel.objects.filter(vnfPackageId=self.csar_id):
+            err_msg = "NF CSAR(%s) already exists." % self.csar_id
+            JobUtil.add_job_status(self.job_id, JOB_ERROR, err_msg)
+            return
+
+        artifact = sdc.get_artifact(sdc.ASSETTYPE_RESOURCES, self.csar_id)
+        local_path = os.path.join(GENERICPARSER_ROOT_PATH, self.csar_id)
+        csar_name = "%s.csar" % artifact.get("name", self.csar_id)
+        local_file_name = sdc.download_artifacts(artifact["toscaModelURL"], local_path, csar_name)
+        if local_file_name.endswith(".csar") or local_file_name.endswith(".zip"):
+            artifact_vnf_file = fileutil.unzip_file(local_file_name, local_path, "Artifacts/Deployment/OTHER/vnf.csar")
+            if os.path.exists(artifact_vnf_file):
+                local_file_name = artifact_vnf_file
+
+        vnfd_json = toscaparsers.parse_vnfd(local_file_name)
+        vnfd = json.JSONDecoder().decode(vnfd_json)
+
+        if not vnfd.get("vnf"):
+            raise GenericparserException("VNF propeties and metadata in VNF Package(id=%s) are empty." % self.csar_id)
+
+        vnfd_id = vnfd["vnf"]["properties"].get("descriptor_id", "")
+        if VnfPackageModel.objects.filter(vnfdId=vnfd_id):
+            logger.error("VNF package(%s) already exists.", vnfd_id)
+            raise GenericparserException("VNF package(%s) already exists." % vnfd_id)
+        JobUtil.add_job_status(self.job_id, 30, "Save CSAR(%s) to database." % self.csar_id)
+        vnfd_ver = vnfd["vnf"]["properties"].get("descriptor_verison", "")
+        vnf_provider = vnfd["vnf"]["properties"].get("provider", "")
+        vnf_software_version = vnfd["vnf"]["properties"].get("software_version", "")
+        vnfd_product_name = vnfd["vnf"]["properties"].get("product_name", "")
+        VnfPackageModel(
+            vnfPackageId=self.csar_id,
+            vnfdId=vnfd_id,
+            vnfVendor=vnf_provider,
+            vnfdProductName=vnfd_product_name,
+            vnfdVersion=vnfd_ver,
+            vnfSoftwareVersion=vnf_software_version,
+            vnfdModel=vnfd_json,
+            localFilePath=local_file_name,
+            vnfPackageUri=csar_name,
+            onboardingState=PKG_STATUS.ONBOARDED,
+            operationalState=PKG_STATUS.ENABLED,
+            usageState=PKG_STATUS.NOT_IN_USE
+        ).save()
+        JobUtil.add_job_status(self.job_id, 100, "CSAR(%s) distribute successfully." % self.csar_id)
+
+    def rollback_distribute(self):
+        try:
+            VnfPackageModel.objects.filter(vnfPackageId=self.csar_id).delete()
+            fileutil.delete_dirs(self.csar_save_path)
+        except Exception as e:
+            logger.error(e.message)
+            logger.error(traceback.format_exc())
+            logger.error(str(sys.exc_info()))
+
+
+class NfPkgDeleteThread(threading.Thread):
+    """
+    Sdc NF Package Deleting
+    """
+
+    def __init__(self, csar_id, job_id):
+        threading.Thread.__init__(self)
+        self.csar_id = csar_id
+        self.job_id = job_id
+
+    def run(self):
+        try:
+            self.delete_csar()
+        except GenericparserException as e:
+            JobUtil.add_job_status(self.job_id, JOB_ERROR, e.message)
+        except Exception as e:
+            logger.error(e.message)
+            logger.error(traceback.format_exc())
+            logger.error(str(sys.exc_info()))
+            JobUtil.add_job_status(self.job_id, JOB_ERROR, "Failed to delete CSAR(%s)" % self.csar_id)
+
+    def delete_csar(self):
+        JobUtil.create_job(
+            inst_type='nf',
+            jobaction='delete',
+            inst_id=self.csar_id,
+            job_id=self.job_id)
+        JobUtil.add_job_status(self.job_id, 5, "Start to delete CSAR(%s)." % self.csar_id)
+
+        VnfPackageModel.objects.filter(vnfPackageId=self.csar_id).delete()
+
+        JobUtil.add_job_status(self.job_id, 50, "Delete local CSAR(%s) file." % self.csar_id)
+
+        csar_save_path = os.path.join(GENERICPARSER_ROOT_PATH, self.csar_id)
+        fileutil.delete_dirs(csar_save_path)
+
+        JobUtil.add_job_status(self.job_id, 100, "Delete CSAR(%s) successfully." % self.csar_id)
+
+
+class NfPackage(object):
+    """
+    Actions for sdc nf package.
+    """
+
+    def __init__(self):
+        pass
+
+    def get_csars(self):
+        csars = []
+        nf_pkgs = VnfPackageModel.objects.filter()
+        for nf_pkg in nf_pkgs:
+            ret = self.get_csar(nf_pkg.vnfPackageId)
+            csars.append(ret[1])
+        return [0, csars]
+
+    def get_csar(self, csar_id):
+        pkg_info = {}
+        nf_pkg = VnfPackageModel.objects.filter(vnfPackageId=csar_id)
+        if not nf_pkg:
+            nf_pkg = VnfPackageModel.objects.filter(vnfdId=csar_id)
+
+        if nf_pkg:
+            db_csar_id = nf_pkg[0].vnfPackageId
+            pkg_info["vnfdId"] = nf_pkg[0].vnfdId
+            pkg_info["vnfPackageId"] = nf_pkg[0].vnfPackageId
+            pkg_info["vnfdProvider"] = nf_pkg[0].vnfVendor
+            pkg_info["vnfdVersion"] = nf_pkg[0].vnfdVersion
+            pkg_info["vnfVersion"] = nf_pkg[0].vnfSoftwareVersion
+            pkg_info["csarName"] = nf_pkg[0].vnfPackageUri
+            pkg_info["vnfdModel"] = nf_pkg[0].vnfdModel
+            pkg_info["downloadUrl"] = "http://%s:%s/%s/%s/%s" % (
+                REG_TO_MSB_REG_PARAM[0]["nodes"][0]["ip"],
+                REG_TO_MSB_REG_PARAM[0]["nodes"][0]["port"],
+                GENERICPARSER_URL_PATH,
+                db_csar_id,
+                nf_pkg[0].vnfPackageUri)
+        else:
+            raise GenericparserException("Vnf package[%s] not Found." % csar_id)
+
+        csar_info = {
+            "csarId": db_csar_id,
+            "packageInfo": pkg_info,
+            "imageInfo": []
+        }
+        return [0, csar_info]
diff --git a/genericparser/packages/biz/service_descriptor.py b/genericparser/packages/biz/service_descriptor.py
new file mode 100644 (file)
index 0000000..b8cba6d
--- /dev/null
@@ -0,0 +1,130 @@
+# Copyright (c) 2019, CMCC Technologies. Co., Ltd.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import logging
+import os
+import uuid
+
+
+from genericparser.packages.const import PKG_STATUS
+from genericparser.pub.config.config import GENERICPARSER_ROOT_PATH
+from genericparser.pub.database.models import ServicePackageModel, VnfPackageModel, PnfPackageModel
+from genericparser.pub.exceptions import GenericparserException, PackageNotFoundException
+from genericparser.pub.utils import toscaparsers, fileutil
+from genericparser.pub.utils.values import ignore_case_get
+
+logger = logging.getLogger(__name__)
+
+
+class ServiceDescriptor(object):
+    """
+    Action for Service Descriptor
+    """
+
+    def __init__(self):
+        pass
+
+    def create(self, data, csar_id=None):
+        logger.info('Start to create a ServiceD...')
+        user_defined_data = ignore_case_get(data, 'userDefinedData', {})
+        data = {
+            'id': csar_id if csar_id else str(uuid.uuid4()),
+            'servicedOnboardingState': PKG_STATUS.CREATED,
+            'servicedOperationalState': PKG_STATUS.DISABLED,
+            'servicedUsageState': PKG_STATUS.NOT_IN_USE,
+            'userDefinedData': user_defined_data,
+            '_links': None  # TODO
+        }
+        ServicePackageModel.objects.create(
+            servicePackageId=data['id'],
+            onboardingState=data['servicedOnboardingState'],
+            operationalState=data['servicedOperationalState'],
+            usageState=data['servicedUsageState'],
+            userDefinedData=json.dumps(user_defined_data)
+        )
+        logger.info('A ServiceD(%s) has been created.' % data['id'])
+        return data
+
+    def parse_serviced_and_save(self, serviced_info_id, local_file_name):
+        logger.info('Start to process ServiceD(%s)...' % serviced_info_id)
+        service_pkgs = ServicePackageModel.objects.filter(servicePackageId=serviced_info_id)
+        service_pkgs.update(onboardingState=PKG_STATUS.PROCESSING)
+
+        serviced_json = toscaparsers.parse_sd(local_file_name)
+        serviced = json.JSONDecoder().decode(serviced_json)
+
+        serviced_id = serviced.get("service", {}).get("properties", {}).get("descriptor_id", "")
+        serviced_name = serviced.get("service", {}).get("properties", {}).get("name", "")
+        serviced_version = serviced.get("service", {}).get("properties", {}).get("version", "")
+        serviced_designer = serviced.get("service", {}).get("properties", {}).get("designer", "")
+        invariant_id = serviced.get("service", {}).get("properties", {}).get("invariant_id", "")
+        if serviced_id == "":
+            raise GenericparserException("serviced_id(%s) does not exist in metadata." % serviced_id)
+        other_nspkg = ServicePackageModel.objects.filter(servicedId=serviced_id)
+        if other_nspkg and other_nspkg[0].servicePackageId != serviced_info_id:
+            logger.warn("ServiceD(%s,%s) already exists.", serviced_id, other_nspkg[0].servicePackageId)
+            raise GenericparserException("ServiceD(%s) already exists." % serviced_id)
+
+        for vnf in serviced["vnfs"]:
+            vnfd_id = vnf["properties"].get("descriptor_id", "undefined")
+            if vnfd_id == "undefined":
+                vnfd_id = vnf["properties"].get("id", "undefined")
+            pkg = VnfPackageModel.objects.filter(vnfdId=vnfd_id)
+            if not pkg:
+                pkg = VnfPackageModel.objects.filter(vnfPackageId=vnfd_id)
+            if not pkg:
+                vnfd_name = vnf.get("vnf_id", "undefined")
+                logger.error("[%s] is not distributed.", vnfd_name)
+                raise GenericparserException("VNF package(%s) is not distributed." % vnfd_id)
+
+        for pnf in serviced["pnfs"]:
+            pnfd_id = pnf["properties"].get("descriptor_id", "undefined")
+            if pnfd_id == "undefined":
+                pnfd_id = pnf["properties"].get("id", "undefined")
+            pkg = PnfPackageModel.objects.filter(pnfdId=pnfd_id)
+            if not pkg:
+                pkg = PnfPackageModel.objects.filter(pnfPackageId=pnfd_id)
+            if not pkg:
+                pnfd_name = pnf.get("pnf_id", "undefined")
+                logger.error("[%s] is not distributed.", pnfd_name)
+                raise GenericparserException("PNF package(%s) is not distributed." % pnfd_name)
+
+        service_pkgs.update(
+            servicedId=serviced_id,
+            servicedName=serviced_name,
+            servicedDesigner=serviced_designer,
+            servicedDescription=serviced.get("description", ""),
+            servicedVersion=serviced_version,
+            invariantId=invariant_id,
+            onboardingState=PKG_STATUS.ONBOARDED,
+            operationalState=PKG_STATUS.ENABLED,
+            usageState=PKG_STATUS.NOT_IN_USE,
+            servicePackageUri=local_file_name,
+            sdcCsarId=serviced_info_id,
+            localFilePath=local_file_name,
+            servicedModel=serviced_json
+        )
+        logger.info('ServiceD(%s) has been processed.' % serviced_info_id)
+
+    def delete_single(self, serviced_info_id):
+        logger.info('Start to delete ServiceD(%s)...' % serviced_info_id)
+        service_pkgs = ServicePackageModel.objects.filter(servicePackageId=serviced_info_id)
+        if not service_pkgs.exists():
+            logger.warn('ServiceD(%s) not found.' % serviced_info_id)
+            raise PackageNotFoundException("Service package[%s] not Found." % serviced_info_id)
+        service_pkgs.delete()
+        service_pkg_path = os.path.join(GENERICPARSER_ROOT_PATH, serviced_info_id)
+        fileutil.delete_dirs(service_pkg_path)
+        logger.info('ServiceD(%s) has been deleted.' % serviced_info_id)
diff --git a/genericparser/packages/biz/vnf_package.py b/genericparser/packages/biz/vnf_package.py
new file mode 100644 (file)
index 0000000..b655f1f
--- /dev/null
@@ -0,0 +1,228 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import logging
+import os
+import sys
+import threading
+import traceback
+import urllib2
+import uuid
+
+from genericparser.packages.biz.common import parse_file_range, read, save
+from genericparser.pub.config.config import GENERICPARSER_ROOT_PATH
+from genericparser.pub.database.models import VnfPackageModel, NSPackageModel
+from genericparser.pub.exceptions import GenericparserException, ResourceNotFoundException
+from genericparser.pub.utils.values import ignore_case_get
+from genericparser.pub.utils import fileutil, toscaparsers
+from genericparser.packages.const import PKG_STATUS
+
+
+logger = logging.getLogger(__name__)
+
+
+class VnfPackage(object):
+
+    def __init__(self):
+        pass
+
+    def create_vnf_pkg(self, data):
+        user_defined_data = ignore_case_get(data, "userDefinedData", {})
+        vnf_pkg_id = str(uuid.uuid4())
+        VnfPackageModel.objects.create(
+            vnfPackageId=vnf_pkg_id,
+            onboardingState=PKG_STATUS.CREATED,
+            operationalState=PKG_STATUS.DISABLED,
+            usageState=PKG_STATUS.NOT_IN_USE,
+            userDefinedData=json.dumps(user_defined_data)
+        )
+        data = {
+            "id": vnf_pkg_id,
+            "onboardingState": PKG_STATUS.CREATED,
+            "operationalState": PKG_STATUS.DISABLED,
+            "usageState": PKG_STATUS.NOT_IN_USE,
+            "userDefinedData": user_defined_data,
+            "_links": None
+        }
+        return data
+
+    def query_multiple(self):
+        pkgs_info = []
+        nf_pkgs = VnfPackageModel.objects.filter()
+        for nf_pkg in nf_pkgs:
+            ret = fill_response_data(nf_pkg)
+            pkgs_info.append(ret)
+        return pkgs_info
+
+    def query_single(self, vnf_pkg_id):
+        nf_pkg = VnfPackageModel.objects.filter(vnfPackageId=vnf_pkg_id)
+        if not nf_pkg.exists():
+            logger.error('VNF package(%s) does not exist.' % vnf_pkg_id)
+            raise ResourceNotFoundException('VNF package(%s) does not exist.' % vnf_pkg_id)
+        return fill_response_data(nf_pkg[0])
+
+    def delete_vnf_pkg(self, vnf_pkg_id):
+        vnf_pkg = VnfPackageModel.objects.filter(vnfPackageId=vnf_pkg_id)
+        if not vnf_pkg.exists():
+            logger.debug('VNF package(%s) has been deleted.' % vnf_pkg_id)
+            return
+        '''
+        if vnf_pkg[0].operationalState != PKG_STATUS.DISABLED:
+            raise GenericparserException("The VNF package (%s) is not disabled" % vnf_pkg_id)
+        if vnf_pkg[0].usageState != PKG_STATUS.NOT_IN_USE:
+            raise GenericparserException("The VNF package (%s) is in use" % vnf_pkg_id)
+        '''
+        del_vnfd_id = vnf_pkg[0].vnfdId
+        ns_pkgs = NSPackageModel.objects.all()
+        for ns_pkg in ns_pkgs:
+            nsd_model = None
+            if ns_pkg.nsdModel:
+                nsd_model = json.JSONDecoder().decode(ns_pkg.nsdModel)
+            if not nsd_model:
+                continue
+            for vnf in nsd_model['vnfs']:
+                if del_vnfd_id == vnf["properties"]["descriptor_id"]:
+                    raise GenericparserException('VNFD(%s) is referenced.' % del_vnfd_id)
+        vnf_pkg.delete()
+        vnf_pkg_path = os.path.join(GENERICPARSER_ROOT_PATH, vnf_pkg_id)
+        fileutil.delete_dirs(vnf_pkg_path)
+        logger.info('VNF package(%s) has been deleted.' % vnf_pkg_id)
+
+    def upload(self, vnf_pkg_id, remote_file):
+        logger.info('Start to upload VNF package(%s)...' % vnf_pkg_id)
+        vnf_pkg = VnfPackageModel.objects.filter(vnfPackageId=vnf_pkg_id)
+        # if vnf_pkg[0].onboardingState != PKG_STATUS.CREATED:
+        #     logger.error("VNF package(%s) is not CREATED" % vnf_pkg_id)
+        #     raise GenericparserException("VNF package(%s) is not CREATED" % vnf_pkg_id)
+        vnf_pkg.update(onboardingState=PKG_STATUS.UPLOADING)
+
+        local_file_name = save(remote_file, vnf_pkg_id)
+        logger.info('VNF package(%s) has been uploaded.' % vnf_pkg_id)
+        return local_file_name
+
+    def download(self, vnf_pkg_id, file_range):
+        logger.info('Start to download VNF package(%s)...' % vnf_pkg_id)
+        nf_pkg = VnfPackageModel.objects.filter(vnfPackageId=vnf_pkg_id)
+        if not nf_pkg.exists():
+            logger.error('VNF package(%s) does not exist.' % vnf_pkg_id)
+            raise ResourceNotFoundException('VNF package(%s) does not exist.' % vnf_pkg_id)
+        if nf_pkg[0].onboardingState != PKG_STATUS.ONBOARDED:
+            raise GenericparserException("VNF package (%s) is not on-boarded" % vnf_pkg_id)
+
+        local_file_path = nf_pkg[0].localFilePath
+        start, end = parse_file_range(local_file_path, file_range)
+        logger.info('VNF package (%s) has been downloaded.' % vnf_pkg_id)
+        return read(local_file_path, start, end)
+
+
+class VnfPkgUploadThread(threading.Thread):
+    def __init__(self, data, vnf_pkg_id):
+        threading.Thread.__init__(self)
+        self.vnf_pkg_id = vnf_pkg_id
+        self.data = data
+        self.upload_file_name = None
+
+    def run(self):
+        try:
+            self.upload_vnf_pkg_from_uri()
+            parse_vnfd_and_save(self.vnf_pkg_id, self.upload_file_name)
+        except GenericparserException as e:
+            logger.error(e.message)
+        except Exception as e:
+            logger.error(e.message)
+            logger.error(traceback.format_exc())
+            logger.error(str(sys.exc_info()))
+
+    def upload_vnf_pkg_from_uri(self):
+        logger.info("Start to upload VNF packge(%s) from URI..." % self.vnf_pkg_id)
+        vnf_pkg = VnfPackageModel.objects.filter(vnfPackageId=self.vnf_pkg_id)
+        if vnf_pkg[0].onboardingState != PKG_STATUS.CREATED:
+            logger.error("VNF package(%s) is not CREATED" % self.vnf_pkg_id)
+            raise GenericparserException("VNF package (%s) is not created" % self.vnf_pkg_id)
+        vnf_pkg.update(onboardingState=PKG_STATUS.UPLOADING)
+
+        uri = ignore_case_get(self.data, "addressInformation")
+        request = urllib2.Request(uri)
+        response = urllib2.urlopen(request)
+
+        local_file_dir = os.path.join(GENERICPARSER_ROOT_PATH, self.vnf_pkg_id)
+        self.upload_file_name = os.path.join(local_file_dir, os.path.basename(uri))
+        if not os.path.exists(local_file_dir):
+            fileutil.make_dirs(local_file_dir)
+        with open(self.upload_file_name, "wb") as local_file:
+            local_file.write(response.read())
+        response.close()
+        logger.info('VNF packge(%s) has been uploaded.' % self.vnf_pkg_id)
+
+
+def fill_response_data(nf_pkg):
+    pkg_info = {}
+    pkg_info["id"] = nf_pkg.vnfPackageId
+    pkg_info["vnfdId"] = nf_pkg.vnfdId
+    pkg_info["vnfProductName"] = nf_pkg.vnfdProductName
+    pkg_info["vnfSoftwareVersion"] = nf_pkg.vnfSoftwareVersion
+    pkg_info["vnfdVersion"] = nf_pkg.vnfdVersion
+    if nf_pkg.checksum:
+        pkg_info["checksum"] = json.JSONDecoder().decode(nf_pkg.checksum)
+    pkg_info["softwareImages"] = None  # TODO
+    pkg_info["additionalArtifacts"] = None  # TODO
+    pkg_info["onboardingState"] = nf_pkg.onboardingState
+    pkg_info["operationalState"] = nf_pkg.operationalState
+    pkg_info["usageState"] = nf_pkg.usageState
+    if nf_pkg.userDefinedData:
+        pkg_info["userDefinedData"] = json.JSONDecoder().decode(nf_pkg.userDefinedData)
+    pkg_info["_links"] = None  # TODO
+    return pkg_info
+
+
+def parse_vnfd_and_save(vnf_pkg_id, vnf_pkg_path):
+    logger.info('Start to process VNF package(%s)...' % vnf_pkg_id)
+    vnf_pkg = VnfPackageModel.objects.filter(vnfPackageId=vnf_pkg_id)
+    vnf_pkg.update(onboardingState=PKG_STATUS.PROCESSING)
+    vnfd_json = toscaparsers.parse_vnfd(vnf_pkg_path)
+    vnfd = json.JSONDecoder().decode(vnfd_json)
+
+    if vnfd.get("vnf", "") != "":
+        vnfd_id = vnfd["vnf"]["properties"].get("descriptor_id", "")
+        other_pkg = VnfPackageModel.objects.filter(vnfdId=vnfd_id)
+        if other_pkg and other_pkg[0].vnfPackageId != vnf_pkg_id:
+            logger.error("VNF package(%s,%s) already exists.", other_pkg[0].vnfPackageId, vnfd_id)
+            raise GenericparserException("VNF package(%s) already exists." % vnfd_id)
+        vnf_provider = vnfd["vnf"]["properties"].get("provider", "")
+        vnfd_ver = vnfd["vnf"]["properties"].get("descriptor_verison", "")
+        vnf_software_version = vnfd["vnf"]["properties"].get("software_version", "")
+        vnfd_product_name = vnfd["vnf"]["properties"].get("product_name", "")
+        vnf_pkg.update(
+            vnfPackageId=vnf_pkg_id,
+            vnfdId=vnfd_id,
+            vnfdProductName=vnfd_product_name,
+            vnfVendor=vnf_provider,
+            vnfdVersion=vnfd_ver,
+            vnfSoftwareVersion=vnf_software_version,
+            vnfdModel=vnfd_json,
+            onboardingState=PKG_STATUS.ONBOARDED,
+            operationalState=PKG_STATUS.ENABLED,
+            usageState=PKG_STATUS.NOT_IN_USE,
+            localFilePath=vnf_pkg_path,
+            vnfPackageUri=os.path.split(vnf_pkg_path)[-1]
+        )
+    else:
+        raise GenericparserException("VNF propeties and metadata in VNF Package(id=%s) are empty." % vnf_pkg_id)
+    logger.info('VNF package(%s) has been processed(done).' % vnf_pkg_id)
+
+
+def handle_upload_failed(vnf_pkg_id):
+    vnf_pkg = VnfPackageModel.objects.filter(vnfPackageId=vnf_pkg_id)
+    vnf_pkg.update(onboardingState=PKG_STATUS.CREATED)
diff --git a/genericparser/packages/biz/vnf_pkg_artifacts.py b/genericparser/packages/biz/vnf_pkg_artifacts.py
new file mode 100644 (file)
index 0000000..f2506da
--- /dev/null
@@ -0,0 +1,40 @@
+# Copyright (C) 2019 Verizon. All Rights Reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from genericparser.pub.database.models import VnfPackageModel
+from genericparser.pub.exceptions import ResourceNotFoundException, ArtifactNotFoundException
+from genericparser.pub.utils import fileutil
+
+logger = logging.getLogger(__name__)
+
+
+class FetchVnfPkgArtifact(object):
+    def fetch(self, vnfPkgId, artifactPath):
+        logger.debug("FetchVnfPkgArtifact--get--single--artifact--biz::>"
+                     "ID: %s path: %s" % (vnfPkgId, artifactPath))
+        vnf_pkg = VnfPackageModel.objects.filter(vnfPackageId=vnfPkgId)
+        if not vnf_pkg.exists():
+            err_msg = "NF Package (%s) doesn't exists." % vnfPkgId
+            raise ResourceNotFoundException(err_msg)
+        vnf_pkg = vnf_pkg.get()
+        local_path = vnf_pkg.localFilePath
+        if local_path.endswith(".csar") or local_path.endswith(".zip"):
+            vnf_extract_path = fileutil.unzip_csar_to_tmp(local_path)
+            artifact_path = fileutil.get_artifact_path(vnf_extract_path, artifactPath)
+            if not artifact_path:
+                raise ArtifactNotFoundException("Couldn't artifact %s" % artifactPath)
+        file_content = open(artifact_path, 'rb').read()
+        return file_content
diff --git a/genericparser/packages/biz/vnf_pkg_subscription.py b/genericparser/packages/biz/vnf_pkg_subscription.py
new file mode 100644 (file)
index 0000000..349db08
--- /dev/null
@@ -0,0 +1,183 @@
+# Copyright (C) 2019 Verizon. All Rights Reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import ast
+import json
+import logging
+import os
+import requests
+import uuid
+
+from collections import Counter
+from rest_framework import status
+
+from genericparser.packages import const
+from genericparser.pub.database.models import VnfPkgSubscriptionModel
+from genericparser.pub.exceptions import VnfPkgSubscriptionException,\
+    VnfPkgDuplicateSubscriptionException, SubscriptionDoesNotExistsException
+from genericparser.pub.utils.values import ignore_case_get
+
+
+logger = logging.getLogger(__name__)
+
+ROOT_FILTERS = {
+    "notificationTypes": "notification_types",
+    "vnfdId": "vnfd_id",
+    "vnfPkgId": "vnf_pkg_id",
+    "operationalState": "operation_states",
+    "usageState": "usage_states"
+}
+
+
+def is_filter_type_equal(new_filter, existing_filter):
+    return Counter(new_filter) == Counter(existing_filter)
+
+
+class CreateSubscription(object):
+
+    def __init__(self, data):
+        self.data = data
+        self.filter = ignore_case_get(self.data, "filters", {})
+        self.callback_uri = ignore_case_get(self.data, "callbackUri")
+        self.authentication = ignore_case_get(self.data, "authentication", {})
+        self.notification_types = ignore_case_get(self.filter, "notificationTypes", [])
+        self.operation_states = ignore_case_get(self.filter, "operationalState", [])
+        self.usage_states = ignore_case_get(self.filter, "usageState", [])
+        self.vnfd_id = ignore_case_get(self.filter, "vnfdId", [])
+        self.vnf_pkg_id = ignore_case_get(self.filter, "vnfPkgId", [])
+        self.vnf_products_from_provider = \
+            ignore_case_get(self.filter, "vnfProductsFromProviders", {})
+
+    def check_callbackuri_connection(self):
+        logger.debug("SubscribeNotification-post::> Sending GET request "
+                     "to %s" % self.callback_uri)
+        try:
+            response = requests.get(self.callback_uri, timeout=2)
+            if response.status_code != status.HTTP_204_NO_CONTENT:
+                raise VnfPkgSubscriptionException("callbackUri %s returns %s status "
+                                                  "code." % (self.callback_uri, response.status_code))
+        except Exception:
+            raise VnfPkgSubscriptionException("callbackUri %s didn't return 204 status"
+                                              "code." % self.callback_uri)
+
+    def do_biz(self):
+        self.subscription_id = str(uuid.uuid4())
+        self.check_callbackuri_connection()
+        self.check_valid_auth_info()
+        self.check_valid()
+        self.save_db()
+        subscription = VnfPkgSubscriptionModel.objects.get(subscription_id=self.subscription_id)
+        if subscription:
+            return subscription.toDict()
+
+    def check_valid_auth_info(self):
+        logger.debug("SubscribeNotification--post::> Validating Auth "
+                     "details if provided")
+        if self.authentication.get("paramsBasic", {}) and \
+                const.BASIC not in self.authentication.get("authType"):
+            raise VnfPkgSubscriptionException('Auth type should be ' + const.BASIC)
+        if self.authentication.get("paramsOauth2ClientCredentials", {}) and \
+                const.OAUTH2_CLIENT_CREDENTIALS not in self.authentication.get("authType"):
+            raise VnfPkgSubscriptionException('Auth type should be ' + const.OAUTH2_CLIENT_CREDENTIALS)
+
+    def check_filter_exists(self, sub):
+        # Check the usage states, operationStates
+        for filter_type in ["operation_states", "usage_states"]:
+            if not is_filter_type_equal(getattr(self, filter_type),
+                                        ast.literal_eval(getattr(sub, filter_type))):
+                return False
+        # If all the above types are same then check id filters
+        for id_filter in ["vnfd_id", "vnf_pkg_id"]:
+            if not is_filter_type_equal(getattr(self, id_filter),
+                                        ast.literal_eval(getattr(sub, id_filter))):
+                return False
+        return True
+
+    def check_valid(self):
+        logger.debug("SubscribeNotification--post::> Checking DB if "
+                     "callbackUri already exists")
+        subscriptions = VnfPkgSubscriptionModel.objects.filter(callback_uri=self.callback_uri)
+        if not subscriptions.exists():
+            return True
+        for subscription in subscriptions:
+            if self.check_filter_exists(subscription):
+                raise VnfPkgDuplicateSubscriptionException(
+                    "Already Subscription (%s) exists with the "
+                    "same callbackUri and filter" % subscription.subscription_id)
+        return True
+
+    def save_db(self):
+        logger.debug("SubscribeNotification--post::> Saving the subscription "
+                     "%s to the database" % self.subscription_id)
+        links = {
+            "self": {
+                "href": os.path.join(const.VNFPKG_SUBSCRIPTION_ROOT_URI, self.subscription_id)
+            }
+        }
+        VnfPkgSubscriptionModel.objects.create(
+            subscription_id=self.subscription_id,
+            callback_uri=self.callback_uri,
+            notification_types=json.dumps(self.notification_types),
+            auth_info=json.dumps(self.authentication),
+            usage_states=json.dumps(self.usage_states),
+            operation_states=json.dumps(self.operation_states),
+            vnf_products_from_provider=json.dumps(self.vnf_products_from_provider),
+            vnfd_id=json.dumps(self.vnfd_id),
+            vnf_pkg_id=json.dumps(self.vnf_pkg_id),
+            links=json.dumps(links))
+        logger.debug('Create Subscription[%s] success', self.subscription_id)
+
+
+class QuerySubscription(object):
+
+    def query_multi_subscriptions(self, params):
+        query_data = {}
+        logger.debug("QuerySubscription--get--multi--subscriptions--biz::> Check "
+                     "for filters in query params %s" % params)
+        for query, value in params.iteritems():
+            if query in ROOT_FILTERS:
+                query_data[ROOT_FILTERS[query] + '__icontains'] = value
+        # Query the database with filters if the request has fields in request params, else fetch all records
+        if query_data:
+            subscriptions = VnfPkgSubscriptionModel.objects.filter(**query_data)
+        else:
+            subscriptions = VnfPkgSubscriptionModel.objects.all()
+        if not subscriptions.exists():
+            return []
+        return [subscription.toDict() for subscription in subscriptions]
+
+    def query_single_subscription(self, subscription_id):
+        logger.debug("QuerySingleSubscriptions--get--single--subscription--biz::> "
+                     "ID: %s" % subscription_id)
+
+        subscription = VnfPkgSubscriptionModel.objects.filter(
+            subscription_id=subscription_id)
+        if not subscription.exists():
+            raise SubscriptionDoesNotExistsException("Subscription with ID: %s "
+                                                     "does not exists" % subscription_id)
+        return subscription[0].toDict()
+
+
+class TerminateSubscription(object):
+
+    def terminate(self, subscription_id):
+        logger.debug("TerminateSubscriptions--delete--biz::> "
+                     "ID: %s" % subscription_id)
+
+        subscription = VnfPkgSubscriptionModel.objects.filter(
+            subscription_id=subscription_id)
+        if not subscription.exists():
+            raise SubscriptionDoesNotExistsException("Subscription with ID: %s "
+                                                     "does not exists" % subscription_id)
+        subscription[0].delete()
diff --git a/genericparser/packages/const.py b/genericparser/packages/const.py
new file mode 100644 (file)
index 0000000..86fbb79
--- /dev/null
@@ -0,0 +1,47 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from genericparser.pub.utils.jobutil import enum
+
+PKG_STATUS = enum(CREATED="CREATED", UPLOADING="UPLOADING", PROCESSING="PROCESSING", ONBOARDED="ONBOARDED",
+                  IN_USE="IN_USE", NOT_IN_USE="NOT_IN_USE", ENABLED="ENABLED", DISABLED="DISABLED")
+
+AUTH_TYPES = ["BASIC", "OAUTH2_CLIENT_CREDENTIALS", "TLS_CERT"]
+
+BASIC = "BASIC"
+
+OAUTH2_CLIENT_CREDENTIALS = "OAUTH2_CLIENT_CREDENTIALS"
+
+NOTIFICATION_TYPES = ["VnfPackageOnboardingNotification", "VnfPackageChangeNotification"]
+
+VNFPKG_SUBSCRIPTION_ROOT_URI = "api/vnfpkgm/v1/subscriptions/"
+
+NSDM_SUBSCRIPTION_ROOT_URI = "api/nsd/v1/subscriptions/"
+
+NSDM_NOTIFICATION_FILTERS = ["notificationTypes", "nsdInfoId", "nsdName",
+                             "nsdId", "nsdVersion", "nsdDesigner",
+                             "nsdInvariantId", "vnfPkgIds", "pnfdInfoIds",
+                             "nestedNsdInfoIds", "nsdOnboardingState",
+                             "nsdOperationalState", "nsdUsageState",
+                             "pnfdId", "pnfdName", "pnfdVersion",
+                             "pnfdProvider", "pnfdInvariantId",
+                             "pnfdOnboardingState", "pnfdUsageState"]
+
+NSDM_NOTIFICATION_TYPES = ["NsdOnBoardingNotification",
+                           "NsdOnboardingFailureNotification",
+                           "NsdChangeNotification",
+                           "NsdDeletionNotification",
+                           "PnfdOnBoardingNotification",
+                           "PnfdOnBoardingFailureNotification",
+                           "PnfdDeletionNotification"]
diff --git a/genericparser/packages/serializers/__init__.py b/genericparser/packages/serializers/__init__.py
new file mode 100644 (file)
index 0000000..342c2a8
--- /dev/null
@@ -0,0 +1,13 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/genericparser/packages/serializers/checksum.py b/genericparser/packages/serializers/checksum.py
new file mode 100644 (file)
index 0000000..1296626
--- /dev/null
@@ -0,0 +1,30 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+
+
+class ChecksumSerializer(serializers.Serializer):
+    algorithm = serializers.CharField(
+        help_text="Name of the algorithm used to generate the checksum.",
+        required=True,
+        allow_null=False,
+        allow_blank=False
+    )
+    hash = serializers.CharField(
+        help_text="The hexadecimal value of the checksum.",
+        required=True,
+        allow_null=True,
+        allow_blank=False
+    )
diff --git a/genericparser/packages/serializers/create_nsd_info_request.py b/genericparser/packages/serializers/create_nsd_info_request.py
new file mode 100644 (file)
index 0000000..24fe3b7
--- /dev/null
@@ -0,0 +1,29 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+
+
+class CreateNsdInfoRequestSerializer(serializers.Serializer):
+    userDefinedData = serializers.DictField(
+        help_text="User-defined data for the NS descriptor resource to be created."
+        "It shall be present when the user defined data is set for the individual NS "
+        "descriptor resource to be created.",
+        child=serializers.CharField(
+            help_text='Key Value Pairs',
+            allow_blank=True
+        ),
+        required=False,
+        allow_null=True
+    )
diff --git a/genericparser/packages/serializers/create_pnfd_info_request.py b/genericparser/packages/serializers/create_pnfd_info_request.py
new file mode 100644 (file)
index 0000000..01d8229
--- /dev/null
@@ -0,0 +1,29 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+
+
+class CreatePnfdInfoRequestSerializer(serializers.Serializer):
+    userDefinedData = serializers.DictField(
+        help_text="User-defined data for the PNF descriptor resource to be created."
+        "It shall be present when the user defined data is set for the individual "
+        "PNF descriptor resource to be created.",
+        child=serializers.CharField(
+            help_text='Key Value Pairs',
+            allow_blank=True
+        ),
+        required=False,
+        allow_null=True
+    )
diff --git a/genericparser/packages/serializers/create_vnf_pkg_info_req.py b/genericparser/packages/serializers/create_vnf_pkg_info_req.py
new file mode 100644 (file)
index 0000000..6da281d
--- /dev/null
@@ -0,0 +1,27 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+
+
+class CreateVnfPkgInfoRequestSerializer(serializers.Serializer):
+    userDefinedData = serializers.DictField(
+        help_text="User defined data for the VNF package.",
+        child=serializers.CharField(
+            help_text="KeyValue Pairs",
+            allow_blank=True
+        ),
+        required=False,
+        allow_null=True
+    )
diff --git a/genericparser/packages/serializers/genericparser_serializers.py b/genericparser/packages/serializers/genericparser_serializers.py
new file mode 100644 (file)
index 0000000..7bcb9ef
--- /dev/null
@@ -0,0 +1,442 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+
+from genericparser.pub.utils.toscaparsers.nsdmodel import EtsiNsdInfoModel
+from genericparser.pub.utils.toscaparsers.vnfdmodel import EtsiVnfdInfoModel
+
+
+class PostJobRequestSerializer(serializers.Serializer):
+    progress = serializers.CharField(
+        help_text="Job Progress",
+        required=False
+    )
+    desc = serializers.CharField(
+        help_text="Description",
+        required=False
+    )
+    errcode = serializers.CharField(
+        help_text="Error Code",
+        required=False
+    )
+
+
+class JobResponseHistoryListSerializer(serializers.Serializer):
+    status = serializers.CharField(
+        help_text="Status",
+        required=False
+    )
+    progress = serializers.CharField(
+        help_text="Job Progress",
+        required=False
+    )
+    statusDescription = serializers.CharField(
+        help_text="Status Description",
+        required=False
+    )
+    errorCode = serializers.CharField(
+        help_text="Error Code",
+        required=False,
+        allow_null=True
+    )
+    responseId = serializers.CharField(
+        help_text="Response Id",
+        required=False
+    )
+
+
+class JobResponseDescriptorSerializer(serializers.Serializer):
+    status = serializers.CharField(
+        help_text="Status",
+        required=False
+    )
+    progress = serializers.CharField(
+        help_text="Job Progress",
+        required=False
+    )
+    statusDescription = serializers.CharField(
+        help_text="Status Description",
+        required=False
+    )
+    errorCode = serializers.CharField(
+        help_text="Error Code",
+        required=False,
+        allow_null=True
+    )
+    responseId = serializers.CharField(
+        help_text="Response Id",
+        required=False
+    )
+    responseHistoryList = JobResponseHistoryListSerializer(
+        help_text="Response History List",
+        many=True,
+        required=False
+    )
+
+
+class GetJobResponseSerializer(serializers.Serializer):
+    jobId = serializers.CharField(
+        help_text="Job Id",
+        required=False
+    )
+    responseDescriptor = JobResponseDescriptorSerializer(
+        help_text="Job Response Descriptor",
+        required=False
+    )
+
+
+class PostJobResponseResultSerializer(serializers.Serializer):
+    result = serializers.CharField(
+        help_text="Result",
+        required=True
+    )
+    msg = serializers.CharField(
+        help_text="Message",
+        required=False
+    )
+
+
+class InternalErrorRequestSerializer(serializers.Serializer):
+    error = serializers.CharField(
+        help_text="Error",
+        required=True
+    )
+    errorMessage = serializers.CharField(
+        help_text="Error Message",
+        required=False
+    )
+
+
+class NsPackageDistributeRequestSerializer(serializers.Serializer):
+    csarId = serializers.CharField(
+        help_text="csarId",
+        required=True
+    )
+
+
+class NsPackageDistributeResponseSerializer(serializers.Serializer):
+    status = serializers.CharField(
+        help_text="status",
+        required=True
+    )
+    statusDescription = serializers.CharField(
+        help_text="statusDescription",
+        required=True
+    )
+    errorCode = serializers.CharField(
+        help_text="errorCode",
+        required=True,
+        allow_null=True
+    )
+
+
+class NsPackageInfoSerializer(serializers.Serializer):
+    nsdId = serializers.CharField(
+        help_text="NSD ID",
+        required=False,
+        allow_null=True
+    )
+    nsPackageId = serializers.CharField(
+        help_text="NS Package ID",
+        allow_blank=True,
+        required=False,
+        allow_null=True
+    )
+    nsdProvider = serializers.CharField(
+        help_text="NSD Provider",
+        allow_blank=True,
+        required=False,
+        allow_null=True
+    )
+    nsdVersion = serializers.CharField(
+        help_text="NSD Version",
+        allow_blank=True,
+        required=False,
+        allow_null=True
+    )
+    csarName = serializers.CharField(
+        help_text="CSAR name",
+        allow_blank=True,
+        required=False,
+        allow_null=True
+    )
+    nsdModel = serializers.CharField(
+        help_text="NSD Model",
+        allow_blank=True,
+        required=False,
+        allow_null=True
+    )
+    downloadUrl = serializers.CharField(
+        help_text="URL to download NSD Model",
+        required=False,
+        allow_null=True
+    )
+
+
+class NsPackageSerializer(serializers.Serializer):
+    csarId = serializers.CharField(
+        help_text="CSAR ID",
+        required=False,
+        allow_null=True
+    )
+    packageInfo = NsPackageInfoSerializer(
+        help_text="NS Package Info",
+        required=False,
+        allow_null=True
+    )
+
+
+class NsPackagesSerializer(serializers.ListSerializer):
+    child = NsPackageSerializer()
+
+
+class ServicePackageDistributeRequestSerializer(serializers.Serializer):
+    csarId = serializers.CharField(
+        help_text="csarId",
+        required=True
+    )
+
+
+class ServicePackageInfoSerializer(serializers.Serializer):
+    servicedId = serializers.CharField(
+        help_text="ServiceD ID",
+        required=False,
+        allow_null=True
+    )
+    servicePackageId = serializers.CharField(
+        help_text="Service Package ID",
+        allow_blank=True,
+        required=False,
+        allow_null=True
+    )
+    servicedProvider = serializers.CharField(
+        help_text="ServiceD Provider",
+        allow_blank=True,
+        required=False,
+        allow_null=True
+    )
+    servicedVersion = serializers.CharField(
+        help_text="ServiceD Version",
+        allow_blank=True,
+        required=False,
+        allow_null=True
+    )
+    csarName = serializers.CharField(
+        help_text="CSAR name",
+        allow_blank=True,
+        required=False,
+        allow_null=True
+    )
+    servicedModel = serializers.CharField(
+        help_text="ServiceD Model",
+        allow_blank=True,
+        required=False,
+        allow_null=True
+    )
+    downloadUrl = serializers.CharField(
+        help_text="URL to download ServiceD Model",
+        required=False,
+        allow_null=True
+    )
+
+
+class ServicePackageSerializer(serializers.Serializer):
+    csarId = serializers.CharField(
+        help_text="CSAR ID",
+        required=False,
+        allow_null=True
+    )
+    packageInfo = ServicePackageInfoSerializer(
+        help_text="Service Package Info",
+        required=False,
+        allow_null=True
+    )
+
+
+class ServicePackagesSerializer(serializers.ListSerializer):
+    child = ServicePackageSerializer()
+
+
+class NfPackageDistributeRequestSerializer(serializers.Serializer):
+    csarId = serializers.CharField(
+        help_text="CSAR ID",
+        required=True
+    )
+    vimIds = serializers.ListField(
+        help_text="A string for vimIds",
+        child=serializers.CharField(),
+        required=False
+    )
+    labVimId = serializers.CharField(
+        help_text="A list of VIM IDs.",
+        allow_blank=True,
+        required=False
+    )
+
+
+class NfPackageInfoSerializer(serializers.Serializer):
+    vnfdId = serializers.CharField(
+        help_text="VNFD ID",
+        required=False,
+        allow_null=True,
+        allow_blank=True
+    )
+    vnfPackageId = serializers.CharField(
+        help_text="VNF Package ID",
+        required=True
+    )
+    vnfdProvider = serializers.CharField(
+        help_text="VNFD Provider",
+        required=False,
+        allow_null=True,
+        allow_blank=True
+    )
+    vnfdVersion = serializers.CharField(
+        help_text="VNFD Version",
+        required=False,
+        allow_null=True,
+        allow_blank=True
+    )
+    vnfVersion = serializers.CharField(
+        help_text="VNF Version",
+        required=False,
+        allow_null=True,
+        allow_blank=True
+    )
+    csarName = serializers.CharField(
+        help_text="CSAR Name",
+        required=False,
+        allow_null=True,
+        allow_blank=True
+    )
+    vnfdModel = serializers.CharField(
+        help_text="VNFD Model",
+        required=False,
+        allow_null=True,
+        allow_blank=True
+    )
+    downloadUrl = serializers.CharField(
+        help_text="URL to download VNFD Model",
+        required=False,
+        allow_null=True,
+        allow_blank=True
+    )
+
+
+class NfImageInfoSerializer(serializers.Serializer):
+    index = serializers.CharField(
+        help_text="Index of VNF Image",
+        required=True
+    )
+    fileName = serializers.CharField(
+        help_text="Image file name",
+        required=True
+    )
+    imageId = serializers.CharField(
+        help_text="Image ID",
+        required=True
+    )
+    vimId = serializers.CharField(
+        help_text="VIM ID",
+        required=True
+    )
+    vimUser = serializers.CharField(
+        help_text="User of VIM",
+        required=True
+    )
+    tenant = serializers.CharField(
+        help_text="Tenant",
+        required=True
+    )
+    status = serializers.CharField(
+        help_text="Status",
+        required=True
+    )
+
+
+class NfPackageSerializer(serializers.Serializer):
+    csarId = serializers.CharField(
+        help_text="CSAR ID",
+        required=True
+    )
+    packageInfo = NfPackageInfoSerializer(
+        help_text="VNF Package Info",
+        required=True
+    )
+    imageInfo = NfImageInfoSerializer(
+        help_text="Image Info",
+        required=False,
+        many=True,
+        allow_null=True
+    )
+
+
+class NfPackagesSerializer(serializers.ListSerializer):
+    child = NfPackageSerializer()
+
+
+class PostJobResponseSerializer(serializers.Serializer):
+    jobId = serializers.CharField(
+        help_text="jobId",
+        required=True
+    )
+
+
+class ParseModelRequestSerializer(serializers.Serializer):
+    csarId = serializers.CharField(
+        help_text="CSAR ID",
+        required=True
+    )
+    packageType = serializers.CharField(
+        help_text="Package type: VNF, PNF, NS, Service",
+        required=False
+    )
+    inputs = serializers.JSONField(
+        help_text="Inputs",
+        required=False
+    )
+
+
+class ParseModelResponseSerializer(serializers.Serializer):
+    model = serializers.JSONField(
+        help_text="Model",
+        required=True
+    )
+
+
+class EtsiNsdInfoModelSerializer(serializers.ModelSerializer):
+
+    class Meta:
+        model = EtsiNsdInfoModel
+
+
+class EtsiVnfdInfoModelSerializer(serializers.ModelSerializer):
+
+    class Meta:
+        model = EtsiVnfdInfoModel
+
+
+class ParseNSPackageResponseSerializer(serializers.Serializer):
+    model = EtsiNsdInfoModelSerializer(
+        help_text="NSD Model",
+        required=True
+    )
+
+
+class ParseNfPackageResponseSerializer(serializers.Serializer):
+    model = EtsiVnfdInfoModelSerializer(
+        help_text="VNFD Model",
+        required=True
+    )
diff --git a/genericparser/packages/serializers/link.py b/genericparser/packages/serializers/link.py
new file mode 100644 (file)
index 0000000..a6a503c
--- /dev/null
@@ -0,0 +1,24 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+
+
+class LinkSerializer(serializers.Serializer):
+    href = serializers.CharField(
+        help_text='URI of the referenced resource',
+        required=True,
+        allow_null=False,
+        allow_blank=False
+    )
diff --git a/genericparser/packages/serializers/nsd_info.py b/genericparser/packages/serializers/nsd_info.py
new file mode 100644 (file)
index 0000000..2206f46
--- /dev/null
@@ -0,0 +1,161 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+from problem_details import ProblemDetailsSerializer
+from link import LinkSerializer
+
+
+class _LinkSerializer(serializers.Serializer):
+    self = LinkSerializer(
+        help_text="URI of this resource.",
+        required=True,
+        allow_null=False
+    )
+    nsd_content = LinkSerializer(
+        help_text="Link to the NSD content resource.",
+        required=True,
+        allow_null=False
+    )
+
+    class Meta:
+        ref_name = "NSD_LinkSerializer"
+
+
+class NsdInfoSerializer(serializers.Serializer):
+    id = serializers.CharField(
+        help_text="Identifier of the onboarded individual NS descriptor resource."
+        "This identifier is allocated by the NFVO.",
+        required=True,
+        allow_null=False,
+        allow_blank=False
+    )
+    nsdId = serializers.CharField(
+        help_text="This identifier, which is allocated by the NSD designer,"
+        "identifies the NSD in a globally unique way."
+        "It is copied from the NSD content and shall be present after the "
+        "NSD content is on-boarded.",
+        required=False,
+        allow_null=True,
+        allow_blank=True
+    )
+    nsdName = serializers.CharField(
+        help_text="Name of the onboarded NSD."
+        "This information is copied from the NSD content and shall be present "
+        "after the NSD content is on-boarded.",
+        required=False,
+        allow_null=True,
+        allow_blank=True
+    )
+    nsdVersion = serializers.CharField(  # TODO: data type is version
+        help_text="Version of the on-boarded NSD."
+        "This information is copied from the NSD content and shall be "
+        "present after the NSD content is on-boarded.",
+        required=False,
+        allow_null=True,
+        allow_blank=True
+    )
+    nsdDesigner = serializers.CharField(
+        help_text="Designer of the on-boarded NSD."
+        "This information is copied from the NSD content and shall be "
+        "present after the NSD content is on-boarded.",
+        required=False,
+        allow_null=True,
+        allow_blank=True
+    )
+    nsdInvariantId = serializers.CharField(
+        help_text="This identifier, which is allocated by the NSD designer,"
+        "identifies an NSD in a version independent manner."
+        "This information is copied from the NSD content and shall be "
+        "present after the NSD content is on-boarded.",
+        required=False,
+        allow_null=True,
+        allow_blank=True
+    )
+    vnfPkgIds = serializers.ListSerializer(
+        help_text="Identifies the VNF package for the VNFD referenced "
+        "by the on-boarded NS descriptor resource.",
+        child=serializers.CharField(
+            help_text="Identifier of the VNF package",
+            allow_blank=True
+        ),
+        required=False,
+        allow_null=True,
+        allow_empty=True
+    )
+    pnfdInfoIds = serializers.ListSerializer(
+        help_text="Identifies the PnfdInfo element for the PNFD referenced "
+        "by the on-boarded NS descriptor resource.",
+        child=serializers.CharField(
+            help_text="Identifier of the PnfdInfo element",
+            allow_blank=True
+        ),
+        required=False,
+        allow_null=True,
+        allow_empty=True
+    )
+    nestedNsdInfoIds = serializers.ListSerializer(
+        help_text="Identifies the NsdInfo element for the nested NSD referenced "
+        "by the on-boarded NS descriptor resource.",
+        child=serializers.CharField(
+            help_text="Identifier of the NsdInfo element",
+            allow_blank=True
+        ),
+        required=False,
+        allow_null=True,
+        allow_empty=True
+    )
+    nsdOnboardingState = serializers.ChoiceField(
+        help_text="Onboarding state of the individual NS descriptor resource.",
+        choices=["CREATED", "UPLOADING", "PROCESSING", "ONBOARDED"],
+        required=True,
+        allow_null=False,
+        allow_blank=False
+    )
+    onboardingFailureDetails = ProblemDetailsSerializer(
+        help_text="Failure details of current onboarding procedure."
+        "It shall be present when the nsdOnboardingState attribute is CREATED "
+        "and the uploading or processing fails in NFVO.",
+        required=False,
+        allow_null=True,
+    )
+    nsdOperationalState = serializers.ChoiceField(
+        help_text="Operational state of the individual NS descriptor resource."
+        "This attribute can be modified with the PATCH method.",
+        choices=["ENABLED", "DISABLED"],
+        required=True,
+        allow_null=False,
+        allow_blank=False
+    )
+    nsdUsageState = serializers.ChoiceField(
+        help_text="Usage state of the individual NS descriptor resource.",
+        choices=["IN_USE", "NOT_IN_USE"],
+        required=True,
+        allow_null=False,
+    )
+    userDefinedData = serializers.DictField(
+        help_text="User defined data for the individual NS descriptor resource."
+        "This attribute can be modified with the PATCH method.",
+        child=serializers.CharField(
+            help_text="Key Value Pairs",
+            allow_blank=True
+        ),
+        required=False,
+        allow_null=True
+    )
+    _links = _LinkSerializer(
+        help_text="Links to resources related to this resource.",
+        required=True,
+        allow_null=True  # TODO: supposed to be False
+    )
diff --git a/genericparser/packages/serializers/nsd_infos.py b/genericparser/packages/serializers/nsd_infos.py
new file mode 100644 (file)
index 0000000..f8ad029
--- /dev/null
@@ -0,0 +1,20 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+from nsd_info import NsdInfoSerializer
+
+
+class NsdInfosSerializer(serializers.ListSerializer):
+    child = NsdInfoSerializer()
diff --git a/genericparser/packages/serializers/nsdm_filter_data.py b/genericparser/packages/serializers/nsdm_filter_data.py
new file mode 100644 (file)
index 0000000..43213e0
--- /dev/null
@@ -0,0 +1,177 @@
+# Copyright (C) 2019 Verizon. All Rights Reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+
+from genericparser.packages.const import NSDM_NOTIFICATION_TYPES
+
+
+class NsdmNotificationsFilter(serializers.Serializer):
+    notificationTypes = serializers.ListField(
+        child=serializers.ChoiceField(
+            required=True,
+            choices=NSDM_NOTIFICATION_TYPES
+        ),
+        help_text="Match particular notification types",
+        allow_null=False,
+        required=False
+    )
+    nsdInfoId = serializers.ListField(
+        child=serializers.UUIDField(),
+        help_text="Match NS packages with particular nsdInfoIds",
+        allow_null=False,
+        required=False
+    )
+    nsdId = serializers.ListField(
+        child=serializers.UUIDField(),
+        help_text="Match NS Packages with particular nsdIds",
+        allow_null=False,
+        required=False
+    )
+    nsdName = serializers.ListField(
+        child=serializers.CharField(
+            max_length=255,
+            required=True
+        ),
+        help_text="Match NS Packages with particular nsdNames",
+        allow_null=False,
+        required=False
+    )
+    nsdVersion = serializers.ListField(
+        child=serializers.CharField(
+            max_length=255,
+            required=True
+        ),
+        help_text="match NS packages that belong to certain nsdversion",
+        required=False,
+        allow_null=False
+    )
+    nsdInvariantId = serializers.ListField(
+        child=serializers.UUIDField(),
+        help_text="Match NS Packages with particular nsdInvariantIds",
+        allow_null=False,
+        required=False
+    )
+    vnfPkgIds = serializers.ListField(
+        child=serializers.UUIDField(),
+        help_text="Match NS Packages that has VNF PackageIds",
+        allow_null=False,
+        required=False
+    )
+    nestedNsdInfoIds = serializers.ListField(
+        child=serializers.UUIDField(),
+        help_text="Match NS Packages with particular nsdInvariantIds",
+        allow_null=False,
+        required=False
+    )
+    nsdOnboardingState = serializers.ListField(
+        child=serializers.ChoiceField(
+            required=True,
+            choices=[
+                'CREATED',
+                'UPLOADING',
+                'PROCESSING',
+                'ONBOARDED'
+            ]
+        ),
+        help_text="Match NS Packages with particular NS Onboarding State",
+        allow_null=False,
+        required=False
+    )
+    nsdOperationalState = serializers.ListField(
+        child=serializers.ChoiceField(
+            required=True,
+            choices=['ENABLED', 'DISABLED']
+        ),
+        help_text="Match NS Packages with particular NS Operational State",
+        allow_null=False,
+        required=False
+    )
+    nsdUsageState = serializers.ListField(
+        child=serializers.ChoiceField(
+            required=True,
+            choices=['IN_USE', 'NOT_IN_USE']
+        ),
+        help_text="Match NS Packages with particular NS Usage State",
+        allow_null=False,
+        required=False
+    )
+    pnfdInfoIds = serializers.ListField(
+        child=serializers.UUIDField(),
+        help_text="Match PF packages with particular pnfdInfoIds",
+        allow_null=False,
+        required=False
+    )
+    pnfdId = serializers.ListField(
+        child=serializers.UUIDField(),
+        help_text="Match PF packages with particular pnfdInfoIds",
+        allow_null=False,
+        required=False
+    )
+    pnfdName = serializers.ListField(
+        child=serializers.CharField(
+            max_length=255,
+            required=True
+        ),
+        help_text="Match PF Packages with particular pnfdNames",
+        allow_null=False,
+        required=False
+    )
+    pnfdVersion = serializers.ListField(
+        child=serializers.CharField(
+            max_length=255,
+            required=True
+        ),
+        help_text="match PF packages that belong to certain pnfd version",
+        required=False,
+        allow_null=False
+    )
+    pnfdProvider = serializers.ListField(
+        child=serializers.CharField(
+            max_length=255,
+            required=True
+        ),
+        help_text="Match PF Packages with particular pnfdProvider",
+        allow_null=False,
+        required=False
+    )
+    pnfdInvariantId = serializers.ListField(
+        child=serializers.UUIDField(),
+        help_text="Match PF Packages with particular pnfdInvariantIds",
+        allow_null=False,
+        required=False
+    )
+    pnfdOnboardingState = serializers.ListField(
+        child=serializers.ChoiceField(
+            required=True,
+            choices=[
+                'CREATED',
+                'UPLOADING',
+                'PROCESSING',
+                'ONBOARDED'
+            ]
+        ),
+        help_text="Match PF Packages with particular PNF Onboarding State ",
+        allow_null=False,
+        required=False
+    )
+    pnfdUsageState = serializers.ListField(
+        child=serializers.ChoiceField(
+            required=True,
+            choices=['IN_USE', 'NOT_IN_USE']
+        ),
+        help_text="Match PF Packages with particular PNF usage State",
+        allow_null=False,
+        required=False
+    )
diff --git a/genericparser/packages/serializers/nsdm_subscription.py b/genericparser/packages/serializers/nsdm_subscription.py
new file mode 100644 (file)
index 0000000..dabd606
--- /dev/null
@@ -0,0 +1,84 @@
+# Copyright (C) 2019 Verizon. All Rights Reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+
+from link import LinkSerializer
+from subscription_auth_data import SubscriptionAuthenticationSerializer
+from nsdm_filter_data import NsdmNotificationsFilter
+
+
+class NsdmSubscriptionLinkSerializer(serializers.Serializer):
+    self = LinkSerializer(
+        help_text="Links to resources related to this resource.",
+        required=True
+    )
+
+
+class NsdmSubscriptionSerializer(serializers.Serializer):
+    id = serializers.CharField(
+        help_text="Identifier of this subscription resource.",
+        max_length=255,
+        required=True,
+        allow_null=False
+    )
+    callbackUri = serializers.CharField(
+        help_text="The URI of the endpoint to send the notification to.",
+        max_length=255,
+        required=True,
+        allow_null=False
+    )
+    filter = NsdmNotificationsFilter(
+        help_text="Filter settings for this subscription, to define the "
+        "of all notifications this subscription relates to.",
+        required=False
+    )
+    _links = NsdmSubscriptionLinkSerializer(
+        help_text="Links to resources related to this resource.",
+        required=True
+    )
+
+
+class NsdmSubscriptionsSerializer(serializers.ListSerializer):
+    child = NsdmSubscriptionSerializer()
+
+
+class NsdmSubscriptionIdSerializer(serializers.Serializer):
+    subscription_id = serializers.UUIDField(
+        help_text="Identifier of this subscription resource.",
+        required=True,
+        allow_null=False
+    )
+
+
+class NsdmSubscriptionRequestSerializer(serializers.Serializer):
+    callbackUri = serializers.CharField(
+        help_text="The URI of the endpoint to send the notification to.",
+        required=True,
+        allow_null=False
+    )
+    filter = NsdmNotificationsFilter(
+        help_text="Filter settings for the subscription,"
+                  " to define the subset of all "
+                  "notifications this subscription relates to.",
+        required=False,
+        allow_null=True
+    )
+    authentication = SubscriptionAuthenticationSerializer(
+        help_text="Authentication parameters to configure"
+                  " the use of Authorization when sending "
+                  "notifications corresponding to this subscription.",
+        required=False,
+        allow_null=True
+    )
diff --git a/genericparser/packages/serializers/pnfd_info.py b/genericparser/packages/serializers/pnfd_info.py
new file mode 100644 (file)
index 0000000..c49ce38
--- /dev/null
@@ -0,0 +1,107 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+from problem_details import ProblemDetailsSerializer
+from link import LinkSerializer
+
+
+class _LinkSerializer(serializers.Serializer):
+    self = LinkSerializer(
+        help_text='URI of this resource.',
+        required=True,
+        allow_null=False
+    )
+    pnfd_content = LinkSerializer(
+        help_text='Link to the PNFD content resource.',
+        required=True,
+        allow_null=False
+    )
+
+
+class PnfdInfoSerializer(serializers.Serializer):
+    id = serializers.CharField(
+        help_text='Identifier of the onboarded individual PNF descriptor resource. \
+        This identifier is allocated by the NFVO.',
+        required=True,
+        allow_null=False,
+        allow_blank=False
+    )
+    pnfdId = serializers.CharField(
+        help_text='This identifier, which is allocated by the PNFD designer, \
+        identifies the PNFD in a globally unique way. \
+        It is copied from the PNFD content and shall be present after the PNFD content is on-boarded.',
+        required=False,
+        allow_null=True,
+        allow_blank=True
+    )
+    pnfdName = serializers.CharField(
+        help_text='Name of the onboarded PNFD. \
+        This information is copied from the PNFD content and shall be present after the PNFD content is on-boarded.',
+        required=False,
+        allow_null=True,
+        allow_blank=True
+    )
+    pnfdVersion = serializers.CharField(  # TODO: data type is version
+        help_text='Version of the on-boarded PNFD. \
+        This information is copied from the PNFD content and shall be present after the PNFD content is on-boarded.',
+        required=False,
+        allow_null=True,
+        allow_blank=True
+    )
+    pnfdProvider = serializers.CharField(
+        help_text='Provider of the on-boarded PNFD. \
+        This information is copied from the PNFD content and shall be present after the PNFD content is on-boarded.',
+        required=False,
+        allow_null=True,
+        allow_blank=True
+    )
+    pnfdInvariantId = serializers.CharField(
+        help_text='Identifies a PNFD in a version independent manner. \
+        This attribute is invariant across versions of PNFD.',
+        required=False,
+        allow_null=True,
+        allow_blank=True
+    )
+    pnfdOnboardingState = serializers.ChoiceField(
+        help_text='Onboarding state of the individual PNF descriptor resource.',
+        choices=['CREATED', 'UPLOADING', 'PROCESSING', 'ONBOARDED'],
+        required=True,
+        allow_null=False,
+        allow_blank=False
+    )
+    onboardingFailureDetails = ProblemDetailsSerializer(
+        help_text='Failure details of current onboarding procedure. \
+        It shall be present when the "pnfdOnboardingState" attribute is CREATED and the uploading or processing fails in NFVO.',
+        required=False,
+        allow_null=True,
+    )
+    pnfdUsageState = serializers.ChoiceField(
+        help_text='Usage state of the individual PNF descriptor resource.',
+        choices=['IN_USE', 'NOT_IN_USE'],
+        required=True,
+        allow_null=False,
+    )
+    userDefinedData = serializers.DictField(
+        help_text='User defined data for the individual PNF descriptor resource. \
+        This attribute can be modified with the PATCH method.',
+        child=serializers.CharField(help_text='Key Value Pairs', allow_blank=True),
+        required=False,
+        allow_null=True
+    )
+    _links = _LinkSerializer(
+        help_text='Links to resources related to this resource.',
+        required=True,
+        allow_null=True  # TODO: supposed to be False
+    )
diff --git a/genericparser/packages/serializers/pnfd_infos.py b/genericparser/packages/serializers/pnfd_infos.py
new file mode 100644 (file)
index 0000000..4509fc2
--- /dev/null
@@ -0,0 +1,20 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+from pnfd_info import PnfdInfoSerializer
+
+
+class PnfdInfosSerializer(serializers.ListSerializer):
+    child = PnfdInfoSerializer()
diff --git a/genericparser/packages/serializers/problem_details.py b/genericparser/packages/serializers/problem_details.py
new file mode 100644 (file)
index 0000000..68d4500
--- /dev/null
@@ -0,0 +1,58 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+
+
+class ProblemDetailsSerializer(serializers.Serializer):
+    type = serializers.CharField(
+        help_text='A URI reference according to IETF RFC 3986 [10] that identifies the problem type. \
+        It is encouraged that the URI provides human-readable documentation for the problem (e.g. using HTML) when dereferenced. \
+        When this member is not present, its value is assumed to be "about:blank".',
+        required=False,
+        allow_null=True,
+        allow_blank=True
+    )
+    title = serializers.CharField(
+        help_text='A short, human-readable summary of the problem type. \
+        It should not change from occurrence to occurrence of the problem, except for purposes of localization. \
+        If type is given and other than "about:blank", this attribute shall also be provided.',
+        required=False,
+        allow_null=True,
+        allow_blank=True
+    )
+    title = serializers.IntegerField(
+        help_text='The HTTP status code for this occurrence of the problem.',
+        required=True,
+        allow_null=False
+    )
+    detail = serializers.CharField(
+        help_text='A human-readable explanation specific to this occurrence of the problem.',
+        required=True,
+        allow_null=False,
+        allow_blank=False
+    )
+    instance = serializers.CharField(
+        help_text='A URI reference that identifies the specific occurrence of the problem. \
+        It may yield further information if dereferenced.',
+        required=False,
+        allow_null=True,
+        allow_blank=True
+    )
+    additional_attributes = serializers.DictField(
+        help_text='Any number of additional attributes, as defined in a specification or by an implementation.',
+        child=serializers.CharField(help_text='Additional attribute', allow_blank=True),
+        required=False,
+        allow_null=True,
+    )
diff --git a/genericparser/packages/serializers/response.py b/genericparser/packages/serializers/response.py
new file mode 100644 (file)
index 0000000..e2cca92
--- /dev/null
@@ -0,0 +1,51 @@
+# Copyright (C) 2019 Verizon. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+
+
+class ProblemDetailsSerializer(serializers.Serializer):
+    type = serializers.CharField(
+        help_text="Type",
+        required=False,
+        allow_null=True
+    )
+    title = serializers.CharField(
+        help_text="Title",
+        required=False,
+        allow_null=True
+    )
+    status = serializers.IntegerField(
+        help_text="Status",
+        required=True
+    )
+    detail = serializers.CharField(
+        help_text="Detail",
+        required=True,
+        allow_null=True
+    )
+    instance = serializers.CharField(
+        help_text="Instance",
+        required=False,
+        allow_null=True
+    )
+    additional_details = serializers.ListField(
+        help_text="Any number of additional attributes, as defined in a "
+        "specification or by an implementation.",
+        required=False,
+        allow_null=True
+    )
+
+    class Meta:
+        ref_name = 'SUBSCRIPTION_ProblemDetailsSerializer'
diff --git a/genericparser/packages/serializers/subscription_auth_data.py b/genericparser/packages/serializers/subscription_auth_data.py
new file mode 100644 (file)
index 0000000..9248fb1
--- /dev/null
@@ -0,0 +1,77 @@
+# Copyright (C) 2019 Verizon. All Rights Reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+
+from genericparser.packages import const
+
+
+class OAuthCredentialsSerializer(serializers.Serializer):
+    clientId = serializers.CharField(
+        help_text="Client identifier to be used in the access token "
+        "request of the OAuth 2.0 client credentials grant type.",
+        required=False,
+        max_length=255,
+        allow_null=False
+    )
+    clientPassword = serializers.CharField(
+        help_text="Client password to be used in the access token "
+        "request of the OAuth 2.0 client credentials grant type.",
+        required=False,
+        max_length=255,
+        allow_null=False
+    )
+    tokenEndpoint = serializers.CharField(
+        help_text="The token endpoint from which the access token can "
+        "be obtained.",
+        required=False,
+        max_length=255,
+        allow_null=False
+    )
+
+
+class BasicAuthSerializer(serializers.Serializer):
+    userName = serializers.CharField(
+        help_text="Username to be used in HTTP Basic authentication.",
+        max_length=255,
+        required=False,
+        allow_null=False
+    )
+    password = serializers.CharField(
+        help_text="Password to be used in HTTP Basic authentication.",
+        max_length=255,
+        required=False,
+        allow_null=False
+    )
+
+
+class SubscriptionAuthenticationSerializer(serializers.Serializer):
+    authType = serializers.ListField(
+        child=serializers.ChoiceField(required=True, choices=const.AUTH_TYPES),
+        help_text="Defines the types of Authentication / Authorization "
+        "which the API consumer is willing to accept when "
+        "receiving a notification.",
+        required=True
+    )
+    paramsBasic = BasicAuthSerializer(
+        help_text="Parameters for authentication/authorization using BASIC.",
+        required=False,
+        allow_null=False
+    )
+    paramsOauth2ClientCredentials = OAuthCredentialsSerializer(
+        help_text="Parameters for authentication/authorization using "
+        "OAUTH2_CLIENT_CREDENTIALS.",
+        required=False,
+        allow_null=False
+    )
diff --git a/genericparser/packages/serializers/upload_vnf_pkg_from_uri_req.py b/genericparser/packages/serializers/upload_vnf_pkg_from_uri_req.py
new file mode 100644 (file)
index 0000000..b847484
--- /dev/null
@@ -0,0 +1,36 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+
+
+class UploadVnfPackageFromUriRequestSerializer(serializers.Serializer):
+    addressInformation = serializers.CharField(
+        help_text="Address information of the VNF package content.",
+        required=True,
+        allow_null=False,
+        allow_blank=False
+    )
+    userName = serializers.CharField(
+        help_text="User name to be used for authentication.",
+        required=False,
+        allow_null=False,
+        allow_blank=False
+    )
+    password = serializers.CharField(
+        help_text="Password to be used for authentication.",
+        required=False,
+        allow_null=False,
+        allow_blank=False
+    )
diff --git a/genericparser/packages/serializers/vnf_pkg_artifact_info.py b/genericparser/packages/serializers/vnf_pkg_artifact_info.py
new file mode 100644 (file)
index 0000000..d5592ec
--- /dev/null
@@ -0,0 +1,39 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+from checksum import ChecksumSerializer
+
+
+class VnfPackageArtifactInfoSerializer(serializers.Serializer):
+    artifactPath = serializers.CharField(
+        help_text="Path in the VNF package.",
+        required=True,
+        allow_null=False,
+        allow_blank=False
+    )
+    checksum = ChecksumSerializer(
+        help_text="Checksum of the artifact file.",
+        required=True,
+        allow_null=False
+    )
+    metadata = serializers.DictField(
+        help_text="The metadata of the artifact that are available in the VNF package",
+        child=serializers.CharField(
+            help_text="KeyValue Pairs",
+            allow_blank=True
+        ),
+        required=False,
+        allow_null=True
+    )
diff --git a/genericparser/packages/serializers/vnf_pkg_info.py b/genericparser/packages/serializers/vnf_pkg_info.py
new file mode 100644 (file)
index 0000000..0251bed
--- /dev/null
@@ -0,0 +1,127 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+from checksum import ChecksumSerializer
+from vnf_pkg_software_image_info import VnfPackageSoftwareImageInfoSerializer
+from vnf_pkg_artifact_info import VnfPackageArtifactInfoSerializer
+from link import LinkSerializer
+
+
+class _LinkSerializer(serializers.Serializer):
+    self = LinkSerializer(
+        help_text='URI of this resource.',
+        required=True,
+        allow_null=False
+    )
+    vnfd = LinkSerializer(
+        help_text='Link to the VNFD resource.',
+        required=False,
+        allow_null=False
+    )
+    packageContent = LinkSerializer(
+        help_text='Link to the "VNF package content resource.',
+        required=True,
+        allow_null=False
+    )
+
+    class Meta:
+        ref_name = 'VNF_PKGM_Link_Serializer'
+
+
+class VnfPkgInfoSerializer(serializers.Serializer):
+    id = serializers.CharField(
+        help_text="Identifier of the on-boarded VNF package.",
+        required=True,
+        allow_null=False,
+        allow_blank=False
+    )
+    vnfdId = serializers.CharField(
+        help_text="This identifier, which is managed by the VNF provider, "
+        "identifies the VNF package and the VNFD in a globally unique way.",
+        required=False,
+        allow_null=True,
+        allow_blank=True
+    )
+    vnfProvider = serializers.CharField(
+        help_text="Provider of the VNF package and the VNFD.",
+        required=False,
+        allow_null=True,
+        allow_blank=True
+    )
+    vnfProductName = serializers.CharField(
+        help_text="Name to identify the VNF product.",
+        required=False,
+        allow_null=True,
+        allow_blank=True
+    )
+    vnfSoftwareVersion = serializers.CharField(
+        help_text="Software version of the VNF.",
+        required=False,
+        allow_null=True,
+        allow_blank=True
+    )
+    vnfdVersion = serializers.CharField(
+        help_text="The version of the VNvFD.",
+        required=False,
+        allow_null=True,
+        allow_blank=True
+    )
+    checksum = ChecksumSerializer(
+        help_text="Checksum of the on-boarded VNF package.",
+        required=False,
+        allow_null=True
+    )
+    softwareImages = VnfPackageSoftwareImageInfoSerializer(
+        help_text="Information about VNF package artifacts that are software images.",
+        required=False,
+        allow_null=True,
+        many=True
+    )
+    additionalArtifacts = VnfPackageArtifactInfoSerializer(
+        help_text="Information about VNF package artifacts contained in "
+        "the VNF package that are not software images.",
+        required=False,
+        allow_null=True,
+        many=True
+    )
+    onboardingState = serializers.ChoiceField(
+        help_text="On-boarding state of the VNF package.",
+        choices=["CREATED", "UPLOADING", "PROCESSING", "ONBOARDED"],
+        required=True,
+        allow_null=True
+    )
+    operationalState = serializers.ChoiceField(
+        help_text="Operational state of the VNF package.",
+        choices=["ENABLED", "DISABLED"],
+        required=True,
+        allow_null=True
+    )
+    usageState = serializers.ChoiceField(
+        help_text="Usage state of the VNF package.",
+        choices=["IN_USE", "NOT_IN_USE"],
+        required=True,
+        allow_null=True
+    )
+    userDefinedData = serializers.DictField(
+        help_text="User defined data for the VNF package.",
+        child=serializers.CharField(help_text="KeyValue Pairs", allow_blank=True),
+        required=False,
+        allow_null=True
+    )
+    _links = _LinkSerializer(
+        help_text='Links to resources related to this resource.',
+        required=True,
+        allow_null=True  # TODO supposed to be False
+    )
diff --git a/genericparser/packages/serializers/vnf_pkg_infos.py b/genericparser/packages/serializers/vnf_pkg_infos.py
new file mode 100644 (file)
index 0000000..d4cbc65
--- /dev/null
@@ -0,0 +1,20 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+from vnf_pkg_info import VnfPkgInfoSerializer
+
+
+class VnfPkgInfosSerializer(serializers.ListSerializer):
+    child = VnfPkgInfoSerializer()
diff --git a/genericparser/packages/serializers/vnf_pkg_notifications.py b/genericparser/packages/serializers/vnf_pkg_notifications.py
new file mode 100644 (file)
index 0000000..4bfdf84
--- /dev/null
@@ -0,0 +1,117 @@
+# Copyright (C) 2019 Verizon. All Rights Reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+
+from genericparser.packages.const import NOTIFICATION_TYPES
+
+PackageOperationalStateType = ["ENABLED", "DISABLED"]
+PackageUsageStateType = ["IN_USE", "NOT_IN_USE"]
+
+
+class VersionSerializer(serializers.Serializer):
+    vnfSoftwareVersion = serializers.CharField(
+        help_text="VNF software version to match.",
+        max_length=255,
+        required=True,
+        allow_null=False
+    )
+    vnfdVersions = serializers.ListField(
+        child=serializers.CharField(),
+        help_text="Match VNF packages that contain "
+                  "VNF products with certain VNFD versions",
+        required=False,
+        allow_null=False
+    )
+
+
+class vnfProductsSerializer(serializers.Serializer):
+    vnfProductName = serializers.CharField(
+        help_text="Name of the VNF product to match.",
+        max_length=255,
+        required=True,
+        allow_null=False
+    )
+    versions = VersionSerializer(
+        help_text="match VNF packages that contain "
+                  "VNF products with certain versions",
+        required=False,
+        allow_null=False
+    )
+
+
+class vnfProductsProvidersSerializer(serializers.Serializer):
+    vnfProvider = serializers.CharField(
+        help_text="Name of the VNFprovider to match.",
+        max_length=255,
+        required=True,
+        allow_null=False
+    )
+    vnfProducts = vnfProductsSerializer(
+        help_text="match VNF packages that contain "
+                  "VNF products with certain product names, "
+                  "from one particular provider",
+        required=False,
+        allow_null=False
+    )
+
+
+class PkgmNotificationsFilter(serializers.Serializer):
+    notificationTypes = serializers.ListField(
+        child=serializers.ChoiceField(
+            required=True,
+            choices=NOTIFICATION_TYPES
+        ),
+        help_text="Match particular notification types",
+        allow_null=False,
+        required=False
+    )
+    vnfProductsFromProviders = vnfProductsProvidersSerializer(
+        help_text="Match VNF packages that contain "
+                  "VNF products from certain providers.",
+        allow_null=False,
+        required=False
+    )
+    vnfdId = serializers.ListField(
+        child=serializers.UUIDField(),
+        help_text="Match VNF packages with a VNFD identifier"
+                  "listed in the attribute",
+        required=False,
+        allow_null=False
+    )
+    vnfPkgId = serializers.ListField(
+        child=serializers.UUIDField(),
+        help_text="Match VNF packages with a VNFD identifier"
+                  "listed in the attribute",
+        required=False,
+        allow_null=False
+    )
+    operationalState = serializers.ListField(
+        child=serializers.ChoiceField(
+            required=True,
+            choices=PackageOperationalStateType
+        ),
+        help_text="Operational state of the VNF package.",
+        allow_null=False,
+        required=False
+    )
+    usageState = serializers.ListField(
+        child=serializers.ChoiceField(
+            required=True,
+            choices=PackageUsageStateType
+        ),
+        help_text="Operational state of the VNF package.",
+        allow_null=False,
+        required=False
+    )
diff --git a/genericparser/packages/serializers/vnf_pkg_software_image_info.py b/genericparser/packages/serializers/vnf_pkg_software_image_info.py
new file mode 100644 (file)
index 0000000..7723ec0
--- /dev/null
@@ -0,0 +1,96 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+from checksum import ChecksumSerializer
+
+
+class VnfPackageSoftwareImageInfoSerializer(serializers.Serializer):
+    id = serializers.CharField(
+        help_text="Identifier of the software image.",
+        required=True,
+        allow_null=False,
+        allow_blank=False
+    )
+    name = serializers.CharField(
+        help_text="Name of the software image.",
+        required=True,
+        allow_null=True,
+        allow_blank=False
+    )
+    provider = serializers.CharField(
+        help_text="Provider of the software image.",
+        required=True,
+        allow_null=True,
+        allow_blank=False
+    )
+    version = serializers.CharField(
+        help_text="Version of the software image.",
+        required=True,
+        allow_null=True,
+        allow_blank=False
+    )
+    checksum = ChecksumSerializer(
+        help_text="Checksum of the software image file.",
+        required=True,
+        allow_null=False
+    )
+    containerFormat = serializers.ChoiceField(
+        help_text="terminationType: Indicates whether forceful or graceful termination is requested.",
+        choices=["AKI", "AMI", "ARI", "BARE", "DOCKER", "OVA", "OVF"],
+        required=True,
+        allow_null=True
+    )
+    diskFormat = serializers.ChoiceField(
+        help_text="Disk format of a software image is the format of the underlying disk image.",
+        choices=["AKI", "AMI", "ARI", "ISO", "QCOW2", "RAW", "VDI", "VHD", "VHDX", "VMDK"],
+        required=True,
+        allow_null=True
+    )
+    createdAt = serializers.DateTimeField(
+        help_text="Time when this software image was created.",
+        required=True,
+        format=None,
+        input_formats=None
+    )
+    minDisk = serializers.IntegerField(
+        help_text="The minimal disk for this software image in bytes.",
+        required=True,
+        allow_null=True
+    )
+    minRam = serializers.IntegerField(
+        help_text="The minimal RAM for this software image in bytes.",
+        required=True,
+        allow_null=True
+    )
+    size = serializers.IntegerField(
+        help_text="Size of this software image in bytes.",
+        required=True,
+        allow_null=True
+    )
+    userMetadata = serializers.DictField(
+        help_text="User-defined data.",
+        child=serializers.CharField(
+            help_text="KeyValue Pairs",
+            allow_blank=True
+        ),
+        required=False,
+        allow_null=True
+    )
+    imagePath = serializers.CharField(
+        help_text="Path in the VNF package.",
+        required=True,
+        allow_null=True,
+        allow_blank=False
+    )
diff --git a/genericparser/packages/serializers/vnf_pkg_subscription.py b/genericparser/packages/serializers/vnf_pkg_subscription.py
new file mode 100644 (file)
index 0000000..b5a0bc4
--- /dev/null
@@ -0,0 +1,93 @@
+# Copyright (C) 2019 Verizon. All Rights Reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from rest_framework import serializers
+
+from genericparser.packages.serializers import subscription_auth_data
+from genericparser.packages.serializers import vnf_pkg_notifications
+
+
+class LinkSerializer(serializers.Serializer):
+    href = serializers.CharField(
+        help_text="URI of the referenced resource.",
+        required=True,
+        allow_null=False,
+        allow_blank=False
+    )
+
+    class Meta:
+        ref_name = 'VNF_SUBSCRIPTION_LINKSERIALIZER'
+
+
+class LinkSelfSerializer(serializers.Serializer):
+    self = LinkSerializer(
+        help_text="URI of this resource.",
+        required=True,
+        allow_null=False
+    )
+
+
+class PkgmSubscriptionRequestSerializer(serializers.Serializer):
+    filters = vnf_pkg_notifications.PkgmNotificationsFilter(
+        help_text="Filter settings for this subscription, "
+                  "to define the subset of all notifications"
+                  " this subscription relates to",
+        required=False,
+        allow_null=False
+    )
+    callbackUri = serializers.URLField(
+        help_text="Callback URI to send"
+                  "the notification",
+        required=True,
+        allow_null=False
+    )
+    authentication = subscription_auth_data.SubscriptionAuthenticationSerializer(
+        help_text="Authentication parameters to configure the use of "
+                  "authorization when sending notifications corresponding to"
+                  "this subscription",
+        required=False,
+        allow_null=False
+    )
+
+
+class PkgmSubscriptionSerializer(serializers.Serializer):
+    id = serializers.UUIDField(
+        help_text="Identifier of this subscription resource.",
+        required=True,
+        allow_null=False
+    )
+    callbackUri = serializers.URLField(
+        help_text="The URI of the endpoint to send the notification to.",
+        required=True,
+        allow_null=False
+    )
+
+    _links = LinkSelfSerializer(
+        help_text="Links to resources related to this resource.",
+        required=True,
+        allow_null=False
+    )
+
+    filter = vnf_pkg_notifications.PkgmNotificationsFilter(
+        help_text="Filter settings for this subscription, "
+                  "to define the subset of all notifications"
+                  " this subscription relates to",
+        required=False,
+        allow_null=False
+    )
+
+
+class PkgmSubscriptionsSerializer(serializers.ListSerializer):
+    child = PkgmSubscriptionSerializer()
+    allow_empty = True
diff --git a/genericparser/packages/tests/__init__.py b/genericparser/packages/tests/__init__.py
new file mode 100644 (file)
index 0000000..c7b6818
--- /dev/null
@@ -0,0 +1,13 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/genericparser/packages/tests/const.py b/genericparser/packages/tests/const.py
new file mode 100644 (file)
index 0000000..caf60e3
--- /dev/null
@@ -0,0 +1,457 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+vnfd_data = {
+    "volume_storages": [
+        {
+            "properties": {
+                "size_of_storage": {
+                    "factor": 10,
+                    "value": 10000000000,
+                    "unit": "GB",
+                    "unit_size": 1000000000
+                },
+                "type_of_storage": "volume",
+                "rdma_enabled": False,
+                "size": "10 GB"
+            },
+            "volume_storage_id": "vNAT_Storage_6wdgwzedlb6sq18uzrr41sof7",
+            "description": ""
+        }
+    ],
+    "inputs": {},
+    "vdus": [
+        {
+            "volume_storages": [
+                "vNAT_Storage_6wdgwzedlb6sq18uzrr41sof7"
+            ],
+            "description": "",
+            "dependencies": [],
+            "vls": [],
+            "properties": {
+                "name": "vNat",
+                "configurable_properties": {
+                    "test": {
+                        "additional_vnfc_configurable_properties": {
+                            "aaa": "1",
+                            "bbb": "2",
+                            "ccc": "3"
+                        }
+                    }
+                },
+                "description": "the virtual machine of vNat",
+                "nfvi_constraints": [
+                    "test"
+                ],
+                "boot_order": [
+                    "vNAT_Storage"
+                ]
+            },
+            "vdu_id": "vdu_vNat",
+            "artifacts": [
+                {
+                    "artifact_name": "vNatVNFImage",
+                    "type": "tosca.artifacts.nfv.SwImage",
+                    "properties": {
+                        "operating_system": "linux",
+                        "sw_image": "/swimages/vRouterVNF_ControlPlane.qcow2",
+                        "name": "vNatVNFImage",
+                        "container_format": "bare",
+                        "min_ram": "1 GB",
+                        "disk_format": "qcow2",
+                        "supported_virtualisation_environments": [
+                            "test_0"
+                        ],
+                        "version": "1.0",
+                        "checksum": "5000",
+                        "min_disk": "10 GB",
+                        "size": "10 GB"
+                    },
+                    "file": "/swimages/vRouterVNF_ControlPlane.qcow2"
+                }
+            ],
+            "nfv_compute": {
+                "flavor_extra_specs": {
+                    "hw:cpu_sockets": "2",
+                    "sw:ovs_dpdk": "true",
+                    "hw:cpu_threads": "2",
+                    "hw:numa_mem.1": "3072",
+                    "hw:numa_mem.0": "1024",
+                    "hw:numa_nodes": "2",
+                    "hw:numa_cpus.0": "0,1",
+                    "hw:numa_cpus.1": "2,3,4,5",
+                    "hw:cpu_cores": "2",
+                    "hw:cpu_threads_policy": "isolate"
+                },
+                "cpu_frequency": "2.4 GHz",
+                "num_cpus": 2,
+                "mem_size": "10 GB"
+            },
+            "local_storages": [],
+            "image_file": "vNatVNFImage",
+            "cps": []
+        }
+    ],
+    "image_files": [
+        {
+            "properties": {
+                "operating_system": "linux",
+                "sw_image": "/swimages/vRouterVNF_ControlPlane.qcow2",
+                "name": "vNatVNFImage",
+                "container_format": "bare",
+                "min_ram": "1 GB",
+                "disk_format": "qcow2",
+                "supported_virtualisation_environments": [
+                    "test_0"
+                ],
+                "version": "1.0",
+                "checksum": "5000",
+                "min_disk": "10 GB",
+                "size": "10 GB"
+            },
+            "image_file_id": "vNatVNFImage",
+            "description": ""
+        }
+    ],
+    "routers": [],
+    "local_storages": [],
+    "vnf_exposed": {
+        "external_cps": [
+            {
+                "key_name": "sriov_plane",
+                "cp_id": "SRIOV_Port"
+            }
+        ],
+        "forward_cps": []
+    },
+    "vls": [
+        {
+            "route_id": "",
+            "vl_id": "sriov_link",
+            "route_external": False,
+            "description": "",
+            "properties": {
+                "vl_flavours": {
+                    "vl_id": "aaaa"
+                },
+                "connectivity_type": {
+                    "layer_protocol": "ipv4",
+                    "flow_pattern": "flat"
+                },
+                "description": "sriov_link",
+                "test_access": [
+                    "test"
+                ]
+            }
+        }
+    ],
+    "cps": [
+        {
+            "vl_id": "sriov_link",
+            "vdu_id": "vdu_vNat",
+            "description": "",
+            "cp_id": "SRIOV_Port",
+            "properties": {
+                "address_data": [
+                    {
+                        "address_type": "ip_address",
+                        "l3_address_data": {
+                            "ip_address_type": "ipv4",
+                            "floating_ip_activated": False,
+                            "number_of_ip_address": 1,
+                            "ip_address_assignment": True
+                        }
+                    }
+                ],
+                "description": "sriov port",
+                "layer_protocol": "ipv4",
+                "virtual_network_interface_requirements": [
+                    {
+                        "requirement": {
+                            "SRIOV": "true"
+                        },
+                        "support_mandatory": False,
+                        "name": "sriov",
+                        "description": "sriov"
+                    },
+                    {
+                        "requirement": {
+                            "SRIOV": "False"
+                        },
+                        "support_mandatory": False,
+                        "name": "normal",
+                        "description": "normal"
+                    }
+                ],
+                "role": "root",
+                "bitrate_requirement": 10
+            }
+        }
+    ],
+    "metadata": {
+        "vnfSoftwareVersion": "1.0.0",
+        "vnfProductName": "zte",
+        "localizationLanguage": [
+            "english",
+            "chinese"
+        ],
+        "vnfProvider": "zte",
+        "vnfmInfo": "zte",
+        "defaultLocalizationLanguage": "english",
+        "vnfdId": "zte-hss-1.0",
+        "id": "zte-hss-1.0",
+        "vnfProductInfoDescription": "hss",
+        "vnfdVersion": "1.0.0",
+        "vnfProductInfoName": "hss"
+    },
+    "vnf": {
+        "properties": {
+            "descriptor_id": "zte-hss-1.0",
+            "descriptor_verison": "1.0.0",
+            "software_version": "1.0.0",
+            "provider": "zte"
+        },
+        "metadata": {
+        }
+    }
+}
+
+nsd_data = {"vnffgs": [{"vnffg_id": "vnffg1",
+                        "description": "",
+                        "members": ["path1",
+                                    "path2"],
+                        "properties": {"vendor": "zte",
+                                       "connection_point": ["m6000_data_in",
+                                                            "m600_tunnel_cp",
+                                                            "m6000_data_out"],
+                                       "version": "1.0",
+                                       "constituent_vnfs": ["VFW",
+                                                            "VNAT"],
+                                       "number_of_endpoints": 3,
+                                       "dependent_virtual_link": ["sfc_data_network",
+                                                                  "ext_datanet_net",
+                                                                  "ext_mnet_net"]}}],
+            "inputs": {"sfc_data_network": {"type": "string",
+                                            "value": "sfc_data_network"},
+                       "externalDataNetworkName": {"type": "string",
+                                                   "value": "vlan_4004_tunnel_net"},
+                       "externalManageNetworkName": {"type": "string",
+                                                     "value": "vlan_4008_mng_net"},
+                       "NatIpRange": {"type": "string",
+                                      "value": "192.167.0.10-192.168.0.20"},
+                       "externalPluginManageNetworkName": {"type": "string",
+                                                           "value": "vlan_4007_plugin_net"}},
+            "pnfs": [{"pnf_id": "m6000_s",
+                      "cps": [],
+                      "description": "",
+                      "properties": {"vendor": "zte",
+                                     "request_reclassification": False,
+                                     "pnf_type": "m6000s",
+                                     "version": "1.0",
+                                     "management_address": "111111",
+                                     "id": "m6000_s",
+                                     "nsh_aware": False}}],
+            "fps": [{"properties": {"symmetric": False,
+                                    "policy": {"type": "ACL",
+                                               "criteria": {"dest_port_range": "1-100",
+                                                            "ip_protocol": "tcp",
+                                                            "source_ip_range": ["119.1.1.1-119.1.1.10"],
+                                                            "dest_ip_range": [{"get_input": "NatIpRange"}],
+                                                            "dscp": 0,
+                                                            "source_port_range": "1-100"}}},
+                     "forwarder_list": [{"capability": "",
+                                         "type": "cp",
+                                         "node_name": "m6000_data_out"},
+                                        {"capability": "",
+                                         "type": "cp",
+                                         "node_name": "m600_tunnel_cp"},
+                                        {"capability": "vnat_fw_inout",
+                                         "type": "vnf",
+                                         "node_name": "VNAT"}],
+                     "description": "",
+                     "fp_id": "path2"},
+                    {"properties": {"symmetric": True,
+                                    "policy": {"type": "ACL",
+                                               "criteria": {"dest_port_range": "1-100",
+                                                            "ip_protocol": "tcp",
+                                                            "source_ip_range": ["1-100"],
+                                                            "dest_ip_range": ["1-100"],
+                                                            "dscp": 4,
+                                                            "source_port_range": "1-100"}}},
+                     "forwarder_list": [{"capability": "",
+                                         "type": "cp",
+                                         "node_name": "m6000_data_in"},
+                                        {"capability": "",
+                                         "type": "cp",
+                                         "node_name": "m600_tunnel_cp"},
+                                        {"capability": "vfw_fw_inout",
+                                         "type": "vnf",
+                                         "node_name": "VFW"},
+                                        {"capability": "vnat_fw_inout",
+                                         "type": "vnf",
+                                         "node_name": "VNAT"},
+                                        {"capability": "",
+                                         "type": "cp",
+                                         "node_name": "m600_tunnel_cp"},
+                                        {"capability": "",
+                                         "type": "cp",
+                                         "node_name": "m6000_data_out"}],
+                     "description": "",
+                     "fp_id": "path1"}],
+            "routers": [],
+            "vnfs": [{"vnf_id": "VFW",
+                      "description": "",
+                      "properties": {"plugin_info": "vbrasplugin_1.0",
+                                     "vendor": "zte",
+                                     "is_shared": False,
+                                     "adjust_vnf_capacity": True,
+                                     "name": "VFW",
+                                     "vnf_extend_type": "driver",
+                                     "csarVersion": "v1.0",
+                                     "csarType": "NFAR",
+                                     "csarProvider": "ZTE",
+                                     "version": "1.0",
+                                     "nsh_aware": True,
+                                     "cross_dc": False,
+                                     "vnf_type": "VFW",
+                                     "vmnumber_overquota_alarm": True,
+                                     "vnfd_version": "1.0.0",
+                                     "externalPluginManageNetworkName": "vlan_4007_plugin_net",
+                                     "id": "vcpe_vfw_zte_1_0",
+                                     "request_reclassification": False},
+                      "dependencies": [{"key_name": "vfw_ctrl_by_manager_cp",
+                                        "vl_id": "ext_mnet_net"},
+                                       {"key_name": "vfw_data_cp",
+                                        "vl_id": "sfc_data_network"}],
+                      "type": "tosca.nodes.nfv.ext.zte.VNF.VFW",
+                      "networks": []}],
+            "ns_exposed": {"external_cps": [],
+                           "forward_cps": []},
+            "policies": [{"file_url": "policies/abc.drl",
+                          "name": "aaa"}],
+            "vls": [{"route_id": "",
+                     "vl_id": "ext_mnet_net",
+                     "route_external": False,
+                     "description": "",
+                     "properties": {"name": "vlan_4008_mng_net",
+                                    "mtu": 1500,
+                                    "location_info": {"tenant": "admin",
+                                                      "vimid": 2,
+                                                      "availability_zone": "nova"},
+                                    "ip_version": 4,
+                                    "dhcp_enabled": True,
+                                    "network_name": "vlan_4008_mng_net",
+                                    "network_type": "vlan"}},
+                    {"route_id": "",
+                     "vl_id": "ext_datanet_net",
+                     "route_external": False,
+                     "description": "",
+                     "properties": {"name": "vlan_4004_tunnel_net",
+                                    "mtu": 1500,
+                                    "location_info": {"tenant": "admin",
+                                                      "vimid": 2,
+                                                      "availability_zone": "nova"},
+                                    "ip_version": 4,
+                                    "dhcp_enabled": True,
+                                    "network_name": "vlan_4004_tunnel_net",
+                                    "network_type": "vlan"}},
+                    {"route_id": "",
+                     "vl_id": "sfc_data_network",
+                     "route_external": False,
+                     "description": "",
+                     "properties": {"name": "sfc_data_network",
+                                    "dhcp_enabled": True,
+                                    "is_predefined": False,
+                                    "location_info": {"tenant": "admin",
+                                                      "vimid": 2,
+                                                      "availability_zone": "nova"},
+                                    "ip_version": 4,
+                                    "mtu": 1500,
+                                    "network_name": "sfc_data_network",
+                                    "network_type": "vlan"}}],
+            "cps": [{"pnf_id": "m6000_s",
+                     "vl_id": "path2",
+                     "description": "",
+                     "cp_id": "m6000_data_out",
+                     "properties": {"direction": "bidirectional",
+                                    "vnic_type": "normal",
+                                    "bandwidth": 0,
+                                    "mac_address": "11-22-33-22-11-44",
+                                    "interface_name": "xgei-0/4/1/5",
+                                    "ip_address": "176.1.1.2",
+                                    "order": 0,
+                                    "sfc_encapsulation": "mac"}},
+                    {"pnf_id": "m6000_s",
+                     "vl_id": "ext_datanet_net",
+                     "description": "",
+                     "cp_id": "m600_tunnel_cp",
+                     "properties": {"direction": "bidirectional",
+                                    "vnic_type": "normal",
+                                    "bandwidth": 0,
+                                    "mac_address": "00-11-00-22-33-00",
+                                    "interface_name": "gei-0/4/0/13",
+                                    "ip_address": "191.167.100.5",
+                                    "order": 0,
+                                    "sfc_encapsulation": "mac"}},
+                    {"pnf_id": "m6000_s",
+                     "vl_id": "path2",
+                     "description": "",
+                     "cp_id": "m6000_data_in",
+                     "properties": {"direction": "bidirectional",
+                                    "vnic_type": "normal",
+                                    "bandwidth": 0,
+                                    "mac_address": "11-22-33-22-11-41",
+                                    "interface_name": "gei-0/4/0/7",
+                                    "ip_address": "1.1.1.1",
+                                    "order": 0,
+                                    "sfc_encapsulation": "mac",
+                                    "bond": "none"}},
+                    {"pnf_id": "m6000_s",
+                     "vl_id": "ext_mnet_net",
+                     "description": "",
+                     "cp_id": "m600_mnt_cp",
+                     "properties": {"direction": "bidirectional",
+                                    "vnic_type": "normal",
+                                    "bandwidth": 0,
+                                    "mac_address": "00-11-00-22-33-11",
+                                    "interface_name": "gei-0/4/0/1",
+                                    "ip_address": "10.46.244.51",
+                                    "order": 0,
+                                    "sfc_encapsulation": "mac",
+                                    "bond": "none"}}],
+            "metadata": {"invariant_id": "vcpe_ns_sff_1",
+                         "name": "VCPE_NS",
+                         "csarVersion": "v1.0",
+                         "csarType": "NSAR",
+                         "csarProvider": "ZTE",
+                         "version": 1,
+                         "vendor": "ZTE",
+                         "id": "VCPE_NS",
+                         "description": "vcpe_ns"},
+            "ns": {
+                "properties": {
+                    "descriptor_id": "VCPE_NS",
+                    "version": 1,
+                    "name": "VCPE_NS",
+                    "desginer": "ZTE",
+                    "invariant_id": "vcpe_ns_sff_1"
+                }
+}
+}
+
+pnfd_data = {
+    "metadata": {
+        "id": "zte-1.0",
+    }
+}
diff --git a/genericparser/packages/tests/test_health_check.py b/genericparser/packages/tests/test_health_check.py
new file mode 100644 (file)
index 0000000..e3befca
--- /dev/null
@@ -0,0 +1,50 @@
+# Copyright (c) 2019, CMCC Technologies Co., Ltd.
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+
+from django.test import TestCase, Client
+from rest_framework import status
+
+
+class TestHealthCheck(TestCase):
+    def setUp(self):
+        self.client = Client()
+
+    def tearDown(self):
+        pass
+
+    def test_vnfpkgm_health_check(self):
+        response = self.client.get("/api/vnfpkgm/v1/health_check")
+        self.assertEqual(status.HTTP_200_OK, response.status_code, response.content)
+        resp_data = json.loads(response.content)
+        self.assertEqual({"status": "active"}, resp_data)
+
+    def test_nsd_health_check(self):
+        response = self.client.get("/api/nsd/v1/health_check")
+        self.assertEqual(status.HTTP_200_OK, response.status_code, response.content)
+        resp_data = json.loads(response.content)
+        self.assertEqual({"status": "active"}, resp_data)
+
+    def test_catalog_health_check(self):
+        response = self.client.get("/api/genericparser/v1/health_check")
+        self.assertEqual(status.HTTP_200_OK, response.status_code, response.content)
+        resp_data = json.loads(response.content)
+        self.assertEqual({"status": "active"}, resp_data)
+
+    def test_parser_health_check(self):
+        response = self.client.get("/api/parser/v1/health_check")
+        self.assertEqual(status.HTTP_200_OK, response.status_code, response.content)
+        resp_data = json.loads(response.content)
+        self.assertEqual({"status": "active"}, resp_data)
diff --git a/genericparser/packages/tests/test_ns_descriptor.py b/genericparser/packages/tests/test_ns_descriptor.py
new file mode 100644 (file)
index 0000000..6c4133c
--- /dev/null
@@ -0,0 +1,300 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import copy
+import json
+import mock
+import os
+
+from django.test import TestCase
+from rest_framework import status
+from rest_framework.test import APIClient
+from genericparser.packages.biz.ns_descriptor import NsDescriptor
+from genericparser.packages.const import PKG_STATUS
+from genericparser.packages.tests.const import nsd_data
+from genericparser.pub.config.config import GENERICPARSER_ROOT_PATH
+from genericparser.pub.database.models import NSPackageModel, VnfPackageModel, PnfPackageModel
+from genericparser.pub.utils import toscaparsers
+
+
+class TestNsDescriptor(TestCase):
+    def setUp(self):
+        self.client = APIClient()
+        self.user_defined_data = {
+            'key1': 'value1',
+            'key2': 'value2',
+            'key3': 'value3',
+        }
+        self.expected_nsd_info = {
+            'id': None,
+            'nsdId': None,
+            'nsdName': None,
+            'nsdVersion': None,
+            'nsdDesigner': None,
+            'nsdInvariantId': None,
+            'vnfPkgIds': [],
+            'pnfdInfoIds': [],
+            'nestedNsdInfoIds': [],
+            'nsdOnboardingState': 'CREATED',
+            'onboardingFailureDetails': None,
+            'nsdOperationalState': 'DISABLED',
+            'nsdUsageState': 'NOT_IN_USE',
+            'userDefinedData': self.user_defined_data,
+            '_links': None
+        }
+        self.nsdModel = {
+            "pnfs": [{"properties": {"id": "m6000_s"}}],
+            "vnfs": [{"properties": {"id": "123"}}]
+        }
+
+    def tearDown(self):
+        pass
+
+    def test_nsd_create_normal(self):
+        reqest_data = {'userDefinedData': self.user_defined_data}
+        expected_reponse_data = {
+            'nsdOnboardingState': 'CREATED',
+            'nsdOperationalState': 'DISABLED',
+            'nsdUsageState': 'NOT_IN_USE',
+            'userDefinedData': self.user_defined_data,
+            '_links': None
+        }
+
+        response = self.client.post(
+            '/api/nsd/v1/ns_descriptors',
+            data=reqest_data,
+            format='json'
+        )
+        response.data.pop('id')
+        self.assertEqual(response.status_code, status.HTTP_201_CREATED)
+        self.assertEqual(expected_reponse_data, response.data)
+
+    def test_query_multiple_nsds_normal(self):
+        expected_reponse_data = [
+            copy.deepcopy(self.expected_nsd_info),
+            copy.deepcopy(self.expected_nsd_info)
+        ]
+        expected_reponse_data[0]['id'] = '0'
+        expected_reponse_data[0]['nsdId'] = '0'
+        expected_reponse_data[1]['id'] = '1'
+        expected_reponse_data[1]['nsdId'] = '1'
+
+        user_defined_data = json.JSONEncoder().encode(self.user_defined_data)
+        for i in range(2):
+            NSPackageModel(
+                nsPackageId=str(i),
+                onboardingState='CREATED',
+                operationalState='DISABLED',
+                usageState='NOT_IN_USE',
+                userDefinedData=user_defined_data,
+                nsdId=str(i)
+            ).save()
+
+        response = self.client.get('/api/nsd/v1/ns_descriptors', format='json')
+        self.assertEqual(response.status_code, status.HTTP_200_OK)
+        self.assertEqual(expected_reponse_data, response.data)
+
+        expected_reponse_data = [
+            copy.deepcopy(self.expected_nsd_info)
+        ]
+        expected_reponse_data[0]['id'] = '1'
+        expected_reponse_data[0]['nsdId'] = '1'
+        response = self.client.get('/api/nsd/v1/ns_descriptors?nsdId=1', format='json')
+        self.assertEqual(response.status_code, status.HTTP_200_OK)
+        self.assertEqual(expected_reponse_data, response.data)
+
+    def test_query_single_nsd_normal(self):
+        expected_reponse_data = copy.deepcopy(self.expected_nsd_info)
+        expected_reponse_data['id'] = '22'
+
+        user_defined_data = json.JSONEncoder().encode(self.user_defined_data)
+        NSPackageModel(
+            nsPackageId='22',
+            onboardingState='CREATED',
+            operationalState='DISABLED',
+            usageState='NOT_IN_USE',
+            userDefinedData=user_defined_data,
+            nsdModel=json.JSONEncoder().encode(self.nsdModel)
+        ).save()
+
+        response = self.client.get('/api/nsd/v1/ns_descriptors/22', format='json')
+        self.assertEqual(response.status_code, status.HTTP_200_OK)
+        self.assertEqual(expected_reponse_data, response.data)
+
+    def test_query_single_when_ns_not_exist(self):
+        response = self.client.get('/api/nsd/v1/ns_descriptors/22', format='json')
+        self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
+
+    def test_delete_single_nsd_normal(self):
+        user_defined_data = json.JSONEncoder().encode(self.user_defined_data)
+        NSPackageModel(
+            nsPackageId='21',
+            operationalState='DISABLED',
+            usageState='NOT_IN_USE',
+            userDefinedData=user_defined_data,
+            nsdModel='test'
+        ).save()
+
+        response = self.client.delete("/api/nsd/v1/ns_descriptors/21", format='json')
+        self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
+        self.assertEqual(None, response.data)
+
+    def test_delete_when_ns_not_exist(self):
+        response = self.client.delete("/api/nsd/v1/ns_descriptors/21", format='json')
+        self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
+
+    @mock.patch.object(toscaparsers, 'parse_nsd')
+    def test_nsd_content_upload_normal(self, mock_parse_nsd):
+        user_defined_data_json = json.JSONEncoder().encode(self.user_defined_data)
+        mock_parse_nsd.return_value = json.JSONEncoder().encode(nsd_data)
+        VnfPackageModel(
+            vnfPackageId="111",
+            vnfdId="vcpe_vfw_zte_1_0"
+        ).save()
+
+        PnfPackageModel(
+            pnfPackageId="112",
+            pnfdId="m6000_s"
+        ).save()
+
+        NSPackageModel(
+            nsPackageId='22',
+            operationalState='DISABLED',
+            usageState='NOT_IN_USE',
+            userDefinedData=user_defined_data_json,
+        ).save()
+
+        with open('nsd_content.txt', 'wb') as fp:
+            fp.write('test')
+        with open('nsd_content.txt', 'rb') as fp:
+            resp = self.client.put(
+                "/api/nsd/v1/ns_descriptors/22/nsd_content",
+                {'file': fp},
+            )
+        file_content = ''
+        with open(os.path.join(GENERICPARSER_ROOT_PATH, '22/nsd_content.txt')) as fp:
+            data = fp.read()
+            file_content = '%s%s' % (file_content, data)
+        ns_pkg = NSPackageModel.objects.filter(nsPackageId="22")
+        self.assertEqual("VCPE_NS", ns_pkg[0].nsdId)
+        self.assertEqual(PKG_STATUS.ONBOARDED, ns_pkg[0].onboardingState)
+        self.assertEqual(resp.status_code, status.HTTP_204_NO_CONTENT)
+        self.assertEqual(None, resp.data)
+        self.assertEqual(file_content, 'test')
+        os.remove('nsd_content.txt')
+
+    def test_nsd_content_upload_failure(self):
+        with open('nsd_content.txt', 'wb') as fp:
+            fp.write('test')
+        with open('nsd_content.txt', 'rb') as fp:
+            response = self.client.put(
+                "/api/nsd/v1/ns_descriptors/22/nsd_content",
+                {'file': fp},
+            )
+        self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+    def test_nsd_content_download_normal(self):
+        with open('nsd_content.txt', 'wb') as fp:
+            fp.writelines('test1')
+            fp.writelines('test2')
+        NSPackageModel.objects.create(
+            nsPackageId='23',
+            onboardingState='ONBOARDED',
+            localFilePath='nsd_content.txt'
+        )
+        response = self.client.get(
+            "/api/nsd/v1/ns_descriptors/23/nsd_content", format='json'
+        )
+        file_content = ""
+        for data in response.streaming_content:
+            file_content = '%s%s' % (file_content, data)
+        self.assertEqual(response.status_code, status.HTTP_200_OK)
+        self.assertEqual('test1test2', file_content)
+        os.remove('nsd_content.txt')
+
+    def test_nsd_content_download_when_ns_not_exist(self):
+        response = self.client.get("/api/nsd/v1/ns_descriptors/23/nsd_content", format='json')
+        self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
+
+    def test_nsd_content_download_failed(self):
+        NSPackageModel.objects.create(
+            nsPackageId='23',
+            onboardingState='CREATED',
+            localFilePath='nsd_content.txt'
+        )
+        response = self.client.get("/api/nsd/v1/ns_descriptors/23/nsd_content", format='json')
+        self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+    def test_nsd_content_partial_download_normal(self):
+        with open('nsd_content.txt', 'wb') as fp:
+            fp.writelines('test1')
+            fp.writelines('test2')
+        NSPackageModel(
+            nsPackageId='23',
+            onboardingState='ONBOARDED',
+            localFilePath='nsd_content.txt'
+        ).save()
+
+        response = self.client.get(
+            "/api/nsd/v1/ns_descriptors/23/nsd_content",
+            RANGE='5-10',
+            format='json'
+        )
+        partial_file_content = ''
+        for data in response.streaming_content:
+            partial_file_content = '%s%s' % (partial_file_content, data)
+        self.assertEqual(response.status_code, status.HTTP_200_OK)
+        self.assertEqual('test2', partial_file_content)
+        os.remove('nsd_content.txt')
+
+    @mock.patch.object(NsDescriptor, 'create')
+    def test_nsd_create_when_catch_exception(self, mock_create):
+        reqest_data = {'userDefinedData': self.user_defined_data}
+        mock_create.side_effect = TypeError("integer type")
+        response = self.client.post('/api/nsd/v1/ns_descriptors', data=reqest_data, format='json')
+        self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+    @mock.patch.object(NsDescriptor, 'query_single')
+    def test_query_single_when_catch_exception(self, mock_query_single):
+        mock_query_single.side_effect = TypeError("integer type")
+        response = self.client.get('/api/nsd/v1/ns_descriptors/22', format='json')
+        self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+    @mock.patch.object(NsDescriptor, 'query_multiple')
+    def test_query_multiple_when_catch_exception(self, mock_query_multipe):
+        mock_query_multipe.side_effect = TypeError("integer type")
+        response = self.client.get('/api/nsd/v1/ns_descriptors', format='json')
+        self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+    @mock.patch.object(NsDescriptor, 'delete_single')
+    def test_delete_when_catch_exception(self, mock_delete_single):
+        mock_delete_single.side_effect = TypeError("integer type")
+        response = self.client.delete("/api/nsd/v1/ns_descriptors/21", format='json')
+        self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+    @mock.patch.object(NsDescriptor, 'upload')
+    def test_upload_when_catch_exception(self, mock_upload):
+        mock_upload.side_effect = TypeError("integer type")
+        with open('nsd_content.txt', 'wb') as fp:
+            fp.write('test')
+        with open('nsd_content.txt', 'rb') as fp:
+            response = self.client.put("/api/nsd/v1/ns_descriptors/22/nsd_content", {'file': fp})
+        self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+        os.remove('nsd_content.txt')
+
+    @mock.patch.object(NsDescriptor, 'download')
+    def test_download_when_catch_exception(self, mock_download):
+        mock_download.side_effect = TypeError("integer type")
+        response = self.client.get("/api/nsd/v1/ns_descriptors/23/nsd_content", format='json')
+        self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
diff --git a/genericparser/packages/tests/test_nsdm_subscription.py b/genericparser/packages/tests/test_nsdm_subscription.py
new file mode 100644 (file)
index 0000000..2d98d7a
--- /dev/null
@@ -0,0 +1,527 @@
+# Copyright (C) 2019 Verizon. All Rights Reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import mock
+import uuid
+from django.test import TestCase
+from rest_framework.test import APIClient
+from rest_framework import status
+
+from genericparser.packages.biz.nsdm_subscription import NsdmSubscription
+from genericparser.pub.database.models import NsdmSubscriptionModel
+
+
+class TestNsdmSubscription(TestCase):
+
+    def setUp(self):
+        self.client = APIClient()
+        NsdmSubscriptionModel.objects.all().delete()
+        self.subscription_id = str(uuid.uuid4())
+        self.subscription = {
+            "callbackUri": "http://callbackuri.com",
+            "authentication": {
+                "authType": ["BASIC"],
+                "paramsBasic": {
+                    "userName": "username",
+                    "password": "password"
+                }
+            }
+        }
+        self.links = {
+            "self": {
+                "href": "/api/v1/subscriptions/" + self.subscription_id
+            }
+        }
+        self.test_subscription = {
+            "callbackUri": "http://callbackuri.com",
+            "id": self.subscription_id,
+            "filter": {
+                "notificationTypes": [
+                    "NsdOnBoardingNotification"
+                ],
+                "nsdInfoId": [],
+                "nsdId": [],
+                "nsdName": [],
+                "nsdVersion": [],
+                "nsdInvariantId": [],
+                "vnfPkgIds": [],
+                "nestedNsdInfoIds": [],
+                "nsdOnboardingState": [],
+                "nsdOperationalState": [],
+                "nsdUsageState": [],
+                "pnfdInfoIds": [],
+                "pnfdId": [],
+                "pnfdName": [],
+                "pnfdVersion": [],
+                "pnfdProvider": [],
+                "pnfdInvariantId": [],
+                "pnfdOnboardingState": [],
+                "pnfdUsageState": []
+            },
+            "_links": self.links,
+        }
+
+    def tearDown(self):
+        pass
+
+    @mock.patch("requests.get")
+    @mock.patch.object(uuid, 'uuid4')
+    def test_nsdm_subscribe_notification(self, mock_uuid4, mock_requests):
+        temp_uuid = str(uuid.uuid4())
+        mock_requests.return_value.status_code = 204
+        mock_requests.get.return_value.status_code = 204
+        mock_uuid4.return_value = temp_uuid
+        response = self.client.post("/api/nsd/v1/subscriptions",
+                                    data=self.subscription, format='json')
+        self.assertEqual(201, response.status_code)
+        self.assertEqual(self.subscription["callbackUri"],
+                         response.data["callbackUri"])
+        self.assertEqual(temp_uuid, response.data["id"])
+
+    @mock.patch("requests.get")
+    @mock.patch.object(uuid, 'uuid4')
+    def test_nsdm_subscribe_callbackFailure(self, mock_uuid4, mock_requests):
+        temp_uuid = str(uuid.uuid4())
+        mock_requests.return_value.status_code = 500
+        mock_requests.get.return_value.status_code = 500
+        mock_uuid4.return_value = temp_uuid
+        expected_data = {
+            'status': 500,
+            'detail': "callbackUri http://callbackuri.com didn't"
+                      " return 204 statuscode.",
+            'title': 'Creating Subscription Failed!'
+        }
+        response = self.client.post("/api/nsd/v1/subscriptions",
+                                    data=self.subscription, format='json')
+        self.assertEqual(500, response.status_code)
+        self.assertEqual(expected_data, response.data)
+
+    @mock.patch("requests.get")
+    def test_nsdm_second_subscription(self, mock_requests):
+        mock_requests.return_value.status_code = 204
+        mock_requests.get.return_value.status_code = 204
+        response = self.client.post("/api/nsd/v1/subscriptions",
+                                    data=self.subscription, format='json')
+        self.assertEqual(201, response.status_code)
+        self.assertEqual(self.subscription["callbackUri"],
+                         response.data["callbackUri"])
+        dummy_subscription = {
+            "callbackUri": "http://callbackuri.com",
+            "authentication": {
+                "authType": ["BASIC"],
+                "paramsBasic": {
+                    "userName": "username",
+                    "password": "password"
+                }
+            },
+            "filter": {
+                "nsdId": ["b632bddc-bccd-4180-bd8d-4e8a9578eff7"],
+            }
+        }
+        response = self.client.post("/api/nsd/v1/subscriptions",
+                                    data=dummy_subscription, format='json')
+        self.assertEqual(201, response.status_code)
+        self.assertEqual(dummy_subscription["callbackUri"],
+                         response.data["callbackUri"])
+
+    @mock.patch("requests.get")
+    def test_nsdm_duplicate_subscription(self, mock_requests):
+        mock_requests.return_value.status_code = 204
+        mock_requests.get.return_value.status_code = 204
+        response = self.client.post("/api/nsd/v1/subscriptions",
+                                    data=self.subscription, format='json')
+        self.assertEqual(201, response.status_code)
+        self.assertEqual(self.subscription["callbackUri"],
+                         response.data["callbackUri"])
+        expected_data = {
+            'status': 303,
+            'detail': 'Already Subscription exists with'
+                      ' the same callbackUri and filter',
+            'title': 'Creating Subscription Failed!'
+        }
+        response = self.client.post("/api/nsd/v1/subscriptions",
+                                    data=self.subscription, format='json')
+        self.assertEqual(303, response.status_code)
+        self.assertEqual(expected_data, response.data)
+
+    @mock.patch("requests.get")
+    def test_nsdm_bad_request(self, mock_requests):
+        dummy_subscription = {
+            "callbackUri": "http://callbackuri.com",
+            "authentication": {
+                "authType": ["BASIC"],
+                "paramsBasic": {
+                    "userName": "username",
+                    "password": "password"
+                }
+            },
+            "filter": {
+                "nsdId": "b632bddc-bccd-4180-bd8d-4e8a9578eff7",
+            }
+        }
+        response = self.client.post("/api/nsd/v1/subscriptions",
+                                    data=dummy_subscription, format='json')
+        self.assertEqual(400, response.status_code)
+
+    @mock.patch("requests.get")
+    def test_nsdm_invalid_authtype_subscription(self, mock_requests):
+        dummy_subscription = {
+            "callbackUri": "http://callbackuri.com",
+            "authentication": {
+                "authType": ["OAUTH2_CLIENT_CREDENTIALS"],
+                "paramsBasic": {
+                    "userName": "username",
+                    "password": "password"
+                }
+            }
+        }
+        mock_requests.return_value.status_code = 204
+        mock_requests.get.return_value.status_code = 204
+        expected_data = {
+            'status': 400,
+            'detail': 'Auth type should be BASIC',
+            'title': 'Creating Subscription Failed!'
+        }
+        response = self.client.post("/api/nsd/v1/subscriptions",
+                                    data=dummy_subscription, format='json')
+        self.assertEqual(400, response.status_code)
+        self.assertEqual(expected_data, response.data)
+
+    @mock.patch("requests.get")
+    def test_nsdm_invalid_authtype_oauthclient_subscription(
+            self, mock_requests):
+        dummy_subscription = {
+            "callbackUri": "http://callbackuri.com",
+            "authentication": {
+                "authType": ["BASIC"],
+                "paramsOauth2ClientCredentials": {
+                    "clientId": "clientId",
+                    "clientPassword": "password",
+                    "tokenEndpoint": "http://tokenEndpoint"
+                }
+            }
+        }
+        mock_requests.return_value.status_code = 204
+        mock_requests.get.return_value.status_code = 204
+        expected_data = {
+            'status': 400,
+            'detail': 'Auth type should be OAUTH2_CLIENT_CREDENTIALS',
+            'title': 'Creating Subscription Failed!'
+        }
+        response = self.client.post("/api/nsd/v1/subscriptions",
+                                    data=dummy_subscription, format='json')
+        self.assertEqual(400, response.status_code)
+        self.assertEqual(expected_data, response.data)
+
+    @mock.patch("requests.get")
+    def test_nsdm_invalid_authparams_subscription(self, mock_requests):
+        dummy_subscription = {
+            "callbackUri": "http://callbackuri.com",
+            "authentication": {
+                "authType": ["BASIC"],
+                "paramsBasic": {
+                    "userName": "username"
+                }
+            }
+        }
+        mock_requests.return_value.status_code = 204
+        mock_requests.get.return_value.status_code = 204
+        expected_data = {
+            'status': 400,
+            'detail': 'userName and password needed for BASIC',
+            'title': 'Creating Subscription Failed!'
+        }
+        response = self.client.post("/api/nsd/v1/subscriptions",
+                                    data=dummy_subscription, format='json')
+        self.assertEqual(400, response.status_code)
+        self.assertEqual(expected_data, response.data)
+
+    @mock.patch("requests.get")
+    def test_nsdm_invalid_authparams_oauthclient_subscription(
+            self, mock_requests):
+        dummy_subscription = {
+            "callbackUri": "http://callbackuri.com",
+            "authentication": {
+                "authType": ["OAUTH2_CLIENT_CREDENTIALS"],
+                "paramsOauth2ClientCredentials": {
+                    "clientPassword": "password",
+                    "tokenEndpoint": "http://tokenEndpoint"
+                }
+            }
+        }
+        mock_requests.return_value.status_code = 204
+        mock_requests.get.return_value.status_code = 204
+        expected_data = {
+            'status': 400,
+            'detail': 'clientId, clientPassword and tokenEndpoint'
+                      ' required for OAUTH2_CLIENT_CREDENTIALS',
+            'title': 'Creating Subscription Failed!'
+        }
+        response = self.client.post("/api/nsd/v1/subscriptions",
+                                    data=dummy_subscription, format='json')
+        self.assertEqual(400, response.status_code)
+        self.assertEqual(expected_data, response.data)
+
+    @mock.patch("requests.get")
+    def test_nsdm_invalid_filter_subscription(self, mock_requests):
+        dummy_subscription = {
+            "callbackUri": "http://callbackuri.com",
+            "authentication": {
+                "authType": ["BASIC"],
+                "paramsBasic": {
+                    "userName": "username",
+                    "password": "password"
+                }
+            },
+            "filter": {
+                "nsdId": ["b632bddc-bccd-4180-bd8d-4e8a9578eff7"],
+                "nsdInfoId": ["d0ea5ec3-0b98-438a-9bea-488230cff174"]
+            }
+        }
+        mock_requests.return_value.status_code = 204
+        mock_requests.get.return_value.status_code = 204
+        expected_data = {
+            'status': 400,
+            'detail': 'Notification Filter should contain'
+                      ' either nsdId or nsdInfoId',
+            'title': 'Creating Subscription Failed!'
+        }
+        response = self.client.post("/api/nsd/v1/subscriptions",
+                                    data=dummy_subscription, format='json')
+        self.assertEqual(400, response.status_code)
+        self.assertEqual(expected_data, response.data)
+
+    @mock.patch("requests.get")
+    def test_nsdm_invalid_filter_pnfd_subscription(self, mock_requests):
+        dummy_subscription = {
+            "callbackUri": "http://callbackuri.com",
+            "authentication": {
+                "authType": ["BASIC"],
+                "paramsBasic": {
+                    "userName": "username",
+                    "password": "password"
+                }
+            },
+            "filter": {
+                "pnfdId": ["b632bddc-bccd-4180-bd8d-4e8a9578eff7"],
+                "pnfdInfoIds": ["d0ea5ec3-0b98-438a-9bea-488230cff174"]
+            }
+        }
+        mock_requests.return_value.status_code = 204
+        mock_requests.get.return_value.status_code = 204
+        expected_data = {
+            'status': 400,
+            'detail': 'Notification Filter should contain'
+                      ' either pnfdId or pnfdInfoIds',
+            'title': 'Creating Subscription Failed!'
+        }
+        response = self.client.post("/api/nsd/v1/subscriptions",
+                                    data=dummy_subscription, format='json')
+        self.assertEqual(400, response.status_code)
+        self.assertEqual(expected_data, response.data)
+
+    @mock.patch.object(NsdmSubscription, 'create')
+    def test_nsdmsubscription_create_when_catch_exception(self, mock_create):
+        mock_create.side_effect = TypeError("Unicode type")
+        response = self.client.post('/api/nsd/v1/subscriptions',
+                                    data=self.subscription, format='json')
+        self.assertEqual(response.status_code,
+                         status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+    def test_nsdm_get_subscriptions(self):
+        NsdmSubscriptionModel(subscriptionid=self.subscription_id,
+                              callback_uri="http://callbackuri.com",
+                              auth_info={},
+                              notificationTypes=json.dumps(
+                                  ["NsdOnBoardingNotification"]),
+                              nsdId=[], nsdVersion=[],
+                              nsdInfoId=[], nsdDesigner=[],
+                              nsdName=[], nsdInvariantId=[],
+                              vnfPkgIds=[], pnfdInfoIds=[],
+                              nestedNsdInfoIds=[], nsdOnboardingState=[],
+                              nsdOperationalState=[], nsdUsageState=[],
+                              pnfdId=[], pnfdVersion=[], pnfdProvider=[],
+                              pnfdName=[], pnfdInvariantId=[],
+                              pnfdOnboardingState=[], pnfdUsageState=[],
+                              links=json.dumps(self.links)).save()
+        response = self.client.get("/api/nsd/v1/subscriptions",
+                                   format='json')
+        self.assertEqual(status.HTTP_200_OK, response.status_code)
+        self.assertEqual([self.test_subscription], response.data)
+
+    def test_nsdm_get_subscriptions_filter(self):
+        NsdmSubscriptionModel(subscriptionid=self.subscription_id,
+                              callback_uri="http://callbackuri.com",
+                              auth_info={},
+                              notificationTypes=json.dumps(
+                                  ["NsdOnBoardingNotification"]),
+                              nsdId=[], nsdVersion=[],
+                              nsdInfoId=[], nsdDesigner=[],
+                              nsdName=[], nsdInvariantId=[],
+                              vnfPkgIds=[], pnfdInfoIds=[],
+                              nestedNsdInfoIds=[], nsdOnboardingState=[],
+                              nsdOperationalState=[], nsdUsageState=[],
+                              pnfdId=[], pnfdVersion=[], pnfdProvider=[],
+                              pnfdName=[], pnfdInvariantId=[],
+                              pnfdOnboardingState=[], pnfdUsageState=[],
+                              links=json.dumps(self.links)).save()
+        response = self.client.get("/api/nsd/v1/subscriptions"
+                                   "?notificationTypes"
+                                   "=NsdOnBoardingNotification",
+                                   format='json')
+        self.assertEqual(status.HTTP_200_OK, response.status_code)
+        self.assertEqual([self.test_subscription], response.data)
+
+    def test_nsdm_get_subscriptions_filter_failure(self):
+        NsdmSubscriptionModel(subscriptionid=self.subscription_id,
+                              callback_uri="http://callbackuri.com",
+                              auth_info={},
+                              notificationTypes=json.dumps(
+                                  ["NsdOnBoardingNotification"]),
+                              nsdId=[], nsdVersion=[],
+                              nsdInfoId=[], nsdDesigner=[],
+                              nsdName=[], nsdInvariantId=[],
+                              vnfPkgIds=[], pnfdInfoIds=[],
+                              nestedNsdInfoIds=[], nsdOnboardingState=[],
+                              nsdOperationalState=[], nsdUsageState=[],
+                              pnfdId=[], pnfdVersion=[], pnfdProvider=[],
+                              pnfdName=[], pnfdInvariantId=[],
+                              pnfdOnboardingState=[], pnfdUsageState=[],
+                              links=json.dumps(self.links)).save()
+        response = self.client.get("/api/nsd/v1/subscriptions"
+                                   "?notificationTypes="
+                                   "PnfdOnBoardingFailureNotification",
+                                   format='json')
+        self.assertEqual(status.HTTP_404_NOT_FOUND, response.status_code)
+
+    def test_nsdm_get_subscriptions_invalid_filter(self):
+        NsdmSubscriptionModel(subscriptionid=self.subscription_id,
+                              callback_uri="http://callbackuri.com",
+                              auth_info={},
+                              notificationTypes=json.dumps(
+                                  ["NsdOnBoardingNotification"]),
+                              nsdId=[], nsdVersion=[],
+                              nsdInfoId=[], nsdDesigner=[],
+                              nsdName=[], nsdInvariantId=[],
+                              vnfPkgIds=[], pnfdInfoIds=[],
+                              nestedNsdInfoIds=[], nsdOnboardingState=[],
+                              nsdOperationalState=[], nsdUsageState=[],
+                              pnfdId=[], pnfdVersion=[], pnfdProvider=[],
+                              pnfdName=[], pnfdInvariantId=[],
+                              pnfdOnboardingState=[], pnfdUsageState=[],
+                              links=json.dumps(self.links)).save()
+        response = self.client.get("/api/nsd/v1/subscriptions"
+                                   "?notificationTypes="
+                                   "PnfdOnBoardingFailureNotificati",
+                                   format='json')
+        self.assertEqual(status.HTTP_400_BAD_REQUEST, response.status_code)
+
+    @mock.patch.object(NsdmSubscription, 'query_multi_subscriptions')
+    def test_nsdmsubscription_get_when_catch_exception(self, mock_create):
+        mock_create.side_effect = TypeError("Unicode type")
+        response = self.client.get('/api/nsd/v1/subscriptions', format='json')
+        self.assertEqual(response.status_code,
+                         status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+    def test_nsdm_get_subscription(self):
+        NsdmSubscriptionModel(subscriptionid=self.subscription_id,
+                              callback_uri="http://callbackuri.com",
+                              auth_info={},
+                              notificationTypes=json.dumps(
+                                  ["NsdOnBoardingNotification"]),
+                              nsdId=[], nsdVersion=[],
+                              nsdInfoId=[], nsdDesigner=[],
+                              nsdName=[], nsdInvariantId=[],
+                              vnfPkgIds=[], pnfdInfoIds=[],
+                              nestedNsdInfoIds=[], nsdOnboardingState=[],
+                              nsdOperationalState=[], nsdUsageState=[],
+                              pnfdId=[], pnfdVersion=[], pnfdProvider=[],
+                              pnfdName=[], pnfdInvariantId=[],
+                              pnfdOnboardingState=[], pnfdUsageState=[],
+                              links=json.dumps(self.links)).save()
+        response = self.client.get('/api/nsd/v1/'
+                                   'subscriptions/' + self.subscription_id,
+                                   format='json')
+        self.assertEqual(status.HTTP_200_OK, response.status_code)
+        self.assertEqual(self.test_subscription, response.data)
+
+    def test_nsdm_get_subscription_failure(self):
+        expected_data = {
+            "title": "Query Subscription Failed!",
+            "status": 404,
+            "detail": "Subscription(" + self.subscription_id + ") "
+            "doesn't exists"
+        }
+        response = self.client.get('/api/nsd/v1/'
+                                   'subscriptions/' + self.subscription_id,
+                                   format='json')
+        self.assertEqual(status.HTTP_404_NOT_FOUND, response.status_code)
+        self.assertEqual(expected_data, response.data)
+
+    def test_nsdm_get_subscription_failure_bad_request(self):
+        response = self.client.get("/api/nsd/v1/subscriptions/123",
+                                   format='json')
+        self.assertEqual(status.HTTP_400_BAD_REQUEST, response.status_code)
+
+    @mock.patch.object(NsdmSubscription, 'query_single_subscription')
+    def test_nsdmsubscription_getsingle_when_catch_exception(
+            self, mock_create):
+        mock_create.side_effect = TypeError("Unicode type")
+        response = self.client.get('/api/nsd/v1/'
+                                   'subscriptions/' + self.subscription_id,
+                                   format='json')
+        self.assertEqual(response.status_code,
+                         status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+    def test_ndsm_delete_subscription(self):
+        NsdmSubscriptionModel(subscriptionid=self.subscription_id,
+                              callback_uri="http://callbackuri.com",
+                              auth_info={},
+                              notificationTypes=json.dumps(
+                                  ["NsdOnBoardingNotification"]),
+                              nsdId=[], nsdVersion=[],
+                              nsdInfoId=[], nsdDesigner=[],
+                              nsdName=[], nsdInvariantId=[],
+                              vnfPkgIds=[], pnfdInfoIds=[],
+                              nestedNsdInfoIds=[], nsdOnboardingState=[],
+                              nsdOperationalState=[], nsdUsageState=[],
+                              pnfdId=[], pnfdVersion=[], pnfdProvider=[],
+                              pnfdName=[], pnfdInvariantId=[],
+                              pnfdOnboardingState=[], pnfdUsageState=[],
+                              links=json.dumps(self.links)).save()
+        response = self.client.delete('/api/nsd/v1/'
+                                      'subscriptions/' + self.subscription_id,
+                                      format='json')
+        self.assertEqual(status.HTTP_204_NO_CONTENT, response.status_code)
+
+    def test_ndsm_delete_subscription_failure(self):
+        response = self.client.delete('/api/nsd/v1/'
+                                      'subscriptions/' + self.subscription_id,
+                                      format='json')
+        self.assertEqual(status.HTTP_404_NOT_FOUND, response.status_code)
+
+    def test_nsdm_delete_subscription_failure_bad_request(self):
+        response = self.client.delete("/api/nsd/v1/subscriptions/123",
+                                      format='json')
+        self.assertEqual(status.HTTP_400_BAD_REQUEST, response.status_code)
+
+    @mock.patch.object(NsdmSubscription, 'delete_single_subscription')
+    def test_nsdmsubscription_delete_when_catch_exception(self, mock_create):
+        mock_create.side_effect = TypeError("Unicode type")
+        response = self.client.delete('/api/nsd/v1/'
+                                      'subscriptions/' + self.subscription_id,
+                                      format='json')
+        self.assertEqual(response.status_code,
+                         status.HTTP_500_INTERNAL_SERVER_ERROR)
diff --git a/genericparser/packages/tests/test_nspackage.py b/genericparser/packages/tests/test_nspackage.py
new file mode 100644 (file)
index 0000000..f44488e
--- /dev/null
@@ -0,0 +1,466 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import mock
+from rest_framework import status
+from django.test import TestCase
+from django.test import Client
+
+from genericparser.pub.utils import restcall, toscaparsers
+from genericparser.pub.database.models import NSPackageModel, VnfPackageModel, PnfPackageModel
+from genericparser.pub.msapi import sdc
+
+
+class TestNsPackage(TestCase):
+    def setUp(self):
+        self.client = Client()
+        NSPackageModel.objects.filter().delete()
+        VnfPackageModel.objects.filter().delete()
+        self.nsd_data = {"vnffgs": [{"vnffg_id": "vnffg1",
+                                     "description": "",
+                                     "members": ["path1",
+                                                 "path2"],
+                                     "properties": {"vendor": "zte",
+                                                    "connection_point": ["m6000_data_in",
+                                                                         "m600_tunnel_cp",
+                                                                         "m6000_data_out"],
+                                                    "version": "1.0",
+                                                    "constituent_vnfs": ["VFW",
+                                                                         "VNAT"],
+                                                    "number_of_endpoints": 3,
+                                                    "dependent_virtual_link": ["sfc_data_network",
+                                                                               "ext_datanet_net",
+                                                                               "ext_mnet_net"]}}],
+                         "inputs": {"sfc_data_network": {"type": "string",
+                                                         "value": "sfc_data_network"},
+                                    "externalDataNetworkName": {"type": "string",
+                                                                "value": "vlan_4004_tunnel_net"},
+                                    "externalManageNetworkName": {"type": "string",
+                                                                  "value": "vlan_4008_mng_net"},
+                                    "NatIpRange": {"type": "string",
+                                                   "value": "192.167.0.10-192.168.0.20"},
+                                    "externalPluginManageNetworkName": {"type": "string",
+                                                                        "value": "vlan_4007_plugin_net"}},
+                         "pnfs": [{"pnf_id": "m6000_s",
+                                   "cps": [],
+                                   "description": "",
+                                   "properties": {"vendor": "zte",
+                                                  "request_reclassification": False,
+                                                  "pnf_type": "m6000s",
+                                                  "version": "1.0",
+                                                  "management_address": "111111",
+                                                  "id": "m6000_s",
+                                                  "nsh_aware": False}}],
+                         "fps": [{"properties": {"symmetric": False,
+                                                 "policy": {"type": "ACL",
+                                                            "criteria": {"dest_port_range": "1-100",
+                                                                         "ip_protocol": "tcp",
+                                                                         "source_ip_range": ["119.1.1.1-119.1.1.10"],
+                                                                         "dest_ip_range": [{"get_input": "NatIpRange"}],
+                                                                         "dscp": 0,
+                                                                         "source_port_range": "1-100"}}},
+                                  "forwarder_list": [{"capability": "",
+                                                      "type": "cp",
+                                                      "node_name": "m6000_data_out"},
+                                                     {"capability": "",
+                                                      "type": "cp",
+                                                      "node_name": "m600_tunnel_cp"},
+                                                     {"capability": "vnat_fw_inout",
+                                                      "type": "vnf",
+                                                      "node_name": "VNAT"}],
+                                  "description": "",
+                                  "fp_id": "path2"},
+                                 {"properties": {"symmetric": True,
+                                                 "policy": {"type": "ACL",
+                                                            "criteria": {"dest_port_range": "1-100",
+                                                                         "ip_protocol": "tcp",
+                                                                         "source_ip_range": ["1-100"],
+                                                                         "dest_ip_range": ["1-100"],
+                                                                         "dscp": 4,
+                                                                         "source_port_range": "1-100"}}},
+                                  "forwarder_list": [{"capability": "",
+                                                      "type": "cp",
+                                                      "node_name": "m6000_data_in"},
+                                                     {"capability": "",
+                                                      "type": "cp",
+                                                      "node_name": "m600_tunnel_cp"},
+                                                     {"capability": "vfw_fw_inout",
+                                                      "type": "vnf",
+                                                      "node_name": "VFW"},
+                                                     {"capability": "vnat_fw_inout",
+                                                      "type": "vnf",
+                                                      "node_name": "VNAT"},
+                                                     {"capability": "",
+                                                      "type": "cp",
+                                                      "node_name": "m600_tunnel_cp"},
+                                                     {"capability": "",
+                                                      "type": "cp",
+                                                      "node_name": "m6000_data_out"}],
+                                  "description": "",
+                                  "fp_id": "path1"}],
+                         "routers": [],
+                         "vnfs": [{"vnf_id": "VFW",
+                                   "description": "",
+                                   "properties": {"plugin_info": "vbrasplugin_1.0",
+                                                  "vendor": "zte",
+                                                  "is_shared": False,
+                                                  "adjust_vnf_capacity": True,
+                                                  "name": "VFW",
+                                                  "vnf_extend_type": "driver",
+                                                  "csarVersion": "v1.0",
+                                                  "csarType": "NFAR",
+                                                  "csarProvider": "ZTE",
+                                                  "version": "1.0",
+                                                  "nsh_aware": True,
+                                                  "cross_dc": False,
+                                                  "vnf_type": "VFW",
+                                                  "vmnumber_overquota_alarm": True,
+                                                  "vnfd_version": "1.0.0",
+                                                  "externalPluginManageNetworkName": "vlan_4007_plugin_net",
+                                                  "id": "vcpe_vfw_zte_1_0",
+                                                  "request_reclassification": False},
+                                   "dependencies": [{"key_name": "vfw_ctrl_by_manager_cp",
+                                                     "vl_id": "ext_mnet_net"},
+                                                    {"key_name": "vfw_data_cp",
+                                                     "vl_id": "sfc_data_network"}],
+                                   "type": "tosca.nodes.nfv.ext.zte.VNF.VFW",
+                                   "networks": []}],
+                         "ns_exposed": {"external_cps": [],
+                                        "forward_cps": []},
+                         "policies": [{"file_url": "policies/abc.drl",
+                                       "name": "aaa"}],
+                         "vls": [{"route_id": "",
+                                  "vl_id": "ext_mnet_net",
+                                  "route_external": False,
+                                  "description": "",
+                                  "properties": {"name": "vlan_4008_mng_net",
+                                                 "mtu": 1500,
+                                                 "location_info": {"tenant": "admin",
+                                                                   "vimid": 2,
+                                                                   "availability_zone": "nova"},
+                                                 "ip_version": 4,
+                                                 "dhcp_enabled": True,
+                                                 "network_name": "vlan_4008_mng_net",
+                                                 "network_type": "vlan"}},
+                                 {"route_id": "",
+                                  "vl_id": "ext_datanet_net",
+                                  "route_external": False,
+                                  "description": "",
+                                  "properties": {"name": "vlan_4004_tunnel_net",
+                                                 "mtu": 1500,
+                                                 "location_info": {"tenant": "admin",
+                                                                   "vimid": 2,
+                                                                   "availability_zone": "nova"},
+                                                 "ip_version": 4,
+                                                 "dhcp_enabled": True,
+                                                 "network_name": "vlan_4004_tunnel_net",
+                                                 "network_type": "vlan"}},
+                                 {"route_id": "",
+                                  "vl_id": "sfc_data_network",
+                                  "route_external": False,
+                                  "description": "",
+                                  "properties": {"name": "sfc_data_network",
+                                                 "dhcp_enabled": True,
+                                                 "is_predefined": False,
+                                                 "location_info": {"tenant": "admin",
+                                                                   "vimid": 2,
+                                                                   "availability_zone": "nova"},
+                                                 "ip_version": 4,
+                                                 "mtu": 1500,
+                                                 "network_name": "sfc_data_network",
+                                                 "network_type": "vlan"}}],
+                         "cps": [{"pnf_id": "m6000_s",
+                                  "vl_id": "path2",
+                                  "description": "",
+                                  "cp_id": "m6000_data_out",
+                                  "properties": {"direction": "bidirectional",
+                                                 "vnic_type": "normal",
+                                                 "bandwidth": 0,
+                                                 "mac_address": "11-22-33-22-11-44",
+                                                 "interface_name": "xgei-0/4/1/5",
+                                                 "ip_address": "176.1.1.2",
+                                                 "order": 0,
+                                                 "sfc_encapsulation": "mac"}},
+                                 {"pnf_id": "m6000_s",
+                                  "vl_id": "ext_datanet_net",
+                                  "description": "",
+                                  "cp_id": "m600_tunnel_cp",
+                                  "properties": {"direction": "bidirectional",
+                                                 "vnic_type": "normal",
+                                                 "bandwidth": 0,
+                                                 "mac_address": "00-11-00-22-33-00",
+                                                 "interface_name": "gei-0/4/0/13",
+                                                 "ip_address": "191.167.100.5",
+                                                 "order": 0,
+                                                 "sfc_encapsulation": "mac"}},
+                                 {"pnf_id": "m6000_s",
+                                  "vl_id": "path2",
+                                  "description": "",
+                                  "cp_id": "m6000_data_in",
+                                  "properties": {"direction": "bidirectional",
+                                                 "vnic_type": "normal",
+                                                 "bandwidth": 0,
+                                                 "mac_address": "11-22-33-22-11-41",
+                                                 "interface_name": "gei-0/4/0/7",
+                                                 "ip_address": "1.1.1.1",
+                                                 "order": 0,
+                                                 "sfc_encapsulation": "mac",
+                                                 "bond": "none"}},
+                                 {"pnf_id": "m6000_s",
+                                  "vl_id": "ext_mnet_net",
+                                  "description": "",
+                                  "cp_id": "m600_mnt_cp",
+                                  "properties": {"direction": "bidirectional",
+                                                 "vnic_type": "normal",
+                                                 "bandwidth": 0,
+                                                 "mac_address": "00-11-00-22-33-11",
+                                                 "interface_name": "gei-0/4/0/1",
+                                                 "ip_address": "10.46.244.51",
+                                                 "order": 0,
+                                                 "sfc_encapsulation": "mac",
+                                                 "bond": "none"}}],
+                         "metadata": {"invariant_id": "vcpe_ns_sff_1",
+                                      "name": "VCPE_NS",
+                                      "csarVersion": "v1.0",
+                                      "csarType": "NSAR",
+                                      "csarProvider": "ZTE",
+                                      "version": 1,
+                                      "vendor": "ZTE",
+                                      "id": "VCPE_NS",
+                                      "description": "vcpe_ns"},
+                         "ns": {
+                             "properties": {
+                                 "descriptor_id": "VCPE_NS",
+                                 "version": 1,
+                                 "name": "VCPE_NS",
+                                 "desginer": "ZTE",
+                                 "invariant_id": "vcpe_ns_sff_1"
+                             }
+        }
+        }
+
+    def tearDown(self):
+        pass
+
+    def test_ns_pkg_distribute_when_ns_exists(self):
+        NSPackageModel(nsPackageId="1", nsdId="2").save()
+        resp = self.client.post(
+            "/api/genericparser/v1/nspackages", {"csarId": "1"}, format='json')
+        self.assertEqual(resp.status_code, status.HTTP_202_ACCEPTED)
+        self.assertEqual("failed", resp.data["status"])
+        self.assertEqual(
+            "NS CSAR(1) already exists.",
+            resp.data["statusDescription"])
+
+    @mock.patch.object(restcall, 'call_req')
+    def test_ns_pkg_distribute_when_csar_not_exist(self, mock_call_req):
+        mock_call_req.return_value = [0, "[]", '200']
+        resp = self.client.post(
+            "/api/genericparser/v1/nspackages", {"csarId": "1"}, format='json')
+        self.assertEqual(resp.status_code, status.HTTP_202_ACCEPTED)
+        self.assertEqual("failed", resp.data["status"])
+        self.assertEqual(
+            "Failed to query artifact(services,1) from sdc.",
+            resp.data["statusDescription"])
+
+    @mock.patch.object(restcall, 'call_req')
+    @mock.patch.object(sdc, 'download_artifacts')
+    @mock.patch.object(toscaparsers, 'parse_nsd')
+    def test_ns_pkg_distribute_when_nsd_already_exists(
+            self, mock_parse_nsd, mock_download_artifacts, mock_call_req):
+        mock_parse_nsd.return_value = json.JSONEncoder().encode(self.nsd_data)
+        mock_download_artifacts.return_value = "/home/vcpe.csar"
+        mock_call_req.return_value = [0, json.JSONEncoder().encode([{
+            "uuid": "1",
+            "toscaModelURL": "https://127.0.0.1:1234/sdc/v1/vcpe.csar",
+            "distributionStatus": "DISTRIBUTED"
+        }]), '200']
+        NSPackageModel(nsPackageId="2", nsdId="VCPE_NS").save()
+        resp = self.client.post(
+            "/api/genericparser/v1/nspackages", {"csarId": "1"}, format='json')
+        self.assertEqual(resp.status_code, status.HTTP_202_ACCEPTED)
+        self.assertEqual("failed", resp.data["status"])
+        self.assertEqual(
+            "NSD(VCPE_NS) already exists.",
+            resp.data["statusDescription"])
+
+    @mock.patch.object(restcall, 'call_req')
+    @mock.patch.object(sdc, 'download_artifacts')
+    @mock.patch.object(toscaparsers, 'parse_nsd')
+    def test_ns_pkg_distribute_when_nf_not_distributed(
+            self, mock_parse_nsd, mock_download_artifacts, mock_call_req):
+        mock_parse_nsd.return_value = json.JSONEncoder().encode(self.nsd_data)
+        mock_download_artifacts.return_value = "/home/vcpe.csar"
+        mock_call_req.return_value = [0, json.JSONEncoder().encode([{
+            "uuid": "1",
+            "toscaModelURL": "https://127.0.0.1:1234/sdc/v1/vcpe.csar",
+            "distributionStatus": "DISTRIBUTED",
+        }]), '200']
+        resp = self.client.post(
+            "/api/genericparser/v1/nspackages", {"csarId": "1"}, format='json')
+        self.assertEqual(resp.status_code, status.HTTP_202_ACCEPTED)
+        self.assertEqual("failed", resp.data["status"])
+        self.assertEqual(
+            "VNF package(vcpe_vfw_zte_1_0) is not distributed.",
+            resp.data["statusDescription"])
+
+    @mock.patch.object(restcall, 'call_req')
+    @mock.patch.object(sdc, 'download_artifacts')
+    @mock.patch.object(toscaparsers, 'parse_nsd')
+    def test_ns_pkg_distribute_when_successfully(
+            self, mock_parse_nsd, mock_download_artifacts, mock_call_req):
+        mock_parse_nsd.return_value = json.JSONEncoder().encode(self.nsd_data)
+        mock_download_artifacts.return_value = "/home/vcpe.csar"
+        mock_call_req.return_value = [0, json.JSONEncoder().encode([{
+            "uuid": "1",
+            "toscaModelURL": "https://127.0.0.1:1234/sdc/v1/vcpe.csar",
+            "distributionStatus": "DISTRIBUTED"
+        }]), '200']
+        VnfPackageModel(vnfPackageId="1", vnfdId="vcpe_vfw_zte_1_0").save()
+        PnfPackageModel(pnfPackageId="1", pnfdId="m6000_s").save()
+        resp = self.client.post(
+            "/api/genericparser/v1/nspackages", {"csarId": "1"}, format='json')
+        self.assertEqual(resp.status_code, status.HTTP_202_ACCEPTED)
+        self.assertEqual("success", resp.data["status"])
+        self.assertEqual(
+            "CSAR(1) distributed successfully.",
+            resp.data["statusDescription"])
+
+    @mock.patch.object(sdc, 'get_artifacts')
+    def test_ns_when_not_distributed_by_sdc(self, mock_get_artifacts):
+        mock_get_artifacts.return_value = [{
+            "uuid": "1",
+            "invariantUUID": "63eaec39-ffbe-411c-a838-448f2c73f7eb",
+            "name": "underlayvpn",
+            "version": "2.0",
+            "toscaModelURL": "/sdc/v1/genericparser/resources/c94490a0-f7ef-48be-b3f8-8d8662a37236/toscaModel",
+            "category": "Volte",
+            "subCategory": "VolteVNF",
+            "resourceType": "VF",
+            "lifecycleState": "CERTIFIED",
+            "distributionStatus": "DISTRIBUTION_APPROVED",
+            "lastUpdaterUserId": "jh0003"
+        }]
+        resp = self.client.post(
+            "/api/genericparser/v1/nspackages", {"csarId": "1"}, format='json')
+        self.assertEqual(resp.status_code, status.HTTP_202_ACCEPTED)
+        self.assertEqual("failed", resp.data["status"])
+        self.assertEqual(
+            "The artifact (services,1) is not distributed from sdc.",
+            resp.data["statusDescription"])
+
+    ##########################################################################
+
+    def test_ns_pkg_normal_delete(self):
+        NSPackageModel(nsPackageId="8", nsdId="2").save()
+        resp = self.client.delete("/api/genericparser/v1/nspackages/8")
+        self.assertEqual(resp.status_code, status.HTTP_200_OK)
+        self.assertEqual("success", resp.data["status"])
+        self.assertEqual(
+            "Delete CSAR(8) successfully.",
+            resp.data["statusDescription"])
+
+    def test_ns_pkg_get_all(self):
+        NSPackageModel(
+            nsPackageId="13",
+            nsdId="2",
+            nsdDesginer="2",
+            nsdVersion="2",
+            nsPackageUri="13.csar",
+            nsdModel="").save()
+        NSPackageModel(
+            nsPackageId="14",
+            nsdId="3",
+            nsdDesginer="3",
+            nsdVersion="3",
+            nsPackageUri="14.csar",
+            nsdModel="").save()
+        resp = self.client.get("/api/genericparser/v1/nspackages")
+        self.assertEqual(resp.status_code, status.HTTP_200_OK)
+        expect_data = [{"csarId": "13",
+                        "packageInfo": {"csarName": "13.csar",
+                                        "nsdProvider": "2",
+                                        "nsdId": "2",
+                                        "nsPackageId": "13",
+                                        "downloadUrl": "http://127.0.0.1:8806/static/genericparser/13/13.csar",
+                                        "nsdModel": "",
+                                        "nsdVersion": "2",
+                                        "nsdInvariantId": None
+                                        }},
+                       {"csarId": "14",
+                        "packageInfo": {"csarName": "14.csar",
+                                        "nsdProvider": "3",
+                                        "nsdId": "3",
+                                        "nsPackageId": "14",
+                                        "downloadUrl": "http://127.0.0.1:8806/static/genericparser/14/14.csar",
+                                        "nsdModel": "",
+                                        "nsdVersion": "3",
+                                        "nsdInvariantId": None}}]
+        self.assertEqual(expect_data, resp.data)
+
+    def test_ns_pkg_get_one(self):
+        NSPackageModel(
+            nsPackageId="14",
+            nsdId="2",
+            nsdDesginer="3",
+            nsdVersion="4",
+            nsPackageUri="14.csar",
+            nsdModel="").save()
+        resp = self.client.get("/api/genericparser/v1/nspackages/14")
+        self.assertEqual(resp.status_code, status.HTTP_200_OK)
+        expect_data = {
+            "csarId": "14",
+            "packageInfo": {
+                "nsdId": "2",
+                "nsPackageId": "14",
+                "nsdProvider": "3",
+                "nsdVersion": "4",
+                "csarName": "14.csar",
+                "nsdModel": "",
+                "downloadUrl": "http://127.0.0.1:8806/static/genericparser/14/14.csar",
+                "nsdInvariantId": None}}
+        self.assertEqual(expect_data, resp.data)
+
+    def test_ns_pkg_get_one_not_found(self):
+        resp = self.client.get("/api/genericparser/v1/nspackages/22")
+        self.assertEqual(resp.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+        self.assertEqual(
+            {"error": "Ns package[22] not Found."},
+            resp.data)
+
+    ##########################################################################
+
+    @mock.patch.object(toscaparsers, 'parse_nsd')
+    def test_nsd_parse_normal(self, mock_parse_nsd):
+        NSPackageModel(nsPackageId="18", nsdId="12").save()
+        mock_parse_nsd.return_value = json.JSONEncoder().encode({"a": "b"})
+        req_data = {"csarId": "18", "inputs": []}
+        resp = self.client.post(
+            "/api/genericparser/v1/parsernsd",
+            req_data,
+            format='json')
+        self.assertEqual(resp.status_code, status.HTTP_202_ACCEPTED)
+        self.assertEqual({"model": '{"a": "b"}'}, resp.data)
+
+    def test_nsd_parse_when_csar_not_exist(self):
+        req_data = {"csarId": "1", "inputs": []}
+        resp = self.client.post(
+            "/api/genericparser/v1/parsernsd",
+            req_data,
+            format='json')
+        self.assertEqual(
+            resp.status_code,
+            status.HTTP_500_INTERNAL_SERVER_ERROR)
+        self.assertEqual(resp.data, {"error": "NS CSAR(1) does not exist."})
diff --git a/genericparser/packages/tests/test_pnf_descriptor.py b/genericparser/packages/tests/test_pnf_descriptor.py
new file mode 100644 (file)
index 0000000..a6da1db
--- /dev/null
@@ -0,0 +1,282 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import copy
+import json
+import mock
+import os
+
+
+from django.test import TestCase
+from rest_framework import status
+from rest_framework.test import APIClient
+from genericparser.packages.biz.pnf_descriptor import PnfDescriptor
+from genericparser.packages.const import PKG_STATUS
+from genericparser.packages.tests.const import pnfd_data
+from genericparser.pub.database.models import PnfPackageModel, NSPackageModel
+from genericparser.pub.utils import toscaparsers
+
+
+class TestPnfDescriptor(TestCase):
+    def setUp(self):
+        self.client = APIClient()
+        self.user_defined_data = {
+            'key1': 'value1',
+            'key2': 'value2',
+            'key3': 'value3',
+        }
+        self.expected_pnfd_info = {
+            'id': None,
+            'pnfdId': None,
+            'pnfdName': None,
+            'pnfdVersion': None,
+            'pnfdProvider': None,
+            'pnfdInvariantId': None,
+            'pnfdOnboardingState': 'CREATED',
+            'onboardingFailureDetails': None,
+            'pnfdUsageState': 'NOT_IN_USE',
+            'userDefinedData': self.user_defined_data,
+            '_links': None
+        }
+        self.nsdModel = {
+            "pnfs": [{"properties": {"id": "m6000_s"}}]
+        }
+
+    def tearDown(self):
+        pass
+
+    def test_pnfd_create_normal(self):
+        request_data = {'userDefinedData': self.user_defined_data}
+        expected_reponse_data = {
+            'pnfdOnboardingState': 'CREATED',
+            'pnfdUsageState': 'NOT_IN_USE',
+            'userDefinedData': self.user_defined_data,
+            '_links': None
+        }
+
+        response = self.client.post(
+            '/api/nsd/v1/pnf_descriptors',
+            data=request_data,
+            format='json'
+        )
+        response.data.pop('id')
+        self.assertEqual(response.status_code, status.HTTP_201_CREATED)
+        self.assertEqual(expected_reponse_data, response.data)
+
+    def test_query_multiple_pnfds_normal(self):
+        expected_reponse_data = [
+            copy.deepcopy(self.expected_pnfd_info),
+            copy.deepcopy(self.expected_pnfd_info)
+        ]
+        expected_reponse_data[0]['id'] = '0'
+        expected_reponse_data[1]['id'] = '1'
+
+        user_defined_data = json.JSONEncoder().encode(self.user_defined_data)
+        for i in range(2):
+            PnfPackageModel(
+                pnfPackageId=str(i),
+                onboardingState='CREATED',
+                usageState='NOT_IN_USE',
+                userDefinedData=user_defined_data
+            ).save()
+        response = self.client.get('/api/nsd/v1/pnf_descriptors', format='json')
+        self.assertEqual(response.status_code, status.HTTP_200_OK)
+        self.assertEqual(expected_reponse_data, response.data)
+
+    def test_query_single_pnfd_normal(self):
+        expected_reponse_data = copy.deepcopy(self.expected_pnfd_info)
+        expected_reponse_data['id'] = '22'
+
+        user_defined_data = json.JSONEncoder().encode(self.user_defined_data)
+        PnfPackageModel(
+            pnfPackageId='22',
+            onboardingState='CREATED',
+            usageState='NOT_IN_USE',
+            userDefinedData=user_defined_data
+        ).save()
+
+        response = self.client.get('/api/nsd/v1/pnf_descriptors/22', format='json')
+        self.assertEqual(response.status_code, status.HTTP_200_OK)
+        self.assertEqual(expected_reponse_data, response.data)
+
+    def test_query_single_pnfd_failed(self):
+        response = self.client.get('/api/nsd/v1/pnf_descriptors/22', format='json')
+        self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
+
+    def test_delete_single_pnfd_normal(self):
+        user_defined_data = json.JSONEncoder().encode(self.user_defined_data)
+        PnfPackageModel(
+            pnfPackageId='22',
+            usageState=PKG_STATUS.NOT_IN_USE,
+            userDefinedData=user_defined_data,
+            pnfdModel='test'
+        ).save()
+        NSPackageModel.objects.create(
+            nsPackageId="111",
+            nsdModel=json.JSONEncoder().encode(self.nsdModel)
+        )
+        resp = self.client.delete("/api/nsd/v1/pnf_descriptors/22", format='json')
+        self.assertEqual(resp.status_code, status.HTTP_204_NO_CONTENT)
+        self.assertEqual(None, resp.data)
+
+    def test_delete_single_pnfd_when_not_exist(self):
+        resp = self.client.delete("/api/nsd/v1/pnf_descriptors/22", format='json')
+        self.assertEqual(resp.status_code, status.HTTP_204_NO_CONTENT)
+        self.assertEqual(None, resp.data)
+
+    @mock.patch.object(toscaparsers, "parse_pnfd")
+    def test_pnfd_content_upload_normal(self, mock_parse_pnfd):
+        user_defined_data_json = json.JSONEncoder().encode(self.user_defined_data)
+        PnfPackageModel(
+            pnfPackageId='22',
+            usageState=PKG_STATUS.NOT_IN_USE,
+            userDefinedData=user_defined_data_json,
+        ).save()
+        mock_parse_pnfd.return_value = json.JSONEncoder().encode(pnfd_data)
+        with open('pnfd_content.txt', 'wb') as fp:
+            fp.write('test')
+
+        with open('pnfd_content.txt', 'rb') as fp:
+            resp = self.client.put(
+                "/api/nsd/v1/pnf_descriptors/22/pnfd_content",
+                {'file': fp},
+            )
+        pnf_pkg = PnfPackageModel.objects.filter(pnfPackageId="22")
+        self.assertEqual(pnf_pkg[0].pnfdId, "zte-1.0")
+        self.assertEqual(pnf_pkg[0].onboardingState, PKG_STATUS.ONBOARDED)
+        self.assertEqual(resp.status_code, status.HTTP_204_NO_CONTENT)
+        self.assertEqual(None, resp.data)
+        os.remove('pnfd_content.txt')
+
+    def test_pnfd_content_upload_when_pnf_not_exist(self):
+        with open('pnfd_content.txt', 'wb') as fp:
+            fp.write('test')
+
+        with open('pnfd_content.txt', 'rb') as fp:
+            resp = self.client.put(
+                "/api/nsd/v1/pnf_descriptors/22/pnfd_content",
+                {'file': fp},
+            )
+        self.assertEqual(resp.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+    @mock.patch.object(toscaparsers, "parse_pnfd")
+    def test_pnfd_content_upload_when_pnfd_exist(self, mock_parse_pnfd):
+        with open('pnfd_content.txt', 'wb') as fp:
+            fp.write('test')
+        PnfPackageModel(
+            pnfPackageId='22',
+            usageState=PKG_STATUS.NOT_IN_USE,
+            pnfdId="zte-1.1"
+        ).save()
+        PnfPackageModel(
+            pnfPackageId='23',
+            usageState=PKG_STATUS.NOT_IN_USE,
+            pnfdId="zte-1.0"
+        ).save()
+        mock_parse_pnfd.return_value = json.JSONEncoder().encode(pnfd_data)
+        with open('pnfd_content.txt', 'rb') as fp:
+            resp = self.client.put(
+                "/api/nsd/v1/pnf_descriptors/22/pnfd_content",
+                {'file': fp},
+            )
+        self.assertEqual(resp.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+    def test_pnfd_download_normal(self):
+        with open('pnfd_content.txt', 'wb') as fp:
+            fp.writelines('test1')
+            fp.writelines('test2')
+        user_defined_data = json.JSONEncoder().encode(self.user_defined_data)
+        PnfPackageModel(
+            pnfPackageId='22',
+            usageState=PKG_STATUS.NOT_IN_USE,
+            onboardingState=PKG_STATUS.ONBOARDED,
+            userDefinedData=user_defined_data,
+            localFilePath="pnfd_content.txt",
+            pnfdModel='test'
+        ).save()
+        resp = self.client.get("/api/nsd/v1/pnf_descriptors/22/pnfd_content")
+        file_content = ""
+        for data in resp.streaming_content:
+            file_content = '%s%s' % (file_content, data)
+        self.assertEqual(resp.status_code, status.HTTP_200_OK)
+        self.assertEqual('test1test2', file_content)
+        os.remove('pnfd_content.txt')
+
+    def test_pnfd_download_failed(self):
+        response = self.client.get("/api/nsd/v1/pnf_descriptors/22/pnfd_content")
+        self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
+
+    def test_pnfd_download_when_not_on_boarded(self):
+        with open('pnfd_content.txt', 'wb') as fp:
+            fp.writelines('test1')
+            fp.writelines('test2')
+        user_defined_data = json.JSONEncoder().encode(self.user_defined_data)
+        PnfPackageModel(
+            pnfPackageId='22',
+            usageState=PKG_STATUS.NOT_IN_USE,
+            onboardingState=PKG_STATUS.CREATED,
+            userDefinedData=user_defined_data,
+            localFilePath="pnfd_content.txt",
+            pnfdModel='test'
+        ).save()
+        response = self.client.get("/api/nsd/v1/pnf_descriptors/22/pnfd_content")
+        self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+        os.remove('pnfd_content.txt')
+
+    @mock.patch.object(PnfDescriptor, "create")
+    def test_pnfd_create_when_catch_exception(self, mock_create):
+        request_data = {'userDefinedData': self.user_defined_data}
+        mock_create.side_effect = TypeError('integer type')
+        response = self.client.post('/api/nsd/v1/pnf_descriptors', data=request_data, format='json')
+        self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+    @mock.patch.object(PnfDescriptor, "delete_single")
+    def test_delete_single_when_catch_exception(self, mock_delete_single):
+        mock_delete_single.side_effect = TypeError("integer type")
+        response = self.client.delete("/api/nsd/v1/pnf_descriptors/22", format='json')
+        self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+    @mock.patch.object(PnfDescriptor, "query_single")
+    def test_query_single_when_catch_exception(self, mock_query_single):
+        mock_query_single.side_effect = TypeError("integer type")
+        response = self.client.get('/api/nsd/v1/pnf_descriptors/22', format='json')
+        self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+    @mock.patch.object(PnfDescriptor, "query_multiple")
+    def test_query_multiple_when_catch_exception(self, mock_query_muitiple):
+        mock_query_muitiple.side_effect = TypeError("integer type")
+        response = self.client.get('/api/nsd/v1/pnf_descriptors', format='json')
+        self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+    @mock.patch.object(PnfDescriptor, "upload")
+    def test_upload_when_catch_exception(self, mock_upload):
+        mock_upload.side_effect = TypeError("integer type")
+        response = self.client.put("/api/nsd/v1/pnf_descriptors/22/pnfd_content")
+        self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+    @mock.patch.object(PnfDescriptor, "download")
+    def test_download_when_catch_exception(self, mock_download):
+        mock_download.side_effect = TypeError("integer type")
+        response = self.client.get("/api/nsd/v1/pnf_descriptors/22/pnfd_content")
+        self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+    @mock.patch.object(toscaparsers, 'parse_pnfd')
+    def test_pnfd_parse_normal(self, mock_parse_pnfd):
+        PnfPackageModel(pnfPackageId="8", pnfdId="10").save()
+        mock_parse_pnfd.return_value = json.JSONEncoder().encode({"c": "d"})
+        req_data = {"csarId": "8", "inputs": []}
+        resp = self.client.post("/api/genericparser/v1/parserpnfd", req_data, format='json')
+        self.assertEqual(resp.status_code, status.HTTP_202_ACCEPTED)
+        self.assertEqual({"model": '{"c": "d"}'}, resp.data)
diff --git a/genericparser/packages/tests/test_service_descriptor.py b/genericparser/packages/tests/test_service_descriptor.py
new file mode 100644 (file)
index 0000000..30eafe0
--- /dev/null
@@ -0,0 +1,189 @@
+# Copyright (c) 2019, CMCC Technologies. Co., Ltd.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import json
+import logging
+
+from django.test import TestCase
+from mock import mock
+
+from genericparser.packages.biz.service_descriptor import ServiceDescriptor
+from genericparser.packages.const import PKG_STATUS
+from genericparser.pub.database.models import ServicePackageModel, VnfPackageModel, PnfPackageModel
+from genericparser.pub.exceptions import PackageNotFoundException
+from genericparser.pub.utils import toscaparsers
+
+logger = logging.getLogger(__name__)
+
+
+class TestServiceDescription(TestCase):
+
+    def setUp(self):
+        self.user_defined_data = {
+            'key1': 'value1',
+            'key2': 'value2',
+            'key3': 'value3',
+        }
+        self.data = {
+            'userDefinedData': self.user_defined_data,
+        }
+        self.sd_data = {
+            "inputs": {
+                "sdwanvpnresource_list": [
+                    {
+                        "sdwanvpn_topology": "",
+                        "required": True,
+                        "type": "string"
+                    }
+                ]
+            },
+            "pnfs": [
+                {
+                    "pnf_id": "m6000_s",
+                    "cps": [],
+                    "description": "",
+                    "properties": {
+                        "vendor": "zte",
+                        "request_reclassification": False,
+                        "pnf_type": "m6000s",
+                        "version": "1.0",
+                        "management_address": "111111",
+                        "id": "m6000_s",
+                        "nsh_aware": False
+                    }
+                }
+            ],
+            "description": "",
+            "vnfs": [
+                {
+                    "vnf_id": "sdwansiteresource",
+                    "description": "",
+                    "properties": {
+                        "sdwandevice_type": "",
+                        "sdwandevice_class": "PNF",
+                        "multi_stage_design": "false",
+                        "min_instances": "1",
+                        "sdwansite_controlPoint": "",
+                        "id": "cd557883-ac4b-462d-aa01-421b5fa606b1",
+                        "sdwansite_longitude": "",
+                        "sdwansite_latitude": "",
+                        "sdwansite_postcode": "",
+                        "sdwansite_type": "",
+                        "nf_naming": {
+                            "ecomp_generated_naming": True
+                        },
+                        "sdwansite_emails": "",
+                        "sdwansite_role": "",
+                        "vnfm_info": "",
+                        "sdwansite_address": "",
+                        "sdwansite_description": "",
+                        "availability_zone_max_count": "1",
+                        "sdwansite_name": ""
+                    }
+                }
+            ],
+            "service": {
+                "type": "org.openecomp.service.EnhanceService",
+                "properties": {
+                    "descriptor_id": "49ee73f4-1e31-4054-b871-eb9b1c29999b",
+                    "designer": "",
+                    "invariant_id": "5de07996-7ff0-4ec1-b93c-e3a00bb3f207",
+                    "name": "Enhance_Service",
+                    "verison": ""
+                },
+                "metadata": {
+                    "category": "E2E Service",
+                    "serviceType": "",
+                    "description": "Enhance_Service",
+                    "instantiationType": "A-la-carte",
+                    "type": "Service",
+                    "environmentContext": "General_Revenue-Bearing",
+                    "serviceEcompNaming": True,
+                    "UUID": "49ee73f4-1e31-4054-b871-eb9b1c29999b",
+                    "ecompGeneratedNaming": True,
+                    "serviceRole": "",
+                    "invariantUUID": "5de07996-7ff0-4ec1-b93c-e3a00bb3f207",
+                    "namingPolicy": "",
+                    "name": "Enhance_Service"
+                }
+            },
+            "metadata": {
+                "category": "E2E Service",
+                "serviceType": "",
+                "description": "Enhance_Service",
+                "instantiationType": "A-la-carte",
+                "type": "Service",
+                "environmentContext": "General_Revenue-Bearing",
+                "serviceEcompNaming": True,
+                "UUID": "49ee73f4-1e31-4054-b871-eb9b1c29999b",
+                "ecompGeneratedNaming": True,
+                "serviceRole": "",
+                "invariantUUID": "5de07996-7ff0-4ec1-b93c-e3a00bb3f207",
+                "namingPolicy": "",
+                "name": "Enhance_Service"
+            }
+        }
+        ServicePackageModel.objects.filter().delete()
+
+    def tearDown(self):
+        pass
+
+    def test_create(self):
+        result_data = ServiceDescriptor().create(self.data)
+        self.assertIsNotNone(result_data['id'])
+        service_package = ServicePackageModel.objects.filter(servicePackageId=result_data['id'])[0]
+        self.assertIsNotNone(service_package)
+        self.assertEqual(PKG_STATUS.DISABLED, service_package.operationalState)
+        self.assertEqual(PKG_STATUS.CREATED, service_package.onboardingState)
+        self.assertEqual(PKG_STATUS.NOT_IN_USE, service_package.usageState)
+
+    def test_create_with_csarid(self):
+        csar_id = '0b667470-e6b3-4ee8-8f08-186317a04dc2'
+        result_data = ServiceDescriptor().create(self.data, csar_id)
+        self.assertEqual(csar_id, result_data['id'])
+        service_package = ServicePackageModel.objects.filter(servicePackageId=csar_id)[0]
+        self.assertIsNotNone(service_package)
+        self.assertEqual(PKG_STATUS.DISABLED, service_package.operationalState)
+        self.assertEqual(PKG_STATUS.CREATED, service_package.onboardingState)
+        self.assertEqual(PKG_STATUS.NOT_IN_USE, service_package.usageState)
+
+    @mock.patch.object(toscaparsers, 'parse_sd')
+    def test_parse_serviced_and_save(self, mock_parse_sd):
+        mock_parse_sd.return_value = json.JSONEncoder().encode(self.sd_data)
+        servcie_desc = ServiceDescriptor()
+        csar_id = '0b667470-e6b3-4ee8-8f08-186317a04dc2'
+        servcie_desc.create(self.data, csar_id)
+        VnfPackageModel(vnfPackageId="1", vnfdId="cd557883-ac4b-462d-aa01-421b5fa606b1").save()
+        PnfPackageModel(pnfPackageId="1", pnfdId="m6000_s").save()
+        local_file_name = "/test.csar"
+        servcie_desc.parse_serviced_and_save(csar_id, local_file_name)
+
+        service_package = ServicePackageModel.objects.filter(servicePackageId=csar_id)[0]
+        self.assertIsNotNone(service_package)
+
+    def test_delete_single(self):
+        servcie_desc = ServiceDescriptor()
+        csar_id = '0b667470-e6b3-4ee8-8f08-186317a04dc2'
+        servcie_desc.create(self.data, csar_id)
+
+        servcie_desc.delete_single(csar_id)
+        self.assertTrue(len(ServicePackageModel.objects.filter(servicePackageId=csar_id)) == 0)
+        self.assertFalse(ServicePackageModel.objects.filter(servicePackageId=csar_id).exists())
+
+    def test_delete_single_not_exists(self):
+        csar_id = "8000"
+        try:
+            ServiceDescriptor().delete_single(csar_id)
+        except Exception as e:
+            self.assertTrue(isinstance(e, PackageNotFoundException))
+            self.assertEqual("Service package[8000] not Found.", e.message)
diff --git a/genericparser/packages/tests/test_servicepackage.py b/genericparser/packages/tests/test_servicepackage.py
new file mode 100644 (file)
index 0000000..28fdcdb
--- /dev/null
@@ -0,0 +1,459 @@
+# Copyright (c) 2019, CMCC Technologies. Co., Ltd.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import json
+
+from django.test import TestCase, Client
+from mock import mock
+from rest_framework import status
+
+from genericparser.packages.biz.sdc_service_package import ServicePackage
+from genericparser.packages.const import PKG_STATUS
+from genericparser.pub.database.models import ServicePackageModel, VnfPackageModel, PnfPackageModel
+from genericparser.pub.exceptions import PackageNotFoundException, PackageHasExistsException, GenericparserException
+from genericparser.pub.msapi import sdc
+from genericparser.pub.utils import toscaparsers
+
+PARSER_BASE_URL = "/api/parser/v1"
+
+
+class TestServicePackage(TestCase):
+    """ Test case for Service Package operations"""
+
+    def setUp(self):
+        self.client = Client()
+        ServicePackageModel.objects.filter().delete()
+        self.sd_data = {
+            "inputs": {
+                "sdwanvpnresource_list": [
+                    {
+                        "sdwanvpn_topology": "",
+                        "required": True,
+                        "type": "string"
+                    },
+                    {
+                        "sdwansitelan_list": [
+                            {
+                                "deviceName": "",
+                                "required": True,
+                                "type": "string",
+                                "description": "The device name in the site"
+                            }
+                        ]
+                    }
+                ],
+                "sdwansiteresource_list": [
+                    {
+                        "sdwansite_controlPoint": "",
+                        "required": False,
+                        "type": "string",
+                        "description": "The control point of the site,only for sd-wan-edge"
+                    },
+                    {
+                        "sdwandevice_list": [
+                            {
+                                "systemIp": "",
+                                "required": False,
+                                "type": "string",
+                                "description": "The system ip of the device"
+                            }
+                        ]
+                    }
+                ]
+            },
+            "pnfs": [
+                {
+                    "pnf_id": "m6000_s",
+                    "cps": [],
+                    "description": "",
+                    "properties": {
+                        "vendor": "zte",
+                        "request_reclassification": False,
+                        "pnf_type": "m6000s",
+                        "version": "1.0",
+                        "management_address": "111111",
+                        "id": "m6000_s",
+                        "nsh_aware": False
+                    }
+                }
+            ],
+            "description": "",
+            "graph": {
+                "sdwansiteresource": [
+                    "sdwanvpnresource"
+                ],
+                "sdwanvpnresource": []
+            },
+            "basepath": "c:\\users\\cmcc\\appdata\\local\\temp\\tmpn79jwc\\Definitions",
+            "vnfs": [
+                {
+                    "vnf_id": "sdwansiteresource",
+                    "description": "",
+                    "properties": {
+                        "sdwandevice_type": "",
+                        "sdwandevice_class": "PNF",
+                        "multi_stage_design": "False",
+                        "min_instances": "1",
+                        "sdwansite_controlPoint": "",
+                        "id": "cd557883-ac4b-462d-aa01-421b5fa606b1",
+                        "sdwansite_longitude": "",
+                        "sdwansite_latitude": "",
+                        "sdwansite_postcode": "",
+                        "sdwansite_type": "",
+                        "nf_naming": {
+                            "ecomp_generated_naming": True
+                        },
+                        "sdwansite_emails": "",
+                        "sdwansite_role": "",
+                        "vnfm_info": "",
+                        "sdwansite_address": "",
+                        "sdwansite_description": "",
+                        "availability_zone_max_count": "1",
+                        "sdwansite_name": ""
+                    },
+                    "dependencies": [],
+                    "networks": [],
+                    "metadata": {
+                        "category": "Configuration",
+                        "subcategory": "Configuration",
+                        "UUID": "cd557883-ac4b-462d-aa01-421b5fa606b1",
+                        "invariantUUID": "c83b621e-e267-4910-a75a-a2a5957296e4",
+                        "name": "sdwansiteresource",
+                        "customizationUUID": "673dd6b3-3a06-4ef0-8ad0-8c26224b08f7",
+                        "resourceVendorRelease": "1.0",
+                        "version": "1.0",
+                        "resourceVendor": "onap",
+                        "resourceVendorModelNumber": "",
+                        "type": "VF",
+                        "description": "sdwansiteresource"
+                    }
+                }
+            ],
+            "vls": [],
+            "service": {
+                "type": "org.openecomp.service.EnhanceService",
+                "requirements": {
+                    "sdwanvpnresource.sdwanvpn.dependency": [
+                        "sdwanvpnresource",
+                        "sdwanvpn.dependency"
+                    ],
+                    "sdwansiteresource.sdwansitewan.dependency": [
+                        "sdwansiteresource",
+                        "sdwansitewan.dependency"
+                    ],
+                    "sdwansiteresource.sdwandevice.dependency": [
+                        "sdwansiteresource",
+                        "sdwandevice.dependency"
+                    ],
+                    "sdwanvpnresource.sdwansitelan.dependency": [
+                        "sdwanvpnresource",
+                        "sdwansitelan.dependency"
+                    ],
+                    "sdwanvpnresource.sdwanvpn.device": [
+                        "sdwanvpnresource",
+                        "sdwanvpn.device"
+                    ],
+                    "sdwansiteresource.sdwansite.device": [
+                        "sdwansiteresource",
+                        "sdwansite.device"
+                    ],
+                    "sdwansiteresource.sdwansite.dependency": [
+                        "sdwansiteresource",
+                        "sdwansite.dependency"
+                    ],
+                    "sdwanvpnresource.sdwansitelan.device": [
+                        "sdwanvpnresource",
+                        "sdwansitelan.device"
+                    ],
+                    "sdwansiteresource.sdwansitewan.device": [
+                        "sdwansiteresource",
+                        "sdwansitewan.device"
+                    ],
+                    "sdwansiteresource.sdwandevice.device": [
+                        "sdwansiteresource",
+                        "sdwandevice.device"
+                    ]
+                },
+                "properties": {
+                    "descriptor_id": "49ee73f4-1e31-4054-b871-eb9b1c29999b",
+                    "designer": "",
+                    "invariant_id": "5de07996-7ff0-4ec1-b93c-e3a00bb3f207",
+                    "name": "Enhance_Service",
+                    "verison": ""
+                },
+                "capabilities": {
+                    "sdwansiteresource.sdwandevice.feature": [
+                        "sdwansiteresource",
+                        "sdwandevice.feature"
+                    ],
+                    "sdwanvpnresource.sdwanvpn.feature": [
+                        "sdwanvpnresource",
+                        "sdwanvpn.feature"
+                    ],
+                    "sdwanvpnresource.sdwanvpn.link": [
+                        "sdwanvpnresource",
+                        "sdwanvpn.link"
+                    ],
+                    "sdwansiteresource.sdwansite.feature": [
+                        "sdwansiteresource",
+                        "sdwansite.feature"
+                    ],
+                    "sdwansiteresource.sdwansitewan.feature": [
+                        "sdwansiteresource",
+                        "sdwansitewan.feature"
+                    ],
+                    "sdwanvpnresource.sdwansitelan.feature": [
+                        "sdwanvpnresource",
+                        "sdwansitelan.feature"
+                    ]
+                },
+                "metadata": {
+                    "category": "E2E Service",
+                    "serviceType": "",
+                    "description": "Enhance_Service",
+                    "instantiationType": "A-la-carte",
+                    "type": "Service",
+                    "environmentContext": "General_Revenue-Bearing",
+                    "serviceEcompNaming": True,
+                    "UUID": "49ee73f4-1e31-4054-b871-eb9b1c29999b",
+                    "ecompGeneratedNaming": True,
+                    "serviceRole": "",
+                    "invariantUUID": "5de07996-7ff0-4ec1-b93c-e3a00bb3f207",
+                    "namingPolicy": "",
+                    "name": "Enhance_Service"
+                }
+            },
+            "metadata": {
+                "category": "E2E Service",
+                "serviceType": "",
+                "description": "Enhance_Service",
+                "instantiationType": "A-la-carte",
+                "type": "Service",
+                "environmentContext": "General_Revenue-Bearing",
+                "serviceEcompNaming": True,
+                "UUID": "49ee73f4-1e31-4054-b871-eb9b1c29999b",
+                "ecompGeneratedNaming": True,
+                "serviceRole": "",
+                "invariantUUID": "5de07996-7ff0-4ec1-b93c-e3a00bb3f207",
+                "namingPolicy": "",
+                "name": "Enhance_Service"
+            }
+        }
+
+    def tearDown(self):
+        pass
+
+    ###############################################################
+
+    def test_service_pkg_distribute_when_pkg_exists(self):
+        ServicePackageModel(servicePackageId="1", servicedId="2").save()
+        csar_id = "1"
+        try:
+            ServicePackage().on_distribute(csar_id)
+        except PackageHasExistsException as e:
+            self.assertEqual("Service CSAR(1) already exists.", e.message)
+
+    @mock.patch.object(sdc, 'get_artifact')
+    def test_service_pkg_distribute_when_fail_get_artifacts(self, mock_get_artifact):
+        mock_get_artifact.side_effect = GenericparserException("Failed to query artifact(services,1) from sdc.")
+        csar_id = "1"
+        try:
+            ServicePackage().on_distribute(csar_id)
+        except Exception as e:
+            self.assertTrue(isinstance(e, GenericparserException))
+            self.assertEqual("Failed to query artifact(services,1) from sdc.", e.message)
+
+    @mock.patch.object(sdc, 'get_artifact')
+    @mock.patch.object(sdc, 'download_artifacts')
+    def test_service_pkg_distribute_when_fail_download_artifacts(self, mock_get_artifact, mock_download_artifacts):
+        mock_get_artifact.return_value = {
+            "uuid": "1",
+            "invariantUUID": "63eaec39-ffbe-411c-a838-448f2c73f7eb",
+            "name": "underlayvpn",
+            "version": "2.0",
+            "toscaModelURL": "/sdc/v1/genericparser/resources/c94490a0-f7ef-48be-b3f8-8d8662a37236/toscaModel",
+            "category": "Volte",
+            "subCategory": "VolteVNF",
+            "resourceType": "VF",
+            "lifecycleState": "CERTIFIED",
+            "distributionStatus": "DISTRIBUTION_APPROVED",
+            "lastUpdaterUserId": "jh0003"
+        }
+        mock_download_artifacts.side_effect = GenericparserException("Failed to download 1 from sdc.")
+        csar_id = "1"
+        try:
+            ServicePackage().on_distribute(csar_id)
+        except Exception as e:
+            self.assertTrue(isinstance(e, GenericparserException))
+            self.assertEqual("Failed to download 1 from sdc.", e.message)
+
+    @mock.patch.object(sdc, 'get_artifact')
+    @mock.patch.object(sdc, 'download_artifacts')
+    @mock.patch.object(toscaparsers, 'parse_sd')
+    def test_service_pkg_distribute(self, mock_parse_sd, mock_download_artifacts, mock_get_artifact):
+        mock_parse_sd.return_value = json.JSONEncoder().encode(self.sd_data)
+        mock_download_artifacts.return_value = "/test.csar"
+        mock_get_artifact.return_value = {
+            "uuid": "1",
+            "invariantUUID": "63eaec39-ffbe-411c-a838-448f2c73f7eb",
+            "name": "underlayvpn",
+            "version": "2.0",
+            "toscaModelURL": "/sdc/v1/genericparser/resources/c94490a0-f7ef-48be-b3f8-8d8662a37236/toscaModel",
+            "category": "Volte",
+            "subCategory": "VolteVNF",
+            "resourceType": "VF",
+            "lifecycleState": "CERTIFIED",
+            "distributionStatus": "DISTRIBUTION_APPROVED",
+            "lastUpdaterUserId": "jh0003"
+        }
+        VnfPackageModel(vnfPackageId="1", vnfdId="cd557883-ac4b-462d-aa01-421b5fa606b1").save()
+        PnfPackageModel(pnfPackageId="1", pnfdId="m6000_s").save()
+        ServicePackage().on_distribute(csar_id="1")
+
+        service_package = ServicePackageModel.objects.filter(servicePackageId="1").first()
+        self.assertEqual("5de07996-7ff0-4ec1-b93c-e3a00bb3f207", service_package.invariantId)
+        self.assertEqual("Enhance_Service", service_package.servicedName)
+        self.assertEqual(PKG_STATUS.ONBOARDED, service_package.onboardingState)
+        self.assertEqual(PKG_STATUS.ENABLED, service_package.operationalState)
+        self.assertEqual(PKG_STATUS.NOT_IN_USE, service_package.usageState)
+
+    def test_api_service_pkg_distribute_when_pkg_exists(self):
+        ServicePackageModel(servicePackageId="1", servicedId="2").save()
+        resp = self.client.post(
+            PARSER_BASE_URL + "/service_packages", {"csarId": "1"}, format='json')
+        self.assertEqual(resp.status_code, status.HTTP_400_BAD_REQUEST)
+        self.assertEqual("Service CSAR(1) already exists.", resp.data["errorMessage"])
+
+    ###############################################################
+
+    def test_service_pkg_get_all(self):
+        ServicePackageModel(
+            servicePackageId="13",
+            servicedId="2",
+            servicedDesigner="2",
+            servicedVersion="2",
+            servicePackageUri="13.csar",
+            servicedModel="").save()
+        ServicePackageModel(
+            servicePackageId="14",
+            servicedId="3",
+            servicedDesigner="3",
+            servicedVersion="3",
+            servicePackageUri="14.csar",
+            servicedModel="").save()
+        csars = ServicePackage().get_csars()
+        self.assertEqual(2, len(csars))
+
+    def test_api_service_pkg_get_all(self):
+        ServicePackageModel(
+            servicePackageId="13",
+            servicedId="2",
+            servicedDesigner="2",
+            servicedVersion="2",
+            servicePackageUri="13.csar",
+            servicedModel="").save()
+        ServicePackageModel(
+            servicePackageId="14",
+            servicedId="3",
+            servicedDesigner="3",
+            servicedVersion="3",
+            servicePackageUri="14.csar",
+            servicedModel="").save()
+        resp = self.client.get(PARSER_BASE_URL + "/service_packages")
+        self.assertEqual(resp.status_code, status.HTTP_200_OK)
+
+    ###############################################################
+
+    def test_service_pkg_get_one(self):
+        ServicePackageModel(
+            servicePackageId="14",
+            servicedId="2",
+            servicedDesigner="3",
+            servicedVersion="4",
+            servicePackageUri="14.csar",
+            servicedModel="").save()
+        csar = ServicePackage().get_csar(14)
+        self.assertEqual(14, csar['csarId'])
+
+    def test_service_pkg_get_one_not_found(self):
+        try:
+            ServicePackage().get_csar(1000)
+        except PackageNotFoundException as e:
+            self.assertEqual("Service package[1000] not Found.", e.message)
+
+    def test_api_service_pkg_get_one(self):
+        ServicePackageModel(
+            servicePackageId="14",
+            servicedId="2",
+            servicedDesigner="3",
+            servicedVersion="4",
+            servicePackageUri="14.csar",
+            servicedModel="").save()
+        resp = self.client.get(PARSER_BASE_URL + "/service_packages/14")
+        self.assertEqual(resp.status_code, status.HTTP_200_OK)
+
+    def test_api_service_pkg_get_one_not_found(self):
+        resp = self.client.get(PARSER_BASE_URL + "/service_packages/22")
+        self.assertEqual(resp.status_code, status.HTTP_404_NOT_FOUND)
+        self.assertEqual(
+            {"errorMessage": "Service package[22] not Found.", 'error': 404},
+            resp.data)
+
+    ###############################################################
+
+    def test_service_pkg_normal_delete(self):
+        ServicePackageModel(servicePackageId="8", servicedId="2").save()
+        sp = ServicePackageModel.objects.filter(servicePackageId=8)
+        self.assertEqual(1, len(sp))
+        ServicePackage().delete_csar("8")
+        sp = ServicePackageModel.objects.filter(servicePackageId=8)
+        self.assertEqual(0, len(sp))
+
+    def test_service_pkg_normal_delete_not_found(self):
+        try:
+            ServicePackage().delete_csar("8000")
+        except PackageNotFoundException as e:
+            self.assertEqual("Service package[8000] not Found.", e.message)
+
+    def test_api_service_pkg_normal_delete(self):
+        ServicePackageModel(servicePackageId="8", servicedId="2").save()
+        resp = self.client.delete(PARSER_BASE_URL + "/service_packages/8")
+        self.assertEqual(resp.status_code, status.HTTP_204_NO_CONTENT)
+
+    ###############################################################
+
+    @mock.patch.object(toscaparsers, 'parse_sd')
+    def test_service_pkg_parser(self, mock_parse_sd):
+        ServicePackageModel(servicePackageId="8", servicedId="2").save()
+        mock_parse_sd.return_value = json.JSONEncoder().encode({"a": "b"})
+
+        inputs = []
+        ret = ServicePackage().parse_serviced("8", inputs)
+        self.assertTrue({"model": '{"c": "d"}'}, ret)
+
+    def test_service_pkg_parser_not_found(self):
+        try:
+            csar_id = "8000"
+            inputs = []
+            ServicePackage().parse_serviced(csar_id, inputs)
+        except PackageNotFoundException as e:
+            self.assertEqual("Service CSAR(8000) does not exist.", e.message)
+
+    def test_api_service_pkg_parser_not_found(self):
+        query_data = {
+            "csarId": "1",
+            "packageType": "Service",
+            "inputs": "string"
+        }
+        resp = self.client.post(PARSER_BASE_URL + "/parser", query_data, format='json')
+        self.assertEqual(resp.status_code, status.HTTP_404_NOT_FOUND)
diff --git a/genericparser/packages/tests/test_vnf_package.py b/genericparser/packages/tests/test_vnf_package.py
new file mode 100644 (file)
index 0000000..22488ba
--- /dev/null
@@ -0,0 +1,363 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import os
+import urllib2
+import mock
+
+from django.test import TestCase
+from rest_framework import status
+from rest_framework.test import APIClient
+
+from genericparser.packages.biz.vnf_package import VnfPackage, VnfPkgUploadThread
+from genericparser.packages.const import PKG_STATUS
+from genericparser.packages.tests.const import vnfd_data
+from genericparser.pub.config.config import GENERICPARSER_ROOT_PATH
+from genericparser.pub.database.models import VnfPackageModel
+from genericparser.pub.utils import toscaparsers
+
+
+class MockReq():
+    def read(self):
+        return "1"
+
+    def close(self):
+        pass
+
+
+class TestVnfPackage(TestCase):
+    def setUp(self):
+        self.client = APIClient()
+
+    def tearDown(self):
+        pass
+
+    @mock.patch.object(toscaparsers, 'parse_vnfd')
+    def test_upload_vnf_pkg(self, mock_parse_vnfd):
+        data = {'file': open(os.path.join(GENERICPARSER_ROOT_PATH, "empty.txt"), "rb")}
+        VnfPackageModel.objects.create(
+            vnfPackageId="222",
+            onboardingState="CREATED"
+        )
+        mock_parse_vnfd.return_value = json.JSONEncoder().encode(vnfd_data)
+        response = self.client.put("/api/vnfpkgm/v1/vnf_packages/222/package_content", data=data)
+        vnf_pkg = VnfPackageModel.objects.filter(vnfPackageId="222")
+        self.assertEqual("zte-hss-1.0", vnf_pkg[0].vnfdId)
+        self.assertEqual(PKG_STATUS.ONBOARDED, vnf_pkg[0].onboardingState)
+        self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)
+
+    def test_upload_vnf_pkg_failed(self):
+        data = {'file': open(os.path.join(GENERICPARSER_ROOT_PATH, "empty.txt"), "rb")}
+        VnfPackageModel.objects.create(
+            vnfPackageId="222",
+        )
+        response = self.client.put("/api/vnfpkgm/v1/vnf_packages/222/package_content", data=data)
+        self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+    @mock.patch.object(toscaparsers, 'parse_vnfd')
+    @mock.patch.object(urllib2, 'urlopen')
+    def test_upload_nf_pkg_from_uri(self, mock_urlopen, mock_parse_vnfd):
+        vnf_pkg = VnfPackageModel.objects.create(
+            vnfPackageId="222",
+            onboardingState="CREATED"
+        )
+        mock_parse_vnfd.return_value = json.JSONEncoder().encode(vnfd_data)
+        req_data = {"addressInformation": "https://127.0.0.1:1234/sdc/v1/hss.csar"}
+        mock_urlopen.return_value = MockReq()
+        vnf_pkg_id = vnf_pkg.vnfPackageId
+        VnfPkgUploadThread(req_data, vnf_pkg_id).run()
+        vnf_pkg1 = VnfPackageModel.objects.filter(vnfPackageId="222")
+        self.assertEqual("zte-hss-1.0", vnf_pkg1[0].vnfdId)
+
+    def test_upload_from_uri_failed(self):
+        req_data = {"username": "123"}
+        response = self.client.post("/api/vnfpkgm/v1/vnf_packages/111/package_content/upload_from_uri", data=req_data)
+        self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+    def test_create_vnf_pkg(self):
+        req_data = {
+            "userDefinedData": {"a": "A"}
+        }
+        response = self.client.post("/api/vnfpkgm/v1/vnf_packages", data=req_data, format="json")
+        resp_data = json.loads(response.content)
+        expect_resp_data = {
+            "id": resp_data.get("id"),
+            "onboardingState": "CREATED",
+            "operationalState": "DISABLED",
+            "usageState": "NOT_IN_USE",
+            "userDefinedData": {"a": "A"},
+            "_links": None  # TODO
+        }
+        self.assertEqual(expect_resp_data, resp_data)
+        self.assertEqual(response.status_code, status.HTTP_201_CREATED)
+
+    def test_query_single_vnf(self):
+        VnfPackageModel.objects.create(
+            vnfPackageId="222",
+            vnfdId="zte-hss-1.0",
+            vnfVendor="zte",
+            vnfdProductName="hss",
+            vnfSoftwareVersion="1.0.0",
+            vnfdVersion="1.0.0",
+            checksum='{"algorithm":"111", "hash": "11"}',
+            onboardingState="CREATED",
+            operationalState="DISABLED",
+            usageState="NOT_IN_USE",
+            userDefinedData='{"a": "A"}'
+        )
+        response = self.client.get("/api/vnfpkgm/v1/vnf_packages/222")
+        expect_data = {
+            "id": "222",
+            "vnfdId": "zte-hss-1.0",
+            "vnfProductName": "hss",
+            "vnfSoftwareVersion": "1.0.0",
+            "vnfdVersion": "1.0.0",
+            "checksum": {"algorithm": "111", "hash": "11"},
+            "softwareImages": None,
+            "additionalArtifacts": None,
+            "onboardingState": "CREATED",
+            "operationalState": "DISABLED",
+            "usageState": "NOT_IN_USE",
+            "userDefinedData": {"a": "A"},
+            "_links": None
+        }
+        self.assertEqual(response.data, expect_data)
+        self.assertEqual(response.status_code, status.HTTP_200_OK)
+
+    def test_query_single_vnf_failed(self):
+        response = self.client.get("/api/vnfpkgm/v1/vnf_packages/222")
+        self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
+
+    def test_query_multiple_vnf(self):
+        VnfPackageModel.objects.create(
+            vnfPackageId="111",
+            vnfdId="zte-hss-1.0",
+            vnfVendor="zte",
+            vnfdProductName="hss",
+            vnfSoftwareVersion="1.0.0",
+            vnfdVersion="1.0.0",
+            checksum='{"algorithm":"111", "hash": "11"}',
+            onboardingState="CREATED",
+            operationalState="DISABLED",
+            usageState="NOT_IN_USE",
+            userDefinedData='{"a": "A"}'
+        )
+        VnfPackageModel.objects.create(
+            vnfPackageId="222",
+            vnfdId="zte-hss-1.0",
+            vnfVendor="zte",
+            vnfdProductName="hss",
+            vnfSoftwareVersion="1.0.0",
+            vnfdVersion="1.0.0",
+            checksum='{"algorithm":"111", "hash": "11"}',
+            onboardingState="CREATED",
+            operationalState="DISABLED",
+            usageState="NOT_IN_USE",
+            userDefinedData='{"a": "A"}'
+        )
+        response = self.client.get("/api/vnfpkgm/v1/vnf_packages")
+        expect_data = [
+            {
+                "id": "111",
+                "vnfdId": "zte-hss-1.0",
+                "vnfProductName": "hss",
+                "vnfSoftwareVersion": "1.0.0",
+                "vnfdVersion": "1.0.0",
+                "checksum": {"algorithm": "111", "hash": "11"},
+                "softwareImages": None,
+                "additionalArtifacts": None,
+                "onboardingState": "CREATED",
+                "operationalState": "DISABLED",
+                "usageState": "NOT_IN_USE",
+                "userDefinedData": {"a": "A"},
+                "_links": None
+            },
+            {
+                "id": "222",
+                "vnfdId": "zte-hss-1.0",
+                "vnfProductName": "hss",
+                "vnfSoftwareVersion": "1.0.0",
+                "vnfdVersion": "1.0.0",
+                "checksum": {"algorithm": "111", "hash": "11"},
+                "softwareImages": None,
+                "additionalArtifacts": None,
+                "onboardingState": "CREATED",
+                "operationalState": "DISABLED",
+                "usageState": "NOT_IN_USE",
+                "userDefinedData": {"a": "A"},
+                "_links": None
+            }
+        ]
+        self.assertEqual(response.data, expect_data)
+        self.assertEqual(response.status_code, status.HTTP_200_OK)
+
+    def test_delete_single_vnf_pkg(self):
+        VnfPackageModel.objects.create(
+            vnfPackageId="222",
+            vnfdId="zte-hss-1.0",
+            vnfVendor="zte",
+            vnfdProductName="hss",
+            vnfSoftwareVersion="1.0.0",
+            vnfdVersion="1.0.0",
+            checksum='{"algorithm":"111", "hash": "11"}',
+            onboardingState="CREATED",
+            operationalState="DISABLED",
+            usageState="NOT_IN_USE",
+            userDefinedData='{"a": "A"}'
+        )
+        response = self.client.delete("/api/vnfpkgm/v1/vnf_packages/222")
+        self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
+        self.assertEqual(response.data, None)
+
+    def test_delete_when_vnf_pkg_not_exist(self):
+        response = self.client.delete("/api/vnfpkgm/v1/vnf_packages/222")
+        self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
+        self.assertEqual(response.data, None)
+
+    def test_fetch_vnf_pkg(self):
+        with open("vnfPackage.csar", "wb") as fp:
+            fp.writelines("AAAABBBBCCCCDDDD")
+        VnfPackageModel.objects.create(
+            vnfPackageId="222",
+            onboardingState="ONBOARDED",
+            localFilePath="vnfPackage.csar"
+        )
+        response = self.client.get("/api/vnfpkgm/v1/vnf_packages/222/package_content")
+        file_content = ''
+        for data in response.streaming_content:
+            file_content = file_content + data
+        self.assertEqual(response.status_code, status.HTTP_200_OK)
+        self.assertEqual('AAAABBBBCCCCDDDD', file_content)
+        os.remove("vnfPackage.csar")
+
+    def test_fetch_partical_vnf_pkg(self):
+        with open("vnfPackage.csar", "wb") as fp:
+            fp.writelines("AAAABBBBCCCCDDDD")
+        VnfPackageModel.objects.create(
+            vnfPackageId="222",
+            onboardingState="ONBOARDED",
+            localFilePath="vnfPackage.csar"
+        )
+        response = self.client.get("/api/vnfpkgm/v1/vnf_packages/222/package_content", RANGE="4-7")
+        partial_file_content = ''
+        for data in response.streaming_content:
+            partial_file_content = partial_file_content + data
+        self.assertEqual(response.status_code, status.HTTP_200_OK)
+        self.assertEqual('BBB', partial_file_content)
+        os.remove("vnfPackage.csar")
+
+    def test_fetch_vnf_pkg_when_pkg_not_exist(self):
+        response = self.client.get("/api/vnfpkgm/v1/vnf_packages/222/package_content")
+        self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
+
+    def test_fetch_vnf_pkg_when_catch_cataloge_exception(self):
+        VnfPackageModel.objects.create(
+            vnfPackageId="222",
+            onboardingState="CREATED",
+            localFilePath="vnfPackage.csar"
+        )
+        response = self.client.get("/api/vnfpkgm/v1/vnf_packages/222/package_content")
+        self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+    @mock.patch.object(VnfPackage, "create_vnf_pkg")
+    def test_create_vnf_pkg_when_catch_exception(self, mock_create_vnf_pkg):
+        mock_create_vnf_pkg.side_effect = TypeError('integer type')
+        req_data = {
+            "userDefinedData": {"a": "A"}
+        }
+        response = self.client.post("/api/vnfpkgm/v1/vnf_packages", data=req_data, format="json")
+        self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+    @mock.patch.object(VnfPackage, "delete_vnf_pkg")
+    def test_delete_single_when_catch_exception(self, mock_delete_vnf_pkg):
+        mock_delete_vnf_pkg.side_effect = TypeError("integer type")
+        response = self.client.delete("/api/vnfpkgm/v1/vnf_packages/222")
+        self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+    @mock.patch.object(VnfPackage, "query_single")
+    def test_query_single_when_catch_exception(self, mock_query_single):
+        mock_query_single.side_effect = TypeError("integer type")
+        response = self.client.get("/api/vnfpkgm/v1/vnf_packages/222")
+        self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+    @mock.patch.object(VnfPackage, "query_multiple")
+    def test_query_multiple_when_catch_exception(self, mock_query_muitiple):
+        mock_query_muitiple.side_effect = TypeError("integer type")
+        response = self.client.get("/api/vnfpkgm/v1/vnf_packages")
+        self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+    @mock.patch.object(toscaparsers, 'parse_vnfd')
+    def test_upload_when_catch_exception(self, mock_parse_vnfd):
+        data = {'file': open(os.path.join(GENERICPARSER_ROOT_PATH, "empty.txt"), "rb")}
+        VnfPackageModel.objects.create(
+            vnfPackageId="222",
+            onboardingState="CREATED"
+        )
+        mock_parse_vnfd.side_effect = TypeError("integer type")
+        response = self.client.put("/api/vnfpkgm/v1/vnf_packages/222/package_content", data=data)
+        self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+    @mock.patch.object(VnfPkgUploadThread, 'start')
+    def test_upload_from_uri_when_catch_exception(self, mock_start):
+        req_data = {"addressInformation": "https://127.0.0.1:1234/sdc/v1/hss.csar"}
+        mock_start.side_effect = TypeError("integer type")
+        response = self.client.post("/api/vnfpkgm/v1/vnf_packages/111/package_content/upload_from_uri", data=req_data)
+        self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+    @mock.patch.object(VnfPackage, 'download')
+    def test_fetch_vnf_pkg_when_catch_exception(self, mock_download):
+        mock_download.side_effect = TypeError("integer type")
+        response = self.client.get("/api/vnfpkgm/v1/vnf_packages/222/package_content")
+        self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+    @mock.patch.object(toscaparsers, 'parse_vnfd')
+    def test_fetch_vnf_artifact(self, mock_parse_vnfd):
+        data = {'file': open(os.path.join(GENERICPARSER_ROOT_PATH, "resource_test.csar"), "rb")}
+        VnfPackageModel.objects.create(
+            vnfPackageId="222",
+            onboardingState="CREATED"
+        )
+        mock_parse_vnfd.return_value = json.JSONEncoder().encode(vnfd_data)
+        response = self.client.put("/api/vnfpkgm/v1/vnf_packages/222/package_content", data=data)
+        self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)
+        response = self.client.get("/api/vnfpkgm/v1/vnf_packages/222/artifacts/image")
+        self.assertEqual(response.status_code, status.HTTP_200_OK)
+        self.assertEqual(response.getvalue(), "ubuntu_16.04\n")
+
+    @mock.patch.object(toscaparsers, 'parse_vnfd')
+    def test_fetch_vnf_artifact_not_exists(self, mock_parse_vnfd):
+        data = {'file': open(os.path.join(GENERICPARSER_ROOT_PATH, "resource_test.csar"), "rb")}
+        VnfPackageModel.objects.create(
+            vnfPackageId="222",
+            onboardingState="CREATED"
+        )
+        mock_parse_vnfd.return_value = json.JSONEncoder().encode(vnfd_data)
+        response = self.client.put("/api/vnfpkgm/v1/vnf_packages/222/package_content", data=data)
+        self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)
+        response = self.client.get("/api/vnfpkgm/v1/vnf_packages/1451/artifacts/image")
+        self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
+
+    @mock.patch.object(toscaparsers, 'parse_vnfd')
+    def test_fetch_vnf_artifact_vnf_not_exists(self, mock_parse_vnfd):
+        data = {'file': open(os.path.join(GENERICPARSER_ROOT_PATH, "resource_test.csar"), "rb")}
+        VnfPackageModel.objects.create(
+            vnfPackageId="222",
+            onboardingState="CREATED"
+        )
+        mock_parse_vnfd.return_value = json.JSONEncoder().encode(vnfd_data)
+        response = self.client.put("/api/vnfpkgm/v1/vnf_packages/222/package_content", data=data)
+        self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)
+        response = self.client.get("/api/vnfpkgm/v1/vnf_packages/222/artifacts/image1")
+        self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
diff --git a/genericparser/packages/tests/test_vnf_pkg_subscription.py b/genericparser/packages/tests/test_vnf_pkg_subscription.py
new file mode 100644 (file)
index 0000000..25e8c5d
--- /dev/null
@@ -0,0 +1,177 @@
+# Copyright (C) 2019 Verizon. All Rights Reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import uuid
+import mock
+from rest_framework.test import APIClient
+from django.test import TestCase
+from genericparser.pub.database.models import VnfPkgSubscriptionModel
+
+
+class TestNfPackageSubscription(TestCase):
+    def setUp(self):
+        self.client = APIClient()
+        VnfPkgSubscriptionModel.objects.filter().delete()
+        self.vnf_subscription_data = {
+            "filters": {
+                "notificationTypes": [
+                    "VnfPackageOnboardingNotification"
+                ],
+                "vnfProductsFromProviders": {
+                    "vnfProvider": "string",
+                    "vnfProducts": {
+                        "vnfProductName": "string",
+                        "versions": {
+                            "vnfSoftwareVersion": "string",
+                            "vnfdVersions": [
+                                "string"
+                            ]
+                        }
+                    }
+                },
+                "vnfdId": [
+                    "3fa85f64-5717-4562-b3fc-2c963f66afa6"
+                ],
+                "vnfPkgId": [
+                    "3fa85f64-5717-4562-b3fc-2c963f66afa6"
+                ],
+                "operationalState": [
+                    "ENABLED"
+                ],
+                "usageState": [
+                    "IN_USE"
+                ]
+            },
+            "callbackUri": "http://www.vnf1.com/notification",
+            "authentication": {
+                "authType": [
+                    "BASIC"
+                ],
+                "paramsBasic": {
+                    "userName": "string",
+                    "password": "string"
+                }
+            }
+        }
+
+    def tearDown(self):
+        pass
+
+    @mock.patch("requests.get")
+    @mock.patch.object(uuid, 'uuid4')
+    def test_create_vnf_subscription(self, mock_uuid4, mock_requests):
+        temp_uuid = "99442b18-a5c7-11e8-998c-bf1755941f13"
+        mock_requests.return_value.status_code = 204
+        mock_requests.get.status_code = 204
+        mock_uuid4.return_value = temp_uuid
+        response = self.client.post("/api/vnfpkgm/v1/subscriptions", data=self.vnf_subscription_data, format='json')
+        self.assertEqual(201, response.status_code)
+        self.assertEqual(self.vnf_subscription_data["callbackUri"], response.data["callbackUri"])
+        self.assertEqual(temp_uuid, response.data["id"])
+
+    @mock.patch("requests.get")
+    @mock.patch.object(uuid, 'uuid4')
+    def test_duplicate_subscriptions(self, mock_uuid4, mock_requests):
+        temp_uuid = "99442b18-a5c7-11e8-998c-bf1755941f13"
+        temp1_uuid = "00342b18-a5c7-11e8-998c-bf1755941f12"
+        mock_requests.return_value.status_code = 204
+        mock_requests.get.status_code = 204
+        mock_uuid4.side_effect = [temp_uuid, temp1_uuid]
+        response = self.client.post("/api/vnfpkgm/v1/subscriptions", data=self.vnf_subscription_data, format='json')
+        self.assertEqual(201, response.status_code)
+        self.assertEqual(self.vnf_subscription_data["callbackUri"], response.data["callbackUri"])
+        self.assertEqual(temp_uuid, response.data["id"])
+        temp_uuid = "00442b18-a5c7-11e8-998c-bf1755941f12"
+        mock_requests.return_value.status_code = 204
+        mock_requests.get.status_code = 204
+        mock_uuid4.return_value = temp_uuid
+        response = self.client.post("/api/vnfpkgm/v1/subscriptions", data=self.vnf_subscription_data, format='json')
+        self.assertEqual(303, response.status_code)
+
+    @mock.patch("requests.get")
+    @mock.patch.object(uuid, 'uuid4')
+    def test_get_subscriptions(self, mock_uuid4, mock_requests):
+        temp_uuid = "99442b18-a5c7-11e8-998c-bf1755941f13"
+        mock_requests.return_value.status_code = 204
+        mock_requests.get.status_code = 204
+        mock_uuid4.return_value = temp_uuid
+        self.client.post("/api/vnfpkgm/v1/subscriptions",
+                         data=self.vnf_subscription_data, format='json')
+        response = self.client.get("/api/vnfpkgm/v1/subscriptions?usageState=IN_USE",
+                                   format='json')
+        self.assertEqual(200, response.status_code)
+        self.assertEqual(1, len(response.data))
+
+    @mock.patch("requests.get")
+    @mock.patch.object(uuid, 'uuid4')
+    def test_get_subscriptions_with_invalid_params(self, mock_uuid4, mock_requests):
+        temp_uuid = "99442b18-a5c7-11e8-998c-bf1755941f13"
+        mock_requests.return_value.status_code = 204
+        mock_requests.get.status_code = 204
+        mock_uuid4.return_value = temp_uuid
+        self.client.post("/api/vnfpkgm/v1/subscriptions",
+                         data=self.vnf_subscription_data, format='json')
+        response = self.client.get("/api/vnfpkgm/v1/subscriptions?dummy=dummy",
+                                   format='json')
+        self.assertEqual(400, response.status_code)
+
+    @mock.patch("requests.get")
+    @mock.patch.object(uuid, 'uuid4')
+    def test_get_subscription_with_id(self, mock_uuid4, mock_requests):
+        temp_uuid = "99442b18-a5c7-11e8-998c-bf1755941f13"
+        mock_requests.return_value.status_code = 204
+        mock_requests.get.status_code = 204
+        mock_uuid4.return_value = temp_uuid
+        self.client.post("/api/vnfpkgm/v1/subscriptions",
+                         data=self.vnf_subscription_data, format='json')
+        response = self.client.get("/api/vnfpkgm/v1/subscriptions/" + temp_uuid,
+                                   format='json')
+        self.assertEqual(200, response.status_code)
+        self.assertEqual(temp_uuid, response.data["id"])
+
+    @mock.patch("requests.get")
+    @mock.patch.object(uuid, 'uuid4')
+    def test_get_subscription_with_id_not_exists(self, mock_uuid4, mock_requests):
+        temp_uuid = "99442b18-a5c7-11e8-998c-bf1755941f13"
+        dummy_uuid = str(uuid.uuid4())
+        mock_requests.return_value.status_code = 204
+        mock_requests.get.status_code = 204
+        mock_uuid4.return_value = temp_uuid
+        self.client.post("/api/vnfpkgm/v1/subscriptions",
+                         data=self.vnf_subscription_data, format='json')
+        response = self.client.get("/api/vnfpkgm/v1/subscriptions/" + dummy_uuid,
+                                   format='json')
+        self.assertEqual(404, response.status_code)
+
+    @mock.patch("requests.get")
+    @mock.patch.object(uuid, 'uuid4')
+    def test_delete_subscription_with_id(self, mock_uuid4, mock_requests):
+        temp_uuid = "99442b18-a5c7-11e8-998c-bf1755941f13"
+        dummy_uuid = str(uuid.uuid4())
+        mock_requests.return_value.status_code = 204
+        mock_requests.get.status_code = 204
+        mock_uuid4.return_value = temp_uuid
+        self.client.post("/api/vnfpkgm/v1/subscriptions",
+                         data=self.vnf_subscription_data, format='json')
+        self.client.get("/api/vnfpkgm/v1/subscriptions/" + dummy_uuid,
+                        format='json')
+        response = self.client.delete("/api/vnfpkgm/v1/subscriptions/" + temp_uuid)
+        self.assertEqual(204, response.status_code)
+
+    @mock.patch("requests.get")
+    @mock.patch.object(uuid, 'uuid4')
+    def test_delete_subscription_with_id_not_exists(self, mock_uuid4, mock_requests):
+        dummy_uuid = str(uuid.uuid4())
+        response = self.client.delete("/api/vnfpkgm/v1/subscriptions/" + dummy_uuid)
+        self.assertEqual(404, response.status_code)
diff --git a/genericparser/packages/tests/test_vnfpackage.py b/genericparser/packages/tests/test_vnfpackage.py
new file mode 100644 (file)
index 0000000..b969b4f
--- /dev/null
@@ -0,0 +1,405 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import mock
+from rest_framework.test import APIClient
+from django.test import TestCase
+from rest_framework import status
+from genericparser.packages.biz.sdc_vnf_package import NfDistributeThread, NfPkgDeleteThread
+from genericparser.pub.database.models import JobStatusModel, JobModel
+from genericparser.pub.database.models import VnfPackageModel
+from genericparser.pub.msapi import sdc
+from genericparser.pub.utils import restcall, toscaparsers
+
+
+class TestNfPackage(TestCase):
+    def setUp(self):
+        self.client = APIClient()
+        VnfPackageModel.objects.filter().delete()
+        JobModel.objects.filter().delete()
+        JobStatusModel.objects.filter().delete()
+        self.vnfd_data = {
+            "volume_storages": [
+                {
+                    "properties": {
+                        "size_of_storage": {
+                            "factor": 10,
+                            "value": 10000000000,
+                            "unit": "GB",
+                            "unit_size": 1000000000
+                        },
+                        "type_of_storage": "volume",
+                        "rdma_enabled": False,
+                        "size": "10 GB"
+                    },
+                    "volume_storage_id": "vNAT_Storage_6wdgwzedlb6sq18uzrr41sof7",
+                    "description": ""
+                }
+            ],
+            "inputs": {},
+            "vdus": [
+                {
+                    "volume_storages": [
+                        "vNAT_Storage_6wdgwzedlb6sq18uzrr41sof7"
+                    ],
+                    "description": "",
+                    "dependencies": [],
+                    "vls": [],
+                    "properties": {
+                        "name": "vNat",
+                        "configurable_properties": {
+                            "test": {
+                                "additional_vnfc_configurable_properties": {
+                                    "aaa": "1",
+                                    "bbb": "2",
+                                    "ccc": "3"
+                                }
+                            }
+                        },
+                        "description": "the virtual machine of vNat",
+                        "nfvi_constraints": [
+                            "test"
+                        ],
+                        "boot_order": [
+                            "vNAT_Storage"
+                        ]
+                    },
+                    "vdu_id": "vdu_vNat",
+                    "artifacts": [
+                        {
+                            "artifact_name": "vNatVNFImage",
+                            "type": "tosca.artifacts.nfv.SwImage",
+                            "properties": {
+                                "operating_system": "linux",
+                                "sw_image": "/swimages/vRouterVNF_ControlPlane.qcow2",
+                                "name": "vNatVNFImage",
+                                "container_format": "bare",
+                                "min_ram": "1 GB",
+                                "disk_format": "qcow2",
+                                "supported_virtualisation_environments": [
+                                    "test_0"
+                                ],
+                                "version": "1.0",
+                                "checksum": "5000",
+                                "min_disk": "10 GB",
+                                "size": "10 GB"
+                            },
+                            "file": "/swimages/vRouterVNF_ControlPlane.qcow2"
+                        }
+                    ],
+                    "nfv_compute": {
+                        "flavor_extra_specs": {
+                            "hw:cpu_sockets": "2",
+                            "sw:ovs_dpdk": "true",
+                            "hw:cpu_threads": "2",
+                            "hw:numa_mem.1": "3072",
+                            "hw:numa_mem.0": "1024",
+                            "hw:numa_nodes": "2",
+                            "hw:numa_cpus.0": "0,1",
+                            "hw:numa_cpus.1": "2,3,4,5",
+                            "hw:cpu_cores": "2",
+                            "hw:cpu_threads_policy": "isolate"
+                        },
+                        "cpu_frequency": "2.4 GHz",
+                        "num_cpus": 2,
+                        "mem_size": "10 GB"
+                    },
+                    "local_storages": [],
+                    "image_file": "vNatVNFImage",
+                    "cps": []
+                }
+            ],
+            "image_files": [
+                {
+                    "properties": {
+                        "operating_system": "linux",
+                        "sw_image": "/swimages/vRouterVNF_ControlPlane.qcow2",
+                        "name": "vNatVNFImage",
+                        "container_format": "bare",
+                        "min_ram": "1 GB",
+                        "disk_format": "qcow2",
+                        "supported_virtualisation_environments": [
+                            "test_0"
+                        ],
+                        "version": "1.0",
+                        "checksum": "5000",
+                        "min_disk": "10 GB",
+                        "size": "10 GB"
+                    },
+                    "image_file_id": "vNatVNFImage",
+                    "description": ""
+                }
+            ],
+            "routers": [],
+            "local_storages": [],
+            "vnf_exposed": {
+                "external_cps": [
+                    {
+                        "key_name": "sriov_plane",
+                        "cp_id": "SRIOV_Port"
+                    }
+                ],
+                "forward_cps": []
+            },
+            "vls": [
+                {
+                    "route_id": "",
+                    "vl_id": "sriov_link",
+                    "route_external": False,
+                    "description": "",
+                    "properties": {
+                        "vl_flavours": {
+                            "vl_id": "aaaa"
+                        },
+                        "connectivity_type": {
+                            "layer_protocol": "ipv4",
+                            "flow_pattern": "flat"
+                        },
+                        "description": "sriov_link",
+                        "test_access": [
+                            "test"
+                        ]
+                    }
+                }
+            ],
+            "cps": [
+                {
+                    "vl_id": "sriov_link",
+                    "vdu_id": "vdu_vNat",
+                    "description": "",
+                    "cp_id": "SRIOV_Port",
+                    "properties": {
+                        "address_data": [
+                            {
+                                "address_type": "ip_address",
+                                "l3_address_data": {
+                                    "ip_address_type": "ipv4",
+                                    "floating_ip_activated": False,
+                                    "number_of_ip_address": 1,
+                                    "ip_address_assignment": True
+                                }
+                            }
+                        ],
+                        "description": "sriov port",
+                        "layer_protocol": "ipv4",
+                        "virtual_network_interface_requirements": [
+                            {
+                                "requirement": {
+                                    "SRIOV": "true"
+                                },
+                                "support_mandatory": False,
+                                "name": "sriov",
+                                "description": "sriov"
+                            },
+                            {
+                                "requirement": {
+                                    "SRIOV": "False"
+                                },
+                                "support_mandatory": False,
+                                "name": "normal",
+                                "description": "normal"
+                            }
+                        ],
+                        "role": "root",
+                        "bitrate_requirement": 10
+                    }
+                }
+            ],
+            "metadata": {
+                "vnfSoftwareVersion": "1.0.0",
+                "vnfProductName": "zte",
+                "localizationLanguage": [
+                    "english",
+                    "chinese"
+                ],
+                "vnfProvider": "zte",
+                "vnfmInfo": "zte",
+                "defaultLocalizationLanguage": "english",
+                "vnfdId": "zte-hss-1.0",
+                "id": "zte-hss-1.0",
+                "vnfProductInfoDescription": "hss",
+                "vnfdVersion": "1.0.0",
+                "vnfProductInfoName": "hss"
+            },
+            "vnf": {
+                "properties": {
+                    "descriptor_id": "zte-hss-1.0",
+                    "descriptor_verison": "1.0.0",
+                    "software_version": "1.0.0",
+                    "provider": "zte"
+                },
+                "metadata": {
+                }
+            }
+        }
+
+    def tearDown(self):
+        pass
+
+    def assert_job_result(self, job_id, job_progress, job_detail):
+        jobs = JobStatusModel.objects.filter(
+            jobid=job_id,
+            progress=job_progress,
+            descp=job_detail)
+        self.assertEqual(1, len(jobs))
+
+    @mock.patch.object(NfDistributeThread, 'run')
+    def test_nf_pkg_distribute_normal(self, mock_run):
+        resp = self.client.post("/api/genericparser/v1/vnfpackages", {
+            "csarId": "1",
+            "vimIds": ["1"]
+        }, format='json')
+        self.assertEqual(resp.status_code, status.HTTP_202_ACCEPTED)
+
+    def test_nf_pkg_distribute_when_csar_already_exist(self):
+        VnfPackageModel(vnfPackageId="1", vnfdId="vcpe_vfw_zte_1_0").save()
+        NfDistributeThread(csar_id="1",
+                           vim_ids=["1"],
+                           lab_vim_id="",
+                           job_id="2").run()
+        self.assert_job_result("2", 255, "NF CSAR(1) already exists.")
+
+    @mock.patch.object(restcall, 'call_req')
+    @mock.patch.object(sdc, 'download_artifacts')
+    @mock.patch.object(toscaparsers, 'parse_vnfd')
+    def test_nf_pkg_distribute_when_vnfd_already_exist(self,
+                                                       mock_parse_vnfd, mock_download_artifacts, mock_call_req):
+        mock_parse_vnfd.return_value = json.JSONEncoder().encode(self.vnfd_data)
+        mock_download_artifacts.return_value = "/home/hss.csar"
+        mock_call_req.return_value = [0, json.JSONEncoder().encode([{
+            "uuid": "1",
+            "toscaModelURL": "https://127.0.0.1:1234/sdc/v1/hss.csar"
+        }]), '200']
+        VnfPackageModel(vnfPackageId="2", vnfdId="zte-hss-1.0").save()
+        NfDistributeThread(csar_id="1",
+                           vim_ids=["1"],
+                           lab_vim_id="",
+                           job_id="2").run()
+        self.assert_job_result("2", 255, "VNF package(zte-hss-1.0) already exists.")
+
+    @mock.patch.object(restcall, 'call_req')
+    @mock.patch.object(sdc, 'download_artifacts')
+    @mock.patch.object(toscaparsers, 'parse_vnfd')
+    def test_nf_pkg_distribute_successfully(self,
+                                            mock_parse_vnfd, mock_download_artifacts, mock_call_req):
+        mock_parse_vnfd.return_value = json.JSONEncoder().encode(self.vnfd_data)
+        mock_download_artifacts.return_value = "/home/hss.csar"
+        mock_call_req.return_value = [0, json.JSONEncoder().encode([{
+            "uuid": "1",
+            "toscaModelURL": "https://127.0.0.1:1234/sdc/v1/hss.csar"
+        }]), '200']
+        NfDistributeThread(csar_id="1",
+                           vim_ids=["1"],
+                           lab_vim_id="",
+                           job_id="4").run()
+        self.assert_job_result("4", 100, "CSAR(1) distribute successfully.")
+
+    ###############################################################################################################
+
+    @mock.patch.object(NfPkgDeleteThread, 'run')
+    def test_nf_pkg_delete_normal(self, mock_run):
+        resp = self.client.delete("/api/genericparser/v1/vnfpackages/1")
+        self.assertEqual(resp.status_code, status.HTTP_202_ACCEPTED)
+
+    def test_nf_pkg_normal_delete(self):
+        VnfPackageModel(vnfPackageId="2", vnfdId="vcpe_vfw_zte_1_0").save()
+        NfPkgDeleteThread(csar_id="2", job_id="2").run()
+        self.assert_job_result("2", 100, "Delete CSAR(2) successfully.")
+
+    def test_nf_pkg_get_all(self):
+        VnfPackageModel(vnfPackageId="3", vnfdId="3", vnfVendor='3', vnfdVersion='3',
+                        vnfSoftwareVersion='', vnfPackageUri='', vnfdModel='').save()
+        VnfPackageModel(vnfPackageId="4", vnfdId="4", vnfVendor='4', vnfdVersion='4',
+                        vnfSoftwareVersion='', vnfPackageUri='', vnfdModel='').save()
+        resp = self.client.get("/api/genericparser/v1/vnfpackages")
+        self.assertEqual(resp.status_code, status.HTTP_200_OK)
+        expect_data = [
+            {
+                "imageInfo": [],
+                "csarId": "3",
+                "packageInfo": {
+                    "csarName": "",
+                    "vnfdModel": "",
+                    "vnfdProvider": "3",
+                    "vnfdId": "3",
+                    "downloadUrl": "http://127.0.0.1:8806/static/genericparser/3/",
+                    "vnfVersion": "",
+                    "vnfdVersion": "3",
+                    "vnfPackageId": "3"
+                }
+            },
+            {
+                "imageInfo": [],
+                "csarId": "4",
+                "packageInfo": {
+                    "csarName": "",
+                    "vnfdModel": "",
+                    "vnfdProvider": "4",
+                    "vnfdId": "4",
+                    "downloadUrl": "http://127.0.0.1:8806/static/genericparser/4/",
+                    "vnfVersion": "",
+                    "vnfdVersion": "4",
+                    "vnfPackageId": "4"
+                }
+            }
+        ]
+        self.assertEqual(expect_data, resp.data)
+
+    def test_nf_pkg_get_one(self):
+        VnfPackageModel(vnfPackageId="4", vnfdId="4", vnfVendor='4', vnfdVersion='4',
+                        vnfSoftwareVersion='', vnfPackageUri='', vnfdModel='').save()
+
+        resp = self.client.get("/api/genericparser/v1/vnfpackages/4")
+        self.assertEqual(resp.status_code, status.HTTP_200_OK)
+        expect_data = {
+            "imageInfo": [],
+            "csarId": "4",
+            "packageInfo": {
+                "csarName": "",
+                "vnfdModel": "",
+                "vnfdProvider": "4",
+                "vnfdId": "4",
+                "downloadUrl": "http://127.0.0.1:8806/static/genericparser/4/",
+                "vnfVersion": "",
+                "vnfdVersion": "4",
+                "vnfPackageId": "4"
+            }
+        }
+        self.assertEqual(expect_data, resp.data)
+
+    def test_nf_pkg_get_one_failed(self):
+        VnfPackageModel(vnfPackageId="4", vnfdId="4", vnfVendor='4', vnfdVersion='4',
+                        vnfSoftwareVersion='', vnfPackageUri='', vnfdModel='').save()
+
+        resp = self.client.get("/api/genericparser/v1/vnfpackages/2")
+        self.assertEqual(resp.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+        self.assertEqual({'error': 'Vnf package[2] not Found.'}, resp.data)
+
+    ###############################################################################################################
+
+    @mock.patch.object(toscaparsers, 'parse_vnfd')
+    def test_vnfd_parse_normal(self, mock_parse_vnfd):
+        VnfPackageModel(vnfPackageId="8", vnfdId="10").save()
+        mock_parse_vnfd.return_value = json.JSONEncoder().encode({"c": "d"})
+        req_data = {"csarId": "8", "inputs": []}
+        resp = self.client.post("/api/genericparser/v1/parservnfd", req_data, format='json')
+        self.assertEqual(resp.status_code, status.HTTP_202_ACCEPTED)
+        self.assertEqual({"model": '{"c": "d"}'}, resp.data)
+
+    def test_vnfd_parse_when_csar_not_exist(self):
+        req_data = {"csarId": "1", "inputs": []}
+        resp = self.client.post("/api/genericparser/v1/parservnfd", req_data, format='json')
+        self.assertEqual(resp.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
+        self.assertEqual(resp.data, {"error": "VNF CSAR(1) does not exist."})
diff --git a/genericparser/packages/urls.py b/genericparser/packages/urls.py
new file mode 100644 (file)
index 0000000..bf2eb11
--- /dev/null
@@ -0,0 +1,76 @@
+# Copyright 2017-2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from django.conf.urls import url
+
+from genericparser.packages.views import vnf_package_views
+from genericparser.packages.views.vnf_package_subscription_views import CreateQuerySubscriptionView,\
+    QueryTerminateSubscriptionView
+from genericparser.packages.views.vnf_package_artifact_views import FetchVnfPkgmArtifactsView
+from genericparser.packages.views import catalog_views, ns_descriptor_views, pnf_descriptor_views, nsdm_subscription_views
+from genericparser.packages.views.health_check_views import HealthCheckView
+
+
+urlpatterns = [
+
+    # Sync package from SDC
+    url(r'^api/genericparser/v1/nspackages$', catalog_views.nspackages_rc, name='nspackages_rc'),
+    url(r'^api/genericparser/v1/nspackages/(?P<csarId>[0-9a-zA-Z\-\_]+)$', catalog_views.ns_rd_csar, name='nspackage_rd'),
+    url(r'^api/genericparser/v1/vnfpackages$', catalog_views.nfpackages_rc, name='nfpackages_rc'),
+    url(r'^api/genericparser/v1/vnfpackages/(?P<csarId>[0-9a-zA-Z\-\_]+)$', catalog_views.nf_rd_csar, name='nfpackage_rd'),
+    url(r'^api/parser/v1/service_packages$', catalog_views.servicepackages_rc, name='servicepackages_rc'),
+    url(r'^api/parser/v1/service_packages/(?P<csarId>[0-9a-zA-Z\-\_]+)$', catalog_views.service_rd_csar, name='servicepackage_rd'),
+
+    # NFV Model Parser
+    url(r'^api/parser/v1/parser$', catalog_views.model_parser, name='modelparser_rc'),
+    url(r'^api/parser/v1/parsernsd$', catalog_views.ns_model_parser, name='nsmodelparser_rc'),
+    url(r'^api/parser/v1/parservnfd$', catalog_views.vnf_model_parser, name='vnfmodelparser_rc'),
+    url(r'^api/parser/v1/parserpnfd$', pnf_descriptor_views.pnf_model_parser, name='pnfmodelparser_rc'),
+    url(r'^api/genericparser/v1/parsernsd$', catalog_views.ns_model_parser, name='nsmodelparser_rc'),
+    url(r'^api/genericparser/v1/parservnfd$', catalog_views.vnf_model_parser, name='vnfmodelparser_rc'),
+    url(r'^api/genericparser/v1/parserpnfd$', pnf_descriptor_views.pnf_model_parser, name='pnfmodelparser_rc'),
+
+    # ETSI SOL005 NSD API
+    url(r'^api/nsd/v1/ns_descriptors$', ns_descriptor_views.ns_descriptors_rc, name='ns_descriptors_rc'),
+    url(r'^api/nsd/v1/ns_descriptors/(?P<nsdInfoId>[0-9a-zA-Z\-\_]+)$', ns_descriptor_views.ns_info_rd, name='ns_info_rd'),
+    url(r'^api/nsd/v1/ns_descriptors/(?P<nsdInfoId>[0-9a-zA-Z\-\_]+)/nsd_content$', ns_descriptor_views.nsd_content_ru, name='nsd_content_ru'),
+    url(r'^api/nsd/v1/subscriptions$', nsdm_subscription_views.nsd_subscription_rc, name='nsd_subscription_rc'),
+    url(r'^api/nsd/v1/subscriptions/(?P<subscriptionId>[0-9a-zA-Z\-\_]+)$', nsdm_subscription_views.nsd_subscription_rd, name='nsd_subscription_rd'),
+
+    #  ETSI SOL005 PNFD
+    url(r'^api/nsd/v1/pnf_descriptors$', pnf_descriptor_views.pnf_descriptors_rc, name='pnf_descriptors_rc'),
+    url(r'^api/nsd/v1/pnf_descriptors/(?P<pnfdInfoId>[0-9a-zA-Z\-\_]+)$', pnf_descriptor_views.pnfd_info_rd, name='pnfd_info_rd'),
+    url(r'^api/nsd/v1/pnf_descriptors/(?P<pnfdInfoId>[0-9a-zA-Z\-\_]+)/pnfd_content$', pnf_descriptor_views.pnfd_content_ru, name='pnfd_content_ru'),
+
+    #  ETSI SOL005&SOL003 VNF Package
+    url(r'^api/vnfpkgm/v1/vnf_packages$', vnf_package_views.vnf_packages_rc, name='vnf_packages_rc'),
+    url(r'^api/vnfpkgm/v1/vnf_packages/(?P<vnfPkgId>[0-9a-zA-Z\-\_]+)$', vnf_package_views.vnf_package_rd, name='vnf_package_rd'),
+    url(r'^api/vnfpkgm/v1/vnf_packages/(?P<vnfPkgId>[0-9a-zA-Z\-\_]+)/package_content$', vnf_package_views.package_content_ru, name='package_content_ru'),
+    url(r'^api/vnfpkgm/v1/vnf_packages/(?P<vnfPkgId>[0-9a-zA-Z\-\_]+)/package_content/upload_from_uri$', vnf_package_views.upload_from_uri_c, name='upload_from_uri_c'),
+
+    # ETSI SOL 005 VNF Package Management Subscription APIs
+    url(r'^api/vnfpkgm/v1/subscriptions$', CreateQuerySubscriptionView.as_view(), name='subscriptions_create_query'),
+    url(r'^api/vnfpkgm/v1/subscriptions/(?P<subscriptionId>[0-9a-zA-Z\-\_]+)$', QueryTerminateSubscriptionView.as_view(), name='subscriptions_query_terminate'),
+    url(r'^api/vnfpkgm/v1/vnf_packages/(?P<vnfPkgId>[0-9a-zA-Z\-\_]+)/artifacts/(?P<artifactPath>[0-9a-zA-Z\-\_]+)$', FetchVnfPkgmArtifactsView.as_view(), name="fetch_vnf_artifacts"),
+    # url(r'^api/vnfpkgm/v1/subscriptions/(?P<vnfPkgId>[0-9a-zA-Z\-\_]+)$', vnf_package_subscription_views.vnf_package_subscriptions_rc, name='subscriptions_rc'),
+    # url(r'^api/vnfpkgm/v1/vnf_packages/(?P<vnfPkgId>[0-9a-zA-Z\-\_]+)/vnfd$', vnfd.as_view(), name='vnfd_r'),# url(r'^api/vnfpkgm/v1/vnf_packages/(?P<vnfPkgId>[0-9a-zA-Z\-\_]+)/artifacts/artifactPath$', artifacts.as_view(), name='artifacts_r'),
+
+    # url(r'^api/vnfpkgm/v1/subscriptions/(?P<subscriptionId>[0-9a-zA-Z\-\_]+)$', vnfpkg_subscription.as_view(), name='subscription_rd'),
+
+    # health check
+    url(r'^api/vnfpkgm/v1/health_check$', HealthCheckView.as_view()),
+    url(r'^api/nsd/v1/health_check$', HealthCheckView.as_view()),
+    url(r'^api/genericparser/v1/health_check$', HealthCheckView.as_view()),
+    url(r'^api/parser/v1/health_check$', HealthCheckView.as_view()),
+]
diff --git a/genericparser/packages/views/__init__.py b/genericparser/packages/views/__init__.py
new file mode 100644 (file)
index 0000000..342c2a8
--- /dev/null
@@ -0,0 +1,13 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/genericparser/packages/views/catalog_views.py b/genericparser/packages/views/catalog_views.py
new file mode 100644 (file)
index 0000000..ed10e68
--- /dev/null
@@ -0,0 +1,535 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import uuid
+
+from drf_yasg import openapi
+from drf_yasg.utils import no_body, swagger_auto_schema
+from rest_framework import status
+from rest_framework.decorators import api_view
+from rest_framework.response import Response
+from genericparser.packages.biz import sdc_vnf_package, sdc_ns_package
+from genericparser.packages.biz.pnf_descriptor import PnfDescriptor
+from genericparser.packages.biz.sdc_service_package import ServicePackage
+from genericparser.packages.serializers.genericparser_serializers import InternalErrorRequestSerializer, \
+    ServicePackageDistributeRequestSerializer, ServicePackagesSerializer, ServicePackageSerializer
+from genericparser.packages.serializers.genericparser_serializers import NfPackageDistributeRequestSerializer
+from genericparser.packages.serializers.genericparser_serializers import NfPackageSerializer
+from genericparser.packages.serializers.genericparser_serializers import NfPackagesSerializer
+from genericparser.packages.serializers.genericparser_serializers import NsPackageDistributeRequestSerializer
+from genericparser.packages.serializers.genericparser_serializers import NsPackageDistributeResponseSerializer
+from genericparser.packages.serializers.genericparser_serializers import NsPackageSerializer
+from genericparser.packages.serializers.genericparser_serializers import NsPackagesSerializer
+from genericparser.packages.serializers.genericparser_serializers import ParseModelRequestSerializer
+from genericparser.packages.serializers.genericparser_serializers import ParseModelResponseSerializer
+from genericparser.packages.serializers.genericparser_serializers import PostJobResponseSerializer
+from genericparser.packages.views.common import fmt_error_rsp
+from genericparser.pub.exceptions import PackageNotFoundException, PackageHasExistsException
+from genericparser.pub.utils.syscomm import fun_name
+from genericparser.pub.utils.values import ignore_case_get
+
+logger = logging.getLogger(__name__)
+
+
+@swagger_auto_schema(
+    method='POST',
+    operation_description="On distribute NS package",
+    request_body=NsPackageDistributeRequestSerializer,
+    responses={
+        status.HTTP_202_ACCEPTED: NsPackageDistributeResponseSerializer,
+        status.HTTP_500_INTERNAL_SERVER_ERROR: InternalErrorRequestSerializer})
+@swagger_auto_schema(
+    method='GET',
+    operation_description="Query NS packages",
+    request_body=no_body,
+    responses={
+        status.HTTP_200_OK: NsPackagesSerializer,
+        status.HTTP_500_INTERNAL_SERVER_ERROR: InternalErrorRequestSerializer})
+@api_view(http_method_names=['POST', 'GET'])
+def nspackages_rc(request, *args, **kwargs):
+    logger.debug("Enter %s, method is %s", fun_name(), request.method)
+    ret, normal_status, response_serializer, validation_error = None, None, None, None
+
+    if request.method == 'GET':
+        # Gets ns package list
+        ret = sdc_ns_package.ns_get_csars()
+        normal_status = status.HTTP_200_OK
+
+        if ret[0] == 0:
+            response_serializer = NsPackagesSerializer(data=ret[1])
+            validation_error = handleValidatonError(
+                response_serializer, False)
+            if validation_error:
+                return validation_error
+    elif request.method == 'POST':
+        # Distributes the package accroding to the given csarId
+        request_serializer = NsPackageDistributeRequestSerializer(data=request.data)
+        validation_error = handleValidatonError(request_serializer, True)
+        if validation_error:
+            return validation_error
+
+        csar_id = ignore_case_get(request.data, "csarId")
+        logger.debug("csar_id is %s", csar_id)
+        ret = sdc_ns_package.ns_on_distribute(csar_id)
+        normal_status = status.HTTP_202_ACCEPTED
+
+    logger.debug("Leave %s, Return value is %s", fun_name(), ret)
+    if ret[0] != 0:
+        return Response(
+            data={
+                'error': ret[1]},
+            status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+    return Response(data=ret[1], status=normal_status)
+
+
+@swagger_auto_schema(
+    method='POST',
+    operation_description="On distribute Nf package",
+    request_body=NfPackageDistributeRequestSerializer(),
+    responses={
+        status.HTTP_202_ACCEPTED: PostJobResponseSerializer,
+        status.HTTP_500_INTERNAL_SERVER_ERROR: InternalErrorRequestSerializer})
+@swagger_auto_schema(
+    method='GET',
+    operation_description="Query Nf packages",
+    request_body=no_body,
+    responses={
+        status.HTTP_200_OK: NfPackagesSerializer,
+        status.HTTP_500_INTERNAL_SERVER_ERROR: InternalErrorRequestSerializer})
+@api_view(http_method_names=['POST', 'GET'])
+def nfpackages_rc(request, *args, **kwargs):
+    logger.debug(
+        "Enter %s%s, method is %s",
+        fun_name(),
+        request.data,
+        request.method)
+    ret, normal_status, response_serializer, validation_error = None, None, None, None
+    if request.method == 'GET':
+        ret = sdc_vnf_package.nf_get_csars()
+        normal_status = status.HTTP_200_OK
+        response_serializer = NfPackagesSerializer(data=ret[1])
+    elif request.method == 'POST':
+        request_serivalizer = NfPackageDistributeRequestSerializer(
+            data=request.data)
+        validation_error = handleValidatonError(
+            request_serivalizer, True)
+        if validation_error:
+            return validation_error
+
+        csar_id = ignore_case_get(request_serivalizer.data, "csarId")
+        vim_ids = ignore_case_get(request_serivalizer.data, "vimIds")
+        lab_vim_id = ignore_case_get(request_serivalizer.data, "labVimId")
+        job_id = str(uuid.uuid4())
+        sdc_vnf_package.NfDistributeThread(
+            csar_id, vim_ids, lab_vim_id, job_id).start()
+        ret = [0, {"jobId": job_id}]
+        normal_status = status.HTTP_202_ACCEPTED
+
+        response_serializer = PostJobResponseSerializer(data=ret[1])
+    logger.debug("Leave %s, Return value is %s", fun_name(), ret)
+
+    if ret[0] != 0:
+        return Response(
+            data={
+                'error': ret[1]},
+            status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+    validation_error = handleValidatonError(
+        response_serializer, False)
+    if validation_error:
+        return validation_error
+
+    return Response(data=response_serializer.data, status=normal_status)
+
+
+@swagger_auto_schema(
+    method='DELETE',
+    operation_description="Delete one NS package",
+    request_body=no_body,
+    manual_parameters=[
+        openapi.Parameter(
+            'csarId',
+            openapi.IN_QUERY,
+            "csarId",
+            type=openapi.TYPE_STRING)],
+    responses={
+        status.HTTP_200_OK: NsPackageDistributeResponseSerializer,
+        status.HTTP_500_INTERNAL_SERVER_ERROR: openapi.Response(
+            'error message',
+            openapi.Schema(
+                type=openapi.TYPE_STRING))})
+@swagger_auto_schema(
+    method='GET',
+    operation_description="Query one NS package",
+    request_body=no_body,
+    manual_parameters=[
+        openapi.Parameter(
+            'csarId',
+            openapi.IN_QUERY,
+            "csarId",
+            type=openapi.TYPE_STRING)],
+    responses={
+        status.HTTP_200_OK: NsPackageSerializer,
+        status.HTTP_500_INTERNAL_SERVER_ERROR: openapi.Response(
+            'error message',
+            openapi.Schema(
+                type=openapi.TYPE_STRING))})
+@api_view(http_method_names=['DELETE', 'GET'])
+def ns_rd_csar(request, *args, **kwargs):
+    csar_id = ignore_case_get(kwargs, "csarId")
+    logger.info("Enter %s, method is %s, csar_id is %s",
+                fun_name(), request.method, csar_id)
+    ret, normal_status, response_serializer, validation_error = None, None, None, None
+    if request.method == 'GET':
+        ret = sdc_ns_package.ns_get_csar(csar_id)
+        normal_status = status.HTTP_200_OK
+        if ret[0] == 0:
+            response_serializer = NsPackageSerializer(data=ret[1])
+            validation_error = handleValidatonError(response_serializer, False)
+            if validation_error:
+                return validation_error
+    elif request.method == 'DELETE':
+        ret = sdc_ns_package.ns_delete_csar(csar_id)
+        normal_status = status.HTTP_200_OK
+    logger.info("Leave %s, Return value is %s", fun_name(), ret)
+    if ret[0] != 0:
+        return Response(
+            data={
+                'error': ret[1]},
+            status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+    return Response(data=ret[1], status=normal_status)
+
+
+@swagger_auto_schema(
+    method='POST',
+    operation_description="On distribute Service package",
+    request_body=ServicePackageDistributeRequestSerializer,
+    responses={
+        status.HTTP_202_ACCEPTED: "",
+        status.HTTP_400_BAD_REQUEST: InternalErrorRequestSerializer,
+        status.HTTP_500_INTERNAL_SERVER_ERROR: InternalErrorRequestSerializer})
+@swagger_auto_schema(
+    method='GET',
+    operation_description="Query Service packages",
+    request_body=no_body,
+    responses={
+        status.HTTP_200_OK: ServicePackagesSerializer,
+        status.HTTP_500_INTERNAL_SERVER_ERROR: InternalErrorRequestSerializer})
+@api_view(http_method_names=['POST', 'GET'])
+def servicepackages_rc(request, *args, **kwargs):
+    logger.debug("Enter %s, method is %s", fun_name(), request.method)
+
+    if request.method == 'GET':
+        # Gets service package list
+        try:
+            csar_list = ServicePackage().get_csars()
+            response_serializer = ServicePackagesSerializer(data=csar_list)
+            validation_error = handleValidatonError(response_serializer, False)
+            if validation_error:
+                return validation_error
+            return Response(data=csar_list, status=status.HTTP_200_OK)
+        except Exception as e:
+            error_status = status.HTTP_500_INTERNAL_SERVER_ERROR
+            return Response(data=fmt_error_rsp(e.message, error_status), status=error_status)
+    elif request.method == 'POST':
+        # Distributes the package according to the given csarId
+        request_serializer = ServicePackageDistributeRequestSerializer(data=request.data)
+        validation_error = handleValidatonError(request_serializer, True)
+        if validation_error:
+            return validation_error
+
+        csar_id = ignore_case_get(request.data, "csarId")
+        logger.debug("csar_id is %s", csar_id)
+        try:
+            ServicePackage().on_distribute(csar_id)
+            return Response(status=status.HTTP_202_ACCEPTED)
+        except PackageHasExistsException as e:
+            error_status = status.HTTP_400_BAD_REQUEST
+            return Response(data=fmt_error_rsp(e.message, error_status), status=error_status)
+        except Exception as e:
+            error_status = status.HTTP_500_INTERNAL_SERVER_ERROR
+            return Response(data=fmt_error_rsp(e.message, error_status), status=error_status)
+
+
+@swagger_auto_schema(
+    method='DELETE',
+    operation_description="Delete one Service package",
+    request_body=no_body,
+    manual_parameters=[
+        openapi.Parameter(
+            'csarId',
+            openapi.IN_QUERY,
+            "csarId",
+            type=openapi.TYPE_STRING)],
+    responses={
+        status.HTTP_204_NO_CONTENT: "",
+        status.HTTP_404_NOT_FOUND: InternalErrorRequestSerializer,
+        status.HTTP_500_INTERNAL_SERVER_ERROR: InternalErrorRequestSerializer})
+@swagger_auto_schema(
+    method='GET',
+    operation_description="Query one Service package",
+    request_body=no_body,
+    manual_parameters=[
+        openapi.Parameter(
+            'csarId',
+            openapi.IN_QUERY,
+            "csarId",
+            type=openapi.TYPE_STRING)],
+    responses={
+        status.HTTP_200_OK: ServicePackageSerializer,
+        status.HTTP_404_NOT_FOUND: InternalErrorRequestSerializer,
+        status.HTTP_500_INTERNAL_SERVER_ERROR: InternalErrorRequestSerializer})
+@api_view(http_method_names=['DELETE', 'GET'])
+def service_rd_csar(request, *args, **kwargs):
+    csar_id = ignore_case_get(kwargs, "csarId")
+    logger.info("Enter %s, method is %s, csar_id is %s", fun_name(), request.method, csar_id)
+
+    if request.method == 'GET':
+        try:
+            ret = ServicePackage().get_csar(csar_id)
+            response_serializer = ServicePackageSerializer(data=ret)
+            validation_error = handleValidatonError(response_serializer, False)
+            if validation_error:
+                return validation_error
+            return Response(data=ret, status=status.HTTP_200_OK)
+        except PackageNotFoundException as e:
+            error_status = status.HTTP_404_NOT_FOUND
+            return Response(data=fmt_error_rsp(e.message, error_status), status=error_status)
+        except Exception as e:
+            error_status = status.HTTP_500_INTERNAL_SERVER_ERROR
+            return Response(data=fmt_error_rsp(e.message, error_status), status=error_status)
+
+    elif request.method == 'DELETE':
+        try:
+            ServicePackage().delete_csar(csar_id)
+            return Response(status=status.HTTP_204_NO_CONTENT)
+        except PackageNotFoundException as e:
+            error_status = status.HTTP_404_NOT_FOUND
+            return Response(data=fmt_error_rsp(e.message, error_status), status=error_status)
+        except Exception as e:
+            error_status = status.HTTP_500_INTERNAL_SERVER_ERROR
+            return Response(data=fmt_error_rsp(e.message, error_status), status=error_status)
+
+
+@swagger_auto_schema(
+    method='DELETE',
+    operation_description="Delete one Nf package",
+    request_body=no_body,
+    manual_parameters=[
+        openapi.Parameter(
+            'csarId',
+            openapi.IN_QUERY,
+            "csarId",
+            type=openapi.TYPE_STRING)],
+    responses={
+        status.HTTP_202_ACCEPTED: PostJobResponseSerializer,
+        status.HTTP_500_INTERNAL_SERVER_ERROR: openapi.Response(
+            'error message',
+            openapi.Schema(
+                type=openapi.TYPE_STRING))})
+@swagger_auto_schema(
+    method='GET',
+    operation_description="Query one Nf package",
+    request_body=no_body,
+    manual_parameters=[
+        openapi.Parameter(
+            'csarId',
+            openapi.IN_QUERY,
+            "csarId",
+            type=openapi.TYPE_STRING)],
+    responses={
+        status.HTTP_200_OK: NfPackageSerializer,
+        status.HTTP_500_INTERNAL_SERVER_ERROR: openapi.Response(
+            'error message',
+            openapi.Schema(
+                type=openapi.TYPE_STRING))})
+@api_view(http_method_names=['DELETE', 'GET'])
+def nf_rd_csar(request, *args, **kwargs):
+    csar_id = ignore_case_get(kwargs, "csarId")
+    logger.info("Enter %s, method is %s, csar_id is %s",
+                fun_name(), request.method, csar_id)
+    ret, normal_status, response_serializer, validation_error = None, None, None, None
+
+    if request.method == 'GET':
+        ret = sdc_vnf_package.nf_get_csar(csar_id)
+        normal_status = status.HTTP_200_OK
+        response_serializer = NfPackageSerializer(data=ret[1])
+
+    elif request.method == 'DELETE':
+        job_id = str(uuid.uuid4())
+        sdc_vnf_package.NfPkgDeleteThread(csar_id, job_id).start()
+        ret = [0, {"jobId": job_id}]
+        normal_status = status.HTTP_202_ACCEPTED
+        response_serializer = PostJobResponseSerializer(data=ret[1])
+
+    logger.info("Leave %s, Return value is %s", fun_name(), ret)
+    if ret[0] != 0:
+        return Response(
+            data={
+                'error': ret[1]},
+            status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+    validation_error = handleValidatonError(
+        response_serializer, False)
+    if validation_error:
+        return validation_error
+
+    return Response(data=response_serializer.data, status=normal_status)
+
+
+@swagger_auto_schema(
+    method='POST',
+    operation_description="Parse model(NS, Service, VNF, PNF)",
+    request_body=ParseModelRequestSerializer,
+    responses={
+        status.HTTP_202_ACCEPTED: ParseModelResponseSerializer,
+        status.HTTP_500_INTERNAL_SERVER_ERROR: InternalErrorRequestSerializer})
+@api_view(http_method_names=['POST'])
+def model_parser(request, *args, **kwargs):
+    csar_id = ignore_case_get(request.data, "csarId")
+    package_type = ignore_case_get(request.data, "packageType")
+    inputs = ignore_case_get(request.data, "inputs")
+    logger.debug(
+        "Enter %s, csar_id=%s, package_type=%s, inputs=%s",
+        fun_name(),
+        csar_id,
+        package_type,
+        inputs)
+
+    if package_type.lower().__eq__("service"):
+        try:
+            ret = ServicePackage().parse_serviced(csar_id, inputs)
+            response_serializer = ParseModelResponseSerializer(data=ret)
+            validation_error = handleValidatonError(
+                response_serializer, False)
+            if validation_error:
+                return validation_error
+            return Response(data=response_serializer.data, status=status.HTTP_202_ACCEPTED)
+        except PackageNotFoundException as e:
+            error_status = status.HTTP_404_NOT_FOUND
+            return Response(data=fmt_error_rsp(e.message, error_status), status=error_status)
+        except Exception as e:
+            error_status = status.HTTP_500_INTERNAL_SERVER_ERROR
+            return Response(data=fmt_error_rsp(e.message, error_status), status=error_status)
+    elif package_type.lower().__eq__("ns"):
+        ret = sdc_ns_package.parse_nsd(csar_id, inputs)
+    elif package_type.lower().__eq__("vnf"):
+        ret = sdc_vnf_package.parse_vnfd(csar_id, inputs)
+    elif package_type.lower().__eq__("pnf"):
+        ret = PnfDescriptor().parse_pnfd(csar_id, inputs)
+    else:
+        error_status = status.HTTP_400_BAD_REQUEST
+        error_message = "Invalid package type, it should be one of [VNF, PNF, NS, Service]"
+        return Response(data=fmt_error_rsp(error_message, error_status), status=error_status)
+
+    if ret[0] != 0:
+        return Response(
+            data={
+                'error': ret[1]},
+            status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+    response_serializer = ParseModelResponseSerializer(data=ret[1])
+    validation_error = handleValidatonError(
+        response_serializer, False)
+    if validation_error:
+        return validation_error
+
+    return Response(data=response_serializer.data, status=status.HTTP_202_ACCEPTED)
+
+
+@swagger_auto_schema(
+    method='POST',
+    operation_description="Parse NS model",
+    request_body=ParseModelRequestSerializer,
+    responses={
+        status.HTTP_202_ACCEPTED: ParseModelResponseSerializer,
+        status.HTTP_500_INTERNAL_SERVER_ERROR: InternalErrorRequestSerializer})
+@api_view(http_method_names=['POST'])
+def ns_model_parser(request, *args, **kwargs):
+    csar_id = ignore_case_get(request.data, "csarId")
+    inputs = ignore_case_get(request.data, "inputs")
+    logger.debug(
+        "Enter %s, csar_id=%s, inputs=%s",
+        fun_name(),
+        csar_id,
+        inputs)
+    ret = sdc_ns_package.parse_nsd(csar_id, inputs)
+    logger.info("Leave %s, Return value is %s", fun_name(), ret)
+    if ret[0] != 0:
+        return Response(
+            data={
+                'error': ret[1]},
+            status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+    response_serializer = ParseModelResponseSerializer(data=ret[1])
+    validation_error = handleValidatonError(
+        response_serializer, False)
+    if validation_error:
+        return validation_error
+
+    return Response(data=response_serializer.data, status=status.HTTP_202_ACCEPTED)
+
+
+@swagger_auto_schema(
+    method='POST',
+    operation_description="Parse NF model",
+    request_body=ParseModelRequestSerializer,
+    responses={
+        status.HTTP_202_ACCEPTED: ParseModelResponseSerializer,
+        status.HTTP_500_INTERNAL_SERVER_ERROR: InternalErrorRequestSerializer})
+@api_view(http_method_names=['POST'])
+def vnf_model_parser(request, *args, **kwargs):
+    csar_id = ignore_case_get(request.data, "csarId")
+    inputs = ignore_case_get(request.data, "inputs")
+    logger.debug(
+        "Enter %s, csar_id=%s, inputs=%s",
+        fun_name(),
+        csar_id,
+        inputs)
+    ret = sdc_vnf_package.parse_vnfd(csar_id, inputs)
+    logger.info("Leave %s, Return value is %s", fun_name(), ret)
+    if ret[0] != 0:
+        return Response(
+            data={
+                'error': ret[1]},
+            status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+    response_serializer = ParseModelResponseSerializer(data=ret[1])
+    validation_error = handleValidatonError(
+        response_serializer, False)
+    if validation_error:
+        return validation_error
+
+    return Response(data=response_serializer.data, status=status.HTTP_202_ACCEPTED)
+
+
+def handleValidatonError(base_serializer, is_request):
+    response = None
+
+    if not base_serializer.is_valid():
+        errormessage = base_serializer.errors
+        logger.error(errormessage)
+
+        if is_request:
+            message = 'Invalid request'
+        else:
+            message = 'Invalid response'
+        logger.error(message)
+        response = Response(
+            data={'error': errormessage},
+            status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+    return response
diff --git a/genericparser/packages/views/common.py b/genericparser/packages/views/common.py
new file mode 100644 (file)
index 0000000..70637a9
--- /dev/null
@@ -0,0 +1,95 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import traceback
+import logging
+
+from rest_framework import status
+from rest_framework.response import Response
+
+from genericparser.pub.exceptions import GenericparserException
+from genericparser.pub.exceptions import NsdmBadRequestException
+from genericparser.pub.exceptions import PackageNotFoundException
+from genericparser.pub.exceptions import ResourceNotFoundException
+from genericparser.pub.exceptions import ArtifactNotFoundException
+
+logger = logging.getLogger(__name__)
+
+
+def validate_data(data, serializer):
+    serialized_data = serializer(data=data)
+    if not serialized_data.is_valid():
+        logger.error('Data validation failed.')
+        raise GenericparserException(serialized_data.errors)
+    return serialized_data
+
+
+def fmt_error_rsp(error_message, status):
+    return {"errorMessage": error_message, "error": status}
+
+
+def make_error_resp(status, detail):
+    return Response(
+        data={
+            'status': status,
+            'detail': detail
+        },
+        status=status
+    )
+
+
+def view_safe_call_with_log(logger):
+    def view_safe_call(func):
+        def wrapper(*args, **kwargs):
+            try:
+                return func(*args, **kwargs)
+            except PackageNotFoundException as e:
+                logger.error(e.message)
+                return make_error_resp(
+                    detail=e.message,
+                    status=status.HTTP_404_NOT_FOUND
+                )
+            except ResourceNotFoundException as e:
+                logger.error(e.message)
+                return make_error_resp(
+                    detail=e.message,
+                    status=status.HTTP_404_NOT_FOUND
+                )
+            except ArtifactNotFoundException as e:
+                logger.error(e.message)
+                return make_error_resp(
+                    detail=e.message,
+                    status=status.HTTP_404_NOT_FOUND
+                )
+            except NsdmBadRequestException as e:
+                logger.error(e.message)
+                return make_error_resp(
+                    detail=e.message,
+                    status=status.HTTP_400_BAD_REQUEST
+                )
+            except GenericparserException as e:
+                logger.error(e.message)
+                return make_error_resp(
+                    detail=e.message,
+                    status=status.HTTP_500_INTERNAL_SERVER_ERROR
+                )
+            except Exception as e:
+                logger.error(e.message)
+                logger.error(traceback.format_exc())
+                return make_error_resp(
+                    detail='Unexpected exception',
+                    status=status.HTTP_500_INTERNAL_SERVER_ERROR
+                )
+        return wrapper
+    return view_safe_call
diff --git a/genericparser/packages/views/health_check_views.py b/genericparser/packages/views/health_check_views.py
new file mode 100644 (file)
index 0000000..cc1a379
--- /dev/null
@@ -0,0 +1,31 @@
+# Copyright (c) 2019, CMCC Technologies Co., Ltd.
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from drf_yasg.utils import swagger_auto_schema
+from rest_framework import status
+from rest_framework.response import Response
+from rest_framework.views import APIView
+
+logger = logging.getLogger(__name__)
+
+
+class HealthCheckView(APIView):
+    @swagger_auto_schema(
+        responses={
+            status.HTTP_200_OK: 'Active'})
+    def get(self, request, format=None):
+        logger.debug("Health check.")
+        return Response({"status": "active"})
diff --git a/genericparser/packages/views/ns_descriptor_views.py b/genericparser/packages/views/ns_descriptor_views.py
new file mode 100644 (file)
index 0000000..86a3e9e
--- /dev/null
@@ -0,0 +1,139 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from django.http import StreamingHttpResponse
+from drf_yasg.utils import no_body, swagger_auto_schema
+from rest_framework import status
+from rest_framework.decorators import api_view
+from rest_framework.response import Response
+
+from genericparser.packages.biz.ns_descriptor import NsDescriptor
+from genericparser.packages.serializers.create_nsd_info_request import CreateNsdInfoRequestSerializer
+from genericparser.packages.serializers.nsd_info import NsdInfoSerializer
+from genericparser.packages.serializers.nsd_infos import NsdInfosSerializer
+from genericparser.packages.views.common import validate_data
+from genericparser.pub.exceptions import GenericparserException
+from .common import view_safe_call_with_log
+
+logger = logging.getLogger(__name__)
+
+
+@swagger_auto_schema(
+    method='GET',
+    operation_description="Query a NSD",
+    request_body=no_body,
+    responses={
+        status.HTTP_200_OK: NsdInfoSerializer(),
+        status.HTTP_404_NOT_FOUND: 'NSDs do not exist',
+        status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+    }
+)
+@swagger_auto_schema(
+    method='DELETE',
+    operation_description="Delete a NSD",
+    request_body=no_body,
+    responses={
+        status.HTTP_204_NO_CONTENT: "No content",
+        status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+    }
+)
+@api_view(http_method_names=['GET', 'DELETE'])
+@view_safe_call_with_log(logger=logger)
+def ns_info_rd(request, **kwargs):
+    nsd_info_id = kwargs.get("nsdInfoId")
+    if request.method == 'GET':
+        data = NsDescriptor().query_single(nsd_info_id)
+        nsd_info = validate_data(data, NsdInfoSerializer)
+        return Response(data=nsd_info.data, status=status.HTTP_200_OK)
+    if request.method == 'DELETE':
+        NsDescriptor().delete_single(nsd_info_id)
+        return Response(status=status.HTTP_204_NO_CONTENT)
+
+
+@swagger_auto_schema(
+    method='POST',
+    operation_description="Create a NSD",
+    request_body=CreateNsdInfoRequestSerializer(),
+    responses={
+        status.HTTP_201_CREATED: NsdInfoSerializer(),
+        status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+    }
+)
+@swagger_auto_schema(
+    method='GET',
+    operation_description="Query multiple NSDs",
+    request_body=no_body,
+    responses={
+        status.HTTP_200_OK: NsdInfosSerializer(),
+        status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+    }
+)
+@api_view(http_method_names=['POST', 'GET'])
+@view_safe_call_with_log(logger=logger)
+def ns_descriptors_rc(request):
+    if request.method == 'POST':
+        create_nsd_info_request = validate_data(request.data, CreateNsdInfoRequestSerializer)
+        data = NsDescriptor().create(create_nsd_info_request.data)
+        nsd_info = validate_data(data, NsdInfoSerializer)
+        return Response(data=nsd_info.data, status=status.HTTP_201_CREATED)
+
+    if request.method == 'GET':
+        nsdId = request.query_params.get("nsdId", None)
+        data = NsDescriptor().query_multiple(nsdId)
+        nsd_infos = validate_data(data, NsdInfosSerializer)
+        return Response(data=nsd_infos.data, status=status.HTTP_200_OK)
+
+
+@swagger_auto_schema(
+    method='PUT',
+    operation_description="Upload NSD content",
+    request_body=no_body,
+    responses={
+        status.HTTP_204_NO_CONTENT: 'PNFD file',
+        status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+    }
+)
+@swagger_auto_schema(
+    method='GET',
+    operation_description="Download NSD content",
+    request_body=no_body,
+    responses={
+        status.HTTP_204_NO_CONTENT: "No content",
+        status.HTTP_404_NOT_FOUND: 'NSD does not exist.',
+        status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+    }
+)
+@api_view(http_method_names=['PUT', 'GET'])
+@view_safe_call_with_log(logger=logger)
+def nsd_content_ru(request, **kwargs):
+    nsd_info_id = kwargs.get("nsdInfoId")
+    if request.method == 'PUT':
+        files = request.FILES.getlist('file')
+        try:
+            local_file_name = NsDescriptor().upload(nsd_info_id, files[0])
+            NsDescriptor().parse_nsd_and_save(nsd_info_id, local_file_name)
+            return Response(data=None, status=status.HTTP_204_NO_CONTENT)
+        except GenericparserException as e:
+            NsDescriptor().handle_upload_failed(nsd_info_id)
+            raise e
+        except Exception as e:
+            NsDescriptor().handle_upload_failed(nsd_info_id)
+            raise e
+
+    if request.method == 'GET':
+        file_range = request.META.get('RANGE')
+        file_iterator = NsDescriptor().download(nsd_info_id, file_range)
+        return StreamingHttpResponse(file_iterator, status=status.HTTP_200_OK)
diff --git a/genericparser/packages/views/nsdm_subscription_views.py b/genericparser/packages/views/nsdm_subscription_views.py
new file mode 100644 (file)
index 0000000..865ece4
--- /dev/null
@@ -0,0 +1,259 @@
+# Copyright (C) 2019 Verizon. All Rights Reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import traceback
+
+from drf_yasg.utils import swagger_auto_schema, no_body
+from rest_framework import status
+from rest_framework.decorators import api_view
+from rest_framework.response import Response
+
+from genericparser.packages.serializers.nsdm_filter_data \
+    import NsdmNotificationsFilter
+from genericparser.packages.serializers.nsdm_subscription import \
+    NsdmSubscriptionsSerializer, \
+    NsdmSubscriptionIdSerializer, \
+    NsdmSubscriptionSerializer, \
+    NsdmSubscriptionRequestSerializer
+from genericparser.packages.serializers.response \
+    import ProblemDetailsSerializer
+from genericparser.pub.exceptions import \
+    ResourceNotFoundException, \
+    NsdmBadRequestException, NsdmDuplicateSubscriptionException
+from genericparser.packages.biz.nsdm_subscription import NsdmSubscription
+
+
+logger = logging.getLogger(__name__)
+
+
+def validate_data(data, serializer):
+    serialized_data = serializer(data=data)
+    if not serialized_data.is_valid():
+        logger.error('Data validation failed.')
+        raise NsdmBadRequestException(serialized_data.errors)
+    return serialized_data
+
+
+def get_problem_details_serializer(title, status_code, error_message):
+    problem_details = {
+        "title": title,
+        "status": status_code,
+        "detail": error_message
+    }
+    problem_details_serializer = ProblemDetailsSerializer(data=problem_details)
+    problem_details_serializer.is_valid()
+    return problem_details_serializer
+
+
+@swagger_auto_schema(
+    method='POST',
+    operation_description="Create Subscription for NSD Management",
+    request_body=NsdmSubscriptionRequestSerializer(),
+    responses={
+        status.HTTP_201_CREATED: NsdmSubscriptionSerializer,
+        status.HTTP_303_SEE_OTHER: ProblemDetailsSerializer(),
+        status.HTTP_400_BAD_REQUEST: ProblemDetailsSerializer(),
+        status.HTTP_500_INTERNAL_SERVER_ERROR: ProblemDetailsSerializer()
+    }
+)
+@swagger_auto_schema(
+    method='GET',
+    operation_description="Query subscriptions for Nsd Management",
+    request_body=no_body,
+    responses={
+        status.HTTP_200_OK: NsdmSubscriptionsSerializer(),
+        status.HTTP_400_BAD_REQUEST: ProblemDetailsSerializer(),
+        status.HTTP_404_NOT_FOUND: ProblemDetailsSerializer(),
+        status.HTTP_500_INTERNAL_SERVER_ERROR: ProblemDetailsSerializer(),
+    }
+)
+@api_view(http_method_names=['POST', 'GET'])
+def nsd_subscription_rc(request):
+    if request.method == 'POST':
+        logger.debug("SubscribeNotification--post::> %s" % request.data)
+        try:
+            title = 'Creating Subscription Failed!'
+            nsdm_subscription_request = \
+                validate_data(request.data,
+                              NsdmSubscriptionRequestSerializer)
+            subscription = NsdmSubscription().create(
+                nsdm_subscription_request.data)
+            subscription_resp = validate_data(subscription,
+                                              NsdmSubscriptionSerializer)
+            return Response(data=subscription_resp.data,
+                            status=status.HTTP_201_CREATED)
+        except NsdmDuplicateSubscriptionException as e:
+            logger.error(e.message)
+            problem_details_serializer = \
+                get_problem_details_serializer(title,
+                                               status.HTTP_303_SEE_OTHER,
+                                               e.message)
+            return Response(data=problem_details_serializer.data,
+                            status=status.HTTP_303_SEE_OTHER)
+        except NsdmBadRequestException as e:
+            problem_details_serializer = \
+                get_problem_details_serializer(title,
+                                               status.HTTP_400_BAD_REQUEST,
+                                               e.message)
+            return Response(data=problem_details_serializer.data,
+                            status=status.HTTP_400_BAD_REQUEST)
+        except Exception as e:
+            logger.error(e.message)
+            logger.error(traceback.format_exc())
+            problem_details_serializer = \
+                get_problem_details_serializer(
+                    title,
+                    status.HTTP_500_INTERNAL_SERVER_ERROR,
+                    e.message)
+            return Response(data=problem_details_serializer.data,
+                            status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+    if request.method == 'GET':
+        logger.debug("Subscription Notification GET %s" % request.query_params)
+        try:
+            title = 'Query Subscription Failed!'
+            request_query_params = {}
+            if request.query_params:
+                request_query_params = \
+                    validate_data(request.query_params,
+                                  NsdmNotificationsFilter).data
+            subscription_data = \
+                NsdmSubscription().query_multi_subscriptions(
+                    request_query_params)
+            subscriptions = validate_data(subscription_data,
+                                          NsdmSubscriptionsSerializer)
+            return Response(data=subscriptions.data, status=status.HTTP_200_OK)
+        except NsdmBadRequestException as e:
+            logger.error(e.message)
+            problem_details_serializer = \
+                get_problem_details_serializer(title,
+                                               status.HTTP_400_BAD_REQUEST,
+                                               e.message)
+            return Response(data=problem_details_serializer.data,
+                            status=status.HTTP_400_BAD_REQUEST)
+        except ResourceNotFoundException as e:
+            problem_details_serializer = \
+                get_problem_details_serializer(title,
+                                               status.HTTP_404_NOT_FOUND,
+                                               e.message)
+            return Response(data=problem_details_serializer.data,
+                            status=status.HTTP_404_NOT_FOUND)
+        except Exception as e:
+            logger.error(e.message)
+            problem_details_serializer = \
+                get_problem_details_serializer(
+                    title,
+                    status.HTTP_500_INTERNAL_SERVER_ERROR,
+                    traceback.format_exc())
+            return Response(data=problem_details_serializer.data,
+                            status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+
+@swagger_auto_schema(
+    method='GET',
+    operation_description="Query subscriptions for Nsd Management",
+    request_body=no_body,
+    responses={
+        status.HTTP_200_OK: NsdmSubscriptionSerializer(),
+        status.HTTP_400_BAD_REQUEST: ProblemDetailsSerializer(),
+        status.HTTP_404_NOT_FOUND: ProblemDetailsSerializer(),
+        status.HTTP_500_INTERNAL_SERVER_ERROR: ProblemDetailsSerializer()
+    }
+)
+@swagger_auto_schema(
+    method='DELETE',
+    operation_description="Delete subscription for Nsd Management",
+    request_body=no_body,
+    responses={
+        status.HTTP_204_NO_CONTENT: 'No_Content',
+        status.HTTP_400_BAD_REQUEST: ProblemDetailsSerializer(),
+        status.HTTP_404_NOT_FOUND: ProblemDetailsSerializer(),
+        status.HTTP_500_INTERNAL_SERVER_ERROR: ProblemDetailsSerializer()
+    }
+)
+@api_view(http_method_names=['GET', 'DELETE'])
+def nsd_subscription_rd(request, **kwargs):
+    subscription_id = kwargs.get("subscriptionId")
+    if request.method == 'GET':
+        try:
+            title = 'Query Subscription Failed!'
+            validate_data({'subscription_id': subscription_id},
+                          NsdmSubscriptionIdSerializer)
+            subscription_data = \
+                NsdmSubscription().query_single_subscription(subscription_id)
+            subscription = validate_data(subscription_data,
+                                         NsdmSubscriptionSerializer)
+            return Response(data=subscription.data, status=status.HTTP_200_OK)
+        except NsdmBadRequestException as e:
+            logger.error(e.message)
+            problem_details_serializer = \
+                get_problem_details_serializer(title,
+                                               status.HTTP_400_BAD_REQUEST,
+                                               e.message)
+            return Response(data=problem_details_serializer.data,
+                            status=status.HTTP_400_BAD_REQUEST)
+        except ResourceNotFoundException as e:
+            logger.error(e.message)
+            problem_details_serializer = \
+                get_problem_details_serializer(title,
+                                               status.HTTP_404_NOT_FOUND,
+                                               e.message)
+            return Response(data=problem_details_serializer.data,
+                            status=status.HTTP_404_NOT_FOUND)
+        except Exception as e:
+            logger.error(e.message)
+            logger.error(traceback.format_exc())
+            problem_details_serializer = \
+                get_problem_details_serializer(
+                    title,
+                    status.HTTP_500_INTERNAL_SERVER_ERROR,
+                    "Query of subscriptioni(%s) Failed"
+                    % subscription_id)
+            return Response(data=problem_details_serializer.data,
+                            status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+    elif request.method == 'DELETE':
+        try:
+            title = 'Delete Subscription Failed!'
+            validate_data({'subscription_id': subscription_id},
+                          NsdmSubscriptionIdSerializer)
+            subscription_data = NsdmSubscription().\
+                delete_single_subscription(subscription_id)
+            return Response(status=status.HTTP_204_NO_CONTENT)
+        except NsdmBadRequestException as e:
+            logger.error(e.message)
+            problem_details_serializer = \
+                get_problem_details_serializer(title,
+                                               status.HTTP_400_BAD_REQUEST,
+                                               e.message)
+            return Response(data=problem_details_serializer.data,
+                            status=status.HTTP_400_BAD_REQUEST)
+        except ResourceNotFoundException as e:
+            logger.error(e.message)
+            problem_details_serializer = \
+                get_problem_details_serializer(title,
+                                               status.HTTP_404_NOT_FOUND,
+                                               e.message)
+            return Response(data=problem_details_serializer.data,
+                            status=status.HTTP_404_NOT_FOUND)
+        except Exception as e:
+            logger.error(e.message)
+            logger.error(traceback.format_exc())
+            problem_details_serializer = \
+                get_problem_details_serializer(
+                    title,
+                    status.HTTP_500_INTERNAL_SERVER_ERROR,
+                    "Delete of subscription(%s) Failed"
+                    % subscription_id)
+            return Response(data=problem_details_serializer.data,
+                            status=status.HTTP_500_INTERNAL_SERVER_ERROR)
diff --git a/genericparser/packages/views/pnf_descriptor_views.py b/genericparser/packages/views/pnf_descriptor_views.py
new file mode 100644 (file)
index 0000000..e82374f
--- /dev/null
@@ -0,0 +1,173 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from django.http import StreamingHttpResponse
+from drf_yasg.utils import no_body, swagger_auto_schema
+from rest_framework import status
+from rest_framework.decorators import api_view
+from rest_framework.response import Response
+
+from genericparser.packages.biz.pnf_descriptor import PnfDescriptor
+from genericparser.packages.serializers.create_pnfd_info_request import CreatePnfdInfoRequestSerializer
+from genericparser.packages.serializers.pnfd_info import PnfdInfoSerializer
+from genericparser.packages.serializers.pnfd_infos import PnfdInfosSerializer
+from genericparser.packages.views.common import validate_data
+from genericparser.pub.exceptions import GenericparserException
+from genericparser.packages.serializers.genericparser_serializers import ParseModelRequestSerializer
+from genericparser.packages.serializers.genericparser_serializers import ParseModelResponseSerializer
+from genericparser.packages.serializers.genericparser_serializers import InternalErrorRequestSerializer
+from genericparser.pub.utils.syscomm import fun_name
+from genericparser.pub.utils.values import ignore_case_get
+from .common import view_safe_call_with_log
+
+logger = logging.getLogger(__name__)
+
+
+@swagger_auto_schema(
+    method='GET',
+    operation_description="Query a PNFD",
+    request_body=no_body,
+    responses={
+        status.HTTP_200_OK: PnfdInfoSerializer(),
+        status.HTTP_404_NOT_FOUND: "PNFD does not exist",
+        status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+    }
+)
+@swagger_auto_schema(
+    method='DELETE',
+    operation_description="Delete a PNFD",
+    request_body=no_body,
+    responses={
+        status.HTTP_204_NO_CONTENT: "No content",
+        status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+    }
+)
+@api_view(http_method_names=['GET', 'DELETE'])
+@view_safe_call_with_log(logger=logger)
+def pnfd_info_rd(request, **kwargs):  # TODO
+    pnfd_info_id = kwargs.get('pnfdInfoId')
+    if request.method == 'GET':
+        logger.debug("Query an individual PNF descriptor> %s" % request.data)
+        data = PnfDescriptor().query_single(pnfd_info_id)
+        pnfd_info = validate_data(data, PnfdInfoSerializer)
+        return Response(data=pnfd_info.data, status=status.HTTP_200_OK)
+
+    if request.method == 'DELETE':
+        logger.debug("Delete an individual PNFD resource> %s" % request.data)
+        PnfDescriptor().delete_single(pnfd_info_id)
+        return Response(data=None, status=status.HTTP_204_NO_CONTENT)
+
+
+@swagger_auto_schema(
+    method='POST',
+    operation_description="Create a  PNFD",
+    request_body=CreatePnfdInfoRequestSerializer(),
+    responses={
+        status.HTTP_201_CREATED: PnfdInfoSerializer(),
+        status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+    }
+)
+@swagger_auto_schema(
+    method='GET',
+    operation_description="Query multiple PNFDs",
+    request_body=no_body,
+    responses={
+        status.HTTP_200_OK: PnfdInfosSerializer(),
+        status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+    }
+)
+@api_view(http_method_names=['POST', 'GET'])
+@view_safe_call_with_log(logger=logger)
+def pnf_descriptors_rc(request):
+    if request.method == 'POST':
+        create_pnfd_info_request = validate_data(request.data, CreatePnfdInfoRequestSerializer)
+        data = PnfDescriptor().create(create_pnfd_info_request.data)
+        pnfd_info = validate_data(data, PnfdInfoSerializer)
+        return Response(data=pnfd_info.data, status=status.HTTP_201_CREATED)
+
+    if request.method == 'GET':
+        pnfdId = request.query_params.get('pnfdId', None)
+        if pnfdId:
+            data = PnfDescriptor().query_multiple(pnfdId)
+        else:
+            data = PnfDescriptor().query_multiple()
+        pnfd_infos = validate_data(data, PnfdInfosSerializer)
+        return Response(data=pnfd_infos.data, status=status.HTTP_200_OK)
+
+
+@swagger_auto_schema(
+    method='PUT',
+    operation_description="Upload PNFD content",
+    request_body=no_body,
+    responses={
+        status.HTTP_204_NO_CONTENT: "No content",
+        status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+    }
+)
+@swagger_auto_schema(
+    method='GET',
+    operation_description="Fetch PNFD content",
+    request_body=no_body,
+    responses={
+        status.HTTP_204_NO_CONTENT: 'PNFD file',
+        status.HTTP_404_NOT_FOUND: "PNFD does not exist",
+        status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+    }
+)
+@api_view(http_method_names=['PUT', 'GET'])
+@view_safe_call_with_log(logger=logger)
+def pnfd_content_ru(request, **kwargs):
+    pnfd_info_id = kwargs.get("pnfdInfoId")
+    if request.method == 'PUT':
+        files = request.FILES.getlist('file')
+        try:
+            local_file_name = PnfDescriptor().upload(files[0], pnfd_info_id)
+            PnfDescriptor().parse_pnfd_and_save(pnfd_info_id, local_file_name)
+            return Response(data=None, status=status.HTTP_204_NO_CONTENT)
+        except GenericparserException as e:
+            PnfDescriptor().handle_upload_failed(pnfd_info_id)
+            raise e
+        except Exception as e:
+            PnfDescriptor().handle_upload_failed(pnfd_info_id)
+            raise e
+
+    if request.method == 'GET':
+        file_iterator = PnfDescriptor().download(pnfd_info_id)
+        return StreamingHttpResponse(file_iterator, status=status.HTTP_200_OK)
+
+
+@swagger_auto_schema(
+    method='POST',
+    operation_description="Parse PNF model",
+    request_body=ParseModelRequestSerializer,
+    responses={
+        status.HTTP_202_ACCEPTED: ParseModelResponseSerializer,
+        status.HTTP_500_INTERNAL_SERVER_ERROR: InternalErrorRequestSerializer})
+@api_view(http_method_names=['POST'])
+def pnf_model_parser(request, *args, **kwargs):
+    csar_id = ignore_case_get(request.data, "csarId")
+    inputs = ignore_case_get(request.data, "inputs")
+    logger.debug(
+        "Enter %s, csar_id=%s, inputs=%s",
+        fun_name(),
+        csar_id,
+        inputs)
+    ret = PnfDescriptor().parse_pnfd(csar_id, inputs)
+    logger.info("Leave %s, Return value is %s", fun_name(), ret)
+    if ret[0] != 0:
+        return Response(data={'error': ret[1]}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+    response = validate_data(ret[1], ParseModelResponseSerializer)
+    return Response(data=response.data, status=status.HTTP_202_ACCEPTED)
diff --git a/genericparser/packages/views/vnf_package_artifact_views.py b/genericparser/packages/views/vnf_package_artifact_views.py
new file mode 100644 (file)
index 0000000..a6d57f8
--- /dev/null
@@ -0,0 +1,54 @@
+# Copyright (C) 2019 Verizon. All Rights Reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from drf_yasg.utils import swagger_auto_schema
+from rest_framework import status
+from rest_framework.views import APIView
+from django.http import FileResponse
+
+from genericparser.packages.serializers.response import ProblemDetailsSerializer
+from genericparser.packages.biz.vnf_pkg_artifacts import FetchVnfPkgArtifact
+from .common import view_safe_call_with_log
+
+logger = logging.getLogger(__name__)
+
+VALID_FILTERS = [
+    "callbackUri",
+    "notificationTypes",
+    "vnfdId",
+    "vnfPkgId",
+    "operationalState",
+    "usageState"
+]
+
+
+class FetchVnfPkgmArtifactsView(APIView):
+
+    @swagger_auto_schema(
+        responses={
+            status.HTTP_200_OK: None,
+            status.HTTP_404_NOT_FOUND: ProblemDetailsSerializer(),
+            status.HTTP_500_INTERNAL_SERVER_ERROR: ProblemDetailsSerializer()
+        }
+    )
+    @view_safe_call_with_log(logger=logger)
+    def get(self, request, vnfPkgId, artifactPath):
+        logger.debug("FetchVnfPkgmArtifactsView--get::> ")
+
+        resp_data = FetchVnfPkgArtifact().fetch(vnfPkgId, artifactPath)
+        response = FileResponse(resp_data)
+
+        return response
diff --git a/genericparser/packages/views/vnf_package_subscription_views.py b/genericparser/packages/views/vnf_package_subscription_views.py
new file mode 100644 (file)
index 0000000..897b43c
--- /dev/null
@@ -0,0 +1,161 @@
+# Copyright (C) 2019 Verizon. All Rights Reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import traceback
+import logging
+
+from drf_yasg.utils import swagger_auto_schema
+from rest_framework import status
+from rest_framework.views import APIView
+from rest_framework.response import Response
+
+from genericparser.packages.serializers.vnf_pkg_subscription import PkgmSubscriptionRequestSerializer, \
+    PkgmSubscriptionSerializer, PkgmSubscriptionsSerializer
+from genericparser.packages.serializers.response import ProblemDetailsSerializer
+from genericparser.packages.biz.vnf_pkg_subscription import CreateSubscription, QuerySubscription, TerminateSubscription
+from genericparser.packages.views.common import validate_data
+from genericparser.pub.exceptions import VnfPkgDuplicateSubscriptionException, VnfPkgSubscriptionException, \
+    SubscriptionDoesNotExistsException
+
+logger = logging.getLogger(__name__)
+VALID_FILTERS = ["callbackUri", "notificationTypes", "vnfdId", "vnfPkgId", "operationalState", "usageState"]
+
+
+def get_problem_details_serializer(status_code, error_message):
+    problem_details = {
+        "status": status_code,
+        "detail": error_message
+    }
+    problem_details_serializer = ProblemDetailsSerializer(data=problem_details)
+    problem_details_serializer.is_valid()
+    return problem_details_serializer
+
+
+class CreateQuerySubscriptionView(APIView):
+
+    @swagger_auto_schema(
+        request_body=PkgmSubscriptionRequestSerializer,
+        responses={
+            status.HTTP_201_CREATED: PkgmSubscriptionSerializer(),
+            status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+        }
+    )
+    def post(self, request):
+        logger.debug("Create VNF package Subscription> %s" % request.data)
+        try:
+            vnf_pkg_subscription_request = validate_data(request.data, PkgmSubscriptionRequestSerializer)
+            data = CreateSubscription(vnf_pkg_subscription_request.data).do_biz()
+            subscription_info = validate_data(data, PkgmSubscriptionSerializer)
+            return Response(data=subscription_info.data, status=status.HTTP_201_CREATED)
+        except VnfPkgDuplicateSubscriptionException as e:
+            logger.error(e.message)
+            logger.error(traceback.format_exc())
+            problem_details_serializer = get_problem_details_serializer(status.HTTP_303_SEE_OTHER,
+                                                                        traceback.format_exc())
+            return Response(data=problem_details_serializer.data, status=status.HTTP_303_SEE_OTHER)
+        except Exception as e:
+            logger.error(e.message)
+            logger.error(traceback.format_exc())
+            problem_details_serializer = get_problem_details_serializer(status.HTTP_500_INTERNAL_SERVER_ERROR,
+                                                                        traceback.format_exc())
+            return Response(data=problem_details_serializer.data, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+    @swagger_auto_schema(
+        responses={
+            status.HTTP_200_OK: PkgmSubscriptionSerializer(),
+            status.HTTP_400_BAD_REQUEST: ProblemDetailsSerializer(),
+            status.HTTP_500_INTERNAL_SERVER_ERROR: ProblemDetailsSerializer()
+        }
+    )
+    def get(self, request):
+        logger.debug("SubscribeNotification--get::> %s" % request.query_params)
+        try:
+            if request.query_params and not set(request.query_params).issubset(set(VALID_FILTERS)):
+                problem_details_serializer = get_problem_details_serializer(status.HTTP_400_BAD_REQUEST,
+                                                                            "Not a valid filter")
+                return Response(data=problem_details_serializer.data, status=status.HTTP_400_BAD_REQUEST)
+            resp_data = QuerySubscription().query_multi_subscriptions(request.query_params)
+
+            subscriptions_serializer = PkgmSubscriptionsSerializer(data=resp_data)
+            if not subscriptions_serializer.is_valid():
+                raise VnfPkgSubscriptionException(subscriptions_serializer.errors)
+
+            return Response(data=subscriptions_serializer.data, status=status.HTTP_200_OK)
+
+        except Exception as e:
+            logger.error(e.message)
+            logger.error(traceback.format_exc())
+            problem_details_serializer = get_problem_details_serializer(status.HTTP_500_INTERNAL_SERVER_ERROR,
+                                                                        traceback.format_exc())
+            return Response(data=problem_details_serializer.data, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+
+class QueryTerminateSubscriptionView(APIView):
+
+    @swagger_auto_schema(
+        responses={
+            status.HTTP_200_OK: PkgmSubscriptionSerializer(),
+            status.HTTP_404_NOT_FOUND: ProblemDetailsSerializer(),
+            status.HTTP_500_INTERNAL_SERVER_ERROR: ProblemDetailsSerializer()
+        }
+    )
+    def get(self, request, subscriptionId):
+        logger.debug("SubscribeNotification--get::> %s" % subscriptionId)
+        try:
+
+            resp_data = QuerySubscription().query_single_subscription(subscriptionId)
+
+            subscription_serializer = PkgmSubscriptionSerializer(data=resp_data)
+            if not subscription_serializer.is_valid():
+                raise VnfPkgSubscriptionException(subscription_serializer.errors)
+
+            return Response(data=subscription_serializer.data, status=status.HTTP_200_OK)
+        except SubscriptionDoesNotExistsException as e:
+            logger.error(e.message)
+            logger.error(traceback.format_exc())
+            problem_details_serializer = get_problem_details_serializer(status.HTTP_404_NOT_FOUND,
+                                                                        traceback.format_exc())
+            return Response(data=problem_details_serializer.data, status=status.HTTP_404_NOT_FOUND)
+        except Exception as e:
+            logger.error(e.message)
+            logger.error(traceback.format_exc())
+            problem_details_serializer = get_problem_details_serializer(status.HTTP_500_INTERNAL_SERVER_ERROR,
+                                                                        traceback.format_exc())
+            return Response(data=problem_details_serializer.data, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+
+    @swagger_auto_schema(
+        responses={
+            status.HTTP_204_NO_CONTENT: "",
+            status.HTTP_404_NOT_FOUND: ProblemDetailsSerializer(),
+            status.HTTP_500_INTERNAL_SERVER_ERROR: ProblemDetailsSerializer()
+        }
+    )
+    def delete(self, request, subscriptionId):
+        logger.debug("SubscribeNotification--get::> %s" % subscriptionId)
+        try:
+            TerminateSubscription().terminate(subscriptionId)
+            return Response(status=status.HTTP_204_NO_CONTENT)
+        except SubscriptionDoesNotExistsException as e:
+            logger.error(e.message)
+            logger.error(traceback.format_exc())
+            problem_details_serializer = get_problem_details_serializer(status.HTTP_404_NOT_FOUND,
+                                                                        traceback.format_exc())
+            return Response(data=problem_details_serializer.data, status=status.HTTP_404_NOT_FOUND)
+        except Exception as e:
+            logger.error(e.message)
+            logger.error(traceback.format_exc())
+            problem_details_serializer = get_problem_details_serializer(status.HTTP_500_INTERNAL_SERVER_ERROR,
+                                                                        traceback.format_exc())
+            return Response(data=problem_details_serializer.data, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
diff --git a/genericparser/packages/views/vnf_package_views.py b/genericparser/packages/views/vnf_package_views.py
new file mode 100644 (file)
index 0000000..91238f9
--- /dev/null
@@ -0,0 +1,169 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+
+from django.http import StreamingHttpResponse
+from drf_yasg.utils import swagger_auto_schema, no_body
+from rest_framework import status
+from rest_framework.decorators import api_view
+from rest_framework.response import Response
+from genericparser.pub.exceptions import GenericparserException
+from genericparser.packages.serializers.upload_vnf_pkg_from_uri_req import UploadVnfPackageFromUriRequestSerializer
+from genericparser.packages.serializers.create_vnf_pkg_info_req import CreateVnfPkgInfoRequestSerializer
+from genericparser.packages.serializers.vnf_pkg_info import VnfPkgInfoSerializer
+from genericparser.packages.serializers.vnf_pkg_infos import VnfPkgInfosSerializer
+from genericparser.packages.biz.vnf_package import VnfPackage, VnfPkgUploadThread, parse_vnfd_and_save, handle_upload_failed
+from genericparser.packages.views.common import validate_data
+from .common import view_safe_call_with_log
+
+logger = logging.getLogger(__name__)
+
+
+@swagger_auto_schema(
+    method="GET",
+    operation_description="Query multiple VNF package resource",
+    request_body=no_body,
+    responses={
+        status.HTTP_200_OK: VnfPkgInfosSerializer(),
+        status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+    }
+)
+@swagger_auto_schema(
+    method="POST",
+    operation_description="Create an individual VNF package resource",
+    request_body=CreateVnfPkgInfoRequestSerializer,
+    responses={
+        status.HTTP_201_CREATED: VnfPkgInfoSerializer(),
+        status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+    }
+)
+@api_view(http_method_names=["GET", "POST"])
+@view_safe_call_with_log(logger=logger)
+def vnf_packages_rc(request):
+    if request.method == 'GET':
+        logger.debug("Query VNF packages> %s" % request.data)
+        data = VnfPackage().query_multiple()
+        vnf_pkg_infos = validate_data(data, VnfPkgInfosSerializer)
+        return Response(data=vnf_pkg_infos.data, status=status.HTTP_200_OK)
+
+    if request.method == 'POST':
+        logger.debug("Create VNF package> %s" % request.data)
+        create_vnf_pkg_info_request = validate_data(request.data, CreateVnfPkgInfoRequestSerializer)
+        data = VnfPackage().create_vnf_pkg(create_vnf_pkg_info_request.data)
+        vnf_pkg_info = validate_data(data, VnfPkgInfoSerializer)
+        return Response(data=vnf_pkg_info.data, status=status.HTTP_201_CREATED)
+
+
+@swagger_auto_schema(
+    method='PUT',
+    operation_description="Upload VNF package content",
+    request_body=no_body,
+    responses={
+        status.HTTP_202_ACCEPTED: "Successfully",
+        status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+    }
+)
+@swagger_auto_schema(
+    method="GET",
+    operation_description="Fetch VNF package content",
+    request_body=no_body,
+    responses={
+        status.HTTP_200_OK: VnfPkgInfosSerializer(),
+        status.HTTP_404_NOT_FOUND: "VNF package does not exist",
+        status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+    }
+)
+@api_view(http_method_names=["PUT", "GET"])
+@view_safe_call_with_log(logger=logger)
+def package_content_ru(request, **kwargs):
+    vnf_pkg_id = kwargs.get("vnfPkgId")
+    if request.method == "PUT":
+        logger.debug("Upload VNF package %s" % vnf_pkg_id)
+        files = request.FILES.getlist('file')
+        try:
+            local_file_name = VnfPackage().upload(vnf_pkg_id, files[0])
+            parse_vnfd_and_save(vnf_pkg_id, local_file_name)
+            return Response(None, status=status.HTTP_202_ACCEPTED)
+        except GenericparserException as e:
+            handle_upload_failed(vnf_pkg_id)
+            raise e
+        except Exception as e:
+            handle_upload_failed(vnf_pkg_id)
+            raise e
+
+    if request.method == "GET":
+        file_range = request.META.get('RANGE')
+        file_iterator = VnfPackage().download(vnf_pkg_id, file_range)
+        return StreamingHttpResponse(file_iterator, status=status.HTTP_200_OK)
+
+
+@swagger_auto_schema(
+    method='POST',
+    operation_description="Upload VNF package content from uri",
+    request_body=UploadVnfPackageFromUriRequestSerializer,
+    responses={
+        status.HTTP_202_ACCEPTED: "Successfully",
+        status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+    }
+)
+@api_view(http_method_names=['POST'])
+@view_safe_call_with_log(logger=logger)
+def upload_from_uri_c(request, **kwargs):
+    vnf_pkg_id = kwargs.get("vnfPkgId")
+    try:
+        upload_vnf_from_uri_request = validate_data(request.data, UploadVnfPackageFromUriRequestSerializer)
+        VnfPkgUploadThread(upload_vnf_from_uri_request.data, vnf_pkg_id).start()
+        return Response(None, status=status.HTTP_202_ACCEPTED)
+    except GenericparserException as e:
+        handle_upload_failed(vnf_pkg_id)
+        raise e
+    except Exception as e:
+        handle_upload_failed(vnf_pkg_id)
+        raise e
+
+
+@swagger_auto_schema(
+    method='GET',
+    operation_description="Query an individual VNF package resource",
+    request_body=no_body,
+    responses={
+        status.HTTP_200_OK: VnfPkgInfoSerializer(),
+        status.HTTP_404_NOT_FOUND: "VNF package does not exist",
+        status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+    }
+)
+@swagger_auto_schema(
+    method='DELETE',
+    operation_description="Delete an individual VNF package resource",
+    request_body=no_body,
+    responses={
+        status.HTTP_204_NO_CONTENT: "No content",
+        status.HTTP_500_INTERNAL_SERVER_ERROR: "Internal error"
+    }
+)
+@api_view(http_method_names=['GET', 'DELETE'])
+@view_safe_call_with_log(logger=logger)
+def vnf_package_rd(request, **kwargs):
+    vnf_pkg_id = kwargs.get("vnfPkgId")
+    if request.method == 'GET':
+        logger.debug("Query an individual VNF package> %s" % request.data)
+        data = VnfPackage().query_single(vnf_pkg_id)
+        vnf_pkg_info = validate_data(data, VnfPkgInfoSerializer)
+        return Response(data=vnf_pkg_info.data, status=status.HTTP_200_OK)
+
+    if request.method == 'DELETE':
+        logger.debug("Delete an individual VNF package> %s" % request.data)
+        VnfPackage().delete_vnf_pkg(vnf_pkg_id)
+        return Response(data=None, status=status.HTTP_204_NO_CONTENT)
diff --git a/genericparser/pub/__init__.py b/genericparser/pub/__init__.py
new file mode 100644 (file)
index 0000000..c7b6818
--- /dev/null
@@ -0,0 +1,13 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/genericparser/pub/config/__init__.py b/genericparser/pub/config/__init__.py
new file mode 100644 (file)
index 0000000..c7b6818
--- /dev/null
@@ -0,0 +1,13 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/genericparser/pub/config/config.py b/genericparser/pub/config/config.py
new file mode 100644 (file)
index 0000000..abecd79
--- /dev/null
@@ -0,0 +1,83 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# [MSB]
+MSB_SERVICE_IP = '127.0.0.1'
+MSB_SERVICE_PORT = '80'
+
+# [REDIS]
+REDIS_HOST = '127.0.0.1'
+REDIS_PORT = '6379'
+REDIS_PASSWD = ''
+
+# [mysql]
+DB_IP = "127.0.0.1"
+DB_PORT = 3306
+DB_NAME = "genericpaser"
+DB_USER = "genericpaser"
+DB_PASSWD = "genericpaser"
+
+# [MDC]
+SERVICE_NAME = "genericparser"
+FORWARDED_FOR_FIELDS = ["HTTP_X_FORWARDED_FOR", "HTTP_X_FORWARDED_HOST",
+                        "HTTP_X_FORWARDED_SERVER"]
+
+# [register]
+REG_TO_MSB_WHEN_START = True
+REG_TO_MSB_REG_URL = "/api/microservices/v1/services"
+REG_TO_MSB_REG_PARAM = [{
+    "serviceName": "genericparser",
+    "version": "v1",
+    "url": "/api/genericparser/v1",
+    "protocol": "REST",
+    "visualRange": "1",
+    "nodes": [{
+        "ip": "127.0.0.1",
+        "port": "8806",
+        "ttl": 0
+    }]
+}, {
+    "serviceName": "nsd",
+    "version": "v1",
+    "url": "/api/nsd/v1",
+    "protocol": "REST",
+    "visualRange": "1",
+    "nodes": [{
+        "ip": "127.0.0.1",
+        "port": "8806",
+        "ttl": 0
+    }]
+}, {
+    "serviceName": "vnfpkgm",
+    "version": "v1",
+    "url": "/api/vnfpkgm/v1",
+    "protocol": "REST",
+    "visualRange": "1",
+    "nodes": [{
+        "ip": "127.0.0.1",
+        "port": "8806",
+        "ttl": 0
+    }]
+}]
+
+# genericparser path(values is defined in settings.py)
+# CATALOG_ROOT_PATH = None
+# CATALOG_URL_PATH = None
+GENERICPARSER_ROOT_PATH = None
+GENERICPARSER_URL_PATH = None
+
+# [sdc config]
+SDC_BASE_URL = "http://msb-iag/api"
+SDC_USER = "aai"
+SDC_PASSWD = "Kp8bJ4SXszM0WXlhak3eHlcse2gAw84vaoGGmJvUy2U"
diff --git a/genericparser/pub/database/__init__.py b/genericparser/pub/database/__init__.py
new file mode 100644 (file)
index 0000000..c7b6818
--- /dev/null
@@ -0,0 +1,13 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/genericparser/pub/database/migrations/0001_initial.py b/genericparser/pub/database/migrations/0001_initial.py
new file mode 100644 (file)
index 0000000..8446b6e
--- /dev/null
@@ -0,0 +1,229 @@
+# Copyright 2019 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# Generated by Django 1.11.9 on 2019-04-16 03:53
+from __future__ import unicode_literals
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+    initial = True
+
+    dependencies = [
+    ]
+
+    operations = [
+        migrations.CreateModel(
+            name='JobModel',
+            fields=[
+                ('jobid', models.CharField(db_column=b'JOBID', max_length=255, primary_key=True, serialize=False)),
+                ('jobtype', models.CharField(db_column=b'JOBTYPE', max_length=255)),
+                ('jobaction', models.CharField(db_column=b'JOBACTION', max_length=255)),
+                ('resid', models.CharField(db_column=b'RESID', max_length=255)),
+                ('status', models.IntegerField(blank=True, db_column=b'STATUS', null=True)),
+                ('starttime', models.CharField(blank=True, db_column=b'STARTTIME', max_length=255, null=True)),
+                ('endtime', models.CharField(blank=True, db_column=b'ENDTIME', max_length=255, null=True)),
+                ('progress', models.IntegerField(blank=True, db_column=b'PROGRESS', null=True)),
+                ('user', models.CharField(blank=True, db_column=b'USER', max_length=255, null=True)),
+                ('parentjobid', models.CharField(blank=True, db_column=b'PARENTJOBID', max_length=255, null=True)),
+                ('resname', models.CharField(blank=True, db_column=b'RESNAME', max_length=255, null=True)),
+            ],
+            options={
+                'db_table': 'CATALOG_JOB',
+            },
+        ),
+        migrations.CreateModel(
+            name='JobStatusModel',
+            fields=[
+                ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+                ('indexid', models.IntegerField(db_column=b'INDEXID')),
+                ('jobid', models.CharField(db_column=b'JOBID', max_length=255)),
+                ('status', models.CharField(db_column=b'STATUS', max_length=255)),
+                ('progress', models.IntegerField(blank=True, db_column=b'PROGRESS', null=True)),
+                ('descp', models.TextField(db_column=b'DESCP', max_length=65535)),
+                ('errcode', models.CharField(blank=True, db_column=b'ERRCODE', max_length=255, null=True)),
+                ('addtime', models.CharField(blank=True, db_column=b'ADDTIME', max_length=255, null=True)),
+            ],
+            options={
+                'db_table': 'CATALOG_JOB_STATUS',
+            },
+        ),
+        migrations.CreateModel(
+            name='NsdmSubscriptionModel',
+            fields=[
+                ('subscriptionid', models.CharField(db_column=b'SUBSCRIPTIONID', max_length=255, primary_key=True, serialize=False)),
+                ('notificationTypes', models.TextField(db_column=b'NOTIFICATIONTYPES', null=True)),
+                ('auth_info', models.TextField(db_column=b'AUTHINFO', null=True)),
+                ('callback_uri', models.CharField(db_column=b'CALLBACKURI', max_length=255)),
+                ('nsdInfoId', models.TextField(db_column=b'NSDINFOID', null=True)),
+                ('nsdId', models.TextField(db_column=b'NSDID', null=True)),
+                ('nsdName', models.TextField(db_column=b'NSDNAME', null=True)),
+                ('nsdVersion', models.TextField(db_column=b'NSDVERSION', null=True)),
+                ('nsdDesigner', models.TextField(db_column=b'NSDDESIGNER', null=True)),
+                ('nsdInvariantId', models.TextField(db_column=b'NSDINVARIANTID', null=True)),
+                ('vnfPkgIds', models.TextField(db_column=b'VNFPKGIDS', null=True)),
+                ('pnfdInfoIds', models.TextField(db_column=b'PNFDINFOIDS', null=True)),
+                ('nestedNsdInfoIds', models.TextField(db_column=b'NESTEDNSDINFOIDS', null=True)),
+                ('nsdOnboardingState', models.TextField(db_column=b'NSDONBOARDINGSTATE', null=True)),
+                ('nsdOperationalState', models.TextField(db_column=b'NSDOPERATIONALSTATE', null=True)),
+                ('nsdUsageState', models.TextField(db_column=b'NSDUSAGESTATE', null=True)),
+                ('pnfdId', models.TextField(db_column=b'PNFDID', null=True)),
+                ('pnfdName', models.TextField(db_column=b'PNFDNAME', null=True)),
+                ('pnfdVersion', models.TextField(db_column=b'PNFDVERSION', null=True)),
+                ('pnfdProvider', models.TextField(db_column=b'PNFDPROVIDER', null=True)),
+                ('pnfdInvariantId', models.TextField(db_column=b'PNFDINVARIANTID', null=True)),
+                ('pnfdOnboardingState', models.TextField(db_column=b'PNFDONBOARDINGSTATE', null=True)),
+                ('pnfdUsageState', models.TextField(db_column=b'PNFDUSAGESTATE', null=True)),
+                ('links', models.TextField(db_column=b'LINKS')),
+            ],
+            options={
+                'db_table': 'CATALOG_NSDM_SUBSCRIPTION',
+            },
+        ),
+        migrations.CreateModel(
+            name='NSPackageModel',
+            fields=[
+                ('nsPackageId', models.CharField(db_column=b'NSPACKAGEID', max_length=50, primary_key=True, serialize=False)),
+                ('nsPackageUri', models.CharField(blank=True, db_column=b'NSPACKAGEURI', max_length=300, null=True)),
+                ('checksum', models.CharField(blank=True, db_column=b'CHECKSUM', max_length=50, null=True)),
+                ('sdcCsarId', models.CharField(blank=True, db_column=b'SDCCSARID', max_length=50, null=True)),
+                ('onboardingState', models.CharField(blank=True, db_column=b'ONBOARDINGSTATE', max_length=20, null=True)),
+                ('operationalState', models.CharField(blank=True, db_column=b'OPERATIONALSTATE', max_length=20, null=True)),
+                ('usageState', models.CharField(blank=True, db_column=b'USAGESTATE', max_length=20, null=True)),
+                ('deletionPending', models.CharField(blank=True, db_column=b'DELETIONPENDING', max_length=20, null=True)),
+                ('nsdId', models.CharField(blank=True, db_column=b'NSDID', max_length=50, null=True)),
+                ('invariantId', models.CharField(blank=True, db_column=b'INVARIANTID', max_length=50, null=True)),
+                ('nsdName', models.CharField(blank=True, db_column=b'NSDNAME', max_length=50, null=True)),
+                ('nsdDesginer', models.CharField(blank=True, db_column=b'NSDDESIGNER', max_length=50, null=True)),
+                ('nsdDescription', models.CharField(blank=True, db_column=b'NSDDESCRIPTION', max_length=100, null=True)),
+                ('nsdVersion', models.CharField(blank=True, db_column=b'NSDVERSION', max_length=20, null=True)),
+                ('userDefinedData', models.TextField(blank=True, db_column=b'USERDEFINEDDATA', max_length=1024, null=True)),
+                ('localFilePath', models.CharField(blank=True, db_column=b'LOCALFILEPATH', max_length=300, null=True)),
+                ('nsdModel', models.TextField(blank=True, db_column=b'NSDMODEL', max_length=65535, null=True)),
+            ],
+            options={
+                'db_table': 'CATALOG_NSPACKAGE',
+            },
+        ),
+        migrations.CreateModel(
+            name='PnfPackageModel',
+            fields=[
+                ('pnfPackageId', models.CharField(db_column=b'PNFPACKAGEID', max_length=50, primary_key=True, serialize=False)),
+                ('pnfPackageUri', models.CharField(blank=True, db_column=b'PNFPACKAGEURI', max_length=300, null=True)),
+                ('sdcCSARUri', models.CharField(blank=True, db_column=b'SDCCSARURI', max_length=300, null=True)),
+                ('checksum', models.CharField(blank=True, db_column=b'CHECKSUM', max_length=50, null=True)),
+                ('onboardingState', models.CharField(blank=True, db_column=b'ONBOARDINGSTATE', max_length=20, null=True)),
+                ('usageState', models.CharField(blank=True, db_column=b'USAGESTATE', max_length=20, null=True)),
+                ('deletionPending', models.CharField(blank=True, db_column=b'DELETIONPENDING', max_length=20, null=True)),
+                ('pnfdId', models.CharField(blank=True, db_column=b'PNFDID', max_length=50, null=True)),
+                ('pnfVendor', models.CharField(blank=True, db_column=b'VENDOR', max_length=50, null=True)),
+                ('pnfdProductName', models.CharField(blank=True, db_column=b'PNFDPRODUCTNAME', max_length=50, null=True)),
+                ('pnfdVersion', models.CharField(blank=True, db_column=b'PNFDVERSION', max_length=20, null=True)),
+                ('pnfSoftwareVersion', models.CharField(blank=True, db_column=b'PNFSOFTWAREVERSION', max_length=20, null=True)),
+                ('userDefinedData', models.TextField(blank=True, db_column=b'USERDEFINEDDATA', max_length=1024, null=True)),
+                ('localFilePath', models.CharField(blank=True, db_column=b'LOCALFILEPATH', max_length=300, null=True)),
+                ('pnfdModel', models.TextField(blank=True, db_column=b'PNFDMODEL', max_length=65535, null=True)),
+                ('pnfdName', models.TextField(blank=True, db_column=b'PNFDNAME', max_length=65535, null=True)),
+            ],
+            options={
+                'db_table': 'CATALOG_PNFPACKAGE',
+            },
+        ),
+        migrations.CreateModel(
+            name='ServicePackageModel',
+            fields=[
+                ('servicePackageId', models.CharField(db_column=b'SERVICEPACKAGEID', max_length=50, primary_key=True, serialize=False)),
+                ('servicePackageUri', models.CharField(blank=True, db_column=b'SERVICEPACKAGEURI', max_length=300, null=True)),
+                ('checksum', models.CharField(blank=True, db_column=b'CHECKSUM', max_length=50, null=True)),
+                ('sdcCsarId', models.CharField(blank=True, db_column=b'SDCCSARID', max_length=50, null=True)),
+                ('onboardingState', models.CharField(blank=True, db_column=b'ONBOARDINGSTATE', max_length=20, null=True)),
+                ('operationalState', models.CharField(blank=True, db_column=b'OPERATIONALSTATE', max_length=20, null=True)),
+                ('usageState', models.CharField(blank=True, db_column=b'USAGESTATE', max_length=20, null=True)),
+                ('deletionPending', models.CharField(blank=True, db_column=b'DELETIONPENDING', max_length=20, null=True)),
+                ('servicedId', models.CharField(blank=True, db_column=b'SERVICEDID', max_length=50, null=True)),
+                ('invariantId', models.CharField(blank=True, db_column=b'INVARIANTID', max_length=50, null=True)),
+                ('servicedName', models.CharField(blank=True, db_column=b'SERVICEDNAME', max_length=50, null=True)),
+                ('servicedDesigner', models.CharField(blank=True, db_column=b'SERVICEDDESIGNER', max_length=50, null=True)),
+                ('servicedDescription', models.CharField(blank=True, db_column=b'SERVICEDDESCRIPTION', max_length=100, null=True)),
+                ('servicedVersion', models.CharField(blank=True, db_column=b'SERVICEDVERSION', max_length=20, null=True)),
+                ('userDefinedData', models.TextField(blank=True, db_column=b'USERDEFINEDDATA', max_length=1024, null=True)),
+                ('localFilePath', models.CharField(blank=True, db_column=b'LOCALFILEPATH', max_length=300, null=True)),
+                ('servicedModel', models.TextField(blank=True, db_column=b'SERVICEDMODEL', max_length=65535, null=True)),
+            ],
+            options={
+                'db_table': 'CATALOG_SERVICEPACKAGE',
+            },
+        ),
+        migrations.CreateModel(
+            name='SoftwareImageModel',
+            fields=[
+                ('imageid', models.CharField(db_column=b'IMAGEID', max_length=50, primary_key=True, serialize=False)),
+                ('containerFormat', models.CharField(db_column=b'CONTAINERFORMAT', max_length=20)),
+                ('diskFormat', models.CharField(db_column=b'DISKFORMAT', max_length=20)),
+                ('mindisk', models.CharField(db_column=b'MINDISK', max_length=20)),
+                ('minram', models.CharField(db_column=b'MINRAM', max_length=20)),
+                ('usermetadata', models.CharField(db_column=b'USAERMETADATA', max_length=1024)),
+                ('vnfPackageId', models.CharField(db_column=b'VNFPACKAGEID', max_length=50)),
+                ('filePath', models.CharField(db_column=b'FILEPATH', max_length=300)),
+                ('status', models.CharField(db_column=b'STATUS', max_length=10)),
+                ('vimid', models.CharField(db_column=b'VIMID', max_length=50)),
+            ],
+            options={
+                'db_table': 'CATALOG_SOFTWAREIMAGEMODEL',
+            },
+        ),
+        migrations.CreateModel(
+            name='VnfPackageModel',
+            fields=[
+                ('vnfPackageId', models.CharField(db_column=b'VNFPACKAGEID', max_length=50, primary_key=True, serialize=False)),
+                ('vnfPackageUri', models.CharField(blank=True, db_column=b'VNFPACKAGEURI', max_length=300, null=True)),
+                ('SdcCSARUri', models.CharField(blank=True, db_column=b'SDCCSARURI', max_length=300, null=True)),
+                ('checksum', models.CharField(blank=True, db_column=b'CHECKSUM', max_length=50, null=True)),
+                ('onboardingState', models.CharField(blank=True, db_column=b'ONBOARDINGSTATE', max_length=20, null=True)),
+                ('operationalState', models.CharField(blank=True, db_column=b'OPERATIONALSTATE', max_length=20, null=True)),
+                ('usageState', models.CharField(blank=True, db_column=b'USAGESTATE', max_length=20, null=True)),
+                ('deletionPending', models.CharField(blank=True, db_column=b'DELETIONPENDING', max_length=20, null=True)),
+                ('vnfdId', models.CharField(blank=True, db_column=b'VNFDID', max_length=50, null=True)),
+                ('vnfVendor', models.CharField(blank=True, db_column=b'VENDOR', max_length=50, null=True)),
+                ('vnfdProductName', models.CharField(blank=True, db_column=b'VNFDPRODUCTNAME', max_length=50, null=True)),
+                ('vnfdVersion', models.CharField(blank=True, db_column=b'VNFDVERSION', max_length=20, null=True)),
+                ('vnfSoftwareVersion', models.CharField(blank=True, db_column=b'VNFSOFTWAREVERSION', max_length=20, null=True)),
+                ('userDefinedData', models.TextField(blank=True, db_column=b'USERDEFINEDDATA', max_length=1024, null=True)),
+                ('localFilePath', models.CharField(blank=True, db_column=b'LOCALFILEPATH', max_length=300, null=True)),
+                ('vnfdModel', models.TextField(blank=True, db_column=b'VNFDMODEL', max_length=65535, null=True)),
+            ],
+            options={
+                'db_table': 'CATALOG_VNFPACKAGE',
+            },
+        ),
+        migrations.CreateModel(
+            name='VnfPkgSubscriptionModel',
+            fields=[
+                ('subscription_id', models.CharField(db_column=b'SUBSCRIPTION_ID', max_length=255, primary_key=True, serialize=False)),
+                ('callback_uri', models.URLField(db_column=b'CALLBACK_URI', max_length=255)),
+                ('auth_info', models.TextField(db_column=b'AUTH_INFO')),
+                ('usage_states', models.TextField(db_column=b'USAGE_STATES')),
+                ('notification_types', models.TextField(db_column=b'NOTIFICATION_TYPES')),
+                ('vnfd_id', models.TextField(db_column=b'VNFD_ID')),
+                ('vnf_pkg_id', models.TextField(db_column=b'VNF_PKG_ID')),
+                ('operation_states', models.TextField(db_column=b'OPERATION_STATES')),
+                ('vnf_products_from_provider', models.TextField(db_column=b'VNF_PRODUCTS_FROM_PROVIDER')),
+                ('links', models.TextField(db_column=b'LINKS')),
+            ],
+            options={
+                'db_table': 'VNF_PKG_SUBSCRIPTION',
+            },
+        ),
+    ]
diff --git a/genericparser/pub/database/migrations/__init__.py b/genericparser/pub/database/migrations/__init__.py
new file mode 100644 (file)
index 0000000..0c847b7
--- /dev/null
@@ -0,0 +1,13 @@
+# Copyright 2019 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/genericparser/pub/database/models.py b/genericparser/pub/database/models.py
new file mode 100644 (file)
index 0000000..9f0b498
--- /dev/null
@@ -0,0 +1,234 @@
+# Copyright 2016-2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from django.db import models
+
+
+class NSPackageModel(models.Model):
+    nsPackageId = models.CharField(db_column='NSPACKAGEID', primary_key=True, max_length=50)
+    nsPackageUri = models.CharField(db_column='NSPACKAGEURI', max_length=300, null=True, blank=True)
+    checksum = models.CharField(db_column='CHECKSUM', max_length=50, null=True, blank=True)  # checksum
+    sdcCsarId = models.CharField(db_column='SDCCSARID', max_length=50, null=True, blank=True)  # SdcCSARUri
+    onboardingState = models.CharField(db_column='ONBOARDINGSTATE', max_length=20, blank=True, null=True)
+    operationalState = models.CharField(db_column='OPERATIONALSTATE', max_length=20, blank=True, null=True)  # operationalState
+    usageState = models.CharField(db_column='USAGESTATE', max_length=20, blank=True, null=True)  # usageState
+    deletionPending = models.CharField(db_column='DELETIONPENDING', max_length=20, blank=True, null=True)  # deletionPending
+    nsdId = models.CharField(db_column='NSDID', max_length=50, blank=True, null=True)
+    invariantId = models.CharField(db_column='INVARIANTID', max_length=50, blank=True, null=True)  # nsdInvariantId
+    nsdName = models.CharField(db_column='NSDNAME', max_length=50, blank=True, null=True)
+    nsdDesginer = models.CharField(db_column='NSDDESIGNER', max_length=50, null=True, blank=True)
+    nsdDescription = models.CharField(db_column='NSDDESCRIPTION', max_length=100, null=True, blank=True)
+    nsdVersion = models.CharField(db_column='NSDVERSION', max_length=20, null=True, blank=True)
+    userDefinedData = models.TextField(db_column='USERDEFINEDDATA', max_length=1024, blank=True, null=True)  # userDefinedData
+    localFilePath = models.CharField(db_column='LOCALFILEPATH', max_length=300, null=True, blank=True)
+    nsdModel = models.TextField(db_column='NSDMODEL', max_length=65535, null=True, blank=True)
+
+    class Meta:
+        db_table = 'CATALOG_NSPACKAGE'
+
+
+class ServicePackageModel(models.Model):
+    servicePackageId = models.CharField(db_column='SERVICEPACKAGEID', primary_key=True, max_length=50)
+    servicePackageUri = models.CharField(db_column='SERVICEPACKAGEURI', max_length=300, null=True, blank=True)
+    checksum = models.CharField(db_column='CHECKSUM', max_length=50, null=True, blank=True)  # checksum
+    sdcCsarId = models.CharField(db_column='SDCCSARID', max_length=50, null=True, blank=True)  # SdcCSARUri
+    onboardingState = models.CharField(db_column='ONBOARDINGSTATE', max_length=20, blank=True, null=True)
+    operationalState = models.CharField(db_column='OPERATIONALSTATE', max_length=20, blank=True, null=True)  # operationalState
+    usageState = models.CharField(db_column='USAGESTATE', max_length=20, blank=True, null=True)  # usageState
+    deletionPending = models.CharField(db_column='DELETIONPENDING', max_length=20, blank=True, null=True)  # deletionPending
+    servicedId = models.CharField(db_column='SERVICEDID', max_length=50, blank=True, null=True)
+    invariantId = models.CharField(db_column='INVARIANTID', max_length=50, blank=True, null=True)  # servicedInvariantId
+    servicedName = models.CharField(db_column='SERVICEDNAME', max_length=50, blank=True, null=True)
+    servicedDesigner = models.CharField(db_column='SERVICEDDESIGNER', max_length=50, null=True, blank=True)
+    servicedDescription = models.CharField(db_column='SERVICEDDESCRIPTION', max_length=100, null=True, blank=True)
+    servicedVersion = models.CharField(db_column='SERVICEDVERSION', max_length=20, null=True, blank=True)
+    userDefinedData = models.TextField(db_column='USERDEFINEDDATA', max_length=1024, blank=True, null=True)  # userDefinedData
+    localFilePath = models.CharField(db_column='LOCALFILEPATH', max_length=300, null=True, blank=True)
+    servicedModel = models.TextField(db_column='SERVICEDMODEL', max_length=65535, null=True, blank=True)
+
+    class Meta:
+        db_table = 'CATALOG_SERVICEPACKAGE'
+
+
+class VnfPackageModel(models.Model):
+    # uuid = models.CharField(db_column='UUID', primary_key=True, max_length=255)
+    vnfPackageId = models.CharField(db_column='VNFPACKAGEID', primary_key=True, max_length=50)   # onboardedVnfPkgInfoId
+    vnfPackageUri = models.CharField(db_column='VNFPACKAGEURI', max_length=300, null=True, blank=True)  # downloadUri
+    SdcCSARUri = models.CharField(db_column='SDCCSARURI', max_length=300, null=True, blank=True)  # SdcCSARUri
+    checksum = models.CharField(db_column='CHECKSUM', max_length=50, null=True, blank=True)  # checksum
+    onboardingState = models.CharField(db_column='ONBOARDINGSTATE', max_length=20, blank=True, null=True)
+    operationalState = models.CharField(db_column='OPERATIONALSTATE', max_length=20, blank=True, null=True)  # operationalState
+    usageState = models.CharField(db_column='USAGESTATE', max_length=20, blank=True, null=True)  # usageState
+    deletionPending = models.CharField(db_column='DELETIONPENDING', max_length=20, blank=True, null=True)  # deletionPending
+    vnfdId = models.CharField(db_column='VNFDID', max_length=50, blank=True, null=True)                # vnfdId
+    vnfVendor = models.CharField(db_column='VENDOR', max_length=50, blank=True, null=True)  # vnfProvider
+    vnfdProductName = models.CharField(db_column='VNFDPRODUCTNAME', max_length=50, blank=True, null=True)  # vnfProductName
+    vnfdVersion = models.CharField(db_column='VNFDVERSION', max_length=20, blank=True, null=True)     # vnfdVersion
+    vnfSoftwareVersion = models.CharField(db_column='VNFSOFTWAREVERSION', max_length=20, blank=True, null=True)   # vnfSoftwareVersion
+    userDefinedData = models.TextField(db_column='USERDEFINEDDATA', max_length=1024, blank=True, null=True)  # userDefinedData
+    localFilePath = models.CharField(db_column='LOCALFILEPATH', max_length=300, null=True, blank=True)
+    vnfdModel = models.TextField(db_column='VNFDMODEL', max_length=65535, blank=True, null=True)  # vnfd
+
+    class Meta:
+        db_table = 'CATALOG_VNFPACKAGE'
+
+
+class PnfPackageModel(models.Model):
+    # uuid = models.CharField(db_column='UUID', primary_key=True, max_length=255)
+    pnfPackageId = models.CharField(db_column='PNFPACKAGEID', primary_key=True, max_length=50)   # onboardedPnfPkgInfoId
+    pnfPackageUri = models.CharField(db_column='PNFPACKAGEURI', max_length=300, null=True, blank=True)  # downloadUri
+    sdcCSARUri = models.CharField(db_column='SDCCSARURI', max_length=300, null=True, blank=True)  # sdcCSARUri
+    checksum = models.CharField(db_column='CHECKSUM', max_length=50, null=True, blank=True)  # checksum
+    onboardingState = models.CharField(db_column='ONBOARDINGSTATE', max_length=20, blank=True, null=True)
+    usageState = models.CharField(db_column='USAGESTATE', max_length=20, blank=True, null=True)  # usageState
+    deletionPending = models.CharField(db_column='DELETIONPENDING', max_length=20, blank=True, null=True)  # deletionPending
+    pnfdId = models.CharField(db_column='PNFDID', max_length=50, blank=True, null=True)                # pnfdId
+    pnfVendor = models.CharField(db_column='VENDOR', max_length=50, blank=True, null=True)  # pnfProvider
+    pnfdProductName = models.CharField(db_column='PNFDPRODUCTNAME', max_length=50, blank=True, null=True)  # pnfProductName
+    pnfdVersion = models.CharField(db_column='PNFDVERSION', max_length=20, blank=True, null=True)     # pnfdVersion
+    pnfSoftwareVersion = models.CharField(db_column='PNFSOFTWAREVERSION', max_length=20, blank=True, null=True)   # pnfSoftwareVersion
+    userDefinedData = models.TextField(db_column='USERDEFINEDDATA', max_length=1024, blank=True, null=True)  # userDefinedData
+    localFilePath = models.CharField(db_column='LOCALFILEPATH', max_length=300, null=True, blank=True)
+    pnfdModel = models.TextField(db_column='PNFDMODEL', max_length=65535, blank=True, null=True)  # pnfd
+    pnfdName = models.TextField(db_column='PNFDNAME', max_length=65535, blank=True, null=True)  # pnfd_name
+
+    class Meta:
+        db_table = 'CATALOG_PNFPACKAGE'
+
+
+class SoftwareImageModel(models.Model):
+    imageid = models.CharField(db_column='IMAGEID', primary_key=True, max_length=50)
+    containerFormat = models.CharField(db_column='CONTAINERFORMAT', max_length=20)
+    diskFormat = models.CharField(db_column='DISKFORMAT', max_length=20)
+    mindisk = models.CharField(db_column='MINDISK', max_length=20)
+    minram = models.CharField(db_column='MINRAM', max_length=20)
+    usermetadata = models.CharField(db_column='USAERMETADATA', max_length=1024)
+    vnfPackageId = models.CharField(db_column='VNFPACKAGEID', max_length=50)
+    filePath = models.CharField(db_column='FILEPATH', max_length=300)
+    status = models.CharField(db_column='STATUS', max_length=10)
+    vimid = models.CharField(db_column='VIMID', max_length=50)
+    # filetype = models.CharField(db_column='FILETYPE', max_length=2)
+    # vimuser = models.CharField(db_column='VIMUSER', max_length=50)
+    # tenant = models.CharField(db_column='TENANT', max_length=50)
+    # purpose = models.CharField(db_column='PURPOSE', max_length=1000)
+
+    class Meta:
+        db_table = 'CATALOG_SOFTWAREIMAGEMODEL'
+
+
+class JobModel(models.Model):
+    jobid = models.CharField(db_column='JOBID', primary_key=True, max_length=255)
+    jobtype = models.CharField(db_column='JOBTYPE', max_length=255)
+    jobaction = models.CharField(db_column='JOBACTION', max_length=255)
+    resid = models.CharField(db_column='RESID', max_length=255)
+    status = models.IntegerField(db_column='STATUS', null=True, blank=True)
+    starttime = models.CharField(db_column='STARTTIME', max_length=255, null=True, blank=True)
+    endtime = models.CharField(db_column='ENDTIME', max_length=255, null=True, blank=True)
+    progress = models.IntegerField(db_column='PROGRESS', null=True, blank=True)
+    user = models.CharField(db_column='USER', max_length=255, null=True, blank=True)
+    parentjobid = models.CharField(db_column='PARENTJOBID', max_length=255, null=True, blank=True)
+    resname = models.CharField(db_column='RESNAME', max_length=255, null=True, blank=True)
+
+    class Meta:
+        db_table = 'CATALOG_JOB'
+
+    def toJSON(self):
+        import json
+        return json.dumps(dict([(attr, getattr(self, attr)) for attr in [f.name for f in self._meta.fields]]))
+
+
+class JobStatusModel(models.Model):
+    indexid = models.IntegerField(db_column='INDEXID')
+    jobid = models.CharField(db_column='JOBID', max_length=255)
+    status = models.CharField(db_column='STATUS', max_length=255)
+    progress = models.IntegerField(db_column='PROGRESS', null=True, blank=True)
+    descp = models.TextField(db_column='DESCP', max_length=65535)
+    errcode = models.CharField(db_column='ERRCODE', max_length=255, null=True, blank=True)
+    addtime = models.CharField(db_column='ADDTIME', max_length=255, null=True, blank=True)
+
+    class Meta:
+        db_table = 'CATALOG_JOB_STATUS'
+
+    def toJSON(self):
+        import json
+        return json.dumps(dict([(attr, getattr(self, attr)) for attr in [f.name for f in self._meta.fields]]))
+
+
+class NsdmSubscriptionModel(models.Model):
+    subscriptionid = models.CharField(db_column='SUBSCRIPTIONID', max_length=255, primary_key=True)
+    notificationTypes = models.TextField(db_column='NOTIFICATIONTYPES', null=True)
+    auth_info = models.TextField(db_column='AUTHINFO', null=True)
+    callback_uri = models.CharField(db_column='CALLBACKURI', max_length=255)
+    nsdInfoId = models.TextField(db_column='NSDINFOID', null=True)
+    nsdId = models.TextField(db_column='NSDID', null=True)
+    nsdName = models.TextField(db_column='NSDNAME', null=True)
+    nsdVersion = models.TextField(db_column='NSDVERSION', null=True)
+    nsdDesigner = models.TextField(db_column='NSDDESIGNER', null=True)
+    nsdInvariantId = models.TextField(db_column='NSDINVARIANTID', null=True)
+    vnfPkgIds = models.TextField(db_column='VNFPKGIDS', null=True)
+    pnfdInfoIds = models.TextField(db_column='PNFDINFOIDS', null=True)
+    nestedNsdInfoIds = models.TextField(db_column='NESTEDNSDINFOIDS', null=True)
+    nsdOnboardingState = models.TextField(db_column='NSDONBOARDINGSTATE', null=True)
+    nsdOperationalState = models.TextField(db_column='NSDOPERATIONALSTATE', null=True)
+    nsdUsageState = models.TextField(db_column='NSDUSAGESTATE', null=True)
+    pnfdId = models.TextField(db_column='PNFDID', null=True)
+    pnfdName = models.TextField(db_column='PNFDNAME', null=True)
+    pnfdVersion = models.TextField(db_column='PNFDVERSION', null=True)
+    pnfdProvider = models.TextField(db_column='PNFDPROVIDER', null=True)
+    pnfdInvariantId = models.TextField(db_column='PNFDINVARIANTID', null=True)
+    pnfdOnboardingState = models.TextField(db_column='PNFDONBOARDINGSTATE', null=True)
+    pnfdUsageState = models.TextField(db_column='PNFDUSAGESTATE', null=True)
+    links = models.TextField(db_column='LINKS')
+
+    class Meta:
+        db_table = 'CATALOG_NSDM_SUBSCRIPTION'
+
+    def toJSON(self):
+        import json
+        return json.dumps(dict([(attr, getattr(self, attr)) for attr in [f.name for f in self._meta.fields]]))
+
+
+class VnfPkgSubscriptionModel(models.Model):
+    subscription_id = models.CharField(max_length=255, primary_key=True, db_column='SUBSCRIPTION_ID')
+    callback_uri = models.URLField(db_column="CALLBACK_URI", max_length=255)
+    auth_info = models.TextField(db_column="AUTH_INFO")
+    usage_states = models.TextField(db_column="USAGE_STATES")
+    notification_types = models.TextField(db_column="NOTIFICATION_TYPES")
+    vnfd_id = models.TextField(db_column="VNFD_ID")
+    vnf_pkg_id = models.TextField(db_column="VNF_PKG_ID")
+    operation_states = models.TextField(db_column="OPERATION_STATES")
+    vnf_products_from_provider = \
+        models.TextField(db_column="VNF_PRODUCTS_FROM_PROVIDER")
+    links = models.TextField(db_column="LINKS")
+
+    class Meta:
+        db_table = 'VNF_PKG_SUBSCRIPTION'
+
+    def toDict(self):
+        import json
+        subscription_obj = {
+            "id": self.subscription_id,
+            "callbackUri": self.callback_uri,
+            "_links": json.loads(self.links)
+        }
+        filter_obj = {
+            "notificationTypes": json.loads(self.notification_types),
+            "vnfdId": json.loads(self.vnfd_id),
+            "vnfPkgId": json.loads(self.vnf_pkg_id),
+            "operationalState": json.loads(self.operation_states),
+            "usageState": json.loads(self.usage_states),
+            "vnfProductsFromProviders": json.loads(self.vnf_products_from_provider)
+        }
+        subscription_obj["filter"] = filter_obj
+        return subscription_obj
diff --git a/genericparser/pub/exceptions.py b/genericparser/pub/exceptions.py
new file mode 100644 (file)
index 0000000..626e567
--- /dev/null
@@ -0,0 +1,53 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+class GenericparserException(Exception):
+    pass
+
+
+class ResourceNotFoundException(GenericparserException):
+    pass
+
+
+class PackageNotFoundException(GenericparserException):
+    pass
+
+
+class PackageHasExistsException(GenericparserException):
+    pass
+
+
+class VnfPkgSubscriptionException(GenericparserException):
+    pass
+
+
+class VnfPkgDuplicateSubscriptionException(GenericparserException):
+    pass
+
+
+class SubscriptionDoesNotExistsException(GenericparserException):
+    pass
+
+
+class NsdmBadRequestException(GenericparserException):
+    pass
+
+
+class NsdmDuplicateSubscriptionException(GenericparserException):
+    pass
+
+
+class ArtifactNotFoundException(GenericparserException):
+    pass
diff --git a/genericparser/pub/msapi/__init__.py b/genericparser/pub/msapi/__init__.py
new file mode 100644 (file)
index 0000000..c7b6818
--- /dev/null
@@ -0,0 +1,13 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/genericparser/pub/msapi/extsys.py b/genericparser/pub/msapi/extsys.py
new file mode 100644 (file)
index 0000000..639513f
--- /dev/null
@@ -0,0 +1,175 @@
+# Copyright 2016 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import logging
+import uuid
+
+from genericparser.pub.config.config import AAI_BASE_URL, AAI_USER, AAI_PASSWD
+from genericparser.pub.exceptions import GenericparserException
+from genericparser.pub.utils import restcall
+from genericparser.pub.utils.values import ignore_case_get
+
+logger = logging.getLogger(__name__)
+
+
+def call_aai(resource, method, content=''):
+    additional_headers = {
+        'X-FromAppId': 'MODEL-GENERICPARSER',
+        'X-TransactionId': str(uuid.uuid1())
+    }
+    return restcall.call_req(AAI_BASE_URL,
+                             AAI_USER,
+                             AAI_PASSWD,
+                             restcall.rest_no_auth,
+                             resource,
+                             method,
+                             content,
+                             additional_headers)
+
+
+def get_vims():
+    ret = call_aai("/cloud-infrastructure/cloud-regions?depth=all", "GET")
+    if ret[0] != 0:
+        logger.error("Status code is %s, detail is %s.", ret[2], ret[1])
+        raise GenericparserException("Failed to query vims from extsys.")
+    # convert vim_info_aai to internal vim_info
+    vims_aai = json.JSONDecoder().decode(ret[1])
+    vims_aai = ignore_case_get(vims_aai, "cloud-region")
+    vims_info = []
+    for vim in vims_aai:
+        vim = convert_vim_info(vim)
+        vims_info.append(vim)
+    return vims_info
+
+
+def get_vim_by_id(vim_id):
+    cloud_owner, cloud_region = split_vim_to_owner_region(vim_id)
+    ret = call_aai("/cloud-infrastructure/cloud-regions/cloud-region/%s/%s?depth=all"
+                   % (cloud_owner, cloud_region), "GET")
+    if ret[0] != 0:
+        logger.error("Status code is %s, detail is %s.", ret[2], ret[1])
+        raise GenericparserException("Failed to query vim(%s) from extsys." % vim_id)
+    # convert vim_info_aai to internal vim_info
+    vim_info_aai = json.JSONDecoder().decode(ret[1])
+    vim_info = convert_vim_info(vim_info_aai)
+    return vim_info
+
+
+def split_vim_to_owner_region(vim_id):
+    split_vim = vim_id.split('_')
+    cloud_owner = split_vim[0]
+    cloud_region = "".join(split_vim[1:])
+    return cloud_owner, cloud_region
+
+
+def convert_vim_info(vim_info_aai):
+    vim_id = vim_info_aai["cloud-owner"] + "_" + vim_info_aai["cloud-region-id"]
+    esr_system_info = ignore_case_get(ignore_case_get(vim_info_aai, "esr-system-info-list"), "esr-system-info")
+    vim_info = {
+        "vimId": vim_id,
+        "name": vim_id,
+        "url": ignore_case_get(esr_system_info[0], "service-url"),
+        "userName": ignore_case_get(esr_system_info[0], "user-name"),
+        "password": ignore_case_get(esr_system_info[0], "password"),
+        "tenant": ignore_case_get(esr_system_info[0], "default-tenant"),
+        "vendor": ignore_case_get(esr_system_info[0], "vendor"),
+        "version": ignore_case_get(esr_system_info[0], "version"),
+        "description": "vim",
+        "domain": "",
+        "type": ignore_case_get(esr_system_info[0], "type"),
+        "createTime": "2016-07-18 12:22:53"
+    }
+    return vim_info
+
+
+def get_sdn_controller_by_id(sdn_ontroller_id):
+    ret = call_aai("/external-system/esr-thirdparty-sdnc-list/esr-thirdparty-sdnc/%s?depth=all"
+                   % sdn_ontroller_id, "GET")
+    if ret[0] != 0:
+        logger.error("Failed to query sdn ontroller(%s) from extsys. detail is %s.", sdn_ontroller_id, ret[1])
+        raise GenericparserException("Failed to query sdn ontroller(%s) from extsys." % sdn_ontroller_id)
+    # convert vim_info_aai to internal vim_info
+    sdnc_info_aai = json.JSONDecoder().decode(ret[1])
+    sdnc_info = convert_sdnc_info(sdnc_info_aai)
+    return sdnc_info
+
+
+def convert_sdnc_info(sdnc_info_aai):
+    esr_system_info = ignore_case_get(ignore_case_get(sdnc_info_aai, "esr-system-info-list"), "esr-system-info")
+    sdnc_info = {
+        "sdnControllerId": sdnc_info_aai["thirdparty-sdnc-id"],
+        "name": sdnc_info_aai["thirdparty-sdnc-id"],
+        "url": ignore_case_get(esr_system_info[0], "service-url"),
+        "userName": ignore_case_get(esr_system_info[0], "user-name"),
+        "password": ignore_case_get(esr_system_info[0], "password"),
+        "vendor": ignore_case_get(esr_system_info[0], "vendor"),
+        "version": ignore_case_get(esr_system_info[0], "version"),
+        "description": "",
+        "protocol": ignore_case_get(esr_system_info[0], "protocal"),
+        "productName": ignore_case_get(sdnc_info_aai, "product-name"),
+        "type": ignore_case_get(esr_system_info[0], "type"),
+        "createTime": "2016-07-18 12:22:53"
+    }
+    return sdnc_info
+
+
+def get_vnfm_by_id(vnfm_inst_id):
+    uri = "/external-system/esr-vnfm-list/esr-vnfm/%s?depth=all" % vnfm_inst_id
+    ret = call_aai(uri, "GET")
+    if ret[0] > 0:
+        logger.error('Send get VNFM information request to extsys failed.')
+        raise GenericparserException('Send get VNFM information request to extsys failed.')
+    # convert vnfm_info_aai to internal vnfm_info
+    vnfm_info_aai = json.JSONDecoder().decode(ret[1])
+    vnfm_info = convert_vnfm_info(vnfm_info_aai)
+    return vnfm_info
+
+
+def convert_vnfm_info(vnfm_info_aai):
+    esr_system_info = ignore_case_get(ignore_case_get(vnfm_info_aai, "esr-system-info-list"), "esr-system-info")
+    vnfm_info = {
+        "vnfmId": vnfm_info_aai["vnfm-id"],
+        "name": vnfm_info_aai["vnfm-id"],
+        "type": ignore_case_get(esr_system_info[0], "type"),
+        "vimId": vnfm_info_aai["vim-id"],
+        "vendor": ignore_case_get(esr_system_info[0], "vendor"),
+        "version": ignore_case_get(esr_system_info[0], "version"),
+        "description": "vnfm",
+        "certificateUrl": vnfm_info_aai["certificate-url"],
+        "url": ignore_case_get(esr_system_info[0], "service-url"),
+        "userName": ignore_case_get(esr_system_info[0], "user-name"),
+        "password": ignore_case_get(esr_system_info[0], "password"),
+        "createTime": "2016-07-06 15:33:18"
+    }
+    return vnfm_info
+
+
+def select_vnfm(vnfm_type, vim_id):
+    uri = "/external-system/esr-vnfm-list?depth=all"
+    ret = call_aai(uri, "GET")
+    if ret[0] > 0:
+        logger.error("Failed to call %s: %s", uri, ret[1])
+        raise GenericparserException('Failed to get vnfms from extsys.')
+    vnfms = json.JSONDecoder().decode(ret[1])
+    vnfms = ignore_case_get(vnfms, "esr-vnfm")
+    for vnfm in vnfms:
+        esr_system_info = ignore_case_get(vnfm, "esr-system-info")
+        type = ignore_case_get(esr_system_info, "type")
+        vimId = vnfm["vnfm-id"]
+        if type == vnfm_type and vimId == vim_id:
+            # convert vnfm_info_aai to internal vnfm_info
+            vnfm = convert_vnfm_info(vnfm)
+            return vnfm
+    raise GenericparserException('No vnfm found with %s in vim(%s)' % (vnfm_type, vim_id))
diff --git a/genericparser/pub/msapi/sdc.py b/genericparser/pub/msapi/sdc.py
new file mode 100644 (file)
index 0000000..f7b9271
--- /dev/null
@@ -0,0 +1,116 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import logging
+import os
+
+from genericparser.pub.exceptions import GenericparserException
+from genericparser.pub.utils import restcall
+from genericparser.pub.utils import fileutil
+from genericparser.pub.config.config import SDC_BASE_URL, SDC_USER, SDC_PASSWD
+
+logger = logging.getLogger(__name__)
+
+ASSETTYPE_RESOURCES = "resources"
+ASSETTYPE_SERVICES = "services"
+DISTRIBUTED = "DISTRIBUTED"
+
+
+def call_sdc(resource, method, content=''):
+    additional_headers = {
+        'X-ECOMP-InstanceID': 'VFC',
+    }
+    return restcall.call_req(base_url=SDC_BASE_URL,
+                             user=SDC_USER,
+                             passwd=SDC_PASSWD,
+                             auth_type=restcall.rest_no_auth,
+                             resource=resource,
+                             method=method,
+                             content=content,
+                             additional_headers=additional_headers)
+
+
+"""
+sample of return value
+[
+    {
+        "uuid": "c94490a0-f7ef-48be-b3f8-8d8662a37236",
+        "invariantUUID": "63eaec39-ffbe-411c-a838-448f2c73f7eb",
+        "name": "underlayvpn",
+        "version": "2.0",
+        "toscaModelURL": "/sdc/v1/genericparser/resources/c94490a0-f7ef-48be-b3f8-8d8662a37236/toscaModel",
+        "category": "Volte",
+        "subCategory": "VolteVF",
+        "resourceType": "VF",
+        "lifecycleState": "CERTIFIED",
+        "lastUpdaterUserId": "jh0003"
+    }
+]
+"""
+
+
+def get_artifacts(asset_type):
+    resource = "/sdc/v1/genericparser/{assetType}"
+    resource = resource.format(assetType=asset_type)
+    ret = call_sdc(resource, "GET")
+    if ret[0] != 0:
+        logger.error("Status code is %s, detail is %s.", ret[2], ret[1])
+        raise GenericparserException("Failed to query artifacts(%s) from sdc." % asset_type)
+    return json.JSONDecoder().decode(ret[1])
+
+
+def get_artifact(asset_type, csar_id):
+    artifacts = get_artifacts(asset_type)
+    for artifact in artifacts:
+        if artifact["uuid"] == csar_id:
+            if asset_type == ASSETTYPE_SERVICES and \
+                    artifact.get("distributionStatus", None) != DISTRIBUTED:
+                raise GenericparserException("The artifact (%s,%s) is not distributed from sdc." % (asset_type, csar_id))
+            else:
+                return artifact
+    raise GenericparserException("Failed to query artifact(%s,%s) from sdc." % (asset_type, csar_id))
+
+
+def delete_artifact(asset_type, asset_id, artifact_id):
+    resource = "/sdc/v1/genericparser/{assetType}/{uuid}/artifacts/{artifactUUID}"
+    resource = resource.format(assetType=asset_type, uuid=asset_id, artifactUUID=artifact_id)
+    ret = call_sdc(resource, "DELETE")
+    if ret[0] != 0:
+        logger.error("Status code is %s, detail is %s.", ret[2], ret[1])
+        raise GenericparserException("Failed to delete artifacts(%s) from sdc." % artifact_id)
+    return json.JSONDecoder().decode(ret[1])
+
+
+def download_artifacts(download_url, local_path, file_name):
+    additional_headers = {
+        'X-ECOMP-InstanceID': 'VFC',
+        'accept': 'application/octet-stream'
+    }
+    ret = restcall.call_req(base_url=SDC_BASE_URL,
+                            user=SDC_USER,
+                            passwd=SDC_PASSWD,
+                            auth_type=restcall.rest_no_auth,
+                            resource=download_url,
+                            method="GET",
+                            additional_headers=additional_headers)
+    if ret[0] != 0:
+        logger.error("Status code is %s, detail is %s.", ret[2], ret[1])
+        raise GenericparserException("Failed to download %s from sdc." % download_url)
+    fileutil.make_dirs(local_path)
+    local_file_name = os.path.join(local_path, file_name)
+    local_file = open(local_file_name, 'wb')
+    local_file.write(ret[1])
+    local_file.close()
+    return local_file_name
diff --git a/genericparser/pub/ssl/cert/foobar.crt b/genericparser/pub/ssl/cert/foobar.crt
new file mode 100644 (file)
index 0000000..7ab6dd3
--- /dev/null
@@ -0,0 +1,20 @@
+-----BEGIN CERTIFICATE-----
+MIIDRDCCAiwCCQD8zmUqCHOp2zANBgkqhkiG9w0BAQsFADBjMQswCQYDVQQGEwJD
+TjEQMA4GA1UECAwHQmVpSmluZzEQMA4GA1UEBwwHQmVpSmluZzENMAsGA1UECgwE
+Q21jYzESMBAGA1UECwwJQ21jYy1vbmFwMQ0wCwYDVQQDDARDbWNjMCAXDTE5MDMy
+NjAyNTI0N1oYDzIxMTkwMzAyMDI1MjQ3WjBjMQswCQYDVQQGEwJDTjEQMA4GA1UE
+CAwHQmVpSmluZzEQMA4GA1UEBwwHQmVpSmluZzENMAsGA1UECgwEQ21jYzESMBAG
+A1UECwwJQ21jYy1vbmFwMQ0wCwYDVQQDDARDbWNjMIIBIjANBgkqhkiG9w0BAQEF
+AAOCAQ8AMIIBCgKCAQEA4DurchTgEw/A1y/Q5gpSSJTLC+KFOV4Vmbz2hlvOGLwV
+NIX1+r7DpaiJTGjEKLCtGsD2tGm69KiUX9FBY1CStnwK2R4wA5NKW+ZKQLd3sRTc
+Hl+2bLFk7E5KvmKZZM4xhsN3ey7Ia8H0sSfKiGlxB1hZI2HibRNy8GWyi95j8MkP
+v+H7HbJlX1kIKb7p2y8aG8AnAzBWikJFcQ1y3bJA2r31wOht63pIekwh+nntt5u+
+Yh/STXHiAe2gT7b9x6RAn09tC6TsBKzdZ4ZKrBLfRwPv6+cbDLcqkhbPukqaFaEs
+rDCLhuWX10sGLEsqXULDwZRoYxTUueLek9v+/8f5EwIDAQABMA0GCSqGSIb3DQEB
+CwUAA4IBAQCenowNpFiy9vH18+9PL4rZjZ1NH+frGqsWvDiyHPnLpneCLOuiXvgv
+kcuLJDYatc6vTlXkJElxwF1fCaJEn6dNq3WtQxdJjhXidAKx8Hsf1Nxkwbvmahv2
+TIWV/FMvop+9SdonDBGZojrYKRsY3EilQf+7/rGEM52HE8S3yE8CCe9xTZSYUs1B
+B8CzOPBVU7SWSRSLUKfdRhjyl4Rqsslxzal+8A36yViHBPhJgmDRoVWVR+E289IH
+FCQ0d8qVvdTGkM79dvZrEH9WSzPwlTR0NSkBMWTNLcWyP8caDjg+fbSVOF+s+sd/
+bLuAyHyeXUzClJx6CA5zwLZz5K5SVxw+
+-----END CERTIFICATE-----
diff --git a/genericparser/pub/ssl/cert/foobar.csr b/genericparser/pub/ssl/cert/foobar.csr
new file mode 100644 (file)
index 0000000..30b381b
--- /dev/null
@@ -0,0 +1,18 @@
+-----BEGIN CERTIFICATE REQUEST-----
+MIIC1DCCAbwCAQAwYzELMAkGA1UEBhMCQ04xEDAOBgNVBAgMB0JlaUppbmcxEDAO
+BgNVBAcMB0JlaUppbmcxDTALBgNVBAoMBENtY2MxEjAQBgNVBAsMCUNtY2Mtb25h
+cDENMAsGA1UEAwwEQ21jYzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB
+AOA7q3IU4BMPwNcv0OYKUkiUywvihTleFZm89oZbzhi8FTSF9fq+w6WoiUxoxCiw
+rRrA9rRpuvSolF/RQWNQkrZ8CtkeMAOTSlvmSkC3d7EU3B5ftmyxZOxOSr5imWTO
+MYbDd3suyGvB9LEnyohpcQdYWSNh4m0TcvBlsoveY/DJD7/h+x2yZV9ZCCm+6dsv
+GhvAJwMwVopCRXENct2yQNq99cDobet6SHpMIfp57bebvmIf0k1x4gHtoE+2/cek
+QJ9PbQuk7ASs3WeGSqwS30cD7+vnGwy3KpIWz7pKmhWhLKwwi4bll9dLBixLKl1C
+w8GUaGMU1Lni3pPb/v/H+RMCAwEAAaAsMBMGCSqGSIb3DQEJAjEGDARDbWNjMBUG
+CSqGSIb3DQEJBzEIDAYxMjM0NTYwDQYJKoZIhvcNAQELBQADggEBAGr8XkV5G9bK
+lPc3jUvmS+KSg9UB1wrvf6kQUGDvCvXqZCGw1mRZekN4rH5c1fk9iLwLqDkWDnNo
+79jzAWV76U62GarTOng32TLTItxI/EeUhQFCf+AI/YcJEdHf8HGrDuvC0iSz6/9q
+Fe5HhVSO7zsHdP28J05wTyII+2k4ecAj3oXutUnGCBg0nlexDmxAZoe8x4XHpqkt
+tEKquZdq3l17+v5DKlKwczQcXUBC1yGw0ki67U5w9fVKzpAf7Frr7YnbGS35i5Pv
+ny4SlXPW167hRQKXCniY5QtCocP+GoPD+81uWwf+bjHyAZ3HCd532YFgXW01yJhM
+imRDxx2gDds=
+-----END CERTIFICATE REQUEST-----
diff --git a/genericparser/pub/ssl/cert/foobar.key b/genericparser/pub/ssl/cert/foobar.key
new file mode 100644 (file)
index 0000000..266f502
--- /dev/null
@@ -0,0 +1,27 @@
+-----BEGIN RSA PRIVATE KEY-----
+MIIEpQIBAAKCAQEA4DurchTgEw/A1y/Q5gpSSJTLC+KFOV4Vmbz2hlvOGLwVNIX1
++r7DpaiJTGjEKLCtGsD2tGm69KiUX9FBY1CStnwK2R4wA5NKW+ZKQLd3sRTcHl+2
+bLFk7E5KvmKZZM4xhsN3ey7Ia8H0sSfKiGlxB1hZI2HibRNy8GWyi95j8MkPv+H7
+HbJlX1kIKb7p2y8aG8AnAzBWikJFcQ1y3bJA2r31wOht63pIekwh+nntt5u+Yh/S
+TXHiAe2gT7b9x6RAn09tC6TsBKzdZ4ZKrBLfRwPv6+cbDLcqkhbPukqaFaEsrDCL
+huWX10sGLEsqXULDwZRoYxTUueLek9v+/8f5EwIDAQABAoIBAQCL+dPBqHRkkc3w
+xsGiCMlq06+Y4LQHpsrXKNW/8+lJGYgnPITcHV+mtvnWgAQL3paA//pBj0sM1Xui
+AM/PvomHbxGajbStVrHxgmXR1nXaTkpGj7siSO7WcN1J0eUtv2W9WaHpfL/SPMaS
+HGPbGe9cBXPHmaAuNRjoJqP5mj9LHB0SebJImjiaCYsUkBgC0Ooo4UuwOXLYO/ak
+gZrbM8WwY21rRVc3uDyg5Ez8gxbFG3L39t26gpqBYosqNlPe7/JVkTpxUKk1Allf
+fAJNyfpS2CuY+nQWtCleJFtF1Yq9jwfPvtNUTrXeJq97xFqSIRnJbygttsokbPto
+tLqB4rSBAoGBAPPgidT0KyfYVUaWNEXtOOJyh3MCk0ssalRKf+Dap9J9Bgpjldbu
+/tBBrrbxSEAieXe8gKDwgDY2qBcsUUvEY+EWL7tiMBnS4HvK8/4aEIx14xMgiuCS
+bTnMGlIlImjMKdj0iKOd0N2NPQcfr0NTUdZJ/p1o965lq/9i7xcfHinTAoGBAOth
+JqwyGQ6oP005Vry3S/7E7UJjYxMaUfhRmMGoVz+qXAEfq0r4TkNrcEvP7mu72pVe
+q1P4imQjvvPXqoPBdh310a6OCQ7BrFpkOghHBIG0koblncml4hdBSReUA1auW2Qr
+c/MUSeV96DDbI2mZJulVdqINyaAt/JDMnfdcbCvBAoGAYPTI91/ndFzeckSvHYnV
+TrnnvcKtWnqa/03rDzL++4D3ENRMsvmrVpJ2aob8iXrrPb40iUd0QZlzNFtLKss2
+Rjty2JWNuAaNdsnWPRSRtbX8hBMxA11TjWHmqPfYeT+J95YoaJwKeLp5I8bl/+c1
+JvOeBWjA55XGTq8/jLqzXD8CgYEAiQVyJNW5Hn4083iIlK1DkRkEYRxIRYuR4jNl
+8H5V5BsBGipcZfUsYjT+FzQBQDgII+ILbIOH1Im2lG6ctbx+TSyXlrzaavu1oJ0t
+5zmoVvVOQzcR5pwphI4dxZsFYoV3cFWXVw8dgXoNG7vF3qgoLbbxq57JG/UJTSXA
+Y4oq8kECgYEAlgh6v+o6jCUD7l0JWdRtZy52rhC3W/HrhcHE0/l3RjeV+kLIWr9u
+WbNltgZQGvPVQ+ZwPIYj1gaGP17wm5pAsJNSN4LQ1v4Fj/XjT7zdwYwYOrXIJati
+5HTeyHjm+wwOPYrmH4YLGwAh6T1is42E0K2L7LG8HnO4bHbfV2mKji0=
+-----END RSA PRIVATE KEY-----
diff --git a/genericparser/pub/utils/__init__.py b/genericparser/pub/utils/__init__.py
new file mode 100644 (file)
index 0000000..c7b6818
--- /dev/null
@@ -0,0 +1,13 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/genericparser/pub/utils/fileutil.py b/genericparser/pub/utils/fileutil.py
new file mode 100644 (file)
index 0000000..d7811b8
--- /dev/null
@@ -0,0 +1,79 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import os
+import shutil
+import logging
+import tempfile
+import traceback
+import urllib2
+import zipfile
+
+
+logger = logging.getLogger(__name__)
+
+
+def make_dirs(path):
+    if not os.path.exists(path):
+        os.makedirs(path, 0777)
+
+
+def delete_dirs(path):
+    try:
+        if os.path.exists(path):
+            shutil.rmtree(path)
+    except Exception as e:
+        logger.error(traceback.format_exc())
+        logger.error("Failed to delete %s:%s", path, e.message)
+
+
+def download_file_from_http(url, local_dir, file_name):
+    local_file_name = os.path.join(local_dir, file_name)
+    is_download_ok = False
+    try:
+        make_dirs(local_dir)
+        r = urllib2.Request(url)
+        req = urllib2.urlopen(r)
+        save_file = open(local_file_name, 'wb')
+        save_file.write(req.read())
+        save_file.close()
+        req.close()
+        is_download_ok = True
+    except:
+        logger.error(traceback.format_exc())
+        logger.error("Failed to download %s to %s.", url, local_file_name)
+    return is_download_ok, local_file_name
+
+
+def unzip_file(zip_src, dst_dir, csar_path):
+    if os.path.exists(zip_src):
+        fz = zipfile.ZipFile(zip_src, 'r')
+        for file in fz.namelist():
+            fz.extract(file, dst_dir)
+        return os.path.join(dst_dir, csar_path)
+    else:
+        return ""
+
+
+def unzip_csar_to_tmp(zip_src):
+    dirpath = tempfile.mkdtemp()
+    zip_ref = zipfile.ZipFile(zip_src, 'r')
+    zip_ref.extractall(dirpath)
+    return dirpath
+
+
+def get_artifact_path(vnf_path, artifact_file):
+    for root, dirs, files in os.walk(vnf_path):
+        if artifact_file in files:
+            return os.path.join(root, artifact_file)
+    return None
diff --git a/genericparser/pub/utils/idutil.py b/genericparser/pub/utils/idutil.py
new file mode 100644 (file)
index 0000000..85bebb8
--- /dev/null
@@ -0,0 +1,20 @@
+# Copyright 2016 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from redisco import containers as cont
+
+
+def get_auto_id(id_type, id_group="auto_id_hash"):
+    auto_id_hash = cont.Hash(id_group)
+    auto_id_hash.hincrby(id_type, 1)
+    return auto_id_hash.hget(id_type)
diff --git a/genericparser/pub/utils/jobutil.py b/genericparser/pub/utils/jobutil.py
new file mode 100644 (file)
index 0000000..c06c72d
--- /dev/null
@@ -0,0 +1,144 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import datetime
+import logging
+import uuid
+import traceback
+
+from genericparser.pub.database.models import JobStatusModel, JobModel
+from genericparser.pub.utils import idutil
+
+logger = logging.getLogger(__name__)
+
+
+def enum(**enums):
+    return type('Enum', (), enums)
+
+
+JOB_STATUS = enum(PROCESSING=0, FINISH=1)
+JOB_MODEL_STATUS = enum(STARTED='started', PROCESSING='processing', FINISHED='finished', ERROR='error',
+                        TIMEOUT='timeout')
+JOB_TYPE = enum(CREATE_VNF="create vnf", TERMINATE_VNF="terminate vnf", GRANT_VNF="grant vnf", MANUAL_SCALE_VNF="manual scale vnf",
+                HEAL_VNF="heal vnf")
+
+
+class JobUtil(object):
+    def __init__(self):
+        pass
+
+    @staticmethod
+    def __gen_job_id(job_name):
+        return "%s-%s" % (job_name if job_name else "UnknownJob", uuid.uuid1())
+
+    @staticmethod
+    def query_job_status(job_id, index_id=-1):
+        # logger.info("Query job status, jobid =[%s], responseid [%d]" % (job_id, index_id))
+        jobs = []
+        if index_id < 0:
+            row = JobStatusModel.objects.filter(jobid=job_id).order_by("-indexid").first()
+            if row:
+                jobs.append(row)
+        else:
+            [jobs.append(job) for job in JobStatusModel.objects.filter(jobid=job_id).order_by("-indexid")
+             if job.indexid > index_id]
+
+        # logger.info("Query job status, rows=%s" % str(jobs))
+        return jobs
+
+    @staticmethod
+    def is_job_exists(job_id):
+        jobs = JobModel.objects.filter(jobid=job_id)
+        return len(jobs) > 0
+
+    @staticmethod
+    def create_job(inst_type, jobaction, inst_id, user='', job_id=None, res_name=''):
+        if job_id is None:
+            job_id = JobUtil.__gen_job_id(
+                '%s-%s-%s' % (str(inst_type).replace(' ', '_'), str(jobaction).replace(' ', '_'), str(inst_id)))
+        job = JobModel()
+        job.jobid = job_id
+        job.jobtype = inst_type
+        job.jobaction = jobaction
+        job.resid = str(inst_id)
+        job.status = JOB_STATUS.PROCESSING
+        job.user = user
+        job.starttime = datetime.datetime.now().strftime('%Y-%m-%d %X')
+        job.progress = 0
+        job.resname = res_name
+        logger.debug("create a new job, jobid=%s, jobtype=%s, jobaction=%s, resid=%s, status=%d" %
+                     (job.jobid, job.jobtype, job.jobaction, job.resid, job.status))
+        job.save()
+        return job_id
+
+    @staticmethod
+    def clear_job(job_id):
+        [job.delete() for job in JobModel.objects.filter(jobid=job_id)]
+        logger.debug("Clear job, job_id=%s" % job_id)
+
+    @staticmethod
+    def add_job_status(job_id, progress, status_decs, error_code=""):
+        jobs = JobModel.objects.filter(jobid=job_id)
+        if not jobs:
+            logger.error("Job[%s] is not exists, please create job first." % job_id)
+            raise Exception("Job[%s] is not exists." % job_id)
+        try:
+            int_progress = int(progress)
+            job_status = JobStatusModel()
+            job_status.indexid = int(idutil.get_auto_id(job_id))
+            job_status.jobid = job_id
+            job_status.status = "processing"
+            job_status.progress = int_progress
+
+            if job_status.progress == 0:
+                job_status.status = "started"
+            elif job_status.progress == 100:
+                job_status.status = "finished"
+            elif job_status.progress == 101:
+                job_status.status = "partly_finished"
+            elif job_status.progress > 101:
+                job_status.status = "error"
+
+            if error_code == "255":
+                job_status.status = "error"
+
+            job_status.descp = status_decs
+            # job_status.errcode = error_code
+            job_status.errcode = error_code if error_code else "0"
+            job_status.addtime = datetime.datetime.now().strftime('%Y-%m-%d %X')
+            job_status.save()
+            logger.debug("Add a new job status, jobid=%s, indexid=%d,"
+                         " status=%s, description=%s, progress=%d, errcode=%s, addtime=%r" %
+                         (job_status.jobid, job_status.indexid, job_status.status, job_status.descp,
+                          job_status.progress, job_status.errcode, job_status.addtime))
+
+            job = jobs[0]
+            job.progress = int_progress
+            if job_status.progress >= 100:
+                job.status = JOB_STATUS.FINISH
+                job.endtime = datetime.datetime.now().strftime('%Y-%m-%d %X')
+            job.save()
+            logger.debug("update job, jobid=%s, progress=%d" % (job_status.jobid, int_progress))
+        except:
+            logger.error(traceback.format_exc())
+
+    @staticmethod
+    def clear_job_status(job_id):
+        [job.delete() for job in JobStatusModel.objects.filter(jobid=job_id)]
+        logger.debug("Clear job status, job_id=%s" % job_id)
+
+    @staticmethod
+    def get_unfinished_jobs(url_prefix, inst_id, inst_type):
+        jobs = JobModel.objects.filter(resid=inst_id, jobtype=inst_type, status=JOB_STATUS.PROCESSING)
+        progresses = reduce(lambda content, job: content + [url_prefix + "/" + job.jobid], jobs, [])
+        return progresses
diff --git a/genericparser/pub/utils/restcall.py b/genericparser/pub/utils/restcall.py
new file mode 100644 (file)
index 0000000..a8944b5
--- /dev/null
@@ -0,0 +1,113 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import traceback
+import logging
+import urllib2
+import uuid
+import httplib2
+
+from genericparser.pub.config.config import MSB_SERVICE_IP, MSB_SERVICE_PORT
+
+rest_no_auth, rest_oneway_auth, rest_bothway_auth = 0, 1, 2
+HTTP_200_OK, HTTP_201_CREATED, HTTP_204_NO_CONTENT, HTTP_202_ACCEPTED = '200', '201', '204', '202'
+status_ok_list = [HTTP_200_OK, HTTP_201_CREATED, HTTP_204_NO_CONTENT, HTTP_202_ACCEPTED]
+HTTP_404_NOTFOUND, HTTP_403_FORBIDDEN, HTTP_401_UNAUTHORIZED, HTTP_400_BADREQUEST = '404', '403', '401', '400'
+
+logger = logging.getLogger(__name__)
+
+
+def call_req(base_url, user, passwd, auth_type, resource, method, content='', additional_headers={}):
+    callid = str(uuid.uuid1())
+    logger.debug("[%s]call_req('%s','%s','%s',%s,'%s','%s','%s')" % (
+        callid, base_url, user, passwd, auth_type, resource, method, content))
+    ret = None
+    resp_status = ''
+    try:
+        full_url = combine_url(base_url, resource)
+        headers = {'content-type': 'application/json', 'accept': 'application/json'}
+        if user:
+            headers['Authorization'] = 'Basic ' + ('%s:%s' % (user, passwd)).encode("base64")
+        ca_certs = None
+        if additional_headers:
+            headers.update(additional_headers)
+        for retry_times in range(3):
+            http = httplib2.Http(ca_certs=ca_certs, disable_ssl_certificate_validation=(auth_type == rest_no_auth))
+            http.follow_all_redirects = True
+            try:
+                resp, resp_content = http.request(full_url, method=method.upper(), body=content, headers=headers)
+                resp_status, resp_body = resp['status'], resp_content
+                logger.debug("[%s][%d]status=%s)" % (callid, retry_times, resp_status))
+                if headers['accept'] == 'application/json':
+                    resp_body = resp_content.decode('UTF-8')
+                    logger.debug("resp_body=%s", resp_body)
+                if resp_status in status_ok_list:
+                    ret = [0, resp_body, resp_status]
+                else:
+                    ret = [1, resp_body, resp_status]
+                break
+            except Exception as ex:
+                if 'httplib.ResponseNotReady' in str(sys.exc_info()):
+                    logger.debug("retry_times=%d", retry_times)
+                    logger.error(traceback.format_exc())
+                    ret = [1, "Unable to connect to %s" % full_url, resp_status]
+                    continue
+                raise ex
+    except urllib2.URLError as err:
+        ret = [2, str(err), resp_status]
+    except Exception as ex:
+        logger.error(traceback.format_exc())
+        logger.error("[%s]ret=%s" % (callid, str(sys.exc_info())))
+        res_info = str(sys.exc_info())
+        if 'httplib.ResponseNotReady' in res_info:
+            res_info = "The URL[%s] request failed or is not responding." % full_url
+        ret = [3, res_info, resp_status]
+    except:
+        logger.error(traceback.format_exc())
+        ret = [4, str(sys.exc_info()), resp_status]
+
+    logger.debug("[%s]ret=%s" % (callid, str(ret)))
+    return ret
+
+
+def req_by_msb(resource, method, content=''):
+    base_url = "http://%s:%s/" % (MSB_SERVICE_IP, MSB_SERVICE_PORT)
+    return call_req(base_url, "", "", rest_no_auth, resource, method, content)
+
+
+def upload_by_msb(resource, method, file_data={}):
+    headers = {'Content-Type': 'application/octet-stream'}
+    full_url = "http://%s:%s/%s" % (MSB_SERVICE_IP, MSB_SERVICE_PORT, resource)
+    http = httplib2.Http()
+    resp, resp_content = http.request(full_url, method=method.upper(), body=file_data, headers=headers)
+    resp_status, resp_body = resp['status'], resp_content.decode('UTF-8')
+    if resp_status not in status_ok_list:
+        logger.error("Status code is %s, detail is %s.", resp_status, resp_body)
+        return [1, "Failed to upload file.", resp_status]
+    logger.debug("resp_body=%s", resp_body)
+    return [0, resp_body, resp_status]
+
+
+def combine_url(base_url, resource):
+    full_url = None
+    if base_url.endswith('/') and resource.startswith('/'):
+        full_url = base_url[:-1] + resource
+    elif base_url.endswith('/') and not resource.startswith('/'):
+        full_url = base_url + resource
+    elif not base_url.endswith('/') and resource.startswith('/'):
+        full_url = base_url + resource
+    else:
+        full_url = base_url + '/' + resource
+    return full_url
diff --git a/genericparser/pub/utils/syscomm.py b/genericparser/pub/utils/syscomm.py
new file mode 100644 (file)
index 0000000..89219ec
--- /dev/null
@@ -0,0 +1,19 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import inspect
+
+
+def fun_name():
+    return inspect.stack()[1][3]
diff --git a/genericparser/pub/utils/tests.py b/genericparser/pub/utils/tests.py
new file mode 100644 (file)
index 0000000..7f8a391
--- /dev/null
@@ -0,0 +1,221 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import platform
+import unittest
+import mock
+import fileutil
+import urllib2
+import syscomm
+import timeutil
+import values
+
+from genericparser.pub.database.models import JobStatusModel, JobModel
+from genericparser.pub.utils.jobutil import JobUtil
+
+
+class MockReq():
+    def read(self):
+        return "1"
+
+    def close(self):
+        pass
+
+
+class UtilsTest(unittest.TestCase):
+    def setUp(self):
+        pass
+
+    def tearDown(self):
+        pass
+
+    def test_create_and_delete_dir(self):
+        dirs = "abc/def/hij"
+        fileutil.make_dirs(dirs)
+        fileutil.make_dirs(dirs)
+        fileutil.delete_dirs(dirs)
+
+    @mock.patch.object(urllib2, 'urlopen')
+    def test_download_file_from_http(self, mock_urlopen):
+        mock_urlopen.return_value = MockReq()
+        fileutil.delete_dirs("abc")
+        is_ok, f_name = fileutil.download_file_from_http("1", "abc", "1.txt")
+        self.assertTrue(is_ok)
+        if 'Windows' in platform.system():
+            self.assertTrue(f_name.endswith("abc\\1.txt"))
+        else:
+            self.assertTrue(f_name.endswith("abc/1.txt"))
+        fileutil.delete_dirs("abc")
+
+    def test_query_job_status(self):
+        job_id = "1"
+        JobStatusModel.objects.filter().delete()
+        JobStatusModel(
+            indexid=1,
+            jobid=job_id,
+            status="success",
+            progress=10
+        ).save()
+        JobStatusModel(
+            indexid=2,
+            jobid=job_id,
+            status="success",
+            progress=50
+        ).save()
+        JobStatusModel(
+            indexid=3,
+            jobid=job_id,
+            status="success",
+            progress=100
+        ).save()
+        jobs = JobUtil.query_job_status(job_id)
+        self.assertEqual(1, len(jobs))
+        self.assertEqual(3, jobs[0].indexid)
+        jobs = JobUtil.query_job_status(job_id, 1)
+        self.assertEqual(2, len(jobs))
+        self.assertEqual(3, jobs[0].indexid)
+        self.assertEqual(2, jobs[1].indexid)
+        JobStatusModel.objects.filter().delete()
+
+    def test_is_job_exists(self):
+        job_id = "1"
+        JobModel.objects.filter().delete()
+        JobModel(
+            jobid=job_id,
+            jobtype="1",
+            jobaction="2",
+            resid="3",
+            status=0
+        ).save()
+        self.assertTrue(JobUtil.is_job_exists(job_id))
+        JobModel.objects.filter().delete()
+
+    def test_create_job(self):
+        job_id = "5"
+        JobModel.objects.filter().delete()
+        JobUtil.create_job(
+            inst_type="1",
+            jobaction="2",
+            inst_id="3",
+            user="4",
+            job_id=5,
+            res_name="6")
+        self.assertEqual(1, len(JobModel.objects.filter(jobid=job_id)))
+        JobModel.objects.filter().delete()
+
+    def test_clear_job(self):
+        job_id = "1"
+        JobModel.objects.filter().delete()
+        JobModel(
+            jobid=job_id,
+            jobtype="1",
+            jobaction="2",
+            resid="3",
+            status=0
+        ).save()
+        JobUtil.clear_job(job_id)
+        self.assertEqual(0, len(JobModel.objects.filter(jobid=job_id)))
+
+    def test_add_job_status_when_job_is_not_created(self):
+        JobModel.objects.filter().delete()
+        self.assertRaises(
+            Exception,
+            JobUtil.add_job_status,
+            job_id="1",
+            progress=1,
+            status_decs="2",
+            error_code="0"
+        )
+
+    def test_add_job_status_normal(self):
+        job_id = "1"
+        JobModel.objects.filter().delete()
+        JobStatusModel.objects.filter().delete()
+        JobModel(
+            jobid=job_id,
+            jobtype="1",
+            jobaction="2",
+            resid="3",
+            status=0
+        ).save()
+        JobUtil.add_job_status(
+            job_id="1",
+            progress=1,
+            status_decs="2",
+            error_code="0"
+        )
+        self.assertEqual(1, len(JobStatusModel.objects.filter(jobid=job_id)))
+        JobStatusModel.objects.filter().delete()
+        JobModel.objects.filter().delete()
+
+    def test_clear_job_status(self):
+        job_id = "1"
+        JobStatusModel.objects.filter().delete()
+        JobStatusModel(
+            indexid=1,
+            jobid=job_id,
+            status="success",
+            progress=10
+        ).save()
+        JobUtil.clear_job_status(job_id)
+        self.assertEqual(0, len(JobStatusModel.objects.filter(jobid=job_id)))
+
+    def test_get_unfinished_jobs(self):
+        JobModel.objects.filter().delete()
+        JobModel(
+            jobid="11",
+            jobtype="InstVnf",
+            jobaction="2",
+            resid="3",
+            status=0
+        ).save()
+        JobModel(
+            jobid="22",
+            jobtype="InstVnf",
+            jobaction="2",
+            resid="3",
+            status=0
+        ).save()
+        JobModel(
+            jobid="33",
+            jobtype="InstVnf",
+            jobaction="2",
+            resid="3",
+            status=0
+        ).save()
+        progresses = JobUtil.get_unfinished_jobs(
+            url_prefix="/vnfinst",
+            inst_id="3",
+            inst_type="InstVnf"
+        )
+        expect_progresses = ['/vnfinst/11', '/vnfinst/22', '/vnfinst/33']
+        self.assertEqual(expect_progresses, progresses)
+        JobModel.objects.filter().delete()
+
+    def test_fun_name(self):
+        self.assertEqual("test_fun_name", syscomm.fun_name())
+
+    def test_now_time(self):
+        self.assertIn(":", timeutil.now_time())
+        self.assertIn("-", timeutil.now_time())
+
+    def test_ignore_case_get(self):
+        data = {
+            "Abc": "def",
+            "HIG": "klm"
+        }
+        self.assertEqual("def", values.ignore_case_get(data, 'ABC'))
+        self.assertEqual("def", values.ignore_case_get(data, 'abc'))
+        self.assertEqual("klm", values.ignore_case_get(data, 'hig'))
+        self.assertEqual("bbb", values.ignore_case_get(data, 'aaa', 'bbb'))
diff --git a/genericparser/pub/utils/timeutil.py b/genericparser/pub/utils/timeutil.py
new file mode 100644 (file)
index 0000000..1d97e9d
--- /dev/null
@@ -0,0 +1,19 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import datetime
+
+
+def now_time(fmt="%Y-%m-%d %H:%M:%S"):
+    return datetime.datetime.now().strftime(fmt)
diff --git a/genericparser/pub/utils/toscaparsers/__init__.py b/genericparser/pub/utils/toscaparsers/__init__.py
new file mode 100644 (file)
index 0000000..4b73f48
--- /dev/null
@@ -0,0 +1,54 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+
+from genericparser.pub.utils.toscaparsers.nsdmodel import NsdInfoModel
+from genericparser.pub.utils.toscaparsers.pnfmodel import PnfdInfoModel
+from genericparser.pub.utils.toscaparsers.sdmodel import SdInfoModel
+from genericparser.pub.utils.toscaparsers.vnfdmodel import EtsiVnfdInfoModel
+
+
+def parse_nsd(path, input_parameters=[]):
+    tosca_obj = NsdInfoModel(path, input_parameters).model
+    strResponse = json.dumps(tosca_obj, default=lambda obj: obj.__dict__)
+    strResponse = strResponse.replace(': null', ': ""')
+    return strResponse
+
+
+def parse_sd(path, input_parameters=[]):
+    tosca_obj = SdInfoModel(path, input_parameters)
+    strResponse = json.dumps(tosca_obj, default=lambda obj: obj.__dict__)
+    strResponse = strResponse.replace(': null', ': ""')
+    return strResponse
+
+
+def parse_vnfd(path, input_parameters=[], isETSI=True):
+    if isETSI:
+        tosca_obj = EtsiVnfdInfoModel(path, input_parameters)
+    else:
+        tosca_obj = {}
+    strResponse = json.dumps(tosca_obj, default=lambda obj: obj.__dict__)
+    strResponse = strResponse.replace(': null', ': ""')
+    return strResponse
+
+
+def parse_pnfd(path, input_parameters=[], isETSI=True):
+    if isETSI:
+        tosca_obj = PnfdInfoModel(path, input_parameters)
+    else:
+        tosca_obj = {}
+    strResponse = json.dumps(tosca_obj, default=lambda obj: obj.__dict__)
+    strResponse = strResponse.replace(': null', ': ""')
+    return strResponse
diff --git a/genericparser/pub/utils/toscaparsers/basemodel.py b/genericparser/pub/utils/toscaparsers/basemodel.py
new file mode 100644 (file)
index 0000000..a5c1f45
--- /dev/null
@@ -0,0 +1,524 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import ftplib
+import json
+import logging
+import os
+import re
+import shutil
+import urllib
+
+import paramiko
+from toscaparser.tosca_template import ToscaTemplate
+from toscaparser.properties import Property
+from toscaparser.functions import Function, Concat, GetInput, get_function, function_mappings
+from genericparser.pub.utils.toscaparsers.graph import Graph
+
+from genericparser.pub.utils.toscaparsers.dataentityext import DataEntityExt
+
+logger = logging.getLogger(__name__)
+
+METADATA = "metadata"
+PROPERTIES = "properties"
+DESCRIPTION = "description"
+REQUIREMENTS = "requirements"
+INTERFACES = "interfaces"
+TOPOLOGY_TEMPLATE = "topology_template"
+INPUTS = "inputs"
+CAPABILITIES = "capabilities"
+ATTRIBUTES = "attributes"
+ARTIFACTS = "artifacts"
+DERIVED_FROM = "derived_from"
+
+NODE_NAME = "name"
+NODE_TYPE = "nodeType"
+NODE_ROOT = "tosca.nodes.Root"
+GROUP_TYPE = "groupType"
+GROUPS_ROOT = "tosca.groups.Root"
+
+
+class BaseInfoModel(object):
+
+    def __init__(self, path=None, params=None, tosca=None):
+        if tosca:
+            _tosca = tosca
+        else:
+            _tosca = self.buildToscaTemplate(path, params)
+        self.description = getattr(_tosca, "description", "")
+        self.parseModel(_tosca)
+
+    def parseModel(self, tosca):
+        pass
+
+    def buildInputs(self, tosca):
+        topo = tosca.tpl.get(TOPOLOGY_TEMPLATE, None)
+        return topo.get(INPUTS, {}) if topo else {}
+
+    def buildToscaTemplate(self, path, params):
+        file_name = None
+        try:
+            file_name = self._check_download_file(path)
+            valid_params = self._validate_input_params(file_name, params)
+            return self._create_tosca_template(file_name, valid_params)
+        finally:
+            if file_name is not None and file_name != path and os.path.exists(file_name):
+                try:
+                    os.remove(file_name)
+                except Exception as e:
+                    logger.error("Failed to parse package, error: %s", e.message)
+
+    def _validate_input_params(self, path, params):
+        valid_params = {}
+        inputs = {}
+        if isinstance(params, list):
+            for param in params:
+                key = param.get('key', 'undefined')
+                value = param.get('value', 'undefined')
+                inputs[key] = value
+            params = inputs
+
+        if params:
+            tmp = self._create_tosca_template(path, None)
+            if isinstance(params, dict):
+                for key, value in params.items():
+                    if hasattr(tmp, 'inputs') and len(tmp.inputs) > 0:
+                        for input_def in tmp.inputs:
+                            if (input_def.name == key):
+                                valid_params[key] = DataEntityExt.validate_datatype(input_def.type, value)
+        return valid_params
+
+    def _create_tosca_template(self, file_name, valid_params):
+        tosca_tpl = None
+        try:
+            tosca_tpl = ToscaTemplate(path=file_name,
+                                      parsed_params=valid_params,
+                                      no_required_paras_check=True,
+                                      debug_mode=True)
+        except Exception as e:
+            print e.message
+        finally:
+            if tosca_tpl is not None and hasattr(tosca_tpl, "temp_dir") and os.path.exists(tosca_tpl.temp_dir):
+                try:
+                    shutil.rmtree(tosca_tpl.temp_dir)
+                except Exception as e:
+                    logger.error("Failed to create tosca template, error: %s", e.message)
+                print "-----------------------------"
+                print '\n'.join(['%s:%s' % item for item in tosca_tpl.__dict__.items()])
+                print "-----------------------------"
+            return tosca_tpl
+
+    def _check_download_file(self, path):
+        if (path.startswith("ftp") or path.startswith("sftp")):
+            return self.downloadFileFromFtpServer(path)
+        elif (path.startswith("http")):
+            return self.download_file_from_httpserver(path)
+        return path
+
+    def download_file_from_httpserver(self, path):
+        path = path.encode("utf-8")
+        tmps = str.split(path, '/')
+        localFileName = tmps[len(tmps) - 1]
+        urllib.urlretrieve(path, localFileName)
+        return localFileName
+
+    def downloadFileFromFtpServer(self, path):
+        path = path.encode("utf-8")
+        tmp = str.split(path, '://')
+        protocol = tmp[0]
+        tmp = str.split(tmp[1], ':')
+        if len(tmp) == 2:
+            userName = tmp[0]
+            tmp = str.split(tmp[1], '@')
+            userPwd = tmp[0]
+            index = tmp[1].index('/')
+            hostIp = tmp[1][0:index]
+            remoteFileName = tmp[1][index:len(tmp[1])]
+            if protocol.lower() == 'ftp':
+                hostPort = 21
+            else:
+                hostPort = 22
+
+        if len(tmp) == 3:
+            userName = tmp[0]
+            userPwd = str.split(tmp[1], '@')[0]
+            hostIp = str.split(tmp[1], '@')[1]
+            index = tmp[2].index('/')
+            hostPort = tmp[2][0:index]
+            remoteFileName = tmp[2][index:len(tmp[2])]
+
+        localFileName = str.split(remoteFileName, '/')
+        localFileName = localFileName[len(localFileName) - 1]
+
+        if protocol.lower() == 'sftp':
+            self.sftp_get(userName, userPwd, hostIp, hostPort, remoteFileName, localFileName)
+        else:
+            self.ftp_get(userName, userPwd, hostIp, hostPort, remoteFileName, localFileName)
+        return localFileName
+
+    def sftp_get(self, userName, userPwd, hostIp, hostPort, remoteFileName, localFileName):
+        # return
+        t = None
+        try:
+            t = paramiko.Transport(hostIp, int(hostPort))
+            t.connect(username=userName, password=userPwd)
+            sftp = paramiko.SFTPClient.from_transport(t)
+            sftp.get(remoteFileName, localFileName)
+        finally:
+            if t is not None:
+                t.close()
+
+    def ftp_get(self, userName, userPwd, hostIp, hostPort, remoteFileName, localFileName):
+        f = None
+        try:
+            ftp = ftplib.FTP()
+            ftp.connect(hostIp, hostPort)
+            ftp.login(userName, userPwd)
+            f = open(localFileName, 'wb')
+            ftp.retrbinary('RETR ' + remoteFileName, f.write, 1024)
+            f.close()
+        finally:
+            if f is not None:
+                f.close()
+
+    def buildMetadata(self, tosca):
+        return tosca.tpl.get(METADATA, {}) if tosca else {}
+
+    def buildNode(self, nodeTemplate, tosca):
+        inputs = tosca.inputs
+        parsed_params = tosca.parsed_params
+        ret = {}
+        ret[NODE_NAME] = nodeTemplate.name
+        ret[NODE_TYPE] = nodeTemplate.type
+        if DESCRIPTION in nodeTemplate.entity_tpl:
+            ret[DESCRIPTION] = nodeTemplate.entity_tpl[DESCRIPTION]
+        else:
+            ret[DESCRIPTION] = ''
+        if METADATA in nodeTemplate.entity_tpl:
+            ret[METADATA] = nodeTemplate.entity_tpl[METADATA]
+        else:
+            ret[METADATA] = ''
+        props = self.buildProperties_ex(nodeTemplate, tosca.topology_template)
+        ret[PROPERTIES] = self.verify_properties(props, inputs, parsed_params)
+        ret[REQUIREMENTS] = self.build_requirements(nodeTemplate)
+        self.buildCapabilities(nodeTemplate, inputs, ret)
+        self.buildArtifacts(nodeTemplate, inputs, ret)
+        interfaces = self.build_interfaces(nodeTemplate)
+        if interfaces:
+            ret[INTERFACES] = interfaces
+        return ret
+
+    def buildProperties(self, nodeTemplate, parsed_params):
+        properties = {}
+        isMappingParams = parsed_params and len(parsed_params) > 0
+        for k, item in nodeTemplate.get_properties().items():
+            properties[k] = item.value
+            if isinstance(item.value, GetInput):
+                if item.value.result() and isMappingParams:
+                    properties[k] = DataEntityExt.validate_datatype(item.type, item.value.result())
+                else:
+                    tmp = {}
+                    tmp[item.value.name] = item.value.input_name
+                    properties[k] = tmp
+        if ATTRIBUTES in nodeTemplate.entity_tpl:
+            for k, item in nodeTemplate.entity_tpl[ATTRIBUTES].items():
+                properties[k] = str(item)
+        return properties
+
+    def buildProperties_ex(self, nodeTemplate, topology_template, properties=None):
+        if properties is None:
+            properties = nodeTemplate.get_properties()
+        _properties = {}
+        if isinstance(properties, dict):
+            for name, prop in properties.items():
+                if isinstance(prop, Property):
+                    if isinstance(prop.value, Function):
+                        if isinstance(prop.value, Concat):  # support one layer inner function.
+                            value_str = ''
+                            for arg in prop.value.args:
+                                if isinstance(arg, str):
+                                    value_str += arg
+                                elif isinstance(arg, dict):
+                                    raw_func = {}
+                                    for k, v in arg.items():
+                                        func_args = []
+                                        func_args.append(v)
+                                        raw_func[k] = func_args
+                                    func = get_function(topology_template, nodeTemplate, raw_func)
+                                    value_str += str(func.result())
+                            _properties[name] = value_str
+                        else:
+                            _properties[name] = prop.value.result()
+                    elif isinstance(prop.value, dict) or isinstance(prop.value, list):
+                        _properties[name] = self.buildProperties_ex(nodeTemplate, topology_template, prop.value)
+                    elif prop.type == 'string':
+                        _properties[name] = prop.value
+                    else:
+                        _properties[name] = json.dumps(prop.value)
+                elif isinstance(prop, dict):
+                    _properties[name] = self.buildProperties_ex(nodeTemplate, topology_template, prop)
+                elif isinstance(prop, list):
+                    _properties[name] = self.buildProperties_ex(nodeTemplate, topology_template, prop)
+                elif name in function_mappings:
+                    raw_func = {}
+                    func_args = []
+                    func_args.append(prop)
+                    raw_func[name] = func_args
+                    if name == 'CONCAT':
+                        value_str = ''
+                        for arg in prop:
+                            if isinstance(arg, str):
+                                value_str += arg
+                            elif isinstance(arg, dict):
+                                raw_func = {}
+                                for k, v in arg.items():
+                                    func_args = []
+                                    func_args.append(v)
+                                    raw_func[k] = func_args
+                                value_str += str(
+                                    get_function(topology_template, nodeTemplate, raw_func).result())
+                                value = value_str
+                    else:
+                        return get_function(topology_template, nodeTemplate, raw_func).result()
+                else:
+                    _properties[name] = prop
+        elif isinstance(properties, list):
+            value = []
+            for para in properties:
+                if isinstance(para, dict) or isinstance(para, list):
+                    value.append(self.buildProperties_ex(nodeTemplate, topology_template, para))
+                else:
+                    value.append(para)
+            return value
+        return _properties
+
+    def verify_properties(self, props, inputs, parsed_params):
+        ret_props = {}
+        if (props and len(props) > 0):
+            for key, value in props.items():
+                ret_props[key] = self._verify_value(value, inputs, parsed_params)
+                #                 if isinstance(value, str):
+                #                     ret_props[key] = self._verify_string(inputs, parsed_params, value);
+                #                     continue
+                #                 if isinstance(value, list):
+                #                     ret_props[key] = map(lambda x: self._verify_dict(inputs, parsed_params, x), value)
+                #                     continue
+                #                 if isinstance(value, dict):
+                #                     ret_props[key] = self._verify_map(inputs, parsed_params, value)
+                #                     continue
+                #                 ret_props[key] = value
+        return ret_props
+
+    def build_requirements(self, node_template):
+        rets = []
+        for req in node_template.requirements:
+            for req_name, req_value in req.items():
+                if (isinstance(req_value, dict)):
+                    if ('node' in req_value and req_value['node'] not in node_template.templates):
+                        continue  # No target requirement for aria parser, not add to result.
+                rets.append({req_name: req_value})
+        return rets
+
+    def buildCapabilities(self, nodeTemplate, inputs, ret):
+        capabilities = json.dumps(nodeTemplate.entity_tpl.get(CAPABILITIES, None))
+        match = re.findall(r'\{"get_input":\s*"([\w|\-]+)"\}', capabilities)
+        for m in match:
+            aa = [input_def for input_def in inputs if m == input_def.name][0]
+            capabilities = re.sub(r'\{"get_input":\s*"([\w|\-]+)"\}', json.dumps(aa.default), capabilities, 1)
+        if capabilities != 'null':
+            ret[CAPABILITIES] = json.loads(capabilities)
+
+    def buildArtifacts(self, nodeTemplate, inputs, ret):
+        artifacts = json.dumps(nodeTemplate.entity_tpl.get('artifacts', None))
+        match = re.findall(r'\{"get_input":\s*"([\w|\-]+)"\}', artifacts)
+        for m in match:
+            aa = [input_def for input_def in inputs if m == input_def.name][0]
+            artifacts = re.sub(r'\{"get_input":\s*"([\w|\-]+)"\}', json.dumps(aa.default), artifacts, 1)
+        if artifacts != 'null':
+            ret[ARTIFACTS] = json.loads(artifacts)
+
+    def build_interfaces(self, node_template):
+        if INTERFACES in node_template.entity_tpl:
+            return node_template.entity_tpl[INTERFACES]
+        return None
+
+    def isNodeTypeX(self, node, nodeTypes, x):
+        node_type = node[NODE_TYPE]
+        while node_type != x:
+            node_type_derived = node_type
+            node_type = nodeTypes[node_type][DERIVED_FROM]
+            if node_type == NODE_ROOT or node_type == node_type_derived:
+                return False
+        return True
+
+    def get_requirement_node_name(self, req_value):
+        return self.get_prop_from_obj(req_value, 'node')
+
+    def getRequirementByNodeName(self, nodeTemplates, storage_name, prop):
+        for node in nodeTemplates:
+            if node[NODE_NAME] == storage_name:
+                if prop in node:
+                    return node[prop]
+
+    def get_prop_from_obj(self, obj, prop):
+        if isinstance(obj, str):
+            return obj
+        if (isinstance(obj, dict) and prop in obj):
+            return obj[prop]
+        return None
+
+    def getNodeDependencys(self, node):
+        return self.getRequirementByName(node, 'dependency')
+
+    def getRequirementByName(self, node, requirementName):
+        requirements = []
+        if REQUIREMENTS in node:
+            for item in node[REQUIREMENTS]:
+                for key, value in item.items():
+                    if key == requirementName:
+                        requirements.append(value)
+        return requirements
+
+    def _verify_value(self, value, inputs, parsed_params):
+        if value == '{}':
+            return ''
+        if isinstance(value, str):
+            return self._verify_string(inputs, parsed_params, value)
+        if isinstance(value, list) or isinstance(value, dict):
+            return self._verify_object(value, inputs, parsed_params)
+        return value
+
+    def _verify_object(self, value, inputs, parsed_params):
+        s = self._verify_string(inputs, parsed_params, json.dumps(value))
+        return json.loads(s)
+
+    def _get_input_name(self, getInput):
+        input_name = getInput.split(':')[1]
+        input_name = input_name.strip()
+        return input_name.replace('"', '').replace('}', '')
+
+    def _verify_string(self, inputs, parsed_params, value):
+        getInputs = re.findall(r'{"get_input": "[a-zA-Z_0-9]+"}', value)
+        for getInput in getInputs:
+            input_name = self._get_input_name(getInput)
+            if parsed_params and input_name in parsed_params:
+                value = value.replace(getInput, json.dumps(parsed_params[input_name]))
+            else:
+                for input_def in inputs:
+                    if input_def.default and input_name == input_def.name:
+                        value = value.replace(getInput, json.dumps(input_def.default))
+        return value
+
+    def get_node_by_name(self, node_templates, name):
+        for node in node_templates:
+            if node[NODE_NAME] == name:
+                return node
+        return None
+
+    def getCapabilityByName(self, node, capabilityName):
+        if CAPABILITIES in node and capabilityName in node[CAPABILITIES]:
+            return node[CAPABILITIES][capabilityName]
+        return None
+
+    def get_base_path(self, tosca):
+        fpath, fname = os.path.split(tosca.path)
+        return fpath
+
+    def build_artifacts(self, node):
+        rets = []
+        if ARTIFACTS in node and len(node[ARTIFACTS]) > 0:
+            artifacts = node[ARTIFACTS]
+            for name, value in artifacts.items():
+                ret = {}
+                ret['artifact_name'] = name
+                ret['file'] = value
+                if isinstance(value, dict):
+                    ret.update(value)
+                rets.append(ret)
+        else:
+            # TODO It is workaround for SDC-1900.
+            logger.error("VCPE specific code")
+            ret = {}
+            ret['artifact_name'] = "sw_image"
+            ret['file'] = "ubuntu_16.04"
+            ret['type'] = "tosca.artifacts.nfv.SwImage"
+            rets.append(ret)
+
+        return rets
+
+    def get_node_by_req(self, node_templates, req):
+        req_node_name = self.get_requirement_node_name(req)
+        return self.get_node_by_name(node_templates, req_node_name)
+
+    def isGroupTypeX(self, group, groupTypes, x):
+        group_type = group[GROUP_TYPE]
+        while group_type != x:
+            group_type_derived = group_type
+            group_type = groupTypes[group_type][DERIVED_FROM]
+            if group_type == GROUPS_ROOT or group_type == group_type_derived:
+                return False
+        return True
+
+    def setTargetValues(self, dict_target, target_keys, dict_source, source_keys):
+        i = 0
+        for item in source_keys:
+            dict_target[target_keys[i]] = dict_source.get(item, "")
+            i += 1
+        return dict_target
+
+    def get_deploy_graph(self, tosca, relations):
+        nodes = tosca.graph.nodetemplates
+        graph = Graph()
+        for node in nodes:
+            self._build_deploy_path(node, [], graph, relations)
+        return graph.to_dict()
+
+    def _build_deploy_path(self, node, node_parent, graph, relations):
+        graph.add_node(node.name, node_parent)
+        type_require_set = {}
+        type_requires = node.type_definition.requirements
+        for type_require in type_requires:
+            type_require_set.update(type_require)
+        for requirement in node.requirements:
+            for k in requirement.keys():
+                if type_require_set[k].get('relationship', None) in relations[0] or type_require_set[k].get('capability', None) in relations[0]:
+                    if isinstance(requirement[k], dict):
+                        next_node = requirement[k].get('node', None)
+                    else:
+                        next_node = requirement[k]
+                    graph.add_node(next_node, [node.name])
+                if type_require_set[k].get('relationship', None) in relations[1]:
+                    if isinstance(requirement[k], dict):
+                        next_node = requirement[k].get('node', None)
+                    else:
+                        next_node = requirement[k]
+                    graph.add_node(next_node, [node.name])
+
+    def get_substitution_mappings(self, tosca):
+        node = {
+            'properties': {},
+            'requirements': {},
+            'capabilities': {},
+            'metadata': {}
+        }
+        metadata = None
+        substitution_mappings = tosca.tpl['topology_template'].get('substitution_mappings', None)
+        if substitution_mappings:
+            node['type'] = substitution_mappings['node_type']
+            node['properties'] = substitution_mappings.get('properties', {})
+            node['requirements'] = substitution_mappings.get('requirements', {})
+            node['capabilities'] = substitution_mappings.get('capabilities', {})
+            metadata = substitution_mappings.get('metadata', {})
+        node['metadata'] = metadata if metadata and metadata != {} else self.buildMetadata(tosca)
+        return node
diff --git a/genericparser/pub/utils/toscaparsers/const.py b/genericparser/pub/utils/toscaparsers/const.py
new file mode 100644 (file)
index 0000000..9c61c48
--- /dev/null
@@ -0,0 +1,30 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+NS_METADATA_SECTIONS = (NS_UUID, NS_INVARIANTUUID, NS_NAME, NS_VERSION, NS_DESIGNER, NSD_RELEASE_DATE) =\
+    ("nsd_id", "nsd_invariant_id", "nsd_name", "nsd_file_structure_version", "nsd_designer", "nsd_release_date_time")
+# ("id", "invariant_id", "name", "version", "designer", "description")
+
+SDC_SERVICE_METADATA_SECTIONS = (SRV_UUID, SRV_INVARIANTUUID, SRV_NAME) = ('UUID', 'invariantUUID', 'name')
+
+PNF_METADATA_SECTIONS = (PNF_UUID, PNF_INVARIANTUUID, PNF_NAME, PNF_METADATA_DESCRIPTION, PNF_VERSION, PNF_PROVIDER) = \
+    ("descriptor_id", "descriptor_invariant_id", "name", "description", "version", "provider")
+PNF_SECTIONS = (PNF_ID, PNF_METADATA, PNF_PROPERTIES, PNF_DESCRIPTION) = \
+    ("pnf_id", "metadata", "properties", "description")
+
+VNF_SECTIONS = (VNF_ID, VNF_METADATA, VNF_PROPERTIES, VNF_DESCRIPTION) = \
+    ("vnf_id", "metadata", "properties", "description")
+
+VL_SECTIONS = (VL_ID, VL_METADATA, VL_PROPERTIES, VL_DESCRIPTION) = \
+    ("vl_id", "metadata", "properties", "description")
diff --git a/genericparser/pub/utils/toscaparsers/dataentityext.py b/genericparser/pub/utils/toscaparsers/dataentityext.py
new file mode 100644 (file)
index 0000000..825e93b
--- /dev/null
@@ -0,0 +1,33 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from toscaparser.dataentity import DataEntity
+from toscaparser.elements.constraints import Schema
+from toscaparser.common.exception import ExceptionCollector
+
+
+class DataEntityExt(object):
+    '''A complex data value entity ext.'''
+    @staticmethod
+    def validate_datatype(type, value, entry_schema=None, custom_def=None):
+        if value:
+            if (type == Schema.STRING):
+                return str(value)
+            elif type == Schema.FLOAT:
+                try:
+                    return float(value)
+                except Exception:
+                    ExceptionCollector.appendException(ValueError(('"%s" is not an float.') % value))
+            return DataEntity.validate_datatype(type, value, entry_schema, custom_def)
+        return value
diff --git a/genericparser/pub/utils/toscaparsers/graph.py b/genericparser/pub/utils/toscaparsers/graph.py
new file mode 100644 (file)
index 0000000..6d38d12
--- /dev/null
@@ -0,0 +1,74 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from collections import deque
+from collections import OrderedDict
+
+
+class Graph(object):
+
+    def __init__(self, graph_dict=None):
+        self.graph = OrderedDict()
+        if graph_dict:
+            for node, dep_nodes in graph_dict.iteritems():
+                self.add_node(node, dep_nodes)
+
+    def add_node(self, node, dep_nodes):
+        if node not in self.graph:
+            self.graph[node] = set()
+        if isinstance(dep_nodes, list):
+            for dep_node in dep_nodes:
+                if dep_node not in self.graph:
+                    self.graph[dep_node] = set()
+                if dep_node not in self.graph[node]:
+                    self.graph[node].add(dep_node)
+
+    def get_pre_nodes(self, node):
+        return [k for k in self.graph if node in self.graph[k]]
+
+    def topo_sort(self):
+        degree = {}
+        for node in self.graph:
+            degree[node] = 0
+
+        for node in self.graph:
+            for dependent in self.graph[node]:
+                degree[dependent] += 1
+
+        queue = deque()
+        for node in degree:
+            if degree[node] == 0:
+                queue.appendleft(node)
+
+        sort_list = []
+        while queue:
+            node = queue.pop()
+            sort_list.append(node)
+            for dependent in self.graph[node]:
+                degree[dependent] -= 1
+                if degree[dependent] == 0:
+                    queue.appendleft(dependent)
+
+        if len(sort_list) == len(self.graph):
+            return sort_list
+        else:
+            return None
+
+    def to_dict(self):
+        dict = {}
+        for node, dependents in self.graph.iteritems():
+            dict[node] = []
+            for dep in dependents:
+                dict[node].append(dep)
+        return dict
diff --git a/genericparser/pub/utils/toscaparsers/nsdmodel.py b/genericparser/pub/utils/toscaparsers/nsdmodel.py
new file mode 100644 (file)
index 0000000..fe522a7
--- /dev/null
@@ -0,0 +1,220 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import functools
+import logging
+from genericparser.pub.utils.toscaparsers.basemodel import BaseInfoModel
+from genericparser.pub.utils.toscaparsers.const import SDC_SERVICE_METADATA_SECTIONS
+from genericparser.pub.utils.toscaparsers.servicemodel import SdcServiceModel
+
+logger = logging.getLogger(__name__)
+
+SECTIONS = (NS_TYPE, NS_VNF_TYPE, NS_VL_TYPE, NS_PNF_TYPE, NS_NFP_TYPE, NS_VNFFG_TYPE) = \
+    ('tosca.nodes.nfv.NS',
+     'tosca.nodes.nfv.VNF',
+     'tosca.nodes.nfv.NsVirtualLink',
+     'tosca.nodes.nfv.PNF',
+     'tosca.nodes.nfv.NFP',
+     'tosca.nodes.nfv.VNFFG')
+
+NFV_NS_RELATIONSHIPS = [["tosca.relationships.nfv.VirtualLinksTo", "tosca.relationships.DependsOn"], []]
+
+
+class NsdInfoModel(BaseInfoModel):
+    def __init__(self, path, params):
+        super(NsdInfoModel, self).__init__(path, params)
+
+    def parseModel(self, tosca):
+        metadata = self.buildMetadata(tosca)
+        self.model = {}
+        if self._is_etsi(metadata):
+            self.model = EtsiNsdInfoModel(tosca)
+        elif self._is_ecomp(metadata):
+            self.model = SdcServiceModel(tosca)
+
+    def _is_etsi(self, metadata):
+        NS_METADATA_MUST = ["nsd_invariant_id", "nsd_name", "nsd_file_structure_version", "nsd_designer", "nsd_release_date_time"]
+        return True if len([1 for key in NS_METADATA_MUST if key in metadata]) == len(NS_METADATA_MUST) else False
+
+    def _is_ecomp(self, metadata):
+        return True if len([1 for key in SDC_SERVICE_METADATA_SECTIONS if key in metadata]) == len(SDC_SERVICE_METADATA_SECTIONS) else False
+
+
+class EtsiNsdInfoModel(BaseInfoModel):
+
+    def __init__(self, tosca):
+        super(EtsiNsdInfoModel, self).__init__(tosca=tosca)
+
+    def parseModel(self, tosca):
+        self.metadata = self.buildMetadata(tosca)
+        self.ns = self._build_ns(tosca)
+        self.inputs = self.buildInputs(tosca)
+        nodeTemplates = map(functools.partial(self.buildNode, tosca=tosca), tosca.nodetemplates)
+        types = tosca.topology_template.custom_defs
+        self.basepath = self.get_base_path(tosca)
+        self.vnfs = self._get_all_vnf(nodeTemplates, types)
+        self.pnfs = self._get_all_pnf(nodeTemplates, types)
+        self.vls = self._get_all_vl(nodeTemplates, types)
+        self.fps = self._get_all_fp(nodeTemplates, types)
+        self.vnffgs = self._get_all_vnffg(tosca.topology_template.groups, types)
+        self.ns_exposed = self._get_all_endpoint_exposed(tosca.topology_template)
+        self.nested_ns = self._get_all_nested_ns(nodeTemplates, types)
+        self.graph = self.get_deploy_graph(tosca, NFV_NS_RELATIONSHIPS)
+
+    def _get_all_vnf(self, nodeTemplates, node_types):
+        vnfs = []
+        for node in nodeTemplates:
+            if self.isNodeTypeX(node, node_types, NS_VNF_TYPE):
+                vnf = {}
+                vnf['vnf_id'] = node['name']
+                vnf['description'] = node['description']
+                vnf['properties'] = node['properties']
+                if not vnf['properties'].get('id', None):
+                    vnf['properties']['id'] = vnf['properties'].get('descriptor_id', None)
+                vnf['dependencies'] = self._get_networks(node, node_types)
+                vnf['networks'] = self._get_networks(node, node_types)
+                vnfs.append(vnf)
+        return vnfs
+
+    def _get_all_pnf(self, nodeTemplates, node_types):
+        pnfs = []
+        for node in nodeTemplates:
+            if self.isNodeTypeX(node, node_types, NS_PNF_TYPE):
+                pnf = {}
+                pnf['pnf_id'] = node['name']
+                pnf['description'] = node['description']
+                pnf['properties'] = node['properties']
+                pnf['networks'] = self._get_networks(node, node_types)
+                pnfs.append(pnf)
+        return pnfs
+
+    def _get_all_vl(self, nodeTemplates, node_types):
+        vls = []
+        for node in nodeTemplates:
+            if self.isNodeTypeX(node, node_types, NS_VL_TYPE):
+                vl = dict()
+                vl['vl_id'] = node['name']
+                vl['description'] = node['description']
+                vl['properties'] = node['properties']
+                vls.append(vl)
+        return vls
+
+    def _get_all_fp(self, nodeTemplates, node_types):
+        fps = []
+        for node in nodeTemplates:
+            if self.isNodeTypeX(node, node_types, NS_NFP_TYPE):
+                fp = {}
+                fp['fp_id'] = node['name']
+                fp['description'] = node['description']
+                fp['properties'] = node['properties']
+                fp['forwarder_list'] = self._getForwarderList(node, nodeTemplates, node_types)
+                fps.append(fp)
+        return fps
+
+    def _getForwarderList(self, node, node_templates, node_types):
+        forwarderList = []
+        if 'requirements' in node:
+            for item in node['requirements']:
+                for key, value in item.items():
+                    if key == 'forwarder':
+                        tmpnode = self.get_node_by_req(node_templates, value)
+                        type = 'pnf' if self.isNodeTypeX(tmpnode, node_types, NS_PNF_TYPE) else 'vnf'
+                        req_node_name = self.get_requirement_node_name(value)
+                        if isinstance(value, dict) and 'capability' in value:
+                            forwarderList.append(
+                                {"type": type, "node_name": req_node_name, "capability": value['capability']})
+                        else:
+                            forwarderList.append({"type": type, "node_name": req_node_name, "capability": ""})
+        return forwarderList
+
+    def _get_all_vnffg(self, groups, group_types):
+        vnffgs = []
+        for group in groups:
+            if self.isGroupTypeX(group, group_types, NS_VNFFG_TYPE):
+                vnffg = {}
+                vnffg['vnffg_id'] = group.name
+                vnffg['description'] = group.description
+                if 'properties' in group.tpl:
+                    vnffg['properties'] = group.tpl['properties']
+                vnffg['members'] = group.members
+                vnffgs.append(vnffg)
+        return vnffgs
+
+    def _get_all_endpoint_exposed(self, topo_tpl):
+        if 'substitution_mappings' in topo_tpl.tpl:
+            external_cps = self._get_external_cps(topo_tpl.tpl['substitution_mappings'])
+            forward_cps = self._get_forward_cps(topo_tpl.tpl['substitution_mappings'])
+            return {"external_cps": external_cps, "forward_cps": forward_cps}
+        return {}
+
+    def _get_external_cps(self, subs_mappings):
+        external_cps = []
+        if 'requirements' in subs_mappings:
+            for key, value in subs_mappings['requirements'].items():
+                if isinstance(value, list) and len(value) > 0:
+                    external_cps.append({"key_name": key, "cpd_id": value[0]})
+                else:
+                    external_cps.append({"key_name": key, "cpd_id": value})
+        return external_cps
+
+    def _get_forward_cps(self, subs_mappings):
+        forward_cps = []
+        if 'capabilities' in subs_mappings:
+            for key, value in subs_mappings['capabilities'].items():
+                if isinstance(value, list) and len(value) > 0:
+                    forward_cps.append({"key_name": key, "cpd_id": value[0]})
+                else:
+                    forward_cps.append({"key_name": key, "cpd_id": value})
+        return forward_cps
+
+    def _get_all_nested_ns(self, nodes, node_types):
+        nss = []
+        for node in nodes:
+            if self.isNodeTypeX(node, node_types, NS_TYPE):
+                ns = {}
+                ns['ns_id'] = node['name']
+                ns['description'] = node['description']
+                ns['properties'] = node['properties']
+                ns['networks'] = self._get_networks(node, node_types)
+                nss.append(ns)
+        return nss
+
+    def _get_networks(self, node, node_types):
+        rets = []
+        if 'requirements' in node and (self.isNodeTypeX(node, node_types, NS_TYPE) or self.isNodeTypeX(node, node_types, NS_VNF_TYPE)):
+            for item in node['requirements']:
+                for key, value in item.items():
+                    rets.append({"key_name": key, "vl_id": self.get_requirement_node_name(value)})
+        return rets
+
+    def _build_ns(self, tosca):
+        ns = self.get_substitution_mappings(tosca)
+        properties = ns.get("properties", {})
+        metadata = ns.get("metadata", {})
+        if properties.get("descriptor_id", "") == "":
+            descriptor_id = metadata.get("nsd_id", "")
+            properties["descriptor_id"] = descriptor_id
+        if properties.get("verison", "") == "":
+            version = metadata.get("nsd_file_structure_version", "")
+            properties["verison"] = version
+        if properties.get("designer", "") == "":
+            author = metadata.get("nsd_designer", "")
+            properties["designer"] = author
+        if properties.get("name", "") == "":
+            template_name = metadata.get("nsd_name", "")
+            properties["name"] = template_name
+        if properties.get("invariant_id", "") == "":
+            nsd_invariant_id = metadata.get("nsd_invariant_id", "")
+            properties["invariant_id"] = nsd_invariant_id
+        return ns
diff --git a/genericparser/pub/utils/toscaparsers/pnfmodel.py b/genericparser/pub/utils/toscaparsers/pnfmodel.py
new file mode 100644 (file)
index 0000000..0f5445f
--- /dev/null
@@ -0,0 +1,53 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import functools
+import logging
+from genericparser.pub.utils.toscaparsers.basemodel import BaseInfoModel
+logger = logging.getLogger(__name__)
+
+
+class PnfdInfoModel(BaseInfoModel):
+
+    def __init__(self, path, params):
+        super(PnfdInfoModel, self).__init__(path, params)
+
+    def parseModel(self, tosca):
+        self.metadata = self.buildMetadata(tosca)
+        self.inputs = self.buildInputs(tosca)
+        nodeTemplates = map(functools.partial(self.buildNode, tosca=tosca),
+                            tosca.nodetemplates)
+        self.basepath = self.get_base_path(tosca)
+        self.pnf = {}
+        self.get_substitution_mappings(tosca)
+        self.get_all_cp(nodeTemplates)
+
+    def get_substitution_mappings(self, tosca):
+        pnf_substitution_mappings = tosca.tpl['topology_template'].get('substitution_mappings', None)
+        if pnf_substitution_mappings:
+            self.pnf['type'] = pnf_substitution_mappings['node_type']
+            self.pnf['properties'] = pnf_substitution_mappings['properties']
+
+    def get_all_cp(self, nodeTemplates):
+        self.pnf['ExtPorts'] = []
+        for node in nodeTemplates:
+            if self.isPnfExtPort(node):
+                cp = {}
+                cp['id'] = node['name']
+                cp['type'] = node['nodeType']
+                cp['properties'] = node['properties']
+                self.pnf['ExtPorts'].append(cp)
+
+    def isPnfExtPort(self, node):
+        return node['nodeType'].find('tosca.nodes.nfv.PnfExtPort') >= 0
diff --git a/genericparser/pub/utils/toscaparsers/sdmodel.py b/genericparser/pub/utils/toscaparsers/sdmodel.py
new file mode 100644 (file)
index 0000000..8cca07e
--- /dev/null
@@ -0,0 +1,93 @@
+# Copyright (c) 2019, CMCC Technologies. Co., Ltd.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import logging
+
+from genericparser.pub.utils.toscaparsers.basemodel import BaseInfoModel
+from genericparser.pub.utils.toscaparsers.servicemodel import SdcServiceModel
+
+logger = logging.getLogger(__name__)
+
+
+class SdInfoModel(BaseInfoModel):
+    def __init__(self, path, params):
+        super(SdInfoModel, self).__init__(path, params)
+
+    def parseModel(self, tosca):
+        self.metadata = self.buildMetadata(tosca)
+        self.inputs = self.build_inputs(tosca)
+
+        sdcModle = SdcServiceModel(tosca)
+        if sdcModle:
+            self.service = sdcModle.ns
+            if hasattr(tosca, 'nodetemplates'):
+                self.basepath = sdcModle.basepath
+                self.vnfs = sdcModle.vnfs
+                self.pnfs = sdcModle.pnfs
+                self.vls = sdcModle.vls
+                self.graph = sdcModle.graph
+
+    def build_inputs(self, tosca):
+        """ Get all the inputs for complex type"""
+        result_inputs = {}
+
+        if not tosca.inputs:
+            return {}
+
+        for input in tosca.inputs:
+            type = input.schema.type
+            if type.__eq__('list') or type.__eq__('map'):
+                complex_input = []
+                entry_schema = self.get_entry_schema(input.schema.schema['entry_schema'])
+                self.get_child_input_repeat(complex_input, entry_schema, input)
+                result_inputs[input.schema.name] = complex_input
+
+            else:
+                simple_input = {
+                    "type": input.schema.type,
+                    "description": input.schema.description,
+                    "required": input.schema.required,
+                }
+                result_inputs[input.schema.name] = simple_input
+        return result_inputs
+
+    def get_child_input_repeat(self, complex_input, entry_schema, input):
+        custom_defs = input.custom_defs
+        properties = custom_defs[entry_schema]['properties']
+        for key, value in properties.iteritems():
+            if value['type'].__eq__('list'):
+                child_complex_input = []
+                child_entry_schema = self.get_entry_schema(value['entry_schema'])
+                self.get_child_input_repeat(child_complex_input, child_entry_schema, input)
+                complex_input.append({key: child_complex_input})
+            else:
+                if 'description' in value.keys():
+                    simple_input = {
+                        key: "",
+                        "type": value['type'],
+                        "required": value['required'],
+                        "description": value['description'],
+                    }
+                else:
+                    simple_input = {
+                        key: "",
+                        "type": value['type'],
+                        "required": value['required'],
+                    }
+                complex_input.append(simple_input)
+
+    def get_entry_schema(self, entry_schema):
+        if isinstance(entry_schema, dict):
+            if 'type' in entry_schema.keys():
+                entry_schema = entry_schema['type']
+        return entry_schema
diff --git a/genericparser/pub/utils/toscaparsers/servicemodel.py b/genericparser/pub/utils/toscaparsers/servicemodel.py
new file mode 100644 (file)
index 0000000..069d402
--- /dev/null
@@ -0,0 +1,188 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import functools
+import logging
+from genericparser.pub.utils.toscaparsers.const import NS_METADATA_SECTIONS, PNF_METADATA_SECTIONS, VNF_SECTIONS, PNF_SECTIONS, VL_SECTIONS
+from genericparser.pub.utils.toscaparsers.basemodel import BaseInfoModel
+
+logger = logging.getLogger(__name__)
+
+SDC_SERVICE_SECTIONS = (SERVICE_TYPE, SRV_DESCRIPTION) = (
+    'org.openecomp.resource.abstract.nodes.service', 'description')
+
+SDC_SERVICE_METADATA_SECTIONS = (SRV_UUID, SRV_INVARIANTUUID, SRV_NAME) = (
+    'UUID', 'invariantUUID', 'name')
+
+SDC_VL = (VL_TYPE) = ('tosca.nodes.nfv.ext.zte.VL')
+SDC_VL_SECTIONS = (VL_ID, VL_METADATA, VL_PROPERTIES, VL_DESCRIPTION) = \
+    ("name", "metadata", "properties", "description")
+
+SDC_VF = (VF_TYPE, VF_UUID) = \
+    ('org.openecomp.resource.abstract.nodes.VF', 'UUID')
+SDC_VF_SECTIONS = (VF_ID, VF_METADATA, VF_PROPERTIES, VF_DESCRIPTION) = \
+    ("name", "metadata", "properties", "description")
+
+SDC_PNF = (PNF_TYPE) = \
+    ('org.openecomp.resource.abstract.nodes.PNF')
+SDC_PNF_METADATA_SECTIONS = (SDC_PNF_UUID, SDC_PNF_INVARIANTUUID, SDC_PNF_NAME, SDC_PNF_METADATA_DESCRIPTION, SDC_PNF_VERSION) = \
+    ("UUID", "invariantUUID", "name", "description", "version")
+SDC_PNF_SECTIONS = (SDC_PNF_ID, SDC_PNF_METADATA, SDC_PNF_PROPERTIES, SDC_PNF_DESCRIPTION) = \
+    ("name", "metadata", "properties", "description")
+
+SERVICE_RELATIONSHIPS = [["tosca.relationships.network.LinksTo", "tosca.relationships.nfv.VirtualLinksTo", "tosca.capabilities.nfv.VirtualLinkable", "tosca.relationships.DependsOn"], []]
+
+
+class SdcServiceModel(BaseInfoModel):
+
+    def __init__(self, tosca):
+        super(SdcServiceModel, self).__init__(tosca=tosca)
+
+    def parseModel(self, tosca):
+        self.metadata = self._buildServiceMetadata(tosca)
+        self.ns = self._build_ns(tosca)
+        self.inputs = self.buildInputs(tosca)
+        if hasattr(tosca, 'nodetemplates'):
+            nodeTemplates = map(functools.partial(self.buildNode, tosca=tosca), tosca.nodetemplates)
+            types = tosca.topology_template.custom_defs
+            self.basepath = self.get_base_path(tosca)
+            self.vnfs = self._get_all_vnf(nodeTemplates, types)
+            self.pnfs = self._get_all_pnf(nodeTemplates, types)
+            self.vls = self._get_all_vl(nodeTemplates, types)
+            self.graph = self.get_deploy_graph(tosca, SERVICE_RELATIONSHIPS)
+
+    def _buildServiceMetadata(self, tosca):
+        """ SDC service Meta Format
+         invariantUUID: e2618ee1 - a29a - 44c4 - a52a - b718fe1269f4
+         UUID: 2362d14a - 115f - 4a2b - b449 - e2f93c0b7c89
+         name: demoVLB
+         description: catalogservicedescription
+         type: Service
+         category: NetworkL1 - 3
+         serviceType: ''
+         serviceRole: ''
+         serviceEcompNaming: true
+         ecompGeneratedNaming: true
+         namingPolicy: ''
+        """
+        metadata_temp = self.buildMetadata(tosca)
+        metadata = {}
+        return self.setTargetValues(metadata, NS_METADATA_SECTIONS, metadata_temp, SDC_SERVICE_METADATA_SECTIONS)
+
+    def _get_all_vnf(self, nodeTemplates, node_types):
+        """  SDC Resource Metadata
+        invariantUUID: 9ed46ddc-8eb7-4cb0-a1b6-04136c921af4
+        UUID: b56ba35d-45fb-41e3-b6b8-b4f66917baa1
+        customizationUUID: af0a6e64-967b-476b-87bc-959dcf59c305
+        version: '1.0'
+        name: b7d2fceb-dd11-43cd-a3fa
+        description: vendor software product
+        type: VF
+        category: Generic
+        subcategory: Abstract
+        resourceVendor: b9d9f9f7-7994-4f0d-8104
+        resourceVendorRelease: '1.0'
+        resourceVendorModelNumber: ''
+        """
+        vnfs = []
+        for node in nodeTemplates:
+            if self.isNodeTypeX(node, node_types, VF_TYPE):
+                vnf = {}
+                self.setTargetValues(vnf, VNF_SECTIONS, node, SDC_VF_SECTIONS)
+                if not vnf['properties'].get('id', None) and node['metadata']:
+                    vnf['properties']['id'] = node['metadata'].get('UUID', None)
+                vnf['properties']['vnfm_info'] = vnf['properties'].get('nf_type', None)
+                vnf['dependencies'] = self._get_networks(node, node_types)
+                vnf['networks'] = self._get_networks(node, node_types)
+                vnfs.append(vnf)
+        return vnfs
+
+    def _get_all_pnf(self, nodeTemplates, node_types):
+        pnfs = []
+        for node in nodeTemplates:
+            if self.isNodeTypeX(node, node_types, PNF_TYPE):
+                pnf = {}
+                self.setTargetValues(pnf, PNF_SECTIONS, node, SDC_PNF_SECTIONS)
+                self.setTargetValues(pnf['properties'], PNF_METADATA_SECTIONS, node['metadata'], SDC_PNF_METADATA_SECTIONS)
+                pnf['networks'] = self._get_networks(node, node_types)
+                pnfs.append(pnf)
+        return pnfs
+
+    def _get_all_vl(self, nodeTemplates, node_types):
+        vls = []
+        for node in nodeTemplates:
+            if self.isNodeTypeX(node, node_types, VL_TYPE):
+                vl = {}
+                self.setTargetValues(vl, VL_SECTIONS, node, SDC_VL_SECTIONS)
+                vl_profile = {}
+                if 'segmentation_id' in vl['properties']:
+                    vl_profile['segmentationId'] = vl['properties'].get('segmentation_id')
+                if 'network_name' in vl['properties']:
+                    vl_profile['networkName'] = vl['properties'].get('network_name')
+                if 'cidr' in vl['properties']:
+                    vl_profile['cidr'] = vl['properties'].get('cidr')
+                if 'network_name' in vl['properties']:
+                    vl_profile['networkName'] = vl['properties'].get('network_name')
+                if 'start_ip' in vl['properties']:
+                    vl_profile['startIp'] = vl['properties'].get('start_ip', '')
+                if 'end_ip' in vl['properties']:
+                    vl_profile['endIp'] = vl['properties'].get('end_ip', '')
+                if 'gateway_ip' in vl['properties']:
+                    vl_profile['gatewayIp'] = vl['properties'].get('gateway_ip', '')
+                if 'physical_network' in vl['properties']:
+                    vl_profile['physicalNetwork'] = vl['properties'].get('physical_network', '')
+                if 'network_type' in vl['properties']:
+                    vl_profile['networkType'] = vl['properties'].get('network_type', '')
+                if 'dhcp_enabled' in vl['properties']:
+                    vl_profile['dhcpEnabled'] = vl['properties'].get('dhcp_enabled', '')
+                if 'vlan_transparent' in vl['properties']:
+                    vl_profile['vlanTransparent'] = vl['properties'].get('vlan_transparent', '')
+                if 'mtu' in vl['properties']:
+                    vl_profile['mtu'] = vl['properties'].get('mtu', '')
+                if 'ip_version' in vl['properties']:
+                    vl_profile['ip_version'] = vl['properties'].get('ip_version', '')
+                if 'dns_nameservers' in vl['properties']:
+                    vl_profile['dns_nameservers'] = vl['properties'].get('dns_nameservers', [])
+                if 'host_routes' in vl['properties']:
+                    vl_profile['host_routes'] = vl['properties'].get('host_routes', [])
+                if 'network_id' in vl['properties']:
+                    vl_profile['network_id'] = vl['properties'].get('network_id', '')
+                vl['properties']['vl_profile'] = vl_profile
+                vls.append(vl)
+        return vls
+
+    def _get_networks(self, node, node_types):
+        rets = []
+        if 'requirements' in node and self.isNodeTypeX(node, node_types, VF_TYPE):
+            for item in node['requirements']:
+                for key, value in item.items():
+                    rets.append({"key_name": key, "vl_id": self.get_requirement_node_name(value)})
+        return rets
+
+    def _build_ns(self, tosca):
+        ns = self.get_substitution_mappings(tosca)
+        properties = ns.get("properties", {})
+        metadata = ns.get("metadata", {})
+        if properties.get("descriptor_id", "") == "":
+            descriptor_id = metadata.get(SRV_UUID, "")
+            properties["descriptor_id"] = descriptor_id
+        properties["verison"] = ""
+        properties["designer"] = ""
+        if properties.get("name", "") == "":
+            template_name = metadata.get(SRV_NAME, "")
+            properties["name"] = template_name
+        if properties.get("invariant_id", "") == "":
+            nsd_invariant_id = metadata.get(SRV_INVARIANTUUID, "")
+            properties["invariant_id"] = nsd_invariant_id
+        return ns
diff --git a/genericparser/pub/utils/toscaparsers/testdata/ns/ran.csar b/genericparser/pub/utils/toscaparsers/testdata/ns/ran.csar
new file mode 100644 (file)
index 0000000..9ea868c
Binary files /dev/null and b/genericparser/pub/utils/toscaparsers/testdata/ns/ran.csar differ
diff --git a/genericparser/pub/utils/toscaparsers/testdata/ns/service-vIMS.csar b/genericparser/pub/utils/toscaparsers/testdata/ns/service-vIMS.csar
new file mode 100644 (file)
index 0000000..0aeed58
Binary files /dev/null and b/genericparser/pub/utils/toscaparsers/testdata/ns/service-vIMS.csar differ
diff --git a/genericparser/pub/utils/toscaparsers/testdata/pnf/ran-du.csar b/genericparser/pub/utils/toscaparsers/testdata/pnf/ran-du.csar
new file mode 100644 (file)
index 0000000..45168a9
Binary files /dev/null and b/genericparser/pub/utils/toscaparsers/testdata/pnf/ran-du.csar differ
diff --git a/genericparser/pub/utils/toscaparsers/testdata/vnf/vSBC.csar b/genericparser/pub/utils/toscaparsers/testdata/vnf/vSBC.csar
new file mode 100644 (file)
index 0000000..921eafd
Binary files /dev/null and b/genericparser/pub/utils/toscaparsers/testdata/vnf/vSBC.csar differ
diff --git a/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/infra.csar b/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/infra.csar
new file mode 100644 (file)
index 0000000..5c9fbcf
Binary files /dev/null and b/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/infra.csar differ
diff --git a/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/vbng.csar b/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/vbng.csar
new file mode 100644 (file)
index 0000000..b11a6ef
Binary files /dev/null and b/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/vbng.csar differ
diff --git a/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/vbrgemu.csar b/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/vbrgemu.csar
new file mode 100644 (file)
index 0000000..730ea8d
Binary files /dev/null and b/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/vbrgemu.csar differ
diff --git a/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/vgmux.csar b/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/vgmux.csar
new file mode 100644 (file)
index 0000000..b0f37a7
Binary files /dev/null and b/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/vgmux.csar differ
diff --git a/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/vgw.csar b/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/vgw.csar
new file mode 100644 (file)
index 0000000..ca652bf
Binary files /dev/null and b/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpedpdk/vgw.csar differ
diff --git a/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/infra.csar b/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/infra.csar
new file mode 100644 (file)
index 0000000..c91c034
Binary files /dev/null and b/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/infra.csar differ
diff --git a/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/vbng.csar b/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/vbng.csar
new file mode 100644 (file)
index 0000000..5011563
Binary files /dev/null and b/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/vbng.csar differ
diff --git a/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/vbrgemu.csar b/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/vbrgemu.csar
new file mode 100644 (file)
index 0000000..0f99199
Binary files /dev/null and b/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/vbrgemu.csar differ
diff --git a/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/vgmux.csar b/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/vgmux.csar
new file mode 100644 (file)
index 0000000..3d2dbf7
Binary files /dev/null and b/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/vgmux.csar differ
diff --git a/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/vgw.csar b/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/vgw.csar
new file mode 100644 (file)
index 0000000..5e47b77
Binary files /dev/null and b/genericparser/pub/utils/toscaparsers/testdata/vnf/vcpesriov/vgw.csar differ
diff --git a/genericparser/pub/utils/toscaparsers/tests.py b/genericparser/pub/utils/toscaparsers/tests.py
new file mode 100644 (file)
index 0000000..c461790
--- /dev/null
@@ -0,0 +1,102 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import json
+import os
+import logging
+import tempfile
+import shutil
+
+from django.test import TestCase
+
+from genericparser.pub.utils.toscaparsers import parse_vnfd, parse_pnfd, parse_nsd
+from genericparser.pub.utils.toscaparsers.graph import Graph
+
+logger = logging.getLogger(__name__)
+
+
+class TestToscaparser(TestCase):
+    def setUp(self):
+        pass
+
+    def tearDown(self):
+        pass
+
+    def test_vnfd_parse(self):
+        self.remove_temp_dir()
+        input_parameters = [{"value": "222222", "key": "sdncontroller"}]
+        vcpe = ["vgw", "infra", "vbng", "vbrgemu", "vgmux"]
+        sriov_path = os.path.dirname(os.path.abspath(__file__)) + "/testdata/vnf/vcpesriov"
+        for vcpe_part in vcpe:
+            csar_file = ("%s/%s.csar" % (sriov_path, vcpe_part))
+            logger.debug("csar_file:%s", csar_file)
+            vnfd_json = parse_vnfd(csar_file, input_parameters)
+            metadata = json.loads(vnfd_json).get("metadata")
+            logger.debug("sriov metadata:%s", metadata)
+            self.assertEqual(("vCPE_%s" % vcpe_part), metadata.get("template_name", ""))
+            if vcpe_part == "infra":
+                self.assertEqual("b1bb0ce7-1111-4fa7-95ed-4840d70a1177", json.loads(vnfd_json)["vnf"]["properties"]["descriptor_id"])
+
+        dpdk_path = os.path.dirname(os.path.abspath(__file__)) + "/testdata/vnf/vcpedpdk"
+        for vcpe_part in vcpe:
+            csar_file = ("%s/%s.csar" % (dpdk_path, vcpe_part))
+            logger.debug("csar_file:%s", csar_file)
+            vnfd_json = parse_vnfd(csar_file, input_parameters)
+            metadata = json.loads(vnfd_json).get("metadata")
+            logger.debug("dpdk metadata:%s", metadata)
+            self.assertEqual(("vCPE_%s" % vcpe_part), metadata.get("template_name", ""))
+
+    def test_pnfd_parse(self):
+        self.remove_temp_dir()
+        csar_path = os.path.dirname(os.path.abspath(__file__)) + "/testdata/pnf/ran-du.csar"
+        pnfd_json = parse_pnfd(csar_path)
+        pnfd_dict = json.loads(pnfd_json)
+        metadata = pnfd_dict.get("metadata")
+        self.assertEqual("RAN_DU", metadata.get("template_name", ""))
+        descriptor_id = pnfd_dict["pnf"]["properties"]["descriptor_id"]
+        self.assertEqual(1, descriptor_id)
+
+    def test_nsd_parse(self):
+        self.remove_temp_dir()
+        # ran_csar = os.path.dirname(os.path.abspath(__file__)) + "/testdata/ns/ran.csar"
+        # nsd_json = parse_nsd(ran_csar, [])
+        # logger.debug("NS ran json: %s" % nsd_json)
+        # metadata = json.loads(nsd_json).get("metadata")
+        # self.assertEqual("RAN-NS", metadata.get("nsd_name", ""))
+
+    def test_service_descriptor_parse(self):
+        self.remove_temp_dir()
+        service_test_csar = os.path.dirname(os.path.abspath(__file__)) + "/testdata/ns/service-vIMS.csar"
+        test_json = parse_nsd(service_test_csar, [])
+        logger.debug("service-vIMS json: %s" % test_json)
+        metadata = json.loads(test_json).get("metadata")
+        self.assertEqual("vIMS_v2", metadata.get("nsd_name", ""))
+
+    def remove_temp_dir(self):
+        tempdir = tempfile.gettempdir()
+        for dir in os.listdir(tempdir):
+            if dir.startswith("tmp"):
+                path = tempfile.tempdir + "/" + dir
+                if (not os.path.isfile(path)) and os.path.exists(path):
+                    shutil.rmtree(tempfile.tempdir + "/" + dir)
+
+    def test_graph(self):
+        data = {
+            "cucp": [],
+            "du": [],
+            "vl_flat_net": ["cucp", "cuup"],
+            "vl_ext_net": ["cucp", "cuup"],
+            "cuup": []
+        }
+        graph = Graph(data)
+        self.assertEqual(['vl_ext_net', 'vl_flat_net'].sort(), graph.get_pre_nodes("cucp").sort())
diff --git a/genericparser/pub/utils/toscaparsers/vnfdmodel.py b/genericparser/pub/utils/toscaparsers/vnfdmodel.py
new file mode 100644 (file)
index 0000000..7b4423d
--- /dev/null
@@ -0,0 +1,265 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import functools
+import logging
+import os
+from genericparser.pub.utils.toscaparsers.basemodel import BaseInfoModel
+# from genericparser.pub.exceptions import CatalogException
+
+logger = logging.getLogger(__name__)
+
+SECTIONS = (VDU_COMPUTE_TYPE, VNF_VL_TYPE, VDU_CP_TYPE, VDU_STORAGE_TYPE) = \
+           ('tosca.nodes.nfv.Vdu.Compute', 'tosca.nodes.nfv.VnfVirtualLink', 'tosca.nodes.nfv.VduCp', 'tosca.nodes.nfv.Vdu.VirtualStorage')
+
+NFV_VNF_RELATIONSHIPS = [["tosca.relationships.nfv.VirtualLinksTo", "tosca.relationships.nfv.VduAttachesTo", "tosca.relationships.nfv.AttachesTo", "tosca.relationships.nfv.Vdu.AttachedTo", "tosca.relationships.DependsOn"],
+                         ["tosca.nodes.relationships.VirtualBindsTo", "tosca.relationships.nfv.VirtualBindsTo"]]
+
+
+class EtsiVnfdInfoModel(BaseInfoModel):
+
+    def __init__(self, path, params):
+        super(EtsiVnfdInfoModel, self).__init__(path, params)
+
+    def parseModel(self, tosca):
+        self.vnf = {}
+        self.vnf = self._build_vnf(tosca)
+        self.metadata = self.buildMetadata(tosca)
+        self.inputs = self.buildInputs(tosca)
+        nodeTemplates = map(functools.partial(self.buildNode, tosca=tosca),
+                            tosca.nodetemplates)
+        node_types = tosca.topology_template.custom_defs
+        self.basepath = self.get_base_path(tosca)
+        self.volume_storages = self._get_all_volume_storage(nodeTemplates, node_types)
+        self.vdus = self._get_all_vdu(nodeTemplates, node_types)
+        self.vls = self._get_all_vl(nodeTemplates, node_types)
+        self.cps = self._get_all_cp(nodeTemplates, node_types)
+        self.vnf_exposed = self._get_all_endpoint_exposed()
+        self.graph = self.get_deploy_graph(tosca, NFV_VNF_RELATIONSHIPS)
+
+    def _get_all_volume_storage(self, nodeTemplates, node_types):
+        rets = []
+        for node in nodeTemplates:
+            if self.isNodeTypeX(node, node_types, VDU_STORAGE_TYPE):
+                ret = {}
+                ret['volume_storage_id'] = node['name']
+                if 'description' in node:
+                    ret['description'] = node['description']
+                ret['properties'] = node['properties']
+                # image_file should be gotten form artifacts TODO
+                # ret['artifacts'] = self._build_artifacts(node)
+                rets.append(ret)
+        return rets
+
+    def _get_all_vdu(self, nodeTemplates, node_types):
+        rets = []
+        inject_files = []
+        for node in nodeTemplates:
+            logger.debug("nodeTemplates :%s", node)
+            if self.isNodeTypeX(node, node_types, VDU_COMPUTE_TYPE):
+                ret = {}
+                ret['vdu_id'] = node['name']
+                ret['type'] = node['nodeType']
+                if 'description' in node:
+                    ret['description'] = node['description']
+                ret['properties'] = node['properties']
+                if 'inject_files' in node['properties']:
+                    inject_files = node['properties']['inject_files']
+                if inject_files is not None:
+                    if isinstance(inject_files, list):
+                        for inject_file in inject_files:
+                            source_path = os.path.join(self.basepath, inject_file['source_path'])
+                            with open(source_path, "rb") as f:
+                                source_data = f.read()
+                                source_data_base64 = source_data.encode("base64")
+                                inject_file["source_data_base64"] = source_data_base64
+                    if isinstance(inject_files, dict):
+                        source_path = os.path.join(self.basepath, inject_files['source_path'])
+                        with open(source_path, "rb") as f:
+                            source_data = f.read()
+                            source_data_base64 = source_data.encode("base64")
+                            inject_files["source_data_base64"] = source_data_base64
+                virtual_storages = self.getRequirementByName(node, 'virtual_storage')
+                ret['virtual_storages'] = map(functools.partial(self._trans_virtual_storage), virtual_storages)
+                ret['dependencies'] = map(lambda x: self.get_requirement_node_name(x), self.getNodeDependencys(node))
+                virtual_compute = self.getCapabilityByName(node, 'virtual_compute')
+                if virtual_compute is not None and 'properties' in virtual_compute:
+                    ret['virtual_compute'] = virtual_compute['properties']
+                ret['vls'] = self._get_linked_vl_ids(node, nodeTemplates)
+                ret['cps'] = self._get_virtal_binding_cp_ids(node, nodeTemplates)
+                ret['artifacts'] = self.build_artifacts(node)
+                rets.append(ret)
+        logger.debug("rets:%s", rets)
+        return rets
+
+    def _trans_virtual_storage(self, virtual_storage):
+        if isinstance(virtual_storage, str):
+            return {"virtual_storage_id": virtual_storage}
+        else:
+            ret = {}
+            ret['virtual_storage_id'] = self.get_requirement_node_name(virtual_storage)
+            return ret
+
+    def _get_linked_vl_ids(self, node, node_templates):
+        vl_ids = []
+        cps = self._get_virtal_binding_cps(node, node_templates)
+        for cp in cps:
+            vl_reqs = self.getRequirementByName(cp, 'virtual_link')
+            for vl_req in vl_reqs:
+                vl_ids.append(self.get_requirement_node_name(vl_req))
+        return vl_ids
+
+    def _get_virtal_binding_cp_ids(self, node, nodeTemplates):
+        return map(lambda x: x['name'], self._get_virtal_binding_cps(node, nodeTemplates))
+
+    def _get_virtal_binding_cps(self, node, nodeTemplates):
+        cps = []
+        for tmpnode in nodeTemplates:
+            if 'requirements' in tmpnode:
+                for item in tmpnode['requirements']:
+                    for key, value in item.items():
+                        if key.upper().startswith('VIRTUAL_BINDING'):
+                            req_node_name = self.get_requirement_node_name(value)
+                            if req_node_name is not None and req_node_name == node['name']:
+                                cps.append(tmpnode)
+        return cps
+
+    def _get_all_vl(self, nodeTemplates, node_types):
+        vls = []
+        for node in nodeTemplates:
+            if self.isNodeTypeX(node, node_types, VNF_VL_TYPE):
+                vl = dict()
+                vl['vl_id'] = node['name']
+                vl['description'] = node['description']
+                vl['properties'] = node['properties']
+                vls.append(vl)
+        return vls
+
+    def _get_all_cp(self, nodeTemplates, node_types):
+        cps = []
+        for node in nodeTemplates:
+            if self.isNodeTypeX(node, node_types, VDU_CP_TYPE):
+                cp = {}
+                cp['cp_id'] = node['name']
+                cp['cpd_id'] = node['name']
+                cp['description'] = node['description']
+                cp['properties'] = node['properties']
+                cp['vl_id'] = self._get_node_vl_id(node)
+                cp['vdu_id'] = self._get_node_vdu_id(node)
+                vls = self._buil_cp_vls(node)
+                if len(vls) > 1:
+                    cp['vls'] = vls
+                cps.append(cp)
+        return cps
+
+    def _get_node_vdu_id(self, node):
+        vdu_ids = map(lambda x: self.get_requirement_node_name(x), self.getRequirementByName(node, 'virtual_binding'))
+        if len(vdu_ids) > 0:
+            return vdu_ids[0]
+        return ""
+
+    def _get_node_vl_id(self, node):
+        vl_ids = map(lambda x: self.get_requirement_node_name(x), self.getRequirementByName(node, 'virtual_link'))
+        if len(vl_ids) > 0:
+            return vl_ids[0]
+        return ""
+
+    def _buil_cp_vls(self, node):
+        return map(lambda x: self._build_cp_vl(x), self.getRequirementByName(node, 'virtual_link'))
+
+    def _build_cp_vl(self, req):
+        cp_vl = {}
+        cp_vl['vl_id'] = self.get_prop_from_obj(req, 'node')
+        relationship = self.get_prop_from_obj(req, 'relationship')
+        if relationship is not None:
+            properties = self.get_prop_from_obj(relationship, 'properties')
+            if properties is not None and isinstance(properties, dict):
+                for key, value in properties.items():
+                    cp_vl[key] = value
+        return cp_vl
+
+    def _get_all_endpoint_exposed(self):
+        if self.vnf:
+            external_cps = self._get_external_cps(self.vnf.get('requirements', None))
+            forward_cps = self._get_forward_cps(self.vnf.get('capabilities', None))
+            return {"external_cps": external_cps, "forward_cps": forward_cps}
+        return {}
+
+    def _get_external_cps(self, vnf_requirements):
+        external_cps = []
+        if vnf_requirements:
+            if isinstance(vnf_requirements, dict):
+                for key, value in vnf_requirements.items():
+                    if isinstance(value, list) and len(value) > 0:
+                        external_cps.append({"key_name": key, "cpd_id": value[0]})
+                    else:
+                        external_cps.append({"key_name": key, "cpd_id": value})
+            elif isinstance(vnf_requirements, list):
+                for vnf_requirement in vnf_requirements:
+                    for key, value in vnf_requirement.items():
+                        if isinstance(value, list) and len(value) > 0:
+                            external_cps.append({"key_name": key, "cpd_id": value[0]})
+                        else:
+                            external_cps.append({"key_name": key, "cpd_id": value})
+        return external_cps
+
+    def _get_forward_cps(self, vnf_capabilities):
+        forward_cps = []
+        if vnf_capabilities:
+            for key, value in vnf_capabilities.items():
+                if isinstance(value, list) and len(value) > 0:
+                    forward_cps.append({"key_name": key, "cpd_id": value[0]})
+                else:
+                    forward_cps.append({"key_name": key, "cpd_id": value})
+        return forward_cps
+
+    # def get_substitution_mappings(self, tosca):
+    #    node = {}
+    #    substitution_mappings = tosca.tpl['topology_template'].get('substitution_mappings', None)
+    #    if substitution_mappings:
+    #        node = substitution_mappings.get('properties', {})
+    #        node['type'] = substitution_mappings['node_type']
+    #    return node
+
+    def _build_vnf(self, tosca):
+        vnf = self.get_substitution_mappings(tosca)
+        properties = vnf.get("properties", {})
+        metadata = vnf.get("metadata", {})
+        if properties.get("descriptor_id", "") == "":
+            descriptor_id = metadata.get("descriptor_id", "")
+            if descriptor_id == "":
+                descriptor_id = metadata.get("id", "")
+            if descriptor_id == "":
+                descriptor_id = metadata.get("UUID", "")
+            properties["descriptor_id"] = descriptor_id
+
+        if properties.get("descriptor_verison", "") == "":
+            version = metadata.get("template_version", "")
+            if version == "":
+                version = metadata.get("version", "")
+            properties["descriptor_verison"] = version
+
+        if properties.get("provider", "") == "":
+            provider = metadata.get("template_author", "")
+            if provider == "":
+                provider = metadata.get("provider", "")
+            properties["provider"] = provider
+
+        if properties.get("template_name", "") == "":
+            template_name = metadata.get("template_name", "")
+            if template_name == "":
+                template_name = metadata.get("template_name", "")
+            properties["template_name"] = template_name
+
+        return vnf
diff --git a/genericparser/pub/utils/values.py b/genericparser/pub/utils/values.py
new file mode 100644 (file)
index 0000000..0fd2d1a
--- /dev/null
@@ -0,0 +1,24 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def ignore_case_get(args, key, def_val=""):
+    if not key:
+        return def_val
+    if key in args:
+        return args[key]
+    for old_key in args:
+        if old_key.upper() == key.upper():
+            return args[old_key]
+    return def_val
diff --git a/genericparser/samples/__init__.py b/genericparser/samples/__init__.py
new file mode 100644 (file)
index 0000000..c7b6818
--- /dev/null
@@ -0,0 +1,13 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/genericparser/samples/tests.py b/genericparser/samples/tests.py
new file mode 100644 (file)
index 0000000..9e4c027
--- /dev/null
@@ -0,0 +1,33 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import unittest
+import json
+from django.test import Client
+from rest_framework import status
+
+
+class SampleViewTest(unittest.TestCase):
+    def setUp(self):
+        self.client = Client()
+
+    def tearDown(self):
+        pass
+
+    def test_sample(self):
+
+        response = self.client.get("/samples/")
+        self.assertEqual(status.HTTP_200_OK, response.status_code, response.content)
+        resp_data = json.loads(response.content)
+        self.assertEqual({"status": "active"}, resp_data)
diff --git a/genericparser/samples/urls.py b/genericparser/samples/urls.py
new file mode 100644 (file)
index 0000000..725b343
--- /dev/null
@@ -0,0 +1,20 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from django.conf.urls import url
+from genericparser.samples import views
+
+urlpatterns = [
+    url(r'^api/genericparser/v1/mandb/(?P<modelName>[a-zA-Z\-]+)$', views.TablesList.as_view()),
+    url(r'^samples/$', views.SampleList.as_view()), ]
diff --git a/genericparser/samples/views.py b/genericparser/samples/views.py
new file mode 100644 (file)
index 0000000..524280f
--- /dev/null
@@ -0,0 +1,57 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import traceback
+
+from rest_framework import status
+from rest_framework.response import Response
+from rest_framework.views import APIView
+
+logger = logging.getLogger(__name__)
+
+
+class SampleList(APIView):
+    """
+    List all samples.
+    """
+    def get(self, request, format=None):
+        logger.debug("get")
+        return Response({"status": "active"})
+
+
+class TablesList(APIView):
+    def delete(self, request, modelName):
+        logger.debug("Start delete model %s", modelName)
+        try:
+            modelNames = modelName.split("-")
+            for name in modelNames:
+                model_obj = eval("models.%s.objects" % name)
+                model_obj.filter().delete()
+                logger.debug("End delete model %s", name)
+        except:
+            logger.error(traceback.format_exc())
+            return Response(data={"error": "failed"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+        return Response(data={}, status=status.HTTP_204_NO_CONTENT)
+
+    def get(self, request, modelName):
+        logger.debug("Get model %s", modelName)
+        count = 0
+        try:
+            model_obj = eval("models.%s.objects" % modelName)
+            count = len(model_obj.filter())
+        except:
+            logger.error(traceback.format_exc())
+            return Response(data={"error": "failed"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
+        return Response(data={"count": count}, status=status.HTTP_200_OK)
diff --git a/genericparser/settings.py b/genericparser/settings.py
new file mode 100644 (file)
index 0000000..a3310b9
--- /dev/null
@@ -0,0 +1,193 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import sys
+import platform
+
+import redisco
+
+from genericparser.pub.config.config import REDIS_HOST, REDIS_PORT, REDIS_PASSWD
+from genericparser.pub.config.config import DB_NAME, DB_IP, DB_USER, DB_PASSWD, DB_PORT
+from genericparser.pub.config import config as pub_config
+from logging import config as log_config
+from onaplogging import monkey
+monkey.patch_all()
+
+# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
+BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+
+# Quick-start development settings - unsuitable for production
+# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
+
+# SECURITY WARNING: keep the secret key used in production secret!
+SECRET_KEY = '3o-wney!99y)^h3v)0$j16l9=fdjxcb+a8g+q3tfbahcnu2b0o'
+
+# SECURITY WARNING: don't run with debug turned on in production!
+DEBUG = True
+
+ALLOWED_HOSTS = ['*']
+
+# Application definition
+
+INSTALLED_APPS = [
+    'django.contrib.auth',
+    'django.contrib.contenttypes',
+    'django.contrib.sessions',
+    'django.contrib.messages',
+    'django.contrib.staticfiles',
+    'rest_framework',
+    'genericparser.pub.database',
+    'genericparser.samples',
+    'genericparser.swagger',
+    'drf_yasg',
+]
+
+# drf-yasg
+SWAGGER_SETTINGS = {
+    'LOGIN_URL': '/admin/login',
+    'LOGOUT_URL': '/admin/logout',
+    'DEFAULT_INFO': 'genericparser.swagger.urls.swagger_info'
+}
+
+TEMPLATES = [
+    {
+        'BACKEND': 'django.template.backends.django.DjangoTemplates',
+        'DIRS': [],
+        'APP_DIRS': True,
+        'OPTIONS': {
+            'context_processors': [
+                'django.template.context_processors.debug',
+                'django.template.context_processors.request',
+                'django.contrib.auth.context_processors.auth',
+                'django.contrib.messages.context_processors.messages',
+            ],
+        },
+    },
+]
+
+MIDDLEWARE_CLASSES = [
+    'django.middleware.security.SecurityMiddleware',
+    'django.contrib.sessions.middleware.SessionMiddleware',
+    'django.middleware.common.CommonMiddleware',
+    'django.middleware.csrf.CsrfViewMiddleware',
+    'django.contrib.auth.middleware.AuthenticationMiddleware',
+    'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
+    'django.contrib.messages.middleware.MessageMiddleware',
+    'django.middleware.clickjacking.XFrameOptionsMiddleware',
+       'genericparser.middleware.LogContextMiddleware',
+]
+
+ROOT_URLCONF = 'genericparser.urls'
+
+WSGI_APPLICATION = 'genericparser.wsgi.application'
+
+REST_FRAMEWORK = {
+    'DEFAULT_RENDERER_CLASSES': (
+        'rest_framework.renderers.JSONRenderer',
+    ),
+
+    'DEFAULT_PARSER_CLASSES': (
+        'rest_framework.parsers.JSONParser',
+        'rest_framework.parsers.MultiPartParser',
+    )
+}
+
+DATABASES = {
+    'default': {
+        'ENGINE': 'django.db.backends.mysql',
+        'NAME': DB_NAME,
+        'HOST': DB_IP,
+        'PORT': DB_PORT,
+        'USER': DB_USER,
+        'PASSWORD': DB_PASSWD,
+    },
+}
+
+redisco.connection_setup(host=REDIS_HOST, port=REDIS_PORT, password=REDIS_PASSWD, db=0)
+# CACHE_BACKEND = 'redis_cache.cache://%s@%s:%s' % (REDIS_PASSWD, REDIS_HOST, REDIS_PORT)
+
+TIME_ZONE = 'UTC'
+
+# Static files (CSS, JavaScript, Images)
+# https://docs.djangoproject.com/en/1.6/howto/static-files/
+
+STATIC_URL = '/static/'
+
+STATICFILES_DIRS = [
+    os.path.join(BASE_DIR, "static")
+]
+# change
+pub_config.GENERICPARSER_ROOT_PATH = os.path.join(STATICFILES_DIRS[0], "genericparser")
+pub_config.GENERICPARSER_URL_PATH = "static/genericparser"
+pub_config.SDC_BASE_URL = "http://%s:%s/api" % (pub_config.MSB_SERVICE_IP, pub_config.MSB_SERVICE_PORT)
+
+if platform.system() == 'Windows' or 'test' in sys.argv:
+    LOGGING = {
+        'version': 1,
+        'disable_existing_loggers': True,
+        'formatters': {
+            'standard': {
+                'format': '%(asctime)s:[%(name)s]:[%(filename)s]-[%(lineno)d] [%(levelname)s]:%(message)s',
+            },
+        },
+        'filters': {
+        },
+        # change
+        'handlers': {
+            'genericparser_handler': {
+                'level': 'DEBUG',
+                'class': 'logging.handlers.RotatingFileHandler',
+                'filename': os.path.join(BASE_DIR, 'logs/runtime_genericparser.log'),
+                'formatter': 'standard',
+                'maxBytes': 1024 * 1024 * 50,
+                'backupCount': 5,
+            },
+        },
+
+        'loggers': {
+            # change
+            'genericparser': {
+                'handlers': ['genericparser_handler'],
+                'level': 'DEBUG',
+                'propagate': False
+            },
+        }
+    }
+else:
+    LOGGING_CONFIG = None
+    # yaml configuration of logging
+    LOGGING_FILE = os.path.join(BASE_DIR, 'genericparser/log.yml')
+    log_config.yamlConfig(filepath=LOGGING_FILE, watchDog=True)
+
+if 'test' in sys.argv:
+    pub_config.REG_TO_MSB_WHEN_START = False
+
+    DATABASES = {}
+    DATABASES['default'] = {
+        'ENGINE': 'django.db.backends.sqlite3',
+        'NAME': ':memory:',
+    }
+    REST_FRAMEWORK = {}
+
+    if platform.system() == 'Linux':
+        TEST_RUNNER = 'xmlrunner.extra.djangotestrunner.XMLTestRunner'
+        TEST_OUTPUT_VERBOSE = True
+        TEST_OUTPUT_DESCRIPTIONS = True
+        TEST_OUTPUT_DIR = 'test-reports'
+
+    import mock
+    from genericparser.pub.utils import idutil
+    idutil.get_auto_id = mock.Mock()
+    idutil.get_auto_id.return_value = 1
diff --git a/genericparser/swagger/__init__.py b/genericparser/swagger/__init__.py
new file mode 100644 (file)
index 0000000..c7b6818
--- /dev/null
@@ -0,0 +1,13 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/genericparser/swagger/management/__init__.py b/genericparser/swagger/management/__init__.py
new file mode 100644 (file)
index 0000000..342c2a8
--- /dev/null
@@ -0,0 +1,13 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/genericparser/swagger/management/commands/__init__.py b/genericparser/swagger/management/commands/__init__.py
new file mode 100644 (file)
index 0000000..342c2a8
--- /dev/null
@@ -0,0 +1,13 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/genericparser/swagger/management/commands/export_swagger.py b/genericparser/swagger/management/commands/export_swagger.py
new file mode 100644 (file)
index 0000000..e732246
--- /dev/null
@@ -0,0 +1,36 @@
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import json
+
+from django.core.management.base import BaseCommand
+from django.test import Client
+
+
+class Command(BaseCommand):
+    def add_arguments(self, parser):
+        parser.add_argument(
+            '-f',
+            '--name',
+            action='store',
+            dest='name',
+            default='swagger.json',
+            help='name of swagger file.',
+        )
+
+    def handle(self, *args, **options):
+        self.client = Client()
+        response = self.client.get("/api/genericparser/v1/swagger.json")
+        with open(options['name'], 'w') as swagger_file:
+            swagger_file.write(json.dumps(response.data))
+        print "swagger api is written to %s" % options['name']
diff --git a/genericparser/swagger/tests.py b/genericparser/swagger/tests.py
new file mode 100644 (file)
index 0000000..15471a9
--- /dev/null
@@ -0,0 +1,28 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at:
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+
+import unittest
+
+from django.test import Client
+from rest_framework import status
+
+
+class SwaggerViewTest(unittest.TestCase):
+    def setUp(self):
+        self.client = Client()
+
+    def tearDown(self):
+        pass
+
+    def test_swagger(self):
+        response = self.client.get("/api/genericparser/v1/swagger.json")
+        self.assertEqual(status.HTTP_200_OK, response.status_code, response.content)
+        self.assertEqual("2.0", response.data.get("swagger"))
diff --git a/genericparser/swagger/urls.py b/genericparser/swagger/urls.py
new file mode 100644 (file)
index 0000000..cb8198a
--- /dev/null
@@ -0,0 +1,43 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from django.conf.urls import url
+from drf_yasg import openapi
+from drf_yasg.views import get_schema_view
+from rest_framework import permissions
+
+# Add code for generating swagger automatically.
+swagger_info = openapi.Info(
+    title="MODELING GENERICPARSER API",
+    default_version='v1',
+    description="""
+
+The `swagger-ui` view can be found [here](/api/genericparser/v1/swagger).
+The `ReDoc` view can be found [here](/api/genericparser/v1/redoc).
+The swagger YAML document can be found [here](/api/genericparser/v1/swagger.yaml).
+The swagger JSON document can be found [here](/api/genericparser/v1/swagger.json)."""
+)
+
+SchemaView = get_schema_view(
+    validators=['ssv', 'flex'],
+    public=True,
+    permission_classes=(permissions.AllowAny,),
+)
+
+urlpatterns = [
+    # url(r'^api/genericparser/v1/swagger.json$', SwaggerJsonView.as_view()),
+    url(r'^api/genericparser/v1/swagger(?P<format>.json|.yaml)$', SchemaView.without_ui(cache_timeout=0), name='schema-json'),
+    url(r'^api/genericparser/v1/swagger$', SchemaView.with_ui('swagger', cache_timeout=0), name='schema-swagger-ui'),
+    url(r'^api/genericparser/v1/redoc$', SchemaView.with_ui('redoc', cache_timeout=0), name='schema-redoc'),
+]
diff --git a/genericparser/swagger/vfc.catalog.swagger.json b/genericparser/swagger/vfc.catalog.swagger.json
new file mode 100644 (file)
index 0000000..1327462
--- /dev/null
@@ -0,0 +1,793 @@
+{
+  "swagger": "2.0",
+  "info": {
+    "version": "1.0.0",
+    "title": "ONAP VFC Catalog Rest API",
+    "description": "VFC Catalog Management API.",
+    "contact": {
+      "name": "ONAP VFC team",
+      "email": "onap-discuss@lists.onap.org",
+      "url": "https://gerrit.onap.org/r/#/admin/projects/vfc/nfvo/catalog"
+    }
+  },
+  "basePath": "/api/catalog/v1",
+  "schemes": [
+    "http",
+    "https"
+  ],
+  "consumes": [
+    "application/json"
+  ],
+  "produces": [
+    "application/json"
+  ],
+  "paths": {
+    "/nspackages": {
+      "get": {
+        "tags": [
+          "nspackage"
+        ],
+        "summary": "query ns packages info",
+        "description": "query ns packages info",
+        "operationId": "query_ns_packages",
+        "parameters": [],
+        "responses": {
+          "200": {
+            "description": "successful operation",
+            "schema": {
+              "$ref": "#/definitions/NsPkgListInfo"
+            }
+          },
+          "404": {
+            "description": "URL not found"
+          },
+          "500": {
+            "description": "internal error"
+          }
+        }
+      },
+      "post": {
+        "tags": [
+          "nspackage"
+        ],
+        "summary": "ns package distribute",
+        "description": "ns package distribute",
+        "operationId": "ns_pkg_distribute",
+        "parameters": [
+          {
+            "in": "body",
+            "name": "body",
+            "description": "distribute request param",
+            "required": true,
+            "schema": {
+              "$ref": "#/definitions/NsPkgDistributeRequest"
+            }
+          }
+        ],
+        "responses": {
+          "202": {
+            "description": "",
+            "schema": {
+              "$ref": "#/definitions/NsPkgDistributeResponse"
+            }
+          },
+          "404": {
+            "description": "URL not found"
+          },
+          "500": {
+            "description": "internal error"
+          }
+        }
+      }
+    },
+    "/nspackages/{csarId}": {
+      "get": {
+        "tags": [
+          "nspackage"
+        ],
+        "summary": "query ns package info",
+        "description": "query ns package info via ns package csarId",
+        "operationId": "query_ns_package",
+        "consumes": [
+          "application/json"
+        ],
+        "produces": [
+          "application/json"
+        ],
+        "parameters": [
+          {
+            "name": "csarId",
+            "in": "path",
+            "description": "csar id of ns package",
+            "required": true,
+            "type": "string"
+          }
+        ],
+        "responses": {
+          "200": {
+            "description": "successful operation",
+            "schema": {
+              "$ref": "#/definitions/NsPkgDetailInfo"
+            }
+          },
+          "404": {
+            "description": "URL not found"
+          },
+          "500": {
+            "description": "internal error"
+          }
+        }
+      },
+      "delete": {
+        "tags": [
+          "nspackage"
+        ],
+        "summary": "delete ns pkg",
+        "description": "delete ns pkg",
+        "operationId": "delete_ns_pkg",
+        "consumes": [
+          "application/json"
+        ],
+        "produces": [
+          "application/json"
+        ],
+        "parameters": [
+          {
+            "name": "csarId",
+            "in": "path",
+            "description": "csar id of ns package",
+            "required": true,
+            "type": "string"
+          }
+        ],
+        "responses": {
+          "200": {
+            "description": "Delete NS Package Response",
+            "schema": {
+              "$ref": "#/definitions/NsPkgDelResponse"
+            }
+          },
+          "404": {
+            "description": "URL not found"
+          },
+          "500": {
+            "description": "internal error"
+          }
+        }
+      }
+    },
+    "/parsernsd": {
+      "post": {
+        "tags": [
+          "model"
+        ],
+        "summary": "ns package model",
+        "description": "ns package model",
+        "operationId": "ms_model_parser",
+        "consumes": [
+          "application/json"
+        ],
+        "produces": [
+          "application/json"
+        ],
+        "parameters": [
+          {
+            "in": "body",
+            "name": "body",
+            "description": "distribute request param",
+            "required": true,
+            "schema": {
+              "$ref": "#/definitions/modelParserRequest"
+            }
+          }
+        ],
+        "responses": {
+          "202": {
+            "description": "",
+            "schema": {
+              "$ref": "#/definitions/modelParserResponse"
+            }
+          },
+          "404": {
+            "description": "URL not found"
+          },
+          "500": {
+            "description": "internal error"
+          }
+        }
+      }
+     },
+    "/vnfpackages": {
+      "get": {
+        "tags": [
+          "vnfpackage"
+        ],
+        "summary": "query vnf packages info",
+        "description": "query vnf packages info",
+        "operationId": "query_vnf_packages",
+        "consumes": [
+          "application/json"
+        ],
+        "produces": [
+          "application/json"
+        ],
+        "parameters": [],
+        "responses": {
+          "200": {
+            "description": "successful operation",
+            "schema": {
+              "$ref": "#/definitions/VnfPkgListInfo"
+            }
+          },
+          "404": {
+            "description": "URL not found"
+          },
+          "500": {
+            "description": "internal error"
+          }
+        }
+      },
+      "post": {
+        "tags": [
+          "vnfpackage"
+        ],
+        "summary": "vnf package distribute",
+        "description": "vnf package distribute",
+        "operationId": "vnf_pkg_distribute",
+        "consumes": [
+          "application/json"
+        ],
+        "produces": [
+          "application/json"
+        ],
+        "parameters": [
+          {
+            "in": "body",
+            "name": "body",
+            "description": "distribute request param",
+            "required": true,
+            "schema": {
+              "$ref": "#/definitions/VnfPkgDistributeRequest"
+            }
+          }
+        ],
+        "responses": {
+          "202": {
+            "description": "",
+            "schema": {
+              "$ref": "#/definitions/VnfPkgDistributeResponse"
+            }
+          },
+          "404": {
+            "description": "URL not found"
+          },
+          "500": {
+            "description": "internal error"
+          }
+        }
+      }
+    },
+    "/vnfpackages/{csarId}": {
+      "get": {
+        "tags": [
+          "vnfpackage"
+        ],
+        "summary": "query vnf package info",
+        "description": "query one vnf package info via vnf package csarId",
+        "operationId": "query_vnf_package",
+        "consumes": [
+          "application/json"
+        ],
+        "produces": [
+          "application/json"
+        ],
+        "parameters": [
+          {
+            "name": "csarId",
+            "in": "path",
+            "description": "csar id of vnf package",
+            "required": true,
+            "type": "string"
+          }
+        ],
+        "responses": {
+          "200": {
+            "description": "successful operation",
+            "schema": {
+              "$ref": "#/definitions/VnfPkgDetailInfo"
+            }
+          },
+          "404": {
+            "description": "URL not found"
+          },
+          "500": {
+            "description": "internal error"
+          }
+        }
+      },
+      "delete": {
+        "tags": [
+          "vnfpackage"
+        ],
+        "summary": "delete vnf package",
+        "description": "delete vnf package",
+        "operationId": "delete_vnf_package",
+        "consumes": [
+          "application/json"
+        ],
+        "produces": [
+          "application/json"
+        ],
+        "parameters": [
+          {
+            "name": "csarId",
+            "in": "path",
+            "description": "csar id of vnf package",
+            "required": true,
+            "type": "string"
+          }
+        ],
+        "responses": {
+          "202": {
+            "description": "Delete VNF Pakcage Response",
+            "schema": {
+              "$ref": "#/definitions/VnfPkgDelResponse"
+            }
+          },
+          "404": {
+            "description": "URL not found"
+          },
+          "500": {
+            "description": "internal error"
+          }
+        }
+      }
+    },
+    "/parservnfd": {
+      "post": {
+        "tags": [
+          "model"
+        ],
+        "summary": "vnf package model",
+        "description": "vnf package model",
+        "operationId": "vnf_model_parser",
+        "consumes": [
+          "application/json"
+        ],
+        "produces": [
+          "application/json"
+        ],
+        "parameters": [
+          {
+            "in": "body",
+            "name": "body",
+            "description": "distribute request param",
+            "required": true,
+            "schema": {
+              "$ref": "#/definitions/modelParserRequest"
+            }
+          }
+        ],
+        "responses": {
+          "202": {
+            "description": "",
+            "schema": {
+              "$ref": "#/definitions/modelParserResponse"
+            }
+          },
+          "404": {
+            "description": "URL not found"
+          },
+          "500": {
+            "description": "internal error"
+          }
+        }
+      }
+     },
+     "/jobs/{jobId}": {
+       "get": {
+        "tags": [
+          "job"
+        ],
+        "summary": "jobstatus",
+        "description": "Get Job Status",
+        "operationId": "get_jobstatus",
+        "parameters": [
+          {
+            "required": true,
+            "type": "string",
+            "description": "job Id",
+            "name": "jobId",
+            "in": "path"
+          },
+          {
+            "required": true,
+            "type": "string",
+            "description": "job response message id",
+            "name": "responseId",
+            "in": "query"
+          }
+        ],
+        "responses": {
+          "200": {
+            "description": "",
+             "schema": {
+              "$ref": "#/definitions/JobDetailInfo"
+            }
+          }
+        }
+      },
+       "post": {
+        "tags": [
+          "job"
+        ],
+        "summary": "Update Job Status",
+        "description": "Update Job Status",
+        "operationId": "post_jobstatus",
+        "parameters": [
+          {
+            "required": true,
+            "type": "string",
+            "description": "job Id",
+            "name": "jobId",
+            "in": "path"
+          },
+          {
+            "required": true,
+            "schema": {
+              "$ref": "#/definitions/PostJobRequest"
+            },
+            "description": "job status",
+            "name": "responseId",
+            "in": "body"
+          }
+        ],
+        "responses": {
+          "200": {
+            "description": "",
+             "schema": {
+              "$ref": "#/definitions/JobDetailInfo"
+            }
+          }
+        }
+      }
+    }
+  },
+  "definitions": {
+    "NsPkgDistributeRequest": {
+      "type": "object",
+      "properties": {
+        "csarId": {
+          "type": "string",
+          "description": "network service package id, UUID"
+        }
+      }
+    },
+    "NsPkgDistributeResponse": {
+      "type": "object",
+      "properties": {
+        "status": {
+          "type": "string",
+          "description": "Operation status. value is success or failed"
+        },
+        "statusDescription": {
+          "type": "string",
+          "description": "description about the operation result"
+        },
+        "errorCode": {
+          "type": "string",
+          "description": "If the status is failed, the errorcode will be returned"
+        }
+      }
+    },
+    "NsPkgDelResponse": {
+      "type": "object",
+      "properties": {
+        "status": {
+          "type": "string",
+          "description": "Operation status. value is success or failed"
+        },
+        "statusDescription": {
+          "type": "string",
+          "description": "description about the operation result"
+        },
+        "errorCode": {
+          "type": "string",
+          "description": "If the status is failed, the errorcode will be returned"
+        }
+      }
+    },
+    "NsPkgListInfo": {
+      "type": "array",
+      "items": {
+        "$ref": "#/definitions/NsPkgDetailInfo"
+      }
+    },
+    "NsPkgDetailInfo": {
+      "type": "object",
+      "properties": {
+        "csarId": {
+          "type": "string"
+        },
+        "packageInfo": {
+          "$ref": "#/definitions/NsPkgInfo"
+        }
+      }
+    },
+    "NsPkgInfo": {
+      "type": "object",
+      "properties": {
+        "nsPackageId": {
+          "type": "string",
+          "description": "network service package id, UUID, csarId"
+        },
+        "nsdId": {
+          "type": "string",
+          "description": "network service descriptor ID"
+        },
+        "nsdProvider": {
+          "type": "string",
+          "description": "network service designer name"
+        },
+        "nsdVersion": {
+          "type": "string",
+          "description": "network service descriptor version"
+        },
+        "csarName": {
+          "type": "string",
+          "description": "network service package name"
+        },
+        "nsdModel": {
+          "type": "string",
+          "description": "ns JSON string parsed and transformed by parser"
+        },
+        "downloadUrl": {
+          "type": "string",
+          "description": "download url of network service package"
+        }
+      }
+    },
+    "NsInstListInfo": {
+      "type": "array",
+      "items": {
+        "$ref": "#/definitions/NsInstInfo"
+      }
+    },
+    "NsInstInfo": {
+      "type": "object",
+      "properties": {
+        "nsInstanceId": {
+          "type": "string",
+          "description": "network service instance ID"
+        },
+        "nsInstanceName": {
+          "type": "string",
+          "description": "network service instance name"
+        }
+      }
+    },
+    "VnfPkgDistributeRequest": {
+      "type": "object",
+      "properties": {
+        "csarId": {
+          "type": "string",
+          "description": "vnf package id, UUID"
+        }
+      }
+    },
+    "VnfPkgDistributeResponse": {
+      "type": "object",
+      "properties": {
+        "jobId": {
+          "type": "string",
+          "description": "VNF package distribute job ID"
+        }
+      }
+    },
+    "VnfPkgDelResponse": {
+      "type": "object",
+      "properties": {
+        "status": {
+          "type": "string",
+          "description": "Operation status. value is success or failed"
+        },
+        "statusDescription": {
+          "type": "string",
+          "description": "description about the operation result"
+        },
+        "errorCode": {
+          "type": "string",
+          "description": "If the status is failed, the errorcode will be returned"
+        }
+      }
+    },
+    "VnfPkgListInfo": {
+      "type": "array",
+      "items": {
+        "$ref": "#/definitions/VnfPkgDetailInfo"
+      }
+    },
+    "VnfPkgDetailInfo": {
+      "type": "object",
+      "properties": {
+        "csarId": {
+          "type": "string",
+          "description": "vnf package id, UUID"
+        },
+        "packageInfo": {
+          "$ref": "#/definitions/VnfPkgInfo"
+        },
+        "imageInfo": {
+          "$ref": "#/definitions/VnfPkgImgListInfo"
+        }
+      }
+    },
+    "VnfPkgInfo": {
+      "type": "object",
+      "description": "vnf package infomation",
+      "properties": {
+        "vnfPackageId": {
+          "type": "string",
+          "description": "vnf package id (csarId)"
+        },
+        "csarName": {
+          "type": "string",
+          "description": "The name of the csar"
+        },
+        "vnfdId": {
+          "type": "string",
+          "description": "VNF descriptor ID"
+        },
+        "vnfdProvider": {
+          "type": "string",
+          "description": "VNF descriptor vendor ID"
+        },
+        "vnfdModel": {
+          "type": "string",
+          "description": "The model of the VNF (JSON) encoded to string"
+        },
+        "vnfdVersion": {
+          "type": "string",
+          "description": "VNF descriptor version"
+        },
+        "vnfVersion": {
+          "type": "string",
+          "description": "VNF Software version"
+        },
+        "downloadUrl":{
+          "type": "string",
+          "description": "The URL from which the VNF package can be downloaded"
+        }
+      }
+    },
+    "VnfInstListInfo": {
+      "type": "array",
+      "items": {
+        "$ref": "#/definitions/VnfInstInfo"
+      }
+    },
+    "VnfInstInfo": {
+      "type": "object",
+      "properties": {
+        "vnfInstanceId": {
+          "type": "string",
+          "description": "VNF instance ID"
+        },
+        "vnfInstanceName": {
+          "type": "string",
+          "description": "VNF instance name"
+        }
+      }
+    },
+    "VnfPkgImgListInfo": {
+      "type": "array",
+      "items": {
+        "$ref": "#/definitions/VnfPkgImgInfo"
+      }
+    },
+    "VnfPkgImgInfo": {
+      "type": "object",
+      "properties": {
+        "fileName": {
+          "type": "string",
+          "description": "image file name"
+        },
+        "imageUrl": {
+          "type": "string",
+          "description": "image file path in the csar or image url in external repository"
+        }
+      }
+    },
+    "modelParserRequest":{
+      "type": "object",
+      "properties": {
+        "csarId": {
+          "type": "string",
+          "description": "csar Package Id"
+        },
+        "inputs": {
+          "type": "object",
+          "description": "csar package json inputs"
+        }
+      }
+    },
+    "modelParserResponse":{
+      "type": "object",
+      "properties": {
+        "model": {
+          "type": "object",
+          "description": "csar model json data"
+        }
+      }
+    },
+    "jobResponseInfo": {
+       "type": "object",
+       "properties": {
+          "status": {
+            "type": "string"
+          },
+         "progress":{
+            "type": "string"
+         },
+         "statusDescription": {
+            "type": "string"
+         },
+         "errorCode": {
+            "type": "string"
+         },
+         "responseId": {
+            "type": "string"
+        }
+       }
+    },
+    "PostJobRequest": {
+      "type": "object",
+      "properties": {
+        "progress": {
+          "type": "string"
+        },
+        "desc": {
+          "type": "string"
+        },
+        "errcode": {
+          "type": "string"
+        }
+      }
+    },
+    "JobDetailInfo":{
+      "type": "object",
+      "properties": {
+            "jobId": {
+               "type": "string"
+             },
+            "responseDescriptor":
+            {
+                "type":"object",
+                "properties": {
+                    "status": {
+                       "type": "string"
+                    },
+                    "progress":{
+                       "type": "string"
+                    },
+                    "statusDescription": {
+                       "type": "string"
+                    },
+                    "errorCode": {
+                       "type": "string"
+                    },
+                    "responseId": {
+                       "type": "string"
+                    },
+                    "responseHistoryList": {
+                        "type": "array",
+                        "items": {
+                           "$ref": "#/definitions/jobResponseInfo"
+                        }
+                    }
+                }
+           }
+      }
+    }
+  }
+}
\ No newline at end of file
diff --git a/genericparser/swagger/views.py b/genericparser/swagger/views.py
new file mode 100644 (file)
index 0000000..33d0edb
--- /dev/null
@@ -0,0 +1,28 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import os
+
+from rest_framework.response import Response
+from rest_framework.views import APIView
+
+
+class SwaggerJsonView(APIView):
+    def get(self, request):
+        json_file = os.path.join(os.path.dirname(__file__), 'vfc.catalog.swagger.json')
+        f = open(json_file)
+        json_data = json.JSONDecoder().decode(f.read())
+        f.close()
+        return Response(json_data)
diff --git a/genericparser/urls.py b/genericparser/urls.py
new file mode 100644 (file)
index 0000000..8e9b0aa
--- /dev/null
@@ -0,0 +1,31 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from django.conf.urls import include, url
+
+from genericparser.pub.config.config import REG_TO_MSB_WHEN_START, REG_TO_MSB_REG_URL, REG_TO_MSB_REG_PARAM
+
+urlpatterns = [
+    url(r'^', include('genericparser.samples.urls')),
+    url(r'^', include('genericparser.packages.urls')),
+    url(r'^', include('genericparser.jobs.urls')),
+    url(r'^', include('genericparser.swagger.urls')),
+]
+
+# regist to MSB when startup
+if REG_TO_MSB_WHEN_START:
+    import json
+    from genericparser.pub.utils.restcall import req_by_msb
+    for reg_param in REG_TO_MSB_REG_PARAM:
+        req_by_msb(REG_TO_MSB_REG_URL, "POST", json.JSONEncoder().encode(reg_param))
diff --git a/genericparser/wsgi.py b/genericparser/wsgi.py
new file mode 100644 (file)
index 0000000..ed4bd0f
--- /dev/null
@@ -0,0 +1,21 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+
+from django.core.wsgi import get_wsgi_application
+
+os.environ.setdefault("DJANGO_SETTINGS_MODULE", "genericparser.settings")
+
+application = get_wsgi_application()
diff --git a/initialize.sh b/initialize.sh
new file mode 100644 (file)
index 0000000..3425019
--- /dev/null
@@ -0,0 +1,16 @@
+#!/bin/bash
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+pip install -r requirements.txt
diff --git a/logs/empty.txt b/logs/empty.txt
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/manage.py b/manage.py
new file mode 100644 (file)
index 0000000..e8de882
--- /dev/null
+++ b/manage.py
@@ -0,0 +1,22 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import sys
+
+os.environ.setdefault("DJANGO_SETTINGS_MODULE", "genericparser.settings")
+
+if __name__ == "__main__":
+    from django.core.management import execute_from_command_line
+    execute_from_command_line(sys.argv)
diff --git a/mvn-phase-script.sh b/mvn-phase-script.sh
new file mode 100644 (file)
index 0000000..6b41abf
--- /dev/null
@@ -0,0 +1,86 @@
+#!/bin/bash
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+set -e
+
+echo "running script: [$0] for module [$1] at stage [$2]"
+
+export SETTINGS_FILE=${SETTINGS_FILE:-$HOME/.m2/settings.xml}
+MVN_PROJECT_MODULEID="$1"
+MVN_PHASE="$2"
+
+
+FQDN="${MVN_PROJECT_GROUPID}.${MVN_PROJECT_ARTIFACTID}"
+if [ "$MVN_PROJECT_MODULEID" == "__" ]; then
+  MVN_PROJECT_MODULEID=""
+fi
+
+if [ -z "$WORKSPACE" ]; then
+    WORKSPACE=$(pwd)
+fi
+
+
+# mvn phase in life cycle
+MVN_PHASE="$2"
+
+
+echo "MVN_PROJECT_MODULEID is            [$MVN_PROJECT_MODULEID]"
+echo "MVN_PHASE is                       [$MVN_PHASE]"
+echo "MVN_PROJECT_GROUPID is             [$MVN_PROJECT_GROUPID]"
+echo "MVN_PROJECT_ARTIFACTID is          [$MVN_PROJECT_ARTIFACTID]"
+echo "MVN_PROJECT_VERSION is             [$MVN_PROJECT_VERSION]"
+
+run_tox_test()
+{
+  set -x
+  CURDIR=$(pwd)
+  if [[ ${CURDIR} =~ "-sonar" ]]
+  then
+    echo "====Sonar job, need execute tox."
+    TOXINIS=$(find . -name "tox.ini")
+    for TOXINI in "${TOXINIS[@]}"; do
+      DIR=$(echo "$TOXINI" | rev | cut -f2- -d'/' | rev)
+      cd "${CURDIR}/${DIR}"
+      rm -rf ./venv-tox ./.tox
+      virtualenv ./venv-tox
+      source ./venv-tox/bin/activate
+      pip install --upgrade pip
+      pip install --upgrade tox argparse
+      pip freeze
+      tox
+      deactivate
+      rm -rf ./venv-tox ./.tox
+    done
+  else
+    echo "====Not a sonar job, need not execute tox."
+  fi
+}
+
+
+case $MVN_PHASE in
+clean)
+  echo "==> clean phase script"
+  rm -rf ./venv-*
+  ;;
+test)
+  echo "==> test phase script"
+  run_tox_test
+  ;;
+*)
+  echo "==> unprocessed phase"
+  ;;
+esac
+
diff --git a/pom.xml b/pom.xml
new file mode 100644 (file)
index 0000000..9029d48
--- /dev/null
+++ b/pom.xml
@@ -0,0 +1,113 @@
+<?xml version="1.0"?>
+<!--
+    Copyright 2017 ZTE Corporation.
+
+    Licensed under the Apache License, Version 2.0 (the "License");
+    you may not use this file except in compliance with the License.
+    You may obtain a copy of the License at
+
+            http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing, software
+    distributed under the License is distributed on an "AS IS" BASIS,
+    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    See the License for the specific language governing permissions and
+    limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <groupId>org.onap.oparent</groupId>
+        <artifactId>oparent</artifactId>
+        <version>1.2.3</version>
+    </parent>
+    <modelVersion>4.0.0</modelVersion>
+    <groupId>org.onap.vfc.nfvo.catalog</groupId>
+    <artifactId>vfc-nfvo-catalog</artifactId>
+    <version>1.3.0-SNAPSHOT</version>
+    <packaging>pom</packaging>
+    <name>vfc-nfvo-catalog</name>
+    <description>vfc nfvo catalog</description>
+    <properties>
+        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+        <sonar.sources>.</sonar.sources>
+        <sonar.junit.reportsPath>xunit-results.xml</sonar.junit.reportsPath>
+        <sonar.python.coverage.reportPath>coverage.xml</sonar.python.coverage.reportPath>
+        <sonar.language>py</sonar.language>
+        <sonar.pluginname>python</sonar.pluginname>
+        <sonar.inclusions>**/**.py</sonar.inclusions>
+        <sonar.exclusions>**/tests/**.py,**/test*.py</sonar.exclusions>
+    </properties>
+    <build>
+      <pluginManagement>
+        <plugins>
+          <plugin>
+            <groupId>org.codehaus.mojo</groupId>
+            <artifactId>exec-maven-plugin</artifactId>
+            <version>1.2.1</version>
+            <configuration>
+              <executable>${project.basedir}/mvn-phase-script.sh</executable>
+              <environmentVariables>
+                <!-- make mvn properties as env for our script -->
+                <MVN_PROJECT_GROUPID>${project.groupId}</MVN_PROJECT_GROUPID>
+                <MVN_PROJECT_ARTIFACTID>${project.artifactId}</MVN_PROJECT_ARTIFACTID>
+                <MVN_PROJECT_VERSION>${project.version}</MVN_PROJECT_VERSION>
+              </environmentVariables>
+            </configuration>
+          </plugin>
+        </plugins>
+      </pluginManagement>
+      <plugins>
+        <plugin>
+          <groupId>org.codehaus.mojo</groupId>
+          <artifactId>exec-maven-plugin</artifactId>
+          <version>1.2.1</version>
+          <executions>
+            <execution>
+              <id>clean phase script</id>
+              <phase>clean</phase>
+              <goals>
+                <goal>exec</goal>
+              </goals>
+              <configuration>
+                <arguments>
+                  <argument>__</argument>
+                  <argument>clean</argument>
+                </arguments>
+              </configuration>
+            </execution>
+            <execution>
+              <id>test script</id>
+              <phase>test</phase>
+              <goals>
+                <goal>exec</goal>
+              </goals>
+              <configuration>
+                <arguments>
+                  <argument>__</argument>
+                  <argument>test</argument>
+                </arguments>
+              </configuration>
+            </execution>
+          </executions>
+        </plugin>
+        <plugin>
+            <artifactId>maven-assembly-plugin</artifactId>
+            <configuration>
+                <appendAssemblyId>false</appendAssemblyId>
+                <descriptors>
+                    <descriptor>assembly.xml</descriptor>
+                </descriptors>
+            </configuration>
+            <executions>
+                <execution>
+                    <id>make-assembly</id>
+                    <phase>package</phase>
+                    <goals>
+                        <goal>single</goal>
+                    </goals>
+                </execution>
+            </executions>
+        </plugin>
+      </plugins>
+    </build>
+</project>
diff --git a/requirements.txt b/requirements.txt
new file mode 100644 (file)
index 0000000..b60dce0
--- /dev/null
@@ -0,0 +1,37 @@
+# rest framework
+Django==1.11.9
+djangorestframework==3.7.7
+
+# for access MySQL
+PyMySQL==0.9.3
+
+# redis cache
+redis==2.10.5
+
+# for access redis cache
+redisco==0.1.4
+django-redis-cache==0.13.1
+
+# for call rest api
+httplib2==0.9.2
+
+# for unit test
+coverage==4.2
+mock==2.0.0
+unittest_xml_reporting==1.12.0
+
+# for parser
+cryptography==2.0.3
+paramiko==2.0.2
+nfv-toscaparser==1.1.1.dev3
+
+# for auto swagger
+drf-yasg>=1.2.2
+flex>=6.11.1
+swagger-spec-validator>=2.1.0
+
+# for onap logging
+onappylog>=1.0.6
+
+# uwsgi for parallel processing
+uwsgi
\ No newline at end of file
diff --git a/resources/bin/initDB.sh b/resources/bin/initDB.sh
new file mode 100644 (file)
index 0000000..7d87e5d
--- /dev/null
@@ -0,0 +1,34 @@
+#!/bin/bash
+#
+# Copyright 2018 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+DIRNAME=`dirname $0`
+HOME=`cd $DIRNAME/; pwd`
+MYSQL_USER=$1
+MYSQL_PASSWORD=$2
+MYSQL_PORT=$3
+MYSQL_IP=$4
+echo "start create gengricparser db"
+sql_path=$HOME/../
+mysql -u$MYSQL_USER -p$MYSQL_PASSWORD -P$MYSQL_PORT -h$MYSQL_IP <$sql_path/dbscripts/mysql/modeling-gengricparser-createdb.sql
+sql_result=$?
+if [ $sql_result -ne 0 ] ; then
+    echo "Failed to create gengricparser database"
+    exit 1
+else
+    echo "Create gengricparser database successfully"
+    exit 0
+fi
+
diff --git a/resources/dbscripts/mysql/modeling-gengricparser-createdb.sql b/resources/dbscripts/mysql/modeling-gengricparser-createdb.sql
new file mode 100644 (file)
index 0000000..791bd7f
--- /dev/null
@@ -0,0 +1,28 @@
+--
+-- Copyright 2018 ZTE Corporation.
+--
+-- Licensed under the Apache License, Version 2.0 (the "License");
+-- you may not use this file except in compliance with the License.
+-- You may obtain a copy of the License at
+--
+--     http://www.apache.org/licenses/LICENSE-2.0
+--
+-- Unless required by applicable law or agreed to in writing, software
+-- distributed under the License is distributed on an "AS IS" BASIS,
+-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+-- See the License for the specific language governing permissions and
+-- limitations under the License.
+--
+
+/******************create database and user***************************/
+use mysql;
+
+create database if not exists nfvocatalog CHARACTER SET utf8;
+
+
+GRANT ALL PRIVILEGES ON nfvocatalog.* TO 'nfvocatalog'@'%' IDENTIFIED BY 'nfvocatalog' WITH GRANT OPTION;
+GRANT ALL PRIVILEGES ON mysql.* TO 'nfvocatalog'@'%' IDENTIFIED BY 'nfvocatalog' WITH GRANT OPTION;
+
+GRANT ALL PRIVILEGES ON nfvocatalog.* TO 'nfvocatalog'@'localhost' IDENTIFIED BY 'nfvocatalog' WITH GRANT OPTION;
+GRANT ALL PRIVILEGES ON mysql.* TO 'nfvocatalog'@'localhost' IDENTIFIED BY 'nfvocatalog' WITH GRANT OPTION;
+FLUSH PRIVILEGES;
diff --git a/resources/resource-TestFyx-template.yml b/resources/resource-TestFyx-template.yml
new file mode 100644 (file)
index 0000000..1d6426b
--- /dev/null
@@ -0,0 +1,457 @@
+tosca_definitions_version: tosca_simple_yaml_1_1
+metadata:
+  invariantUUID: 4b8712de-254b-4dae-8f16-b3d63c9f2d49
+  UUID: faf6c8f0-b096-44e2-88e9-4527451a71ff
+  name: Test-fyx
+  description: Application Test VF
+  type: VF
+  category: Application L4+
+  subcategory: Application Server
+  resourceVendor: zte
+  resourceVendorRelease: '1.0'
+  resourceVendorModelNumber: ''
+imports:
+- nodes:
+    file: nodes.yml
+- datatypes:
+    file: data.yml
+- capabilities:
+    file: capabilities.yml
+- relationships:
+    file: relationships.yml
+- groups:
+    file: groups.yml
+- policies:
+    file: policies.yml
+- resource-Test-fyx-interface:
+    file: resource-TestFyx-template-interface.yml
+- resource-ExtCP:
+    file: resource-Extcp-template.yml
+- resource-Compute:
+    file: resource-Compute-template.yml
+- resource-BlockStorage:
+    file: resource-Blockstorage-template.yml
+- resource-ContrailVirtualNetwork:
+    file: resource-Contrailvirtualnetwork-template.yml
+- resource-ContrailPort:
+    file: resource-Contrailport-template.yml
+- resource-ObjectStorage:
+    file: resource-Objectstorage-template.yml
+topology_template:
+  inputs:
+    nf_naming:
+      type: org.openecomp.datatypes.Naming
+      default:
+        ecomp_generated_naming: true
+    nf_naming_code:
+      type: string
+      default: {
+        }
+    nf_function:
+      type: string
+      default: {
+        }
+    availability_zone_max_count:
+      type: integer
+      default: 1
+    nf_role:
+      type: string
+      default: {
+        }
+    max_instances:
+      type: integer
+      default: {
+        }
+    min_instances:
+      type: integer
+      default: {
+        }
+    nf_type:
+      type: string
+      default: {
+        }
+  node_templates:
+    VDU-B:
+      type: tosca.nodes.Compute
+      metadata:
+        invariantUUID: 96ac4c35-bdc2-4018-b022-d7df39507d7d
+        UUID: 0da94c51-2271-4ee2-ae13-cd89f46dbdff
+        customizationUUID: ea43f19d-770e-4ed4-9d5a-1f4999366fde
+        version: '1.0'
+        name: Compute
+        description: Represents a real or virtual machine or server. Information specified on the Compute node will be used to find the machine that fits the given requirements in the cloud available machines. If no sizing information are specified the cloud provider default machine will be used. It is strongly recommended to specify the required CPUs and memory at least.
+        type: VFC
+        category: Generic
+        subcategory: Infrastructure
+        resourceVendor: ATT (Tosca)
+        resourceVendorRelease: 1.0.0.wd03
+        resourceVendorModelNumber: ''
+    ContrailPort 0:
+      type: org.openecomp.resource.cp.nodes.heat.network.contrail.Port
+      metadata:
+        invariantUUID: 740aef05-3c26-4fc9-ab83-ef889801af59
+        UUID: 67110e55-8b3b-4595-9e7f-b76a8e5b78c8
+        customizationUUID: 37d74cc1-d060-4cbc-8a23-b0b81424e984
+        version: '2.0'
+        name: ContrailPort
+        description: Represents a logical entity that associates between Compute and Network normative types for contrail.
+        type: CP
+        category: Generic
+        subcategory: Network Elements
+        resourceVendor: ATT (Tosca)
+        resourceVendorRelease: 1.0.0.wd03
+        resourceVendorModelNumber: ''
+      properties:
+        mac_requirements:
+          mac_count_required:
+            is_required: false
+        static_route: false
+        shared_ip: false
+        exCP_naming:
+          ecomp_generated_naming: true
+        is_default: false
+      requirements:
+      - link:
+          capability: tosca.capabilities.network.Linkable
+          node: ContrailVirtualNetwork 0
+          relationship: tosca.relationships.network.LinksTo
+      - binding:
+          capability: tosca.capabilities.network.Bindable
+          node: VDU-B
+          relationship: tosca.relationships.network.BindsTo
+    VDU-C:
+      type: tosca.nodes.Compute
+      metadata:
+        invariantUUID: 96ac4c35-bdc2-4018-b022-d7df39507d7d
+        UUID: 0da94c51-2271-4ee2-ae13-cd89f46dbdff
+        customizationUUID: 8e76a90e-4ec3-4177-bdef-92ce55934f9d
+        version: '1.0'
+        name: Compute
+        description: Represents a real or virtual machine or server. Information specified on the Compute node will be used to find the machine that fits the given requirements in the cloud available machines. If no sizing information are specified the cloud provider default machine will be used. It is strongly recommended to specify the required CPUs and memory at least.
+        type: VFC
+        category: Generic
+        subcategory: Infrastructure
+        resourceVendor: ATT (Tosca)
+        resourceVendorRelease: 1.0.0.wd03
+        resourceVendorModelNumber: ''
+    ContrailPort 1:
+      type: org.openecomp.resource.cp.nodes.heat.network.contrail.Port
+      metadata:
+        invariantUUID: 740aef05-3c26-4fc9-ab83-ef889801af59
+        UUID: 67110e55-8b3b-4595-9e7f-b76a8e5b78c8
+        customizationUUID: 723cf043-6243-4a82-b220-d3fa548ce79f
+        version: '2.0'
+        name: ContrailPort
+        description: Represents a logical entity that associates between Compute and Network normative types for contrail.
+        type: CP
+        category: Generic
+        subcategory: Network Elements
+        resourceVendor: ATT (Tosca)
+        resourceVendorRelease: 1.0.0.wd03
+        resourceVendorModelNumber: ''
+      properties:
+        mac_requirements:
+          mac_count_required:
+            is_required: false
+        static_route: false
+        shared_ip: false
+        exCP_naming:
+          ecomp_generated_naming: true
+        is_default: false
+      requirements:
+      - link:
+          capability: tosca.capabilities.network.Linkable
+          node: ContrailVirtualNetwork 0
+          relationship: tosca.relationships.network.LinksTo
+      - binding:
+          capability: tosca.capabilities.network.Bindable
+          node: VDU-A
+          relationship: tosca.relationships.network.BindsTo
+    BlockStorage 0:
+      type: tosca.nodes.BlockStorage
+      metadata:
+        invariantUUID: 0b40dcd5-3d95-4dd4-bf9b-22b2f10755b8
+        UUID: 9ad49f03-2aa2-48e2-8fae-82d3b7f81788
+        customizationUUID: 9239f0fc-eaaa-400d-bca1-8510841fa651
+        version: '1.0'
+        name: BlockStorage
+        description: Represents a server-local block storage device (i.e., not shared) offering evenly sized blocks of data from which raw storage volumes can be created.
+        type: VFC
+        category: Generic
+        subcategory: Infrastructure
+        resourceVendor: ATT (Tosca)
+        resourceVendorRelease: 1.0.0.wd03
+        resourceVendorModelNumber: ''
+    VDU-A:
+      type: tosca.nodes.Compute
+      metadata:
+        invariantUUID: 96ac4c35-bdc2-4018-b022-d7df39507d7d
+        UUID: 0da94c51-2271-4ee2-ae13-cd89f46dbdff
+        customizationUUID: 35ccbeed-443d-4899-9042-82433aea3c01
+        version: '1.0'
+        name: Compute
+        description: Represents a real or virtual machine or server. Information specified on the Compute node will be used to find the machine that fits the given requirements in the cloud available machines. If no sizing information are specified the cloud provider default machine will be used. It is strongly recommended to specify the required CPUs and memory at least.
+        type: VFC
+        category: Generic
+        subcategory: Infrastructure
+        resourceVendor: ATT (Tosca)
+        resourceVendorRelease: 1.0.0.wd03
+        resourceVendorModelNumber: ''
+    ExtCP 0:
+      type: org.openecomp.resource.cp.extCP
+      metadata:
+        invariantUUID: 68f72152-2c57-4099-8bbc-aedb388a8f81
+        UUID: 41d5a4a1-346e-4b47-a08f-936572916657
+        customizationUUID: 94e2a43c-2c99-487f-a74b-d82edbbd1d5f
+        version: '2.0'
+        name: ExtCP
+        description: The AT&T Connection Point base type all other CP derive from
+        type: CP
+        category: Generic
+        subcategory: Network Elements
+        resourceVendor: ATT (Tosca)
+        resourceVendorRelease: 1.0.0.wd03
+        resourceVendorModelNumber: ''
+      properties:
+        mac_requirements:
+          mac_count_required:
+            is_required: false
+        exCP_naming:
+          ecomp_generated_naming: true
+      requirements:
+      - virtualBinding:
+          capability: tosca.capabilities.network.Bindable
+          node: VDU-C
+          relationship: tosca.relationships.network.BindsTo
+    ObjectStorage 0:
+      type: tosca.nodes.ObjectStorage
+      metadata:
+        invariantUUID: 4e89d2a8-ff83-47f8-9788-4aef40feaf0e
+        UUID: e7217097-ada2-42b9-81af-ccf525514796
+        customizationUUID: 42cc06c5-3bf2-480b-8c30-aac34c48e39f
+        version: '1.0'
+        name: ObjectStorage
+        description: Represents storage that provides the ability to store data as objects (or BLOBs of data) without consideration for the underlying filesystem or devices.
+        type: VFC
+        category: Generic
+        subcategory: Infrastructure
+        resourceVendor: ATT (Tosca)
+        resourceVendorRelease: 1.0.0.wd03
+        resourceVendorModelNumber: ''
+    ContrailVirtualNetwork 0:
+      type: org.openecomp.resource.vl.nodes.heat.network.contrail.VirtualNetwork
+      metadata:
+        invariantUUID: 86b02009-8634-44f1-a22f-dc685460c95d
+        UUID: 4ebcc4a7-d648-4b7b-a23c-a094ec89b7f0
+        customizationUUID: d320316b-e61e-44f4-991d-e06f5fa3f9d2
+        version: '1.0'
+        name: ContrailVirtualNetwork
+        description: Represents a network service with optional subnets and advanced configurations.
+        type: VL
+        category: Generic
+        subcategory: Network Elements
+        resourceVendor: ATT (Tosca)
+        resourceVendorRelease: 1.0.0.wd03
+        resourceVendorModelNumber: ''
+      properties:
+        dhcp_enabled: true
+        ip_version: 4
+  substitution_mappings:
+    node_type: org.openecomp.resource.vf.TestFyx
+    capabilities:
+      extcp0.feature:
+      - ExtCP 0
+      - feature
+      compute0.binding:
+      - Compute 0
+      - binding
+      contrailport1.network.outgoing.packets.rate:
+      - ContrailPort 1
+      - network.outgoing.packets.rate
+      objectstorage0.feature:
+      - ObjectStorage 0
+      - feature
+      compute1.binding:
+      - Compute 1
+      - binding
+      contrailport0.network.incoming.packets.rate:
+      - ContrailPort 0
+      - network.incoming.packets.rate
+      compute1.endpoint:
+      - Compute 1
+      - endpoint
+      contrailport1.network.incoming.packets:
+      - ContrailPort 1
+      - network.incoming.packets
+      objectstorage0.storage_endpoint:
+      - ObjectStorage 0
+      - storage_endpoint
+      compute2.binding:
+      - Compute 2
+      - binding
+      contrailport1.network.incoming.bytes.rate:
+      - ContrailPort 1
+      - network.incoming.bytes.rate
+      contrailport1.network.outpoing.packets:
+      - ContrailPort 1
+      - network.outpoing.packets
+      contrailport0.network.incoming.packets:
+      - ContrailPort 0
+      - network.incoming.packets
+      contrailport0.feature:
+      - ContrailPort 0
+      - feature
+      compute1.host:
+      - Compute 1
+      - host
+      compute0.endpoint:
+      - Compute 0
+      - endpoint
+      contrailport1.feature:
+      - ContrailPort 1
+      - feature
+      blockstorage0.attachment:
+      - BlockStorage 0
+      - attachment
+      contrailvirtualnetwork0.end_point:
+      - ContrailVirtualNetwork 0
+      - end_point
+      contrailport0.network.outgoing.packets.rate:
+      - ContrailPort 0
+      - network.outgoing.packets.rate
+      compute2.os:
+      - Compute 2
+      - os
+      compute0.os:
+      - Compute 0
+      - os
+      compute1.scalable:
+      - Compute 1
+      - scalable
+      contrailport1.network.incoming.bytes:
+      - ContrailPort 1
+      - network.incoming.bytes
+      contrailvirtualnetwork0.attachment:
+      - ContrailVirtualNetwork 0
+      - attachment
+      contrailvirtualnetwork0.link:
+      - ContrailVirtualNetwork 0
+      - link
+      extcp0.internal_connectionPoint:
+      - ExtCP 0
+      - internal_connectionPoint
+      contrailport1.network.incoming.packets.rate:
+      - ContrailPort 1
+      - network.incoming.packets.rate
+      contrailport0.network.outgoing.bytes.rate:
+      - ContrailPort 0
+      - network.outgoing.bytes.rate
+      compute2.endpoint:
+      - Compute 2
+      - endpoint
+      blockstorage0.feature:
+      - BlockStorage 0
+      - feature
+      contrailvirtualnetwork0.feature:
+      - ContrailVirtualNetwork 0
+      - feature
+      compute0.feature:
+      - Compute 0
+      - feature
+      compute0.host:
+      - Compute 0
+      - host
+      compute1.feature:
+      - Compute 1
+      - feature
+      contrailport1.network.outgoing.bytes:
+      - ContrailPort 1
+      - network.outgoing.bytes
+      compute0.scalable:
+      - Compute 0
+      - scalable
+      compute2.feature:
+      - Compute 2
+      - feature
+      compute2.scalable:
+      - Compute 2
+      - scalable
+      contrailport0.network.outgoing.bytes:
+      - ContrailPort 0
+      - network.outgoing.bytes
+      contrailport1.network.outgoing.bytes.rate:
+      - ContrailPort 1
+      - network.outgoing.bytes.rate
+      contrailport0.network.outpoing.packets:
+      - ContrailPort 0
+      - network.outpoing.packets
+      compute2.host:
+      - Compute 2
+      - host
+      contrailport0.network.incoming.bytes:
+      - ContrailPort 0
+      - network.incoming.bytes
+      compute1.os:
+      - Compute 1
+      - os
+      contrailport0.network.incoming.bytes.rate:
+      - ContrailPort 0
+      - network.incoming.bytes.rate
+    requirements:
+      compute2.local_storage:
+      - Compute 2
+      - local_storage
+      extcp0.virtualBinding:
+      - ExtCP 0
+      - virtualBinding
+      blockstorage0.dependency:
+      - BlockStorage 0
+      - dependency
+      contrailport0.dependency:
+      - ContrailPort 0
+      - dependency
+      compute2.dependency:
+      - Compute 2
+      - dependency
+      extcp0.dependency:
+      - ExtCP 0
+      - dependency
+      contrailport0.link:
+      - ContrailPort 0
+      - link
+      compute1.local_storage:
+      - Compute 1
+      - local_storage
+      contrailport0.binding:
+      - ContrailPort 0
+      - binding
+      compute0.local_storage:
+      - Compute 0
+      - local_storage
+      contrailport1.dependency:
+      - ContrailPort 1
+      - dependency
+      contrailport1.binding:
+      - ContrailPort 1
+      - binding
+      compute1.dependency:
+      - Compute 1
+      - dependency
+      compute0.dependency:
+      - Compute 0
+      - dependency
+      extcp0.external_virtualLink:
+      - ExtCP 0
+      - external_virtualLink
+      contrailport1.link:
+      - ContrailPort 1
+      - link
+      objectstorage0.dependency:
+      - ObjectStorage 0
+      - dependency
+      contrailvirtualnetwork0.dependency:
+      - ContrailVirtualNetwork 0
+      - dependency
+      extcp0.virtualLink:
+      - ExtCP 0
+      - virtualLink
diff --git a/resources/service-TestServiceFyx-template.yml b/resources/service-TestServiceFyx-template.yml
new file mode 100644 (file)
index 0000000..e4b18a1
--- /dev/null
@@ -0,0 +1,476 @@
+tosca_definitions_version: tosca_simple_yaml_1_1
+metadata:
+  invariantUUID: 54d92e89-b56b-41b1-af6b-cdeb75cb233f
+  UUID: 21fa8ccd-064f-4ecc-98e0-3196a4d12e0e
+  name: Test-Service-fyx
+  description: Service Test
+  type: Service
+  category: Mobility
+  serviceType: ''
+  serviceRole: ''
+  serviceEcompNaming: true
+  ecompGeneratedNaming: true
+  namingPolicy: ''
+imports:
+- nodes:
+    file: nodes.yml
+- datatypes:
+    file: data.yml
+- capabilities:
+    file: capabilities.yml
+- relationships:
+    file: relationships.yml
+- groups:
+    file: groups.yml
+- policies:
+    file: policies.yml
+- service-Test-Service-fyx-interface:
+    file: service-TestServiceFyx-template-interface.yml
+- resource-ExtVL:
+    file: resource-Extvl-template.yml
+- resource-Test-fyx:
+    file: resource-TestFyx-template.yml
+- resource-Test-fyx-interface:
+    file: resource-TestFyx-template-interface.yml
+- resource-ExtCP:
+    file: resource-Extcp-template.yml
+- resource-vIMS:
+    file: resource-Vims-template.yml
+- resource-vIMS-interface:
+    file: resource-Vims-template-interface.yml
+topology_template:
+  node_templates:
+    ExtVL 0:
+      type: org.openecomp.resource.vl.extVL
+      metadata:
+        invariantUUID: 25e1a13f-c8dc-4823-97b6-1ef40d06e695
+        UUID: 883a59b7-2221-4465-87d5-0cd880a5eb1a
+        customizationUUID: c62183b0-161b-441d-8e70-8cd931b01413
+        version: '1.0'
+        name: ExtVL
+        description: ECOMP generic virtual link (network) base type for all other service-level and global networks
+        type: VL
+        category: Generic
+        subcategory: Network Elements
+        resourceVendor: ATT (Tosca)
+        resourceVendorRelease: 1.0.0.wd03
+        resourceVendorModelNumber: ''
+      properties:
+        network_assignments:
+          is_external_network: false
+          ipv4_subnet_default_assignment:
+            min_subnets_count: 1
+          ecomp_generated_network_assignment: false
+          ipv6_subnet_default_assignment:
+            min_subnets_count: 1
+        exVL_naming:
+          ecomp_generated_naming: true
+        network_flows:
+          is_network_policy: false
+          is_bound_to_vpn: false
+        network_homing:
+          ecomp_selected_instance_node_target: false
+    Test-fyx 0:
+      type: org.openecomp.resource.vf.TestFyx
+      metadata:
+        invariantUUID: 4b8712de-254b-4dae-8f16-b3d63c9f2d49
+        UUID: faf6c8f0-b096-44e2-88e9-4527451a71ff
+        customizationUUID: 14254e74-4465-44af-a057-550a07785be8
+        version: '0.1'
+        name: Test-fyx
+        description: Application Test VF
+        type: VF
+        category: Application L4+
+        subcategory: Application Server
+        resourceVendor: zte
+        resourceVendorRelease: '1.0'
+        resourceVendorModelNumber: ''
+      properties:
+        nf_naming:
+          ecomp_generated_naming: true
+        availability_zone_max_count: 1
+      requirements:
+      - virtualLink:
+          capability: tosca.capabilities.network.Linkable
+          node: ExtVL 0
+          relationship: tosca.relationships.network.LinksTo
+    ExtCP 0:
+      type: org.openecomp.resource.cp.extCP
+      metadata:
+        invariantUUID: 68f72152-2c57-4099-8bbc-aedb388a8f81
+        UUID: 41d5a4a1-346e-4b47-a08f-936572916657
+        customizationUUID: f89628fa-4983-4468-a0f2-20c5810d43e4
+        version: '2.0'
+        name: ExtCP
+        description: The AT&T Connection Point base type all other CP derive from
+        type: CP
+        category: Generic
+        subcategory: Network Elements
+        resourceVendor: ATT (Tosca)
+        resourceVendorRelease: 1.0.0.wd03
+        resourceVendorModelNumber: ''
+      properties:
+        mac_requirements:
+          mac_count_required:
+            is_required: false
+        exCP_naming:
+          ecomp_generated_naming: true
+      requirements:
+      - virtualBinding:
+          capability: tosca.capabilities.network.Bindable
+          node: vIMS 0
+          relationship: tosca.relationships.network.BindsTo
+      - virtualLink:
+          capability: tosca.capabilities.network.Linkable
+          node: ExtVL 0
+          relationship: tosca.relationships.network.LinksTo
+    vIMS 0:
+      type: org.openecomp.resource.vf.Vims
+      metadata:
+        invariantUUID: 2191b35e-42aa-4407-81a2-184f4bf991be
+        UUID: a71ff273-fe43-46fa-a4bc-79d48a250c96
+        customizationUUID: f4cc1a90-ce84-43ab-aaa6-5eccad21f7d2
+        version: '0.1'
+        name: vIMS
+        description: Desc of vIMS
+        type: VF
+        category: Application L4+
+        subcategory: Web Server
+        resourceVendor: zte
+        resourceVendorRelease: v1.0
+        resourceVendorModelNumber: ''
+      properties:
+        nf_naming:
+          ecomp_generated_naming: true
+        availability_zone_max_count: 1
+  substitution_mappings:
+    node_type: org.openecomp.service.TestServiceFyx
+    capabilities:
+      vims0.port1.feature:
+      - Port 1
+      - port1.feature
+      testfyx0.contrailport0.network.incoming.packets:
+      - ContrailPort 0
+      - contrailport0.network.incoming.packets
+      vims0.compute0.feature:
+      - Compute 0
+      - compute0.feature
+      testfyx0.compute1.feature:
+      - Compute 1
+      - compute1.feature
+      testfyx0.contrailvirtualnetwork0.link:
+      - ContrailVirtualNetwork 0
+      - contrailvirtualnetwork0.link
+      testfyx0.contrailport0.network.incoming.packets.rate:
+      - ContrailPort 0
+      - contrailport0.network.incoming.packets.rate
+      testfyx0.compute2.host:
+      - Compute 2
+      - compute2.host
+      vims0.compute0.os:
+      - Compute 0
+      - compute0.os
+      vims0.compute1.scalable:
+      - Compute 1
+      - compute1.scalable
+      testfyx0.contrailport0.network.incoming.bytes.rate:
+      - ContrailPort 0
+      - contrailport0.network.incoming.bytes.rate
+      vims0.compute0.host:
+      - Compute 0
+      - compute0.host
+      vims0.compute0.endpoint:
+      - Compute 0
+      - compute0.endpoint
+      testfyx0.contrailport0.network.incoming.bytes:
+      - ContrailPort 0
+      - contrailport0.network.incoming.bytes
+      testfyx0.objectstorage0.storage_endpoint:
+      - ObjectStorage 0
+      - objectstorage0.storage_endpoint
+      testfyx0.compute0.binding:
+      - Compute 0
+      - compute0.binding
+      testfyx0.compute0.os:
+      - Compute 0
+      - compute0.os
+      testfyx0.compute0.endpoint:
+      - Compute 0
+      - compute0.endpoint
+      testfyx0.contrailport0.feature:
+      - ContrailPort 0
+      - contrailport0.feature
+      extcp0.internal_connectionPoint:
+      - ExtCP 0
+      - internal_connectionPoint
+      testfyx0.compute1.binding:
+      - Compute 1
+      - compute1.binding
+      vims0.compute1.host:
+      - Compute 1
+      - compute1.host
+      testfyx0.contrailport0.network.outgoing.packets.rate:
+      - ContrailPort 0
+      - contrailport0.network.outgoing.packets.rate
+      testfyx0.compute1.endpoint:
+      - Compute 1
+      - compute1.endpoint
+      testfyx0.contrailport1.network.outgoing.bytes:
+      - ContrailPort 1
+      - contrailport1.network.outgoing.bytes
+      vims0.compute1.os:
+      - Compute 1
+      - compute1.os
+      vims0.compute1.binding:
+      - Compute 1
+      - compute1.binding
+      testfyx0.contrailport1.network.outgoing.bytes.rate:
+      - ContrailPort 1
+      - contrailport1.network.outgoing.bytes.rate
+      vims0.vl0.virtual_linkable:
+      - VL 0
+      - vl0.virtual_linkable
+      testfyx0.extcp0.feature:
+      - ExtCP 0
+      - extcp0.feature
+      testfyx0.contrailvirtualnetwork0.feature:
+      - ContrailVirtualNetwork 0
+      - contrailvirtualnetwork0.feature
+      testfyx0.compute1.host:
+      - Compute 1
+      - compute1.host
+      extvl0.feature:
+      - ExtVL 0
+      - feature
+      extcp0.feature:
+      - ExtCP 0
+      - feature
+      vims0.compute1.feature:
+      - Compute 1
+      - compute1.feature
+      testfyx0.contrailport0.network.outpoing.packets:
+      - ContrailPort 0
+      - contrailport0.network.outpoing.packets
+      testfyx0.blockstorage0.attachment:
+      - BlockStorage 0
+      - blockstorage0.attachment
+      testfyx0.contrailport1.network.incoming.packets:
+      - ContrailPort 1
+      - contrailport1.network.incoming.packets
+      testfyx0.compute0.feature:
+      - Compute 0
+      - compute0.feature
+      testfyx0.compute2.feature:
+      - Compute 2
+      - compute2.feature
+      testfyx0.contrailport1.network.outgoing.packets.rate:
+      - ContrailPort 1
+      - contrailport1.network.outgoing.packets.rate
+      vims0.compute0.scalable:
+      - Compute 0
+      - compute0.scalable
+      testfyx0.contrailport1.network.incoming.bytes.rate:
+      - ContrailPort 1
+      - contrailport1.network.incoming.bytes.rate
+      testfyx0.compute0.scalable:
+      - Compute 0
+      - compute0.scalable
+      testfyx0.contrailvirtualnetwork0.end_point:
+      - ContrailVirtualNetwork 0
+      - contrailvirtualnetwork0.end_point
+      vims0.vl0.end_point:
+      - VL 0
+      - vl0.end_point
+      testfyx0.compute2.endpoint:
+      - Compute 2
+      - compute2.endpoint
+      vims0.extcp0.internal_connectionPoint:
+      - ExtCP 0
+      - extcp0.internal_connectionPoint
+      testfyx0.compute2.os:
+      - Compute 2
+      - compute2.os
+      testfyx0.contrailport0.network.outgoing.bytes:
+      - ContrailPort 0
+      - contrailport0.network.outgoing.bytes
+      testfyx0.compute0.host:
+      - Compute 0
+      - compute0.host
+      vims0.extcp0.feature:
+      - ExtCP 0
+      - extcp0.feature
+      testfyx0.contrailport1.network.incoming.packets.rate:
+      - ContrailPort 1
+      - contrailport1.network.incoming.packets.rate
+      vims0.compute0.binding:
+      - Compute 0
+      - compute0.binding
+      testfyx0.contrailport1.feature:
+      - ContrailPort 1
+      - contrailport1.feature
+      testfyx0.objectstorage0.feature:
+      - ObjectStorage 0
+      - objectstorage0.feature
+      testfyx0.compute1.scalable:
+      - Compute 1
+      - compute1.scalable
+      vims0.compute1.endpoint:
+      - Compute 1
+      - compute1.endpoint
+      extvl0.virtual_linkable:
+      - ExtVL 0
+      - virtual_linkable
+      testfyx0.compute2.binding:
+      - Compute 2
+      - compute2.binding
+      testfyx0.contrailvirtualnetwork0.attachment:
+      - ContrailVirtualNetwork 0
+      - contrailvirtualnetwork0.attachment
+      testfyx0.compute1.os:
+      - Compute 1
+      - compute1.os
+      vims0.vl0.feature:
+      - VL 0
+      - vl0.feature
+      testfyx0.extcp0.internal_connectionPoint:
+      - ExtCP 0
+      - extcp0.internal_connectionPoint
+      testfyx0.blockstorage0.feature:
+      - BlockStorage 0
+      - blockstorage0.feature
+      testfyx0.contrailport1.network.outpoing.packets:
+      - ContrailPort 1
+      - contrailport1.network.outpoing.packets
+      testfyx0.contrailport1.network.incoming.bytes:
+      - ContrailPort 1
+      - contrailport1.network.incoming.bytes
+      vims0.port0.feature:
+      - Port 0
+      - port0.feature
+      testfyx0.contrailport0.network.outgoing.bytes.rate:
+      - ContrailPort 0
+      - contrailport0.network.outgoing.bytes.rate
+      vims0.vl0.link:
+      - VL 0
+      - vl0.link
+      testfyx0.compute2.scalable:
+      - Compute 2
+      - compute2.scalable
+    requirements:
+      vims0.port0.dependency:
+      - Port 0
+      - port0.dependency
+      testfyx0.contrailport1.binding:
+      - ContrailPort 1
+      - contrailport1.binding
+      testfyx0.extcp0.virtualLink:
+      - ExtCP 0
+      - extcp0.virtualLink
+      vims0.extcp0.virtualLink:
+      - ExtCP 0
+      - extcp0.virtualLink
+      vims0.port1.link:
+      - Port 1
+      - port1.link
+      extvl0.dependency:
+      - ExtVL 0
+      - dependency
+      testfyx0.extcp0.dependency:
+      - ExtCP 0
+      - extcp0.dependency
+      testfyx0.compute1.dependency:
+      - Compute 1
+      - compute1.dependency
+      vims0.compute1.dependency:
+      - Compute 1
+      - compute1.dependency
+      testfyx0.compute1.local_storage:
+      - Compute 1
+      - compute1.local_storage
+      vims0.compute0.local_storage:
+      - Compute 0
+      - compute0.local_storage
+      vims0.port1.dependency:
+      - Port 1
+      - port1.dependency
+      vims0.extcp0.dependency:
+      - ExtCP 0
+      - extcp0.dependency
+      vims0.vl0.dependency:
+      - VL 0
+      - vl0.dependency
+      testfyx0.blockstorage0.dependency:
+      - BlockStorage 0
+      - blockstorage0.dependency
+      testfyx0.contrailport1.dependency:
+      - ContrailPort 1
+      - contrailport1.dependency
+      testfyx0.contrailport0.link:
+      - ContrailPort 0
+      - contrailport0.link
+      testfyx0.extcp0.external_virtualLink:
+      - ExtCP 0
+      - extcp0.external_virtualLink
+      testfyx0.contrailport0.binding:
+      - ContrailPort 0
+      - contrailport0.binding
+      testfyx0.compute2.dependency:
+      - Compute 2
+      - compute2.dependency
+      extcp0.virtualLink:
+      - ExtCP 0
+      - virtualLink
+      testfyx0.contrailport1.link:
+      - ContrailPort 1
+      - contrailport1.link
+      extcp0.virtualBinding:
+      - ExtCP 0
+      - virtualBinding
+      testfyx0.compute2.local_storage:
+      - Compute 2
+      - compute2.local_storage
+      vims0.port0.link:
+      - Port 0
+      - port0.link
+      vims0.port0.binding:
+      - Port 0
+      - port0.binding
+      testfyx0.contrailport0.dependency:
+      - ContrailPort 0
+      - contrailport0.dependency
+      extcp0.dependency:
+      - ExtCP 0
+      - dependency
+      vims0.compute1.local_storage:
+      - Compute 1
+      - compute1.local_storage
+      testfyx0.contrailvirtualnetwork0.dependency:
+      - ContrailVirtualNetwork 0
+      - contrailvirtualnetwork0.dependency
+      vims0.extcp0.virtualBinding:
+      - ExtCP 0
+      - extcp0.virtualBinding
+      testfyx0.compute0.local_storage:
+      - Compute 0
+      - compute0.local_storage
+      testfyx0.extcp0.virtualBinding:
+      - ExtCP 0
+      - extcp0.virtualBinding
+      extcp0.external_virtualLink:
+      - ExtCP 0
+      - external_virtualLink
+      testfyx0.objectstorage0.dependency:
+      - ObjectStorage 0
+      - objectstorage0.dependency
+      vims0.compute0.dependency:
+      - Compute 0
+      - compute0.dependency
+      vims0.port1.binding:
+      - Port 1
+      - port1.binding
+      vims0.extcp0.external_virtualLink:
+      - ExtCP 0
+      - extcp0.external_virtualLink
+      testfyx0.compute0.dependency:
+      - Compute 0
+      - compute0.dependency
diff --git a/run.sh b/run.sh
new file mode 100644 (file)
index 0000000..1b8eb46
--- /dev/null
+++ b/run.sh
@@ -0,0 +1,36 @@
+#!/bin/bash
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+logDir="/var/log/onap/modeling/genericparser/"
+if [ ! -x  $logDir  ]; then
+       mkdir -p $logDir
+fi
+
+# nohup python manage.py runserver 0.0.0.0:8806 > /dev/null &
+# nohup uwsgi --http :8806 -t 120 --module genericparser.wsgi --master --processes 4 &
+
+
+if [ ${SSL_ENABLED} = "true" ]; then
+    nohup uwsgi --https :8806,genericpaser/pub/ssl/cert/foobar.crt,genericpaser/pub/ssl/cert/foobar.key, -t 120 --module genericpaser.wsgi --master --processes 4 &
+else
+    nohup uwsgi --http :8806 -t 120 --module genericpaser.wsgi --master --processes 4 &
+fi
+
+
+while [ ! -f $logDir/runtime_genericpaser.log ]; do
+    sleep 1
+done
+
+tail -F  $logDir/runtime_genericpaser.log
\ No newline at end of file
diff --git a/static/genericparser/22/nsd_content.txt b/static/genericparser/22/nsd_content.txt
new file mode 100644 (file)
index 0000000..30d74d2
--- /dev/null
@@ -0,0 +1 @@
+test
\ No newline at end of file
diff --git a/static/genericparser/22/pnfd_content.txt b/static/genericparser/22/pnfd_content.txt
new file mode 100644 (file)
index 0000000..30d74d2
--- /dev/null
@@ -0,0 +1 @@
+test
\ No newline at end of file
diff --git a/static/genericparser/222/empty.txt b/static/genericparser/222/empty.txt
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/static/genericparser/222/hss.csar b/static/genericparser/222/hss.csar
new file mode 100644 (file)
index 0000000..56a6051
--- /dev/null
@@ -0,0 +1 @@
+1
\ No newline at end of file
diff --git a/static/genericparser/222/resource_test.csar b/static/genericparser/222/resource_test.csar
new file mode 100644 (file)
index 0000000..1cf038d
Binary files /dev/null and b/static/genericparser/222/resource_test.csar differ
diff --git a/static/genericparser/empty.txt b/static/genericparser/empty.txt
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/static/genericparser/resource_test.csar b/static/genericparser/resource_test.csar
new file mode 100644 (file)
index 0000000..1cf038d
Binary files /dev/null and b/static/genericparser/resource_test.csar differ
diff --git a/stop.sh b/stop.sh
new file mode 100644 (file)
index 0000000..74a5fd6
--- /dev/null
+++ b/stop.sh
@@ -0,0 +1,17 @@
+#!/bin/bash
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# ps auxww | grep "manage.py runserver 0.0.0.0:8806" | awk '{print $2}' | xargs kill -9
+ps auxww |grep 'uwsgi --http :8806 --module genericpaser.wsgi --master' |awk '{print $2}' |xargs kill -9
diff --git a/tox.ini b/tox.ini
new file mode 100644 (file)
index 0000000..fcc6330
--- /dev/null
+++ b/tox.ini
@@ -0,0 +1,25 @@
+[tox]
+envlist = py27,pep8,cov
+skipsdist = true
+
+[tox:jenkins]
+downloadcache = ~/cache/pip
+
+[flake8]
+ignore = E501,E722
+exclude =  ./venv-tox,./.tox
+
+[testenv]
+deps = -r{toxinidir}/requirements.txt
+commands = coverage run --branch manage.py test catalog
+
+[testenv:pep8]
+deps = flake8
+commands = flake8
+
+[testenv:py27]
+commands =
+  {[testenv]commands}
+
+[testenv:cov]
+commands = coverage xml --omit="*test*,*__init__.py,*site-packages*"
diff --git a/version.properties b/version.properties
new file mode 100644 (file)
index 0000000..a9aa8e7
--- /dev/null
@@ -0,0 +1,27 @@
+# Copyright 2017 ZTE Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Versioning variables
+# Note that these variables cannot be structured (e.g. : version.release or version.snapshot etc... )
+# because they are used in Jenkins, whose plug-in doesn't support
+
+major=1
+minor=3
+patch=0
+
+base_version=${major}.${minor}.${patch}
+
+# Release must be completed with git revision # in Jenkins
+release_version=${base_version}
+snapshot_version=${base_version}-SNAPSHOT